mirror of
				https://github.com/dani-garcia/vaultwarden.git
				synced 2025-10-30 17:58:19 +02:00 
			
		
		
		
	Merge branch 'more-clippy-checks' of https://github.com/BlackDex/vaultwarden into BlackDex-more-clippy-checks
This commit is contained in:
		| @@ -427,7 +427,7 @@ async fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, c | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     user_to_edit.atype = new_type as i32; | ||||
|     user_to_edit.atype = new_type; | ||||
|     user_to_edit.save(&conn).await | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -913,8 +913,8 @@ async fn save_attachment( | ||||
|     // In the v2 API, the attachment record has already been created, | ||||
|     // so the size limit needs to be adjusted to account for that. | ||||
|     let size_adjust = match &attachment { | ||||
|         None => 0,                     // Legacy API | ||||
|         Some(a) => a.file_size as i64, // v2 API | ||||
|         None => 0,                         // Legacy API | ||||
|         Some(a) => i64::from(a.file_size), // v2 API | ||||
|     }; | ||||
|  | ||||
|     let size_limit = if let Some(ref user_uuid) = cipher.user_uuid { | ||||
| @@ -1496,7 +1496,7 @@ pub enum CipherSyncType { | ||||
| impl CipherSyncData { | ||||
|     pub async fn new(user_uuid: &str, ciphers: &Vec<Cipher>, sync_type: CipherSyncType, conn: &DbConn) -> Self { | ||||
|         // Generate a list of Cipher UUID's to be used during a query filter with an eq_any. | ||||
|         let cipher_uuids = stream::iter(ciphers).map(|c| c.uuid.to_string()).collect::<Vec<String>>().await; | ||||
|         let cipher_uuids = stream::iter(ciphers).map(|c| c.uuid.clone()).collect::<Vec<String>>().await; | ||||
|  | ||||
|         let mut cipher_folders: HashMap<String, String> = HashMap::new(); | ||||
|         let mut cipher_favorites: HashSet<String> = HashSet::new(); | ||||
| @@ -1518,7 +1518,7 @@ impl CipherSyncData { | ||||
|         // Generate a list of Cipher UUID's containing a Vec with one or more Attachment records | ||||
|         let mut cipher_attachments: HashMap<String, Vec<Attachment>> = HashMap::new(); | ||||
|         for attachment in Attachment::find_all_by_ciphers(&cipher_uuids, conn).await { | ||||
|             cipher_attachments.entry(attachment.cipher_uuid.to_string()).or_default().push(attachment); | ||||
|             cipher_attachments.entry(attachment.cipher_uuid.clone()).or_default().push(attachment); | ||||
|         } | ||||
|  | ||||
|         // Generate a HashMap with the Cipher UUID as key and one or more Collection UUID's | ||||
| @@ -1530,14 +1530,14 @@ impl CipherSyncData { | ||||
|         // Generate a HashMap with the Organization UUID as key and the UserOrganization record | ||||
|         let user_organizations: HashMap<String, UserOrganization> = | ||||
|             stream::iter(UserOrganization::find_by_user(user_uuid, conn).await) | ||||
|                 .map(|uo| (uo.org_uuid.to_string(), uo)) | ||||
|                 .map(|uo| (uo.org_uuid.clone(), uo)) | ||||
|                 .collect() | ||||
|                 .await; | ||||
|  | ||||
|         // Generate a HashMap with the User_Collections UUID as key and the CollectionUser record | ||||
|         let user_collections: HashMap<String, CollectionUser> = | ||||
|             stream::iter(CollectionUser::find_by_user(user_uuid, conn).await) | ||||
|                 .map(|uc| (uc.collection_uuid.to_string(), uc)) | ||||
|                 .map(|uc| (uc.collection_uuid.clone(), uc)) | ||||
|                 .collect() | ||||
|                 .await; | ||||
|  | ||||
|   | ||||
| @@ -761,7 +761,7 @@ pub async fn emergency_request_timeout_job(pool: DbPool) { | ||||
|         for mut emer in emergency_access_list { | ||||
|             if emer.recovery_initiated_at.is_some() | ||||
|                 && Utc::now().naive_utc() | ||||
|                     >= emer.recovery_initiated_at.unwrap() + Duration::days(emer.wait_time_days as i64) | ||||
|                     >= emer.recovery_initiated_at.unwrap() + Duration::days(i64::from(emer.wait_time_days)) | ||||
|             { | ||||
|                 emer.status = EmergencyAccessStatus::RecoveryApproved as i32; | ||||
|                 emer.save(&conn).await.expect("Cannot save emergency access on job"); | ||||
| @@ -812,7 +812,7 @@ pub async fn emergency_notification_reminder_job(pool: DbPool) { | ||||
|         for mut emer in emergency_access_list { | ||||
|             if (emer.recovery_initiated_at.is_some() | ||||
|                 && Utc::now().naive_utc() | ||||
|                     >= emer.recovery_initiated_at.unwrap() + Duration::days((emer.wait_time_days as i64) - 1)) | ||||
|                     >= emer.recovery_initiated_at.unwrap() + Duration::days((i64::from(emer.wait_time_days)) - 1)) | ||||
|                 && (emer.last_notification_at.is_none() | ||||
|                     || (emer.last_notification_at.is_some() | ||||
|                         && Utc::now().naive_utc() >= emer.last_notification_at.unwrap() + Duration::days(1))) | ||||
|   | ||||
| @@ -95,7 +95,7 @@ async fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, c | ||||
|     Ok(()) | ||||
| } | ||||
|  | ||||
| async fn create_send(data: SendData, user_uuid: String) -> ApiResult<Send> { | ||||
| fn create_send(data: SendData, user_uuid: String) -> ApiResult<Send> { | ||||
|     let data_val = if data.Type == SendType::Text as i32 { | ||||
|         data.Text | ||||
|     } else if data.Type == SendType::File as i32 { | ||||
| @@ -117,7 +117,7 @@ async fn create_send(data: SendData, user_uuid: String) -> ApiResult<Send> { | ||||
|         ); | ||||
|     } | ||||
|  | ||||
|     let mut send = Send::new(data.Type, data.Name, data_str, data.Key, data.DeletionDate.naive_utc()).await; | ||||
|     let mut send = Send::new(data.Type, data.Name, data_str, data.Key, data.DeletionDate.naive_utc()); | ||||
|     send.user_uuid = Some(user_uuid); | ||||
|     send.notes = data.Notes; | ||||
|     send.max_access_count = match data.MaxAccessCount { | ||||
| @@ -171,7 +171,7 @@ async fn post_send(data: JsonUpcase<SendData>, headers: Headers, conn: DbConn, n | ||||
|         err!("File sends should use /api/sends/file") | ||||
|     } | ||||
|  | ||||
|     let mut send = create_send(data, headers.user.uuid).await?; | ||||
|     let mut send = create_send(data, headers.user.uuid)?; | ||||
|     send.save(&conn).await?; | ||||
|     nt.send_send_update(UpdateType::SyncSendCreate, &send, &send.update_users_revision(&conn).await).await; | ||||
|  | ||||
| @@ -211,7 +211,7 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, conn: DbCo | ||||
|         None => SIZE_525_MB, | ||||
|     }; | ||||
|  | ||||
|     let mut send = create_send(model, headers.user.uuid).await?; | ||||
|     let mut send = create_send(model, headers.user.uuid)?; | ||||
|     if send.atype != SendType::File as i32 { | ||||
|         err!("Send content is not a file"); | ||||
|     } | ||||
|   | ||||
| @@ -139,7 +139,7 @@ pub async fn validate_totp_code( | ||||
|     // The amount of steps back and forward in time | ||||
|     // Also check if we need to disable time drifted TOTP codes. | ||||
|     // If that is the case, we set the steps to 0 so only the current TOTP is valid. | ||||
|     let steps = !CONFIG.authenticator_disable_time_drift() as i64; | ||||
|     let steps = i64::from(!CONFIG.authenticator_disable_time_drift()); | ||||
|  | ||||
|     // Get the current system time in UNIX Epoch (UTC) | ||||
|     let current_time = chrono::Utc::now(); | ||||
| @@ -154,7 +154,7 @@ pub async fn validate_totp_code( | ||||
|         let generated = totp_custom::<Sha1>(30, 6, &decoded_secret, time); | ||||
|  | ||||
|         // Check the the given code equals the generated and if the time_step is larger then the one last used. | ||||
|         if generated == totp_code && time_step > twofactor.last_used as i64 { | ||||
|         if generated == totp_code && time_step > i64::from(twofactor.last_used) { | ||||
|             // If the step does not equals 0 the time is drifted either server or client side. | ||||
|             if step != 0 { | ||||
|                 warn!("TOTP Time drift detected. The step offset is {}", step); | ||||
| @@ -165,7 +165,7 @@ pub async fn validate_totp_code( | ||||
|             twofactor.last_used = time_step as i32; | ||||
|             twofactor.save(conn).await?; | ||||
|             return Ok(()); | ||||
|         } else if generated == totp_code && time_step <= twofactor.last_used as i64 { | ||||
|         } else if generated == totp_code && time_step <= i64::from(twofactor.last_used) { | ||||
|             warn!("This TOTP or a TOTP code within {} steps back or forward has already been used!", steps); | ||||
|             err!(format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip)); | ||||
|         } | ||||
|   | ||||
| @@ -147,7 +147,7 @@ async fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers, | ||||
|         verify_yubikey_otp(yubikey.to_owned()).map_res("Invalid Yubikey OTP provided")?; | ||||
|     } | ||||
|  | ||||
|     let yubikey_ids: Vec<String> = yubikeys.into_iter().map(|x| (&x[..12]).to_owned()).collect(); | ||||
|     let yubikey_ids: Vec<String> = yubikeys.into_iter().map(|x| (x[..12]).to_owned()).collect(); | ||||
|  | ||||
|     let yubikey_metadata = YubikeyMetadata { | ||||
|         Keys: yubikey_ids, | ||||
|   | ||||
| @@ -51,7 +51,7 @@ static CLIENT: Lazy<Client> = Lazy::new(|| { | ||||
|  | ||||
|     // Reuse the client between requests | ||||
|     let client = get_reqwest_client_builder() | ||||
|         .cookie_provider(cookie_store.clone()) | ||||
|         .cookie_provider(Arc::clone(&cookie_store)) | ||||
|         .timeout(Duration::from_secs(CONFIG.icon_download_timeout())) | ||||
|         .default_headers(default_headers.clone()); | ||||
|  | ||||
| @@ -77,7 +77,7 @@ static ICON_SIZE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+ | ||||
| static ICON_BLACKLIST_REGEX: Lazy<dashmap::DashMap<String, Regex>> = Lazy::new(dashmap::DashMap::new); | ||||
|  | ||||
| async fn icon_redirect(domain: &str, template: &str) -> Option<Redirect> { | ||||
|     if !is_valid_domain(domain).await { | ||||
|     if !is_valid_domain(domain) { | ||||
|         warn!("Invalid domain: {}", domain); | ||||
|         return None; | ||||
|     } | ||||
| @@ -123,7 +123,7 @@ async fn icon_google(domain: String) -> Option<Redirect> { | ||||
| async fn icon_internal(domain: String) -> Cached<(ContentType, Vec<u8>)> { | ||||
|     const FALLBACK_ICON: &[u8] = include_bytes!("../static/images/fallback-icon.png"); | ||||
|  | ||||
|     if !is_valid_domain(&domain).await { | ||||
|     if !is_valid_domain(&domain) { | ||||
|         warn!("Invalid domain: {}", domain); | ||||
|         return Cached::ttl( | ||||
|             (ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), | ||||
| @@ -144,7 +144,7 @@ async fn icon_internal(domain: String) -> Cached<(ContentType, Vec<u8>)> { | ||||
| /// | ||||
| /// This does some manual checks and makes use of Url to do some basic checking. | ||||
| /// domains can't be larger then 63 characters (not counting multiple subdomains) according to the RFC's, but we limit the total size to 255. | ||||
| async fn is_valid_domain(domain: &str) -> bool { | ||||
| fn is_valid_domain(domain: &str) -> bool { | ||||
|     const ALLOWED_CHARS: &str = "_-."; | ||||
|  | ||||
|     // If parsing the domain fails using Url, it will not work with reqwest. | ||||
| @@ -278,6 +278,7 @@ mod tests { | ||||
|  | ||||
| use cached::proc_macro::cached; | ||||
| #[cached(key = "String", convert = r#"{ domain.to_string() }"#, size = 16, time = 60)] | ||||
| #[allow(clippy::unused_async)] // This is needed because cached causes a false-positive here. | ||||
| async fn is_domain_blacklisted(domain: &str) -> bool { | ||||
|     if CONFIG.icon_blacklist_non_global_ips() { | ||||
|         if let Ok(s) = lookup_host((domain, 0)).await { | ||||
| @@ -304,7 +305,7 @@ async fn is_domain_blacklisted(domain: &str) -> bool { | ||||
|             // Generate the regex to store in too the Lazy Static HashMap. | ||||
|             let blacklist_regex = Regex::new(&blacklist).unwrap(); | ||||
|             let is_match = blacklist_regex.is_match(domain); | ||||
|             ICON_BLACKLIST_REGEX.insert(blacklist.to_string(), blacklist_regex); | ||||
|             ICON_BLACKLIST_REGEX.insert(blacklist.clone(), blacklist_regex); | ||||
|  | ||||
|             is_match | ||||
|         }; | ||||
| @@ -326,7 +327,7 @@ async fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> { | ||||
|     } | ||||
|  | ||||
|     if let Some(icon) = get_cached_icon(&path).await { | ||||
|         let icon_type = match get_icon_type(&icon).await { | ||||
|         let icon_type = match get_icon_type(&icon) { | ||||
|             Some(x) => x, | ||||
|             _ => "x-icon", | ||||
|         }; | ||||
| @@ -416,7 +417,7 @@ impl Icon { | ||||
|     } | ||||
| } | ||||
|  | ||||
| async fn get_favicons_node( | ||||
| fn get_favicons_node( | ||||
|     dom: InfallibleTokenizer<StringReader<'_>, FaviconEmitter>, | ||||
|     icons: &mut Vec<Icon>, | ||||
|     url: &url::Url, | ||||
| @@ -468,7 +469,7 @@ async fn get_favicons_node( | ||||
|                 } else { | ||||
|                     "" | ||||
|                 }; | ||||
|                 let priority = get_icon_priority(full_href.as_str(), sizes).await; | ||||
|                 let priority = get_icon_priority(full_href.as_str(), sizes); | ||||
|                 icons.push(Icon::new(priority, full_href.to_string())); | ||||
|             } | ||||
|         }; | ||||
| @@ -512,7 +513,7 @@ async fn get_icon_url(domain: &str) -> Result<IconUrlResult, Error> { | ||||
|                     tld = domain_parts.next_back().unwrap(), | ||||
|                     base = domain_parts.next_back().unwrap() | ||||
|                 ); | ||||
|                 if is_valid_domain(&base_domain).await { | ||||
|                 if is_valid_domain(&base_domain) { | ||||
|                     let sslbase = format!("https://{base_domain}"); | ||||
|                     let httpbase = format!("http://{base_domain}"); | ||||
|                     debug!("[get_icon_url]: Trying without subdomains '{base_domain}'"); | ||||
| @@ -523,7 +524,7 @@ async fn get_icon_url(domain: &str) -> Result<IconUrlResult, Error> { | ||||
|             // When the domain is not an IP, and has less then 2 dots, try to add www. infront of it. | ||||
|             } else if is_ip.is_err() && domain.matches('.').count() < 2 { | ||||
|                 let www_domain = format!("www.{domain}"); | ||||
|                 if is_valid_domain(&www_domain).await { | ||||
|                 if is_valid_domain(&www_domain) { | ||||
|                     let sslwww = format!("https://{www_domain}"); | ||||
|                     let httpwww = format!("http://{www_domain}"); | ||||
|                     debug!("[get_icon_url]: Trying with www. prefix '{www_domain}'"); | ||||
| @@ -555,7 +556,7 @@ async fn get_icon_url(domain: &str) -> Result<IconUrlResult, Error> { | ||||
|         let limited_reader = stream_to_bytes_limit(content, 384 * 1024).await?.to_vec(); | ||||
|  | ||||
|         let dom = Tokenizer::new_with_emitter(limited_reader.to_reader(), FaviconEmitter::default()).infallible(); | ||||
|         get_favicons_node(dom, &mut iconlist, &url).await; | ||||
|         get_favicons_node(dom, &mut iconlist, &url); | ||||
|     } else { | ||||
|         // Add the default favicon.ico to the list with just the given domain | ||||
|         iconlist.push(Icon::new(35, format!("{ssldomain}/favicon.ico"))); | ||||
| @@ -603,12 +604,12 @@ async fn get_page_with_referer(url: &str, referer: &str) -> Result<Response, Err | ||||
| /// | ||||
| /// # Example | ||||
| /// ``` | ||||
| /// priority1 = get_icon_priority("http://example.com/path/to/a/favicon.png", "32x32").await; | ||||
| /// priority2 = get_icon_priority("https://example.com/path/to/a/favicon.ico", "").await; | ||||
| /// priority1 = get_icon_priority("http://example.com/path/to/a/favicon.png", "32x32"); | ||||
| /// priority2 = get_icon_priority("https://example.com/path/to/a/favicon.ico", ""); | ||||
| /// ``` | ||||
| async fn get_icon_priority(href: &str, sizes: &str) -> u8 { | ||||
| fn get_icon_priority(href: &str, sizes: &str) -> u8 { | ||||
|     // Check if there is a dimension set | ||||
|     let (width, height) = parse_sizes(sizes).await; | ||||
|     let (width, height) = parse_sizes(sizes); | ||||
|  | ||||
|     // Check if there is a size given | ||||
|     if width != 0 && height != 0 { | ||||
| @@ -650,11 +651,11 @@ async fn get_icon_priority(href: &str, sizes: &str) -> u8 { | ||||
| /// | ||||
| /// # Example | ||||
| /// ``` | ||||
| /// let (width, height) = parse_sizes("64x64").await; // (64, 64) | ||||
| /// let (width, height) = parse_sizes("x128x128").await; // (128, 128) | ||||
| /// let (width, height) = parse_sizes("32").await; // (0, 0) | ||||
| /// let (width, height) = parse_sizes("64x64"); // (64, 64) | ||||
| /// let (width, height) = parse_sizes("x128x128"); // (128, 128) | ||||
| /// let (width, height) = parse_sizes("32"); // (0, 0) | ||||
| /// ``` | ||||
| async fn parse_sizes(sizes: &str) -> (u16, u16) { | ||||
| fn parse_sizes(sizes: &str) -> (u16, u16) { | ||||
|     let mut width: u16 = 0; | ||||
|     let mut height: u16 = 0; | ||||
|  | ||||
| @@ -698,7 +699,7 @@ async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> { | ||||
|                     // Also check if the size is atleast 67 bytes, which seems to be the smallest png i could create | ||||
|                     if body.len() >= 67 { | ||||
|                         // Check if the icon type is allowed, else try an icon from the list. | ||||
|                         icon_type = get_icon_type(&body).await; | ||||
|                         icon_type = get_icon_type(&body); | ||||
|                         if icon_type.is_none() { | ||||
|                             debug!("Icon from {} data:image uri, is not a valid image type", domain); | ||||
|                             continue; | ||||
| @@ -716,7 +717,7 @@ async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> { | ||||
|                     buffer = stream_to_bytes_limit(res, 5120 * 1024).await?; // 5120KB/5MB for each icon max (Same as icons.bitwarden.net) | ||||
|  | ||||
|                     // Check if the icon type is allowed, else try an icon from the list. | ||||
|                     icon_type = get_icon_type(&buffer).await; | ||||
|                     icon_type = get_icon_type(&buffer); | ||||
|                     if icon_type.is_none() { | ||||
|                         buffer.clear(); | ||||
|                         debug!("Icon from {}, is not a valid image type", icon.href); | ||||
| @@ -751,7 +752,7 @@ async fn save_icon(path: &str, icon: &[u8]) { | ||||
|     } | ||||
| } | ||||
|  | ||||
| async fn get_icon_type(bytes: &[u8]) -> Option<&'static str> { | ||||
| fn get_icon_type(bytes: &[u8]) -> Option<&'static str> { | ||||
|     match bytes { | ||||
|         [137, 80, 78, 71, ..] => Some("png"), | ||||
|         [0, 0, 1, 0, ..] => Some("x-icon"), | ||||
|   | ||||
| @@ -87,11 +87,11 @@ impl Device { | ||||
|             nbf: time_now.timestamp(), | ||||
|             exp: (time_now + *DEFAULT_VALIDITY).timestamp(), | ||||
|             iss: JWT_LOGIN_ISSUER.to_string(), | ||||
|             sub: user.uuid.to_string(), | ||||
|             sub: user.uuid.clone(), | ||||
|  | ||||
|             premium: true, | ||||
|             name: user.name.to_string(), | ||||
|             email: user.email.to_string(), | ||||
|             name: user.name.clone(), | ||||
|             email: user.email.clone(), | ||||
|             email_verified: !CONFIG.mail_enabled() || user.verified_at.is_some(), | ||||
|  | ||||
|             orgowner, | ||||
| @@ -99,8 +99,8 @@ impl Device { | ||||
|             orguser, | ||||
|             orgmanager, | ||||
|  | ||||
|             sstamp: user.security_stamp.to_string(), | ||||
|             device: self.uuid.to_string(), | ||||
|             sstamp: user.security_stamp.clone(), | ||||
|             device: self.uuid.clone(), | ||||
|             scope, | ||||
|             amr: vec!["Application".into()], | ||||
|         }; | ||||
|   | ||||
| @@ -45,7 +45,7 @@ pub enum SendType { | ||||
| } | ||||
|  | ||||
| impl Send { | ||||
|     pub async fn new(atype: i32, name: String, data: String, akey: String, deletion_date: NaiveDateTime) -> Self { | ||||
|     pub fn new(atype: i32, name: String, data: String, akey: String, deletion_date: NaiveDateTime) -> Self { | ||||
|         let now = Utc::now().naive_utc(); | ||||
|  | ||||
|         Self { | ||||
|   | ||||
| @@ -171,7 +171,7 @@ impl User { | ||||
|     pub fn set_stamp_exception(&mut self, route_exception: Vec<String>) { | ||||
|         let stamp_exception = UserStampException { | ||||
|             routes: route_exception, | ||||
|             security_stamp: self.security_stamp.to_string(), | ||||
|             security_stamp: self.security_stamp.clone(), | ||||
|             expire: (Utc::now().naive_utc() + Duration::minutes(2)).timestamp(), | ||||
|         }; | ||||
|         self.stamp_exception = Some(serde_json::to_string(&stamp_exception).unwrap_or_default()); | ||||
|   | ||||
							
								
								
									
										30
									
								
								src/main.rs
									
									
									
									
									
								
							
							
						
						
									
										30
									
								
								src/main.rs
									
									
									
									
									
								
							| @@ -1,6 +1,30 @@ | ||||
| #![forbid(unsafe_code)] | ||||
| #![warn(rust_2018_idioms)] | ||||
| #![warn(rust_2021_compatibility)] | ||||
| #![forbid(unsafe_code, non_ascii_idents)] | ||||
| #![deny( | ||||
|     rust_2018_idioms, | ||||
|     rust_2021_compatibility, | ||||
|     noop_method_call, | ||||
|     pointer_structural_match, | ||||
|     trivial_casts, | ||||
|     trivial_numeric_casts, | ||||
|     unused_import_braces, | ||||
|     clippy::cast_lossless, | ||||
|     clippy::clone_on_ref_ptr, | ||||
|     clippy::equatable_if_let, | ||||
|     clippy::float_cmp_const, | ||||
|     clippy::inefficient_to_string, | ||||
|     clippy::linkedlist, | ||||
|     clippy::macro_use_imports, | ||||
|     clippy::manual_assert, | ||||
|     clippy::match_wildcard_for_single_variants, | ||||
|     clippy::mem_forget, | ||||
|     clippy::string_add_assign, | ||||
|     clippy::string_to_string, | ||||
|     clippy::unnecessary_join, | ||||
|     clippy::unnecessary_self_imports, | ||||
|     clippy::unused_async, | ||||
|     clippy::verbose_file_reads, | ||||
|     clippy::zero_sized_map_values | ||||
| )] | ||||
| #![cfg_attr(feature = "unstable", feature(ip))] | ||||
| // The recursion_limit is mainly triggered by the json!() macro. | ||||
| // The more key/value pairs there are the more recursion occurs. | ||||
|   | ||||
							
								
								
									
										14
									
								
								src/util.rs
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								src/util.rs
									
									
									
									
									
								
							| @@ -300,7 +300,7 @@ impl Fairing for BetterLogging { | ||||
| // | ||||
| use std::{ | ||||
|     fs::{self, File}, | ||||
|     io::{Read, Result as IOResult}, | ||||
|     io::Result as IOResult, | ||||
|     path::Path, | ||||
| }; | ||||
|  | ||||
| @@ -309,11 +309,7 @@ pub fn file_exists(path: &str) -> bool { | ||||
| } | ||||
|  | ||||
| pub fn read_file(path: &str) -> IOResult<Vec<u8>> { | ||||
|     let mut contents: Vec<u8> = Vec::new(); | ||||
|  | ||||
|     let mut file = File::open(Path::new(path))?; | ||||
|     file.read_to_end(&mut contents)?; | ||||
|  | ||||
|     let contents = fs::read(Path::new(path))?; | ||||
|     Ok(contents) | ||||
| } | ||||
|  | ||||
| @@ -326,11 +322,7 @@ pub fn write_file(path: &str, content: &[u8]) -> Result<(), crate::error::Error> | ||||
| } | ||||
|  | ||||
| pub fn read_file_string(path: &str) -> IOResult<String> { | ||||
|     let mut contents = String::new(); | ||||
|  | ||||
|     let mut file = File::open(Path::new(path))?; | ||||
|     file.read_to_string(&mut contents)?; | ||||
|  | ||||
|     let contents = fs::read_to_string(Path::new(path))?; | ||||
|     Ok(contents) | ||||
| } | ||||
|  | ||||
|   | ||||
		Reference in New Issue
	
	Block a user