Fix failing large note imports

When importing to Vaultwarden (or Bitwarden) notes larger then 10_000
encrypted characters are invalid. This because it for one isn't
compatible with Bitwarden. And some clients tend to break on very large
notes.

We already added a check for this limit when adding a single cipher, but
this caused issues during import, and could cause a partial imported
vault. Bitwarden does some validations before actually running it
through the import process and generates a special error message which
helps the user indicate which items are invalid during the import.

This PR adds that validation check and returns the same kind of error.
Fixes #3048
This commit is contained in:
BlackDex
2023-01-01 15:09:10 +01:00
parent 988d24927e
commit 6be26f0a38
4 changed files with 42 additions and 3 deletions

View File

@@ -205,7 +205,7 @@ pub struct CipherData {
*/
pub Type: i32,
pub Name: String,
Notes: Option<String>,
pub Notes: Option<String>,
Fields: Option<Value>,
// Only one of these should exist, depending on type
@@ -542,6 +542,12 @@ async fn post_ciphers_import(
let data: ImportData = data.into_inner().data;
// Validate the import before continuing
// Bitwarden does not process the import if there is one item invalid.
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
Cipher::validate_notes(&data.Ciphers)?;
// Read and create the folders
let mut folders: Vec<_> = Vec::new();
for folder in data.Folders.into_iter() {

View File

@@ -7,7 +7,7 @@ mod organizations;
mod sends;
pub mod two_factor;
pub use ciphers::{purge_trashed_ciphers, CipherSyncData, CipherSyncType};
pub use ciphers::{purge_trashed_ciphers, CipherData, CipherSyncData, CipherSyncType};
pub use emergency_access::{emergency_notification_reminder_job, emergency_request_timeout_job};
pub use events::{event_cleanup_job, log_event, log_user_event};
pub use sends::purge_sends;

View File

@@ -1378,6 +1378,12 @@ async fn post_org_import(
let data: ImportData = data.into_inner().data;
let org_id = query.organization_id;
// Validate the import before continuing
// Bitwarden does not process the import if there is one item invalid.
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
Cipher::validate_notes(&data.Ciphers)?;
let mut collections = Vec::new();
for coll in data.Collections {
let collection = Collection::new(org_id.clone(), coll.Name);