mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2024-12-11 17:31:24 +01:00
Merge branch 'BlackDex-issue-3048'
This commit is contained in:
commit
525979d5d9
4 changed files with 42 additions and 3 deletions
|
@ -205,7 +205,7 @@ pub struct CipherData {
|
|||
*/
|
||||
pub Type: i32,
|
||||
pub Name: String,
|
||||
Notes: Option<String>,
|
||||
pub Notes: Option<String>,
|
||||
Fields: Option<Value>,
|
||||
|
||||
// Only one of these should exist, depending on type
|
||||
|
@ -542,6 +542,12 @@ async fn post_ciphers_import(
|
|||
|
||||
let data: ImportData = data.into_inner().data;
|
||||
|
||||
// Validate the import before continuing
|
||||
// Bitwarden does not process the import if there is one item invalid.
|
||||
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
|
||||
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
|
||||
Cipher::validate_notes(&data.Ciphers)?;
|
||||
|
||||
// Read and create the folders
|
||||
let mut folders: Vec<_> = Vec::new();
|
||||
for folder in data.Folders.into_iter() {
|
||||
|
|
|
@ -7,7 +7,7 @@ mod organizations;
|
|||
mod sends;
|
||||
pub mod two_factor;
|
||||
|
||||
pub use ciphers::{purge_trashed_ciphers, CipherSyncData, CipherSyncType};
|
||||
pub use ciphers::{purge_trashed_ciphers, CipherData, CipherSyncData, CipherSyncType};
|
||||
pub use emergency_access::{emergency_notification_reminder_job, emergency_request_timeout_job};
|
||||
pub use events::{event_cleanup_job, log_event, log_user_event};
|
||||
pub use sends::purge_sends;
|
||||
|
|
|
@ -1378,6 +1378,12 @@ async fn post_org_import(
|
|||
let data: ImportData = data.into_inner().data;
|
||||
let org_id = query.organization_id;
|
||||
|
||||
// Validate the import before continuing
|
||||
// Bitwarden does not process the import if there is one item invalid.
|
||||
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
|
||||
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
|
||||
Cipher::validate_notes(&data.Ciphers)?;
|
||||
|
||||
let mut collections = Vec::new();
|
||||
for coll in data.Collections {
|
||||
let collection = Collection::new(org_id.clone(), coll.Name);
|
||||
|
|
|
@ -6,7 +6,7 @@ use super::{
|
|||
Attachment, CollectionCipher, Favorite, FolderCipher, Group, User, UserOrgStatus, UserOrgType, UserOrganization,
|
||||
};
|
||||
|
||||
use crate::api::core::CipherSyncData;
|
||||
use crate::api::core::{CipherData, CipherSyncData};
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
|
@ -73,6 +73,33 @@ impl Cipher {
|
|||
reprompt: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn validate_notes(cipher_data: &[CipherData]) -> EmptyResult {
|
||||
let mut validation_errors = serde_json::Map::new();
|
||||
for (index, cipher) in cipher_data.iter().enumerate() {
|
||||
if let Some(note) = &cipher.Notes {
|
||||
if note.len() > 10_000 {
|
||||
validation_errors.insert(
|
||||
format!("Ciphers[{index}].Notes"),
|
||||
serde_json::to_value([
|
||||
"The field Notes exceeds the maximum encrypted value length of 10000 characters.",
|
||||
])
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
if !validation_errors.is_empty() {
|
||||
let err_json = json!({
|
||||
"message": "The model state is invalid.",
|
||||
"validationErrors" : validation_errors,
|
||||
"object": "error"
|
||||
});
|
||||
err_json!(err_json, "Import validation errors")
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
use crate::db::DbConn;
|
||||
|
|
Loading…
Reference in a new issue