aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorDaniel GarcĂ­a <[email protected]>2024-06-16 21:39:30 +0200
committerBlackDex <[email protected]>2024-06-19 13:13:34 +0200
commitccdcbc89ce7054afb7e7c7910a5ef5fb721a1605 (patch)
tree2b1923260ca72d0ce005fa01b5b5ad37ba4c99b3 /src
parent55fdee3bf8f45ccc7a5a188ab17855ac9866a979 (diff)
downloadvaultwarden-ccdcbc89ce7054afb7e7c7910a5ef5fb721a1605.tar.gz
vaultwarden-ccdcbc89ce7054afb7e7c7910a5ef5fb721a1605.zip
Change API inputs/outputs and structs to camelCase
Diffstat (limited to 'src')
-rw-r--r--src/api/admin.rs6
-rw-r--r--src/api/core/accounts.rs479
-rw-r--r--src/api/core/ciphers.rs396
-rw-r--r--src/api/core/emergency_access.rs138
-rw-r--r--src/api/core/events.rs51
-rw-r--r--src/api/core/folders.rs32
-rw-r--r--src/api/core/mod.rs63
-rw-r--r--src/api/core/organizations.rs757
-rw-r--r--src/api/core/public.rs71
-rw-r--r--src/api/core/sends.rs154
-rw-r--r--src/api/core/two_factor/authenticator.rs55
-rw-r--r--src/api/core/two_factor/duo.rs60
-rw-r--r--src/api/core/two_factor/email.rs70
-rw-r--r--src/api/core/two_factor/mod.rs60
-rw-r--r--src/api/core/two_factor/protected_actions.rs17
-rw-r--r--src/api/core/two_factor/webauthn.rs160
-rw-r--r--src/api/core/two_factor/yubikey.rs76
-rw-r--r--src/api/identity.rs9
-rw-r--r--src/api/mod.rs13
-rw-r--r--src/db/models/attachment.rs14
-rw-r--r--src/db/models/cipher.rs99
-rw-r--r--src/db/models/collection.rs16
-rw-r--r--src/db/models/emergency_access.rs42
-rw-r--r--src/db/models/folder.rs8
-rw-r--r--src/db/models/group.rs36
-rw-r--r--src/db/models/org_policy.rs31
-rw-r--r--src/db/models/organization.rs207
-rw-r--r--src/db/models/send.rs75
-rw-r--r--src/db/models/two_factor.rs12
-rw-r--r--src/db/models/user.rs38
-rw-r--r--src/error.rs18
-rw-r--r--src/main.rs2
-rw-r--r--src/static/global_domains.json540
-rw-r--r--src/util.rs36
34 files changed, 1891 insertions, 1950 deletions
diff --git a/src/api/admin.rs b/src/api/admin.rs
index b3dc588c..a7ab60ca 100644
--- a/src/api/admin.rs
+++ b/src/api/admin.rs
@@ -265,8 +265,8 @@ fn admin_page_login() -> ApiResult<Html<String>> {
render_admin_login(None, None)
}
-#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
struct InviteData {
email: String,
}
@@ -475,7 +475,7 @@ async fn resend_user_invite(uuid: &str, _token: AdminToken, mut conn: DbConn) ->
}
}
-#[derive(Deserialize, Debug)]
+#[derive(Debug, Deserialize)]
struct UserOrgTypeData {
user_type: NumberOrString,
user_uuid: String,
diff --git a/src/api/core/accounts.rs b/src/api/core/accounts.rs
index 812b6c7a..da787ac7 100644
--- a/src/api/core/accounts.rs
+++ b/src/api/core/accounts.rs
@@ -6,7 +6,7 @@ use serde_json::Value;
use crate::{
api::{
core::{log_user_event, two_factor::email},
- register_push_device, unregister_push_device, AnonymousNotify, EmptyResult, JsonResult, JsonUpcase, Notify,
+ register_push_device, unregister_push_device, AnonymousNotify, EmptyResult, JsonResult, Notify,
PasswordOrOtpData, UpdateType,
},
auth::{decode_delete, decode_invite, decode_verify_email, ClientHeaders, Headers},
@@ -62,29 +62,29 @@ pub fn routes() -> Vec<rocket::Route> {
]
}
-#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
pub struct RegisterData {
- Email: String,
- Kdf: Option<i32>,
- KdfIterations: Option<i32>,
- KdfMemory: Option<i32>,
- KdfParallelism: Option<i32>,
- Key: String,
- Keys: Option<KeysData>,
- MasterPasswordHash: String,
- MasterPasswordHint: Option<String>,
- Name: Option<String>,
- Token: Option<String>,
+ email: String,
+ kdf: Option<i32>,
+ kdf_iterations: Option<i32>,
+ kdf_memory: Option<i32>,
+ kdf_parallelism: Option<i32>,
+ key: String,
+ keys: Option<KeysData>,
+ master_password_hash: String,
+ master_password_hint: Option<String>,
+ name: Option<String>,
+ token: Option<String>,
#[allow(dead_code)]
- OrganizationUserId: Option<String>,
+ organization_user_id: Option<String>,
}
-#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
struct KeysData {
- EncryptedPrivateKey: String,
- PublicKey: String,
+ encrypted_private_key: String,
+ public_key: String,
}
/// Trims whitespace from password hints, and converts blank password hints to `None`.
@@ -119,17 +119,17 @@ async fn is_email_2fa_required(org_user_uuid: Option<String>, conn: &mut DbConn)
}
#[post("/accounts/register", data = "<data>")]
-async fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> JsonResult {
+async fn register(data: Json<RegisterData>, conn: DbConn) -> JsonResult {
_register(data, conn).await
}
-pub async fn _register(data: JsonUpcase<RegisterData>, mut conn: DbConn) -> JsonResult {
- let data: RegisterData = data.into_inner().data;
- let email = data.Email.to_lowercase();
+pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult {
+ let data: RegisterData = data.into_inner();
+ let email = data.email.to_lowercase();
// Check if the length of the username exceeds 50 characters (Same is Upstream Bitwarden)
// This also prevents issues with very long usernames causing to large JWT's. See #2419
- if let Some(ref name) = data.Name {
+ if let Some(ref name) = data.name {
if name.len() > 50 {
err!("The field Name must be a string with a maximum length of 50.");
}
@@ -137,7 +137,7 @@ pub async fn _register(data: JsonUpcase<RegisterData>, mut conn: DbConn) -> Json
// Check against the password hint setting here so if it fails, the user
// can retry without losing their invitation below.
- let password_hint = clean_password_hint(&data.MasterPasswordHint);
+ let password_hint = clean_password_hint(&data.master_password_hint);
enforce_password_hint_setting(&password_hint)?;
let mut verified_by_invite = false;
@@ -148,7 +148,7 @@ pub async fn _register(data: JsonUpcase<RegisterData>, mut conn: DbConn) -> Json
err!("Registration not allowed or user already exists")
}
- if let Some(token) = data.Token {
+ if let Some(token) = data.token {
let claims = decode_invite(&token)?;
if claims.email == email {
// Verify the email address when signing up via a valid invite token
@@ -188,28 +188,28 @@ pub async fn _register(data: JsonUpcase<RegisterData>, mut conn: DbConn) -> Json
// Make sure we don't leave a lingering invitation.
Invitation::take(&email, &mut conn).await;
- if let Some(client_kdf_type) = data.Kdf {
+ if let Some(client_kdf_type) = data.kdf {
user.client_kdf_type = client_kdf_type;
}
- if let Some(client_kdf_iter) = data.KdfIterations {
+ if let Some(client_kdf_iter) = data.kdf_iterations {
user.client_kdf_iter = client_kdf_iter;
}
- user.client_kdf_memory = data.KdfMemory;
- user.client_kdf_parallelism = data.KdfParallelism;
+ user.client_kdf_memory = data.kdf_memory;
+ user.client_kdf_parallelism = data.kdf_parallelism;
- user.set_password(&data.MasterPasswordHash, Some(data.Key), true, None);
+ user.set_password(&data.master_password_hash, Some(data.key), true, None);
user.password_hint = password_hint;
// Add extra fields if present
- if let Some(name) = data.Name {
+ if let Some(name) = data.name {
user.name = name;
}
- if let Some(keys) = data.Keys {
- user.private_key = Some(keys.EncryptedPrivateKey);
- user.public_key = Some(keys.PublicKey);
+ if let Some(keys) = data.keys {
+ user.private_key = Some(keys.encrypted_private_key);
+ user.public_key = Some(keys.public_key);
}
if CONFIG.mail_enabled() {
@@ -222,7 +222,7 @@ pub async fn _register(data: JsonUpcase<RegisterData>, mut conn: DbConn) -> Json
error!("Error sending welcome email: {:#?}", e);
}
- if verified_by_invite && is_email_2fa_required(data.OrganizationUserId, &mut conn).await {
+ if verified_by_invite && is_email_2fa_required(data.organization_user_id, &mut conn).await {
let _ = email::activate_email_2fa(&user, &mut conn).await;
}
}
@@ -237,8 +237,8 @@ pub async fn _register(data: JsonUpcase<RegisterData>, mut conn: DbConn) -> Json
}
Ok(Json(json!({
- "Object": "register",
- "CaptchaBypassToken": "",
+ "object": "register",
+ "captchaBypassToken": "",
})))
}
@@ -247,57 +247,57 @@ async fn profile(headers: Headers, mut conn: DbConn) -> Json<Value> {
Json(headers.user.to_json(&mut conn).await)
}
-#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
struct ProfileData {
- // Culture: String, // Ignored, always use en-US
- // MasterPasswordHint: Option<String>, // Ignored, has been moved to ChangePassData
- Name: String,
+ // culture: String, // Ignored, always use en-US
+ // masterPasswordHint: Option<String>, // Ignored, has been moved to ChangePassData
+ name: String,
}
#[put("/accounts/profile", data = "<data>")]
-async fn put_profile(data: JsonUpcase<ProfileData>, headers: Headers, conn: DbConn) -> JsonResult {
+async fn put_profile(data: Json<ProfileData>, headers: Headers, conn: DbConn) -> JsonResult {
post_profile(data, headers, conn).await
}
#[post("/accounts/profile", data = "<data>")]
-async fn post_profile(data: JsonUpcase<ProfileData>, headers: Headers, mut conn: DbConn) -> JsonResult {
- let data: ProfileData = data.into_inner().data;
+async fn post_profile(data: Json<ProfileData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let data: ProfileData = data.into_inner();
// Check if the length of the username exceeds 50 characters (Same is Upstream Bitwarden)
// This also prevents issues with very long usernames causing to large JWT's. See #2419
- if data.Name.len() > 50 {
+ if data.name.len() > 50 {
err!("The field Name must be a string with a maximum length of 50.");
}
let mut user = headers.user;
- user.name = data.Name;
+ user.name = data.name;
user.save(&mut conn).await?;
Ok(Json(user.to_json(&mut conn).await))
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct AvatarData {
- AvatarColor: Option<String>,
+ avatar_color: Option<String>,
}
#[put("/accounts/avatar", data = "<data>")]
-async fn put_avatar(data: JsonUpcase<AvatarData>, headers: Headers, mut conn: DbConn) -> JsonResult {
- let data: AvatarData = data.into_inner().data;
+async fn put_avatar(data: Json<AvatarData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let data: AvatarData = data.into_inner();
// It looks like it only supports the 6 hex color format.
// If you try to add the short value it will not show that color.
// Check and force 7 chars, including the #.
- if let Some(color) = &data.AvatarColor {
+ if let Some(color) = &data.avatar_color {
if color.len() != 7 {
err!("The field AvatarColor must be a HTML/Hex color code with a length of 7 characters")
}
}
let mut user = headers.user;
- user.avatar_color = data.AvatarColor;
+ user.avatar_color = data.avatar_color;
user.save(&mut conn).await?;
Ok(Json(user.to_json(&mut conn).await))
@@ -312,62 +312,57 @@ async fn get_public_keys(uuid: &str, _headers: Headers, mut conn: DbConn) -> Jso
};
Ok(Json(json!({
- "UserId": user.uuid,
- "PublicKey": user.public_key,
- "Object":"userKey"
+ "userId": user.uuid,
+ "publicKey": user.public_key,
+ "object":"userKey"
})))
}
#[post("/accounts/keys", data = "<data>")]
-async fn post_keys(data: JsonUpcase<KeysData>, headers: Headers, mut conn: DbConn) -> JsonResult {
- let data: KeysData = data.into_inner().data;
+async fn post_keys(data: Json<KeysData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let data: KeysData = data.into_inner();
let mut user = headers.user;
- user.private_key = Some(data.EncryptedPrivateKey);
- user.public_key = Some(data.PublicKey);
+ user.private_key = Some(data.encrypted_private_key);
+ user.public_key = Some(data.public_key);
user.save(&mut conn).await?;
Ok(Json(json!({
- "PrivateKey": user.private_key,
- "PublicKey": user.public_key,
- "Object":"keys"
+ "privateKey": user.private_key,
+ "publicKey": user.public_key,
+ "object":"keys"
})))
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct ChangePassData {
- MasterPasswordHash: String,
- NewMasterPasswordHash: String,
- MasterPasswordHint: Option<String>,
- Key: String,
+ master_password_hash: String,
+ new_master_password_hash: String,
+ master_password_hint: Option<String>,
+ key: String,
}
#[post("/accounts/password", data = "<data>")]
-async fn post_password(
- data: JsonUpcase<ChangePassData>,
- headers: Headers,
- mut conn: DbConn,
- nt: Notify<'_>,
-) -> EmptyResult {
- let data: ChangePassData = data.into_inner().data;
+async fn post_password(data: Json<ChangePassData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
+ let data: ChangePassData = data.into_inner();
let mut user = headers.user;
- if !user.check_valid_password(&data.MasterPasswordHash) {
+ if !user.check_valid_password(&data.master_password_hash) {
err!("Invalid password")
}
- user.password_hint = clean_password_hint(&data.MasterPasswordHint);
+ user.password_hint = clean_password_hint(&data.master_password_hint);
enforce_password_hint_setting(&user.password_hint)?;
log_user_event(EventType::UserChangedPassword as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn)
.await;
user.set_password(
- &data.NewMasterPasswordHash,
- Some(data.Key),
+ &data.new_master_password_hash,
+ Some(data.key),
true,
Some(vec![String::from("post_rotatekey"), String::from("get_contacts"), String::from("get_public_keys")]),
);
@@ -383,48 +378,48 @@ async fn post_password(
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct ChangeKdfData {
- Kdf: i32,
- KdfIterations: i32,
- KdfMemory: Option<i32>,
- KdfParallelism: Option<i32>,
+ kdf: i32,
+ kdf_iterations: i32,
+ kdf_memory: Option<i32>,
+ kdf_parallelism: Option<i32>,
- MasterPasswordHash: String,
- NewMasterPasswordHash: String,
- Key: String,
+ master_password_hash: String,
+ new_master_password_hash: String,
+ key: String,
}
#[post("/accounts/kdf", data = "<data>")]
-async fn post_kdf(data: JsonUpcase<ChangeKdfData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
- let data: ChangeKdfData = data.into_inner().data;
+async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
+ let data: ChangeKdfData = data.into_inner();
let mut user = headers.user;
- if !user.check_valid_password(&data.MasterPasswordHash) {
+ if !user.check_valid_password(&data.master_password_hash) {
err!("Invalid password")
}
- if data.Kdf == UserKdfType::Pbkdf2 as i32 && data.KdfIterations < 100_000 {
+ if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 {
err!("PBKDF2 KDF iterations must be at least 100000.")
}
- if data.Kdf == UserKdfType::Argon2id as i32 {
- if data.KdfIterations < 1 {
+ if data.kdf == UserKdfType::Argon2id as i32 {
+ if data.kdf_iterations < 1 {
err!("Argon2 KDF iterations must be at least 1.")
}
- if let Some(m) = data.KdfMemory {
+ if let Some(m) = data.kdf_memory {
if !(15..=1024).contains(&m) {
err!("Argon2 memory must be between 15 MB and 1024 MB.")
}
- user.client_kdf_memory = data.KdfMemory;
+ user.client_kdf_memory = data.kdf_memory;
} else {
err!("Argon2 memory parameter is required.")
}
- if let Some(p) = data.KdfParallelism {
+ if let Some(p) = data.kdf_parallelism {
if !(1..=16).contains(&p) {
err!("Argon2 parallelism must be between 1 and 16.")
}
- user.client_kdf_parallelism = data.KdfParallelism;
+ user.client_kdf_parallelism = data.kdf_parallelism;
} else {
err!("Argon2 parallelism parameter is required.")
}
@@ -432,9 +427,9 @@ async fn post_kdf(data: JsonUpcase<ChangeKdfData>, headers: Headers, mut conn: D
user.client_kdf_memory = None;
user.client_kdf_parallelism = None;
}
- user.client_kdf_iter = data.KdfIterations;
- user.client_kdf_type = data.Kdf;
- user.set_password(&data.NewMasterPasswordHash, Some(data.Key), true, None);
+ user.client_kdf_iter = data.kdf_iterations;
+ user.client_kdf_type = data.kdf;
+ user.set_password(&data.new_master_password_hash, Some(data.key), true, None);
let save_result = user.save(&mut conn).await;
nt.send_logout(&user, Some(headers.device.uuid)).await;
@@ -443,51 +438,51 @@ async fn post_kdf(data: JsonUpcase<ChangeKdfData>, headers: Headers, mut conn: D
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct UpdateFolderData {
// There is a bug in 2024.3.x which adds a `null` item.
// To bypass this we allow a Option here, but skip it during the updates
// See: https://github.com/bitwarden/clients/issues/8453
- Id: Option<String>,
- Name: String,
+ id: Option<String>,
+ name: String,
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct UpdateEmergencyAccessData {
- Id: String,
- KeyEncrypted: String,
+ id: String,
+ key_encrypted: String,
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct UpdateResetPasswordData {
- OrganizationId: String,
- ResetPasswordKey: String,
+ organization_id: String,
+ reset_password_key: String,
}
use super::ciphers::CipherData;
use super::sends::{update_send_from_data, SendData};
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct KeyData {
- Ciphers: Vec<CipherData>,
- Folders: Vec<UpdateFolderData>,
- Sends: Vec<SendData>,
- EmergencyAccessKeys: Vec<UpdateEmergencyAccessData>,
- ResetPasswordKeys: Vec<UpdateResetPasswordData>,
- Key: String,
- MasterPasswordHash: String,
- PrivateKey: String,
+ ciphers: Vec<CipherData>,
+ folders: Vec<UpdateFolderData>,
+ sends: Vec<SendData>,
+ emergency_access_keys: Vec<UpdateEmergencyAccessData>,
+ reset_password_keys: Vec<UpdateResetPasswordData>,
+ key: String,
+ master_password_hash: String,
+ private_key: String,
}
#[post("/accounts/key", data = "<data>")]
-async fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
+async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
// TODO: See if we can wrap everything within a SQL Transaction. If something fails it should revert everything.
- let data: KeyData = data.into_inner().data;
+ let data: KeyData = data.into_inner();
- if !headers.user.check_valid_password(&data.MasterPasswordHash) {
+ if !headers.user.check_valid_password(&data.master_password_hash) {
err!("Invalid password")
}
@@ -495,15 +490,15 @@ async fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, mut conn: D
// Bitwarden does not process the import if there is one item invalid.
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
- Cipher::validate_notes(&data.Ciphers)?;
+ Cipher::validate_notes(&data.ciphers)?;
let user_uuid = &headers.user.uuid;
// Update folder data
- for folder_data in data.Folders {
+ for folder_data in data.folders {
// Skip `null` folder id entries.
// See: https://github.com/bitwarden/clients/issues/8453
- if let Some(folder_id) = folder_data.Id {
+ if let Some(folder_id) = folder_data.id {
let mut saved_folder = match Folder::find_by_uuid(&folder_id, &mut conn).await {
Some(folder) => folder,
None => err!("Folder doesn't exist"),
@@ -513,14 +508,14 @@ async fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, mut conn: D
err!("The folder is not owned by the user")
}
- saved_folder.name = folder_data.Name;
+ saved_folder.name = folder_data.name;
saved_folder.save(&mut conn).await?
}
}
// Update emergency access data
- for emergency_access_data in data.EmergencyAccessKeys {
- let mut saved_emergency_access = match EmergencyAccess::find_by_uuid(&emergency_access_data.Id, &mut conn).await
+ for emergency_access_data in data.emergency_access_keys {
+ let mut saved_emergency_access = match EmergencyAccess::find_by_uuid(&emergency_access_data.id, &mut conn).await
{
Some(emergency_access) => emergency_access,
None => err!("Emergency access doesn't exist"),
@@ -530,27 +525,27 @@ async fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, mut conn: D
err!("The emergency access is not owned by the user")
}
- saved_emergency_access.key_encrypted = Some(emergency_access_data.KeyEncrypted);
+ saved_emergency_access.key_encrypted = Some(emergency_access_data.key_encrypted);
saved_emergency_access.save(&mut conn).await?
}
// Update reset password data
- for reset_password_data in data.ResetPasswordKeys {
+ for reset_password_data in data.reset_password_keys {
let mut user_org =
- match UserOrganization::find_by_user_and_org(user_uuid, &reset_password_data.OrganizationId, &mut conn)
+ match UserOrganization::find_by_user_and_org(user_uuid, &reset_password_data.organization_id, &mut conn)
.await
{
Some(reset_password) => reset_password,
None => err!("Reset password doesn't exist"),
};
- user_org.reset_password_key = Some(reset_password_data.ResetPasswordKey);
+ user_org.reset_password_key = Some(reset_password_data.reset_password_key);
user_org.save(&mut conn).await?
}
// Update send data
- for send_data in data.Sends {
- let mut send = match Send::find_by_uuid(send_data.Id.as_ref().unwrap(), &mut conn).await {
+ for send_data in data.sends {
+ let mut send = match Send::find_by_uuid(send_data.id.as_ref().unwrap(), &mut conn).await {
Some(send) => send,
None => err!("Send doesn't exist"),
};
@@ -561,9 +556,9 @@ async fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, mut conn: D
// Update cipher data
use super::ciphers::update_cipher_from_data;
- for cipher_data in data.Ciphers {
- if cipher_data.OrganizationId.is_none() {
- let mut saved_cipher = match Cipher::find_by_uuid(cipher_data.Id.as_ref().unwrap(), &mut conn).await {
+ for cipher_data in data.ciphers {
+ if cipher_data.organization_id.is_none() {
+ let mut saved_cipher = match Cipher::find_by_uuid(cipher_data.id.as_ref().unwrap(), &mut conn).await {
Some(cipher) => cipher,
None => err!("Cipher doesn't exist"),
};
@@ -583,8 +578,8 @@ async fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, mut conn: D
// Update user data
let mut user = headers.user;
- user.akey = data.Key;
- user.private_key = Some(data.PrivateKey);
+ user.akey = data.key;
+ user.private_key = Some(data.private_key);
user.reset_security_stamp();
let save_result = user.save(&mut conn).await;
@@ -598,13 +593,8 @@ async fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, mut conn: D
}
#[post("/accounts/security-stamp", data = "<data>")]
-async fn post_sstamp(
- data: JsonUpcase<PasswordOrOtpData>,
- headers: Headers,
- mut conn: DbConn,
- nt: Notify<'_>,
-) -> EmptyResult {
- let data: PasswordOrOtpData = data.into_inner().data;
+async fn post_sstamp(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
+ let data: PasswordOrOtpData = data.into_inner();
let mut user = headers.user;
data.validate(&user, true, &mut conn).await?;
@@ -619,84 +609,79 @@ async fn post_sstamp(
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct EmailTokenData {
- MasterPasswordHash: String,
- NewEmail: String,
+ master_password_hash: String,
+ new_email: String,
}
#[post("/accounts/email-token", data = "<data>")]
-async fn post_email_token(data: JsonUpcase<EmailTokenData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
+async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
if !CONFIG.email_change_allowed() {
err!("Email change is not allowed.");
}
- let data: EmailTokenData = data.into_inner().data;
+ let data: EmailTokenData = data.into_inner();
let mut user = headers.user;
- if !user.check_valid_password(&data.MasterPasswordHash) {
+ if !user.check_valid_password(&data.master_password_hash) {
err!("Invalid password")
}
- if User::find_by_mail(&data.NewEmail, &mut conn).await.is_some() {
+ if User::find_by_mail(&data.new_email, &mut conn).await.is_some() {
err!("Email already in use");
}
- if !CONFIG.is_email_domain_allowed(&data.NewEmail) {
+ if !CONFIG.is_email_domain_allowed(&data.new_email) {
err!("Email domain not allowed");
}
let token = crypto::generate_email_token(6);
if CONFIG.mail_enabled() {
- if let Err(e) = mail::send_change_email(&data.NewEmail, &token).await {
+ if let Err(e) = mail::send_change_email(&data.new_email, &token).await {
error!("Error sending change-email email: {:#?}", e);
}
} else {
- debug!("Email change request for user ({}) to email ({}) with token ({})", user.uuid, data.NewEmail, token);
+ debug!("Email change request for user ({}) to email ({}) with token ({})", user.uuid, data.new_email, token);
}
- user.email_new = Some(data.NewEmail);
+ user.email_new = Some(data.new_email);
user.email_new_token = Some(token);
user.save(&mut conn).await
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct ChangeEmailData {
- MasterPasswordHash: String,
- NewEmail: String,
+ master_password_hash: String,
+ new_email: String,
- Key: String,
- NewMasterPasswordHash: String,
- Token: NumberOrString,
+ key: String,
+ new_master_password_hash: String,
+ token: NumberOrString,
}
#[post("/accounts/email", data = "<data>")]
-async fn post_email(
- data: JsonUpcase<ChangeEmailData>,
- headers: Headers,
- mut conn: DbConn,
- nt: Notify<'_>,
-) -> EmptyResult {
+async fn post_email(data: Json<ChangeEmailData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
if !CONFIG.email_change_allowed() {
err!("Email change is not allowed.");
}
- let data: ChangeEmailData = data.into_inner().data;
+ let data: ChangeEmailData = data.into_inner();
let mut user = headers.user;
- if !user.check_valid_password(&data.MasterPasswordHash) {
+ if !user.check_valid_password(&data.master_password_hash) {
err!("Invalid password")
}
- if User::find_by_mail(&data.NewEmail, &mut conn).await.is_some() {
+ if User::find_by_mail(&data.new_email, &mut conn).await.is_some() {
err!("Email already in use");
}
match user.email_new {
Some(ref val) => {
- if val != &data.NewEmail {
+ if val != &data.new_email {
err!("Email change mismatch");
}
}
@@ -707,7 +692,7 @@ async fn post_email(
// Only check the token if we sent out an email...
match user.email_new_token {
Some(ref val) => {
- if *val != data.Token.into_string() {
+ if *val != data.token.into_string() {
err!("Token mismatch");
}
}
@@ -718,11 +703,11 @@ async fn post_email(
user.verified_at = None;
}
- user.email = data.NewEmail;
+ user.email = data.new_email;
user.email_new = None;
user.email_new_token = None;
- user.set_password(&data.NewMasterPasswordHash, Some(data.Key), true, None);
+ user.set_password(&data.new_master_password_hash, Some(data.key), true, None);
let save_result = user.save(&mut conn).await;
@@ -747,22 +732,22 @@ async fn post_verify_email(headers: Headers) -> EmptyResult {
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct VerifyEmailTokenData {
- UserId: String,
- Token: String,
+ user_id: String,
+ token: String,
}
#[post("/accounts/verify-email-token", data = "<data>")]
-async fn post_verify_email_token(data: JsonUpcase<VerifyEmailTokenData>, mut conn: DbConn) -> EmptyResult {
- let data: VerifyEmailTokenData = data.into_inner().data;
+async fn post_verify_email_token(data: Json<VerifyEmailTokenData>, mut conn: DbConn) -> EmptyResult {
+ let data: VerifyEmailTokenData = data.into_inner();
- let mut user = match User::find_by_uuid(&data.UserId, &mut conn).await {
+ let mut user = match User::find_by_uuid(&data.user_id, &mut conn).await {
Some(user) => user,
None => err!("User doesn't exist"),
};
- let claims = match decode_verify_email(&data.Token) {
+ let claims = match decode_verify_email(&data.token) {
Ok(claims) => claims,
Err(_) => err!("Invalid claim"),
};
@@ -780,17 +765,17 @@ async fn post_verify_email_token(data: JsonUpcase<VerifyEmailTokenData>, mut con
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct DeleteRecoverData {
- Email: String,
+ email: String,
}
#[post("/accounts/delete-recover", data = "<data>")]
-async fn post_delete_recover(data: JsonUpcase<DeleteRecoverData>, mut conn: DbConn) -> EmptyResult {
- let data: DeleteRecoverData = data.into_inner().data;
+async fn post_delete_recover(data: Json<DeleteRecoverData>, mut conn: DbConn) -> EmptyResult {
+ let data: DeleteRecoverData = data.into_inner();
if CONFIG.mail_enabled() {
- if let Some(user) = User::find_by_mail(&data.Email, &mut conn).await {
+ if let Some(user) = User::find_by_mail(&data.email, &mut conn).await {
if let Err(e) = mail::send_delete_account(&user.email, &user.uuid).await {
error!("Error sending delete account email: {:#?}", e);
}
@@ -806,22 +791,22 @@ async fn post_delete_recover(data: JsonUpcase<DeleteRecoverData>, mut conn: DbCo
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct DeleteRecoverTokenData {
- UserId: String,
- Token: String,
+ user_id: String,
+ token: String,
}
#[post("/accounts/delete-recover-token", data = "<data>")]
-async fn post_delete_recover_token(data: JsonUpcase<DeleteRecoverTokenData>, mut conn: DbConn) -> EmptyResult {
- let data: DeleteRecoverTokenData = data.into_inner().data;
+async fn post_delete_recover_token(data: Json<DeleteRecoverTokenData>, mut conn: DbConn) -> EmptyResult {
+ let data: DeleteRecoverTokenData = data.into_inner();
- let user = match User::find_by_uuid(&data.UserId, &mut conn).await {
+ let user = match User::find_by_uuid(&data.user_id, &mut conn).await {
Some(user) => user,
None => err!("User doesn't exist"),
};
- let claims = match decode_delete(&data.Token) {
+ let claims = match decode_delete(&data.token) {
Ok(claims) => claims,
Err(_) => err!("Invalid claim"),
};
@@ -832,13 +817,13 @@ async fn post_delete_recover_token(data: JsonUpcase<DeleteRecoverTokenData>, mut
}
#[post("/accounts/delete", data = "<data>")]
-async fn post_delete_account(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, conn: DbConn) -> EmptyResult {
+async fn post_delete_account(data: Json<PasswordOrOtpData>, headers: Headers, conn: DbConn) -> EmptyResult {
delete_account(data, headers, conn).await
}
#[delete("/accounts", data = "<data>")]
-async fn delete_account(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
- let data: PasswordOrOtpData = data.into_inner().data;
+async fn delete_account(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
+ let data: PasswordOrOtpData = data.into_inner();
let user = headers.user;
data.validate(&user, true, &mut conn).await?;
@@ -853,21 +838,21 @@ fn revision_date(headers: Headers) -> JsonResult {
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct PasswordHintData {
- Email: String,
+ email: String,
}
#[post("/accounts/password-hint", data = "<data>")]
-async fn password_hint(data: JsonUpcase<PasswordHintData>, mut conn: DbConn) -> EmptyResult {
+async fn password_hint(data: Json<PasswordHintData>, mut conn: DbConn) -> EmptyResult {
if !CONFIG.mail_enabled() && !CONFIG.show_password_hint() {
err!("This server is not configured to provide password hints.");
}
const NO_HINT: &str = "Sorry, you have no password hint...";
- let data: PasswordHintData = data.into_inner().data;
- let email = &data.Email;
+ let data: PasswordHintData = data.into_inner();
+ let email = &data.email;
match User::find_by_mail(email, &mut conn).await {
None => {
@@ -901,29 +886,29 @@ async fn password_hint(data: JsonUpcase<PasswordHintData>, mut conn: DbConn) ->
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
pub struct PreloginData {
- Email: String,
+ email: String,
}
#[post("/accounts/prelogin", data = "<data>")]
-async fn prelogin(data: JsonUpcase<PreloginData>, conn: DbConn) -> Json<Value> {
+async fn prelogin(data: Json<PreloginData>, conn: DbConn) -> Json<Value> {
_prelogin(data, conn).await
}
-pub async fn _prelogin(data: JsonUpcase<PreloginData>, mut conn: DbConn) -> Json<Value> {
- let data: PreloginData = data.into_inner().data;
+pub async fn _prelogin(data: Json<PreloginData>, mut conn: DbConn) -> Json<Value> {
+ let data: PreloginData = data.into_inner();
- let (kdf_type, kdf_iter, kdf_mem, kdf_para) = match User::find_by_mail(&data.Email, &mut conn).await {
+ let (kdf_type, kdf_iter, kdf_mem, kdf_para) = match User::find_by_mail(&data.email, &mut conn).await {
Some(user) => (user.client_kdf_type, user.client_kdf_iter, user.client_kdf_memory, user.client_kdf_parallelism),
None => (User::CLIENT_KDF_TYPE_DEFAULT, User::CLIENT_KDF_ITER_DEFAULT, None, None),
};
let result = json!({
- "Kdf": kdf_type,
- "KdfIterations": kdf_iter,
- "KdfMemory": kdf_mem,
- "KdfParallelism": kdf_para,
+ "kdf": kdf_type,
+ "kdfIterations": kdf_iter,
+ "kdfMemory": kdf_mem,
+ "kdfParallelism": kdf_para,
});
Json(result)
@@ -931,27 +916,27 @@ pub async fn _prelogin(data: JsonUpcase<PreloginData>, mut conn: DbConn) -> Json
// https://github.com/bitwarden/server/blob/master/src/Api/Models/Request/Accounts/SecretVerificationRequestModel.cs
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct SecretVerificationRequest {
- MasterPasswordHash: String,
+ master_password_hash: String,
}
#[post("/accounts/verify-password", data = "<data>")]
-fn verify_password(data: JsonUpcase<SecretVerificationRequest>, headers: Headers) -> EmptyResult {
- let data: SecretVerificationRequest = data.into_inner().data;
+fn verify_password(data: Json<SecretVerificationRequest>, headers: Headers) -> EmptyResult {
+ let data: SecretVerificationRequest = data.into_inner();
let user = headers.user;
- if !user.check_valid_password(&data.MasterPasswordHash) {
+ if !user.check_valid_password(&data.master_password_hash) {
err!("Invalid password")
}
Ok(())
}
-async fn _api_key(data: JsonUpcase<PasswordOrOtpData>, rotate: bool, headers: Headers, mut conn: DbConn) -> JsonResult {
+async fn _api_key(data: Json<PasswordOrOtpData>, rotate: bool, headers: Headers, mut conn: DbConn) -> JsonResult {
use crate::util::format_date;
- let data: PasswordOrOtpData = data.into_inner().data;
+ let data: PasswordOrOtpData = data.into_inner();
let mut user = headers.user;
data.validate(&user, true, &mut conn).await?;
@@ -962,19 +947,19 @@ async fn _api_key(data: JsonUpcase<PasswordOrOtpData>, rotate: bool, headers: He
}
Ok(Json(json!({
- "ApiKey": user.api_key,
- "RevisionDate": format_date(&user.updated_at),
- "Object": "apiKey",
+ "apiKey": user.api_key,
+ "revisionDate": format_date(&user.updated_at),
+ "object": "apiKey",
})))
}
#[post("/accounts/api-key", data = "<data>")]
-async fn api_key(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, conn: DbConn) -> JsonResult {
+async fn api_key(data: Json<PasswordOrOtpData>, headers: Headers, conn: DbConn) -> JsonResult {
_api_key(data, false, headers, conn).await
}
#[post("/accounts/rotate-api-key", data = "<data>")]
-async fn rotate_api_key(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, conn: DbConn) -> JsonResult {
+async fn rotate_api_key(data: Json<PasswordOrOtpData>, headers: Headers, conn: DbConn) -> JsonResult {
_api_key(data, true, headers, conn).await
}
@@ -1028,20 +1013,20 @@ impl<'r> FromRequest<'r> for KnownDevice {
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct PushToken {
- PushToken: String,
+ push_token: String,
}
#[post("/devices/identifier/<uuid>/token", data = "<data>")]
-async fn post_device_token(uuid: &str, data: JsonUpcase<PushToken>, headers: Headers, conn: DbConn) -> EmptyResult {
+async fn post_device_token(uuid: &str, data: Json<PushToken>, headers: Headers, conn: DbConn) -> EmptyResult {
put_device_token(uuid, data, headers, conn).await
}
#[put("/devices/identifier/<uuid>/token", data = "<data>")]
-async fn put_device_token(uuid: &str, data: JsonUpcase<PushToken>, headers: Headers, mut conn: DbConn) -> EmptyResult {
- let data = data.into_inner().data;
- let token = data.PushToken;
+async fn put_device_token(uuid: &str, data: Json<PushToken>, headers: Headers, mut conn: DbConn) -> EmptyResult {
+ let data = data.into_inner();
+ let token = data.push_token;
let mut device = match Device::find_by_uuid_and_user(&headers.device.uuid, &headers.user.uuid, &mut conn).await {
Some(device) => device,
@@ -1096,12 +1081,12 @@ async fn post_clear_device_token(uuid: &str, conn: DbConn) -> EmptyResult {
}
#[derive(Debug, Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct AuthRequestRequest {
- accessCode: String,
- deviceIdentifier: String,
+ access_code: String,
+ device_identifier: String,
email: String,
- publicKey: String,
+ public_key: String,
#[serde(alias = "type")]
_type: i32,
}
@@ -1124,15 +1109,15 @@ async fn post_auth_request(
let mut auth_request = AuthRequest::new(
user.uuid.clone(),
- data.deviceIdentifier.clone(),
+ data.device_identifier.clone(),
headers.device_type,
headers.ip.ip.to_string(),
- data.accessCode,
- data.publicKey,
+ data.access_code,
+ data.public_key,
);
auth_request.save(&mut conn).await?;
- nt.send_auth_request(&user.uuid, &auth_request.uuid, &data.deviceIdentifier, &mut conn).await;
+ nt.send_auth_request(&user.uuid, &auth_request.uuid, &data.device_identifier, &mut conn).await;
Ok(Json(json!({
"id": auth_request.uuid,
@@ -1178,12 +1163,12 @@ async fn get_auth_request(uuid: &str, mut conn: DbConn) -> JsonResult {
}
#[derive(Debug, Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct AuthResponseRequest {
- deviceIdentifier: String,
+ device_identifier: String,
key: String,
- masterPasswordHash: Option<String>,
- requestApproved: bool,
+ master_password_hash: Option<String>,
+ request_approved: bool,
}
#[put("/auth-requests/<uuid>", data = "<data>")]
@@ -1202,15 +1187,15 @@ async fn put_auth_request(
}
};
- auth_request.approved = Some(data.requestApproved);
+ auth_request.approved = Some(data.request_approved);
auth_request.enc_key = Some(data.key);
- auth_request.master_password_hash = data.masterPasswordHash;
- auth_request.response_device_id = Some(data.deviceIdentifier.clone());
+ auth_request.master_password_hash = data.master_password_hash;
+ auth_request.response_device_id = Some(data.device_identifier.clone());
auth_request.save(&mut conn).await?;
if auth_request.approved.unwrap_or(false) {
ant.send_auth_response(&auth_request.user_uuid, &auth_request.uuid).await;
- nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, data.deviceIdentifier, &mut conn).await;
+ nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, data.device_identifier, &mut conn).await;
}
let response_date_utc = auth_request.response_date.map(|response_date| response_date.and_utc());
diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs
index 18d1b998..9a4456a4 100644
--- a/src/api/core/ciphers.rs
+++ b/src/api/core/ciphers.rs
@@ -12,7 +12,7 @@ use serde_json::Value;
use crate::util::NumberOrString;
use crate::{
- api::{self, core::log_event, EmptyResult, JsonResult, JsonUpcase, Notify, PasswordOrOtpData, UpdateType},
+ api::{self, core::log_event, EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType},
auth::Headers,
crypto,
db::{models::*, DbConn, DbPool},
@@ -141,15 +141,15 @@ async fn sync(data: SyncData, headers: Headers, mut conn: DbConn) -> Json<Value>
};
Json(json!({
- "Profile": user_json,
- "Folders": folders_json,
- "Collections": collections_json,
- "Policies": policies_json,
- "Ciphers": ciphers_json,
- "Domains": domains_json,
- "Sends": sends_json,
+ "profile": user_json,
+ "folders": folders_json,
+ "collections": collections_json,
+ "policies": policies_json,
+ "ciphers": ciphers_json,
+ "domains": domains_json,
+ "sends": sends_json,
"unofficialServer": true,
- "Object": "sync"
+ "object": "sync"
}))
}
@@ -167,9 +167,9 @@ async fn get_ciphers(headers: Headers, mut conn: DbConn) -> Json<Value> {
}
Json(json!({
- "Data": ciphers_json,
- "Object": "list",
- "ContinuationToken": null
+ "data": ciphers_json,
+ "object": "list",
+ "continuationToken": null
}))
}
@@ -198,17 +198,17 @@ async fn get_cipher_details(uuid: &str, headers: Headers, conn: DbConn) -> JsonR
get_cipher(uuid, headers, conn).await
}
-#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
pub struct CipherData {
// Id is optional as it is included only in bulk share
- pub Id: Option<String>,
+ pub id: Option<String>,
// Folder id is not included in import
- FolderId: Option<String>,
+ folder_id: Option<String>,
// TODO: Some of these might appear all the time, no need for Option
- pub OrganizationId: Option<String>,
+ pub organization_id: Option<String>,
- Key: Option<String>,
+ key: Option<String>,
/*
Login = 1,
@@ -216,27 +216,27 @@ pub struct CipherData {
Card = 3,
Identity = 4
*/
- pub Type: i32,
- pub Name: String,
- pub Notes: Option<String>,
- Fields: Option<Value>,
+ pub r#type: i32,
+ pub name: String,
+ pub notes: Option<String>,
+ fields: Option<Value>,
// Only one of these should exist, depending on type
- Login: Option<Value>,
- SecureNote: Option<Value>,
- Card: Option<Value>,
- Identity: Option<Value>,
+ login: Option<Value>,
+ secure_note: Option<Value>,
+ card: Option<Value>,
+ identity: Option<Value>,
- Favorite: Option<bool>,
- Reprompt: Option<i32>,
+ favorite: Option<bool>,
+ reprompt: Option<i32>,
- PasswordHistory: Option<Value>,
+ password_history: Option<Value>,
// These are used during key rotation
// 'Attachments' is unused, contains map of {id: filename}
- #[serde(rename = "Attachments")]
- _Attachments: Option<Value>,
- Attachments2: Option<HashMap<String, Attachments2Data>>,
+ #[allow(dead_code)]
+ attachments: Option<Value>,
+ attachments2: Option<HashMap<String, Attachments2Data>>,
// The revision datetime (in ISO 8601 format) of the client's local copy
// of the cipher. This is used to prevent a client from updating a cipher
@@ -244,31 +244,26 @@ pub struct CipherData {
// loss. It's not an error when no value is provided; this can happen
// when using older client versions, or if the operation doesn't involve
// updating an existing cipher.
- LastKnownRevisionDate: Option<String>,
+ last_known_revision_date: Option<String>,
}
-#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
pub struct PartialCipherData {
- FolderId: Option<String>,
- Favorite: bool,
+ folder_id: Option<String>,
+ favorite: bool,
}
-#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
pub struct Attachments2Data {
- FileName: String,
- Key: String,
+ file_name: String,
+ key: String,
}
/// Called when an org admin clones an org cipher.
#[post("/ciphers/admin", data = "<data>")]
-async fn post_ciphers_admin(
- data: JsonUpcase<ShareCipherData>,
- headers: Headers,
- conn: DbConn,
- nt: Notify<'_>,
-) -> JsonResult {
+async fn post_ciphers_admin(data: Json<ShareCipherData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult {
post_ciphers_create(data, headers, conn, nt).await
}
@@ -277,25 +272,25 @@ async fn post_ciphers_admin(
/// `organizationId` is null.
#[post("/ciphers/create", data = "<data>")]
async fn post_ciphers_create(
- data: JsonUpcase<ShareCipherData>,
+ data: Json<ShareCipherData>,
headers: Headers,
mut conn: DbConn,
nt: Notify<'_>,
) -> JsonResult {
- let mut data: ShareCipherData = data.into_inner().data;
+ let mut data: ShareCipherData = data.into_inner();
// Check if there are one more more collections selected when this cipher is part of an organization.
// err if this is not the case before creating an empty cipher.
- if data.Cipher.OrganizationId.is_some() && data.CollectionIds.is_empty() {
+ if data.cipher.organization_id.is_some() && data.collection_ids.is_empty() {
err!("You must select at least one collection.");
}
// This check is usually only needed in update_cipher_from_data(), but we
// need it here as well to avoid creating an empty cipher in the call to
// cipher.save() below.
- enforce_personal_ownership_policy(Some(&data.Cipher), &headers, &mut conn).await?;
+ enforce_personal_ownership_policy(Some(&data.cipher), &headers, &mut conn).await?;
- let mut cipher = Cipher::new(data.Cipher.Type, data.Cipher.Name.clone());
+ let mut cipher = Cipher::new(data.cipher.r#type, data.cipher.name.clone());
cipher.user_uuid = Some(headers.user.uuid.clone());
cipher.save(&mut conn).await?;
@@ -305,23 +300,23 @@ async fn post_ciphers_create(
// the current time, so the stale data check will end up failing down the
// line. Since this function only creates new ciphers (whether by cloning
// or otherwise), we can just ignore this field entirely.
- data.Cipher.LastKnownRevisionDate = None;
+ data.cipher.last_known_revision_date = None;
share_cipher_by_uuid(&cipher.uuid, data, &headers, &mut conn, &nt).await
}
/// Called when creating a new user-owned cipher.
#[post("/ciphers", data = "<data>")]
-async fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
- let mut data: CipherData = data.into_inner().data;
+async fn post_ciphers(data: Json<CipherData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
+ let mut data: CipherData = data.into_inner();
// The web/browser clients set this field to null as expected, but the
// mobile clients seem to set the invalid value `0001-01-01T00:00:00`,
// which results in a warning message being logged. This field isn't
// needed when creating a new cipher, so just ignore it unconditionally.
- data.LastKnownRevisionDate = None;
+ data.last_known_revision_date = None;
- let mut cipher = Cipher::new(data.Type, data.Name.clone());
+ let mut cipher = Cipher::new(data.r#type, data.name.clone());
update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherCreate).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await))
@@ -339,7 +334,7 @@ async fn enforce_personal_ownership_policy(
headers: &Headers,
conn: &mut DbConn,
) -> EmptyResult {
- if data.is_none() || data.unwrap().OrganizationId.is_none() {
+ if data.is_none() || data.unwrap().organization_id.is_none() {
let user_uuid = &headers.user.uuid;
let policy_type = OrgPolicyType::PersonalOwnership;
if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, None, conn).await {
@@ -363,7 +358,7 @@ pub async fn update_cipher_from_data(
// Check that the client isn't updating an existing cipher with stale data.
// And only perform this check when not importing ciphers, else the date/time check will fail.
if ut != UpdateType::None {
- if let Some(dt) = data.LastKnownRevisionDate {
+ if let Some(dt) = data.last_known_revision_date {
match NaiveDateTime::parse_from_str(&dt, "%+") {
// ISO 8601 format
Err(err) => warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err),
@@ -375,20 +370,20 @@ pub async fn update_cipher_from_data(
}
}
- if cipher.organization_uuid.is_some() && cipher.organization_uuid != data.OrganizationId {
+ if cipher.organization_uuid.is_some() && cipher.organization_uuid != data.organization_id {
err!("Organization mismatch. Please resync the client before updating the cipher")
}
- if let Some(note) = &data.Notes {
+ if let Some(note) = &data.notes {
if note.len() > 10_000 {
err!("The field Notes exceeds the maximum encrypted value length of 10000 characters.")
}
}
// Check if this cipher is being transferred from a personal to an organization vault
- let transfer_cipher = cipher.organization_uuid.is_none() && data.OrganizationId.is_some();
+ let transfer_cipher = cipher.organization_uuid.is_none() && data.organization_id.is_some();
- if let Some(org_id) = data.OrganizationId {
+ if let Some(org_id) = data.organization_id {
match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, conn).await {
None => err!("You don't have permission to add item to organization"),
Some(org_user) => {
@@ -412,7 +407,7 @@ pub async fn update_cipher_from_data(
cipher.user_uuid = Some(headers.user.uuid.clone());
}
- if let Some(ref folder_id) = data.FolderId {
+ if let Some(ref folder_id) = data.folder_id {
match Folder::find_by_uuid(folder_id, conn).await {
Some(folder) => {
if folder.user_uuid != headers.user.uuid {
@@ -424,7 +419,7 @@ pub async fn update_cipher_from_data(
}
// Modify attachments name and keys when rotating
- if let Some(attachments) = data.Attachments2 {
+ if let Some(attachments) = data.attachments2 {
for (id, attachment) in attachments {
let mut saved_att = match Attachment::find_by_id(&id, conn).await {
Some(att) => att,
@@ -445,8 +440,8 @@ pub async fn update_cipher_from_data(
break;
}
- saved_att.akey = Some(attachment.Key);
- saved_att.file_name = attachment.FileName;
+ saved_att.akey = Some(attachment.key);
+ saved_att.file_name = attachment.file_name;
saved_att.save(conn).await?;
}
@@ -460,44 +455,44 @@ pub async fn update_cipher_from_data(
fn _clean_cipher_data(mut json_data: Value) -> Value {
if json_data.is_array() {
json_data.as_array_mut().unwrap().iter_mut().for_each(|ref mut f| {
- f.as_object_mut().unwrap().remove("Response");
+ f.as_object_mut().unwrap().remove("response");
});
};
json_data
}
- let type_data_opt = match data.Type {
- 1 => data.Login,
- 2 => data.SecureNote,
- 3 => data.Card,
- 4 => data.Identity,
+ let type_data_opt = match data.r#type {
+ 1 => data.login,
+ 2 => data.secure_note,
+ 3 => data.card,
+ 4 => data.identity,
_ => err!("Invalid type"),
};
let type_data = match type_data_opt {
Some(mut data) => {
// Remove the 'Response' key from the base object.
- data.as_object_mut().unwrap().remove("Response");
+ data.as_object_mut().unwrap().remove("response");
// Remove the 'Response' key from every Uri.
- if data["Uris"].is_array() {
- data["Uris"] = _clean_cipher_data(data["Uris"].clone());
+ if data["uris"].is_array() {
+ data["uris"] = _clean_cipher_data(data["uris"].clone());
}
data
}
None => err!("Data missing"),
};
- cipher.key = data.Key;
- cipher.name = data.Name;
- cipher.notes = data.Notes;
- cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string());
+ cipher.key = data.key;
+ cipher.name = data.name;
+ cipher.notes = data.notes;
+ cipher.fields = data.fields.map(|f| _clean_cipher_data(f).to_string());
cipher.data = type_data.to_string();
- cipher.password_history = data.PasswordHistory.map(|f| f.to_string());
- cipher.reprompt = data.Reprompt;
+ cipher.password_history = data.password_history.map(|f| f.to_string());
+ cipher.reprompt = data.reprompt;
cipher.save(conn).await?;
- cipher.move_to_folder(data.FolderId, &headers.user.uuid, conn).await?;
- cipher.set_favorite(data.Favorite, &headers.user.uuid, conn).await?;
+ cipher.move_to_folder(data.folder_id, &headers.user.uuid, conn).await?;
+ cipher.set_favorite(data.favorite, &headers.user.uuid, conn).await?;
if ut != UpdateType::None {
// Only log events for organizational ciphers
@@ -533,43 +528,43 @@ pub async fn update_cipher_from_data(
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct ImportData {
- Ciphers: Vec<CipherData>,
- Folders: Vec<FolderData>,
- FolderRelationships: Vec<RelationsData>,
+ ciphers: Vec<CipherData>,
+ folders: Vec<FolderData>,
+ folder_relationships: Vec<RelationsData>,
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct RelationsData {
// Cipher id
- Key: usize,
+ key: usize,
// Folder id
- Value: usize,
+ value: usize,
}
#[post("/ciphers/import", data = "<data>")]
async fn post_ciphers_import(
- data: JsonUpcase<ImportData>,
+ data: Json<ImportData>,
headers: Headers,
mut conn: DbConn,
nt: Notify<'_>,
) -> EmptyResult {
enforce_personal_ownership_policy(None, &headers, &mut conn).await?;
- let data: ImportData = data.into_inner().data;
+ let data: ImportData = data.into_inner();
// Validate the import before continuing
// Bitwarden does not process the import if there is one item invalid.
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
- Cipher::validate_notes(&data.Ciphers)?;
+ Cipher::validate_notes(&data.ciphers)?;
// Read and create the folders
let mut folders: Vec<_> = Vec::new();
- for folder in data.Folders.into_iter() {
- let mut new_folder = Folder::new(headers.user.uuid.clone(), folder.Name);
+ for folder in data.folders.into_iter() {
+ let mut new_folder = Folder::new(headers.user.uuid.clone(), folder.name);
new_folder.save(&mut conn).await?;
folders.push(new_folder);
@@ -578,16 +573,16 @@ async fn post_ciphers_import(
// Read the relations between folders and ciphers
let mut relations_map = HashMap::new();
- for relation in data.FolderRelationships {
- relations_map.insert(relation.Key, relation.Value);
+ for relation in data.folder_relationships {
+ relations_map.insert(relation.key, relation.value);
}
// Read and create the ciphers
- for (index, mut cipher_data) in data.Ciphers.into_iter().enumerate() {
+ for (index, mut cipher_data) in data.ciphers.into_iter().enumerate() {
let folder_uuid = relations_map.get(&index).map(|i| folders[*i].uuid.clone());
- cipher_data.FolderId = folder_uuid;
+ cipher_data.folder_id = folder_uuid;
- let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone());
+ let mut cipher = Cipher::new(cipher_data.r#type, cipher_data.name.clone());
update_cipher_from_data(&mut cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await?;
}
@@ -602,7 +597,7 @@ async fn post_ciphers_import(
#[put("/ciphers/<uuid>/admin", data = "<data>")]
async fn put_cipher_admin(
uuid: &str,
- data: JsonUpcase<CipherData>,
+ data: Json<CipherData>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
@@ -613,7 +608,7 @@ async fn put_cipher_admin(
#[post("/ciphers/<uuid>/admin", data = "<data>")]
async fn post_cipher_admin(
uuid: &str,
- data: JsonUpcase<CipherData>,
+ data: Json<CipherData>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
@@ -622,25 +617,19 @@ async fn post_cipher_admin(
}
#[post("/ciphers/<uuid>", data = "<data>")]
-async fn post_cipher(
- uuid: &str,
- data: JsonUpcase<CipherData>,
- headers: Headers,
- conn: DbConn,
- nt: Notify<'_>,
-) -> JsonResult {
+async fn post_cipher(uuid: &str, data: Json<CipherData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult {
put_cipher(uuid, data, headers, conn, nt).await
}
#[put("/ciphers/<uuid>", data = "<data>")]
async fn put_cipher(
uuid: &str,
- data: JsonUpcase<CipherData>,
+ data: Json<CipherData>,
headers: Headers,
mut conn: DbConn,
nt: Notify<'_>,
) -> JsonResult {
- let data: CipherData = data.into_inner().data;
+ let data: CipherData = data.into_inner();
let mut cipher = match Cipher::find_by_uuid(uuid, &mut conn).await {
Some(cipher) => cipher,
@@ -662,12 +651,7 @@ async fn put_cipher(
}
#[post("/ciphers/<uuid>/partial", data = "<data>")]
-async fn post_cipher_partial(
- uuid: &str,
- data: JsonUpcase<PartialCipherData>,
- headers: Headers,
- conn: DbConn,
-) -> JsonResult {
+async fn post_cipher_partial(uuid: &str, data: Json<PartialCipherData>, headers: Headers, conn: DbConn) -> JsonResult {
put_cipher_partial(uuid, data, headers, conn).await
}
@@ -675,18 +659,18 @@ async fn post_cipher_partial(
#[put("/ciphers/<uuid>/partial", data = "<data>")]
async fn put_cipher_partial(
uuid: &str,
- data: JsonUpcase<PartialCipherData>,
+ data: Json<PartialCipherData>,
headers: Headers,
mut conn: DbConn,
) -> JsonResult {
- let data: PartialCipherData = data.into_inner().data;
+ let data: PartialCipherData = data.into_inner();
let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await {
Some(cipher) => cipher,
None => err!("Cipher doesn't exist"),
};
- if let Some(ref folder_id) = data.FolderId {
+ if let Some(ref folder_id) = data.folder_id {
match Folder::find_by_uuid(folder_id, &mut conn).await {
Some(folder) => {
if folder.user_uuid != headers.user.uuid {
@@ -698,23 +682,23 @@ async fn put_cipher_partial(
}
// Move cipher
- cipher.move_to_folder(data.FolderId.clone(), &headers.user.uuid, &mut conn).await?;
+ cipher.move_to_folder(data.folder_id.clone(), &headers.user.uuid, &mut conn).await?;
// Update favorite
- cipher.set_favorite(Some(data.Favorite), &headers.user.uuid, &mut conn).await?;
+ cipher.set_favorite(Some(data.favorite), &headers.user.uuid, &mut conn).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await))
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct CollectionsAdminData {
- CollectionIds: Vec<String>,
+ collection_ids: Vec<String>,
}
#[put("/ciphers/<uuid>/collections", data = "<data>")]
async fn put_collections_update(
uuid: &str,
- data: JsonUpcase<CollectionsAdminData>,
+ data: Json<CollectionsAdminData>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
@@ -725,7 +709,7 @@ async fn put_collections_update(
#[post("/ciphers/<uuid>/collections", data = "<data>")]
async fn post_collections_update(
uuid: &str,
- data: JsonUpcase<CollectionsAdminData>,
+ data: Json<CollectionsAdminData>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
@@ -736,7 +720,7 @@ async fn post_collections_update(
#[put("/ciphers/<uuid>/collections-admin", data = "<data>")]
async fn put_collections_admin(
uuid: &str,
- data: JsonUpcase<CollectionsAdminData>,
+ data: Json<CollectionsAdminData>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
@@ -747,12 +731,12 @@ async fn put_collections_admin(
#[post("/ciphers/<uuid>/collections-admin", data = "<data>")]
async fn post_collections_admin(
uuid: &str,
- data: JsonUpcase<CollectionsAdminData>,
+ data: Json<CollectionsAdminData>,
headers: Headers,
mut conn: DbConn,
nt: Notify<'_>,
) -> EmptyResult {
- let data: CollectionsAdminData = data.into_inner().data;
+ let data: CollectionsAdminData = data.into_inner();
let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await {
Some(cipher) => cipher,
@@ -763,7 +747,7 @@ async fn post_collections_admin(
err!("Cipher is not write accessible")
}
- let posted_collections: HashSet<String> = data.CollectionIds.iter().cloned().collect();
+ let posted_collections: HashSet<String> = data.collection_ids.iter().cloned().collect();
let current_collections: HashSet<String> =
cipher.get_collections(headers.user.uuid.clone(), &mut conn).await.iter().cloned().collect();
@@ -811,21 +795,21 @@ async fn post_collections_admin(
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct ShareCipherData {
- Cipher: CipherData,
- CollectionIds: Vec<String>,
+ cipher: CipherData,
+ collection_ids: Vec<String>,
}
#[post("/ciphers/<uuid>/share", data = "<data>")]
async fn post_cipher_share(
uuid: &str,
- data: JsonUpcase<ShareCipherData>,
+ data: Json<ShareCipherData>,
headers: Headers,
mut conn: DbConn,
nt: Notify<'_>,
) -> JsonResult {
- let data: ShareCipherData = data.into_inner().data;
+ let data: ShareCipherData = data.into_inner();
share_cipher_by_uuid(uuid, data, &headers, &mut conn, &nt).await
}
@@ -833,53 +817,53 @@ async fn post_cipher_share(
#[put("/ciphers/<uuid>/share", data = "<data>")]
async fn put_cipher_share(
uuid: &str,
- data: JsonUpcase<ShareCipherData>,
+ data: Json<ShareCipherData>,
headers: Headers,
mut conn: DbConn,
nt: Notify<'_>,
) -> JsonResult {
- let data: ShareCipherData = data.into_inner().data;
+ let data: ShareCipherData = data.into_inner();
share_cipher_by_uuid(uuid, data, &headers, &mut conn, &nt).await
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct ShareSelectedCipherData {
- Ciphers: Vec<CipherData>,
- CollectionIds: Vec<String>,
+ ciphers: Vec<CipherData>,
+ collection_ids: Vec<String>,
}
#[put("/ciphers/share", data = "<data>")]
async fn put_cipher_share_selected(
- data: JsonUpcase<ShareSelectedCipherData>,
+ data: Json<ShareSelectedCipherData>,
headers: Headers,
mut conn: DbConn,
nt: Notify<'_>,
) -> EmptyResult {
- let mut data: ShareSelectedCipherData = data.into_inner().data;
+ let mut data: ShareSelectedCipherData = data.into_inner();
- if data.Ciphers.is_empty() {
+ if data.ciphers.is_empty() {
err!("You must select at least one cipher.")
}
- if data.CollectionIds.is_empty() {
+ if data.collection_ids.is_empty() {
err!("You must select at least one collection.")
}
- for cipher in data.Ciphers.iter() {
- if cipher.Id.is_none() {
+ for cipher in data.ciphers.iter() {
+ if cipher.id.is_none() {
err!("Request missing ids field")
}
}
- while let Some(cipher) = data.Ciphers.pop() {
+ while let Some(cipher) = data.ciphers.pop() {
let mut shared_cipher_data = ShareCipherData {
- Cipher: cipher,
- CollectionIds: data.CollectionIds.clone(),
+ cipher,
+ collection_ids: data.collection_ids.clone(),
};
- match shared_cipher_data.Cipher.Id.take() {
+ match shared_cipher_data.cipher.id.take() {
Some(id) => share_cipher_by_uuid(&id, shared_cipher_data, &headers, &mut conn, &nt).await?,
None => err!("Request missing ids field"),
};
@@ -908,8 +892,8 @@ async fn share_cipher_by_uuid(
let mut shared_to_collections = vec![];
- if let Some(organization_uuid) = &data.Cipher.OrganizationId {
- for uuid in &data.CollectionIds {
+ if let Some(organization_uuid) = &data.cipher.organization_id {
+ for uuid in &data.collection_ids {
match Collection::find_by_uuid_and_org(uuid, organization_uuid, conn).await {
None => err!("Invalid collection ID provided"),
Some(collection) => {
@@ -925,13 +909,13 @@ async fn share_cipher_by_uuid(
};
// When LastKnownRevisionDate is None, it is a new cipher, so send CipherCreate.
- let ut = if data.Cipher.LastKnownRevisionDate.is_some() {
+ let ut = if data.cipher.last_known_revision_date.is_some() {
UpdateType::SyncCipherUpdate
} else {
UpdateType::SyncCipherCreate
};
- update_cipher_from_data(&mut cipher, data.Cipher, headers, Some(shared_to_collections), conn, nt, ut).await?;
+ update_cipher_from_data(&mut cipher, data.cipher, headers, Some(shared_to_collections), conn, nt, ut).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await))
}
@@ -961,12 +945,12 @@ async fn get_attachment(uuid: &str, attachment_id: &str, headers: Headers, mut c
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct AttachmentRequestData {
- Key: String,
- FileName: String,
- FileSize: NumberOrString,
- AdminRequest: Option<bool>, // true when attaching from an org vault view
+ key: String,
+ file_name: String,
+ file_size: NumberOrString,
+ admin_request: Option<bool>, // true when attaching from an org vault view
}
enum FileUploadType {
@@ -981,7 +965,7 @@ enum FileUploadType {
#[post("/ciphers/<uuid>/attachment/v2", data = "<data>")]
async fn post_attachment_v2(
uuid: &str,
- data: JsonUpcase<AttachmentRequestData>,
+ data: Json<AttachmentRequestData>,
headers: Headers,
mut conn: DbConn,
) -> JsonResult {
@@ -994,28 +978,28 @@ async fn post_attachment_v2(
err!("Cipher is not write accessible")
}
- let data: AttachmentRequestData = data.into_inner().data;
- let file_size = data.FileSize.into_i64()?;
+ let data: AttachmentRequestData = data.into_inner();
+ let file_size = data.file_size.into_i64()?;
if file_size < 0 {
err!("Attachment size can't be negative")
}
let attachment_id = crypto::generate_attachment_id();
let attachment =
- Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.FileName, file_size, Some(data.Key));
+ Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.file_name, file_size, Some(data.key));
attachment.save(&mut conn).await.expect("Error saving attachment");
let url = format!("/ciphers/{}/attachment/{}", cipher.uuid, attachment_id);
- let response_key = match data.AdminRequest {
- Some(b) if b => "CipherMiniResponse",
- _ => "CipherResponse",
+ let response_key = match data.admin_request {
+ Some(b) if b => "cipherMiniResponse",
+ _ => "cipherResponse",
};
Ok(Json(json!({ // AttachmentUploadDataResponseModel
- "Object": "attachment-fileUpload",
- "AttachmentId": attachment_id,
- "Url": url,
- "FileUploadType": FileUploadType::Direct as i32,
+ "object": "attachment-fileUpload",
+ "attachmentId": attachment_id,
+ "url": url,
+ "fileUploadType": FileUploadType::Direct as i32,
response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await,
})))
}
@@ -1350,38 +1334,23 @@ async fn delete_cipher_admin(uuid: &str, headers: Headers, mut conn: DbConn, nt:
}
#[delete("/ciphers", data = "<data>")]
-async fn delete_cipher_selected(
- data: JsonUpcase<Value>,
- headers: Headers,
- conn: DbConn,
- nt: Notify<'_>,
-) -> EmptyResult {
+async fn delete_cipher_selected(data: Json<Value>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
_delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete
}
#[post("/ciphers/delete", data = "<data>")]
-async fn delete_cipher_selected_post(
- data: JsonUpcase<Value>,
- headers: Headers,
- conn: DbConn,
- nt: Notify<'_>,
-) -> EmptyResult {
+async fn delete_cipher_selected_post(data: Json<Value>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
_delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete
}
#[put("/ciphers/delete", data = "<data>")]
-async fn delete_cipher_selected_put(
- data: JsonUpcase<Value>,
- headers: Headers,
- conn: DbConn,
- nt: Notify<'_>,
-) -> EmptyResult {
+async fn delete_cipher_selected_put(data: Json<Value>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
_delete_multiple_ciphers(data, headers, conn, true, nt).await // soft delete
}
#[delete("/ciphers/admin", data = "<data>")]
async fn delete_cipher_selected_admin(
- data: JsonUpcase<Value>,
+ data: Json<Value>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
@@ -1391,7 +1360,7 @@ async fn delete_cipher_selected_admin(
#[post("/ciphers/delete-admin", data = "<data>")]
async fn delete_cipher_selected_post_admin(
- data: JsonUpcase<Value>,
+ data: Json<Value>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
@@ -1401,7 +1370,7 @@ async fn delete_cipher_selected_post_admin(
#[put("/ciphers/delete-admin", data = "<data>")]
async fn delete_cipher_selected_put_admin(
- data: JsonUpcase<Value>,
+ data: Json<Value>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
@@ -1420,33 +1389,28 @@ async fn restore_cipher_put_admin(uuid: &str, headers: Headers, mut conn: DbConn
}
#[put("/ciphers/restore", data = "<data>")]
-async fn restore_cipher_selected(
- data: JsonUpcase<Value>,
- headers: Headers,
- mut conn: DbConn,
- nt: Notify<'_>,
-) -> JsonResult {
+async fn restore_cipher_selected(data: Json<Value>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
_restore_multiple_ciphers(data, &headers, &mut conn, &nt).await
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct MoveCipherData {
- FolderId: Option<String>,
- Ids: Vec<String>,
+ folder_id: Option<String>,
+ ids: Vec<String>,
}
#[post("/ciphers/move", data = "<data>")]
async fn move_cipher_selected(
- data: JsonUpcase<MoveCipherData>,
+ data: Json<MoveCipherData>,
headers: Headers,
mut conn: DbConn,
nt: Notify<'_>,
) -> EmptyResult {
- let data = data.into_inner().data;
+ let data = data.into_inner();
let user_uuid = headers.user.uuid;
- if let Some(ref folder_id) = data.FolderId {
+ if let Some(ref folder_id) = data.folder_id {
match Folder::find_by_uuid(folder_id, &mut conn).await {
Some(folder) => {
if folder.user_uuid != user_uuid {
@@ -1457,7 +1421,7 @@ async fn move_cipher_selected(
}
}
- for uuid in data.Ids {
+ for uuid in data.ids {
let cipher = match Cipher::find_by_uuid(&uuid, &mut conn).await {
Some(cipher) => cipher,
None => err!("Cipher doesn't exist"),
@@ -1468,7 +1432,7 @@ async fn move_cipher_selected(
}
// Move cipher
- cipher.move_to_folder(data.FolderId.clone(), &user_uuid, &mut conn).await?;
+ cipher.move_to_folder(data.folder_id.clone(), &user_uuid, &mut conn).await?;
nt.send_cipher_update(
UpdateType::SyncCipherUpdate,
@@ -1486,7 +1450,7 @@ async fn move_cipher_selected(
#[put("/ciphers/move", data = "<data>")]
async fn move_cipher_selected_put(
- data: JsonUpcase<MoveCipherData>,
+ data: Json<MoveCipherData>,
headers: Headers,
conn: DbConn,
nt: Notify<'_>,
@@ -1503,12 +1467,12 @@ struct OrganizationId {
#[post("/ciphers/purge?<organization..>", data = "<data>")]
async fn delete_all(
organization: Option<OrganizationId>,
- data: JsonUpcase<PasswordOrOtpData>,
+ data: Json<PasswordOrOtpData>,
headers: Headers,
mut conn: DbConn,
nt: Notify<'_>,
) -> EmptyResult {
- let data: PasswordOrOtpData = data.into_inner().data;
+ let data: PasswordOrOtpData = data.into_inner();
let mut user = headers.user;
data.validate(&user, true, &mut conn).await?;
@@ -1616,13 +1580,13 @@ async fn _delete_cipher_by_uuid(
}
async fn _delete_multiple_ciphers(
- data: JsonUpcase<Value>,
+ data: Json<Value>,
headers: Headers,
mut conn: DbConn,
soft_delete: bool,
nt: Notify<'_>,
) -> EmptyResult {
- let data: Value = data.into_inner().data;
+ let data: Value = data.into_inner();
let uuids = match data.get("Ids") {
Some(ids) => match ids.as_array() {
@@ -1681,12 +1645,12 @@ async fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &mut DbCon
}
async fn _restore_multiple_ciphers(
- data: JsonUpcase<Value>,
+ data: Json<Value>,
headers: &Headers,
conn: &mut DbConn,
nt: &Notify<'_>,
) -> JsonResult {
- let data: Value = data.into_inner().data;
+ let data: Value = data.into_inner();
let uuids = match data.get("Ids") {
Some(ids) => match ids.as_array() {
@@ -1705,9 +1669,9 @@ async fn _restore_multiple_ciphers(
}
Ok(Json(json!({
- "Data": ciphers,
- "Object": "list",
- "ContinuationToken": null
+ "data": ciphers,
+ "object": "list",
+ "continuationToken": null
})))
}
diff --git a/src/api/core/emergency_access.rs b/src/api/core/emergency_access.rs
index 5d522c61..8f9e0015 100644
--- a/src/api/core/emergency_access.rs
+++ b/src/api/core/emergency_access.rs
@@ -5,7 +5,7 @@ use serde_json::Value;
use crate::{
api::{
core::{CipherSyncData, CipherSyncType},
- EmptyResult, JsonResult, JsonUpcase,
+ EmptyResult, JsonResult,
},
auth::{decode_emergency_access_invite, Headers},
db::{models::*, DbConn, DbPool},
@@ -43,19 +43,19 @@ pub fn routes() -> Vec<Route> {
async fn get_contacts(headers: Headers, mut conn: DbConn) -> Json<Value> {
if !CONFIG.emergency_access_allowed() {
return Json(json!({
- "Data": [{
- "Id": "",
- "Status": 2,
- "Type": 0,
- "WaitTimeDays": 0,
- "GranteeId": "",
- "Email": "",
- "Name": "NOTE: Emergency Access is disabled!",
- "Object": "emergencyAccessGranteeDetails",
+ "data": [{
+ "id": "",
+ "status": 2,
+ "type": 0,
+ "waitTimeDays": 0,
+ "granteeId": "",
+ "email": "",
+ "name": "NOTE: Emergency Access is disabled!",
+ "object": "emergencyAccessGranteeDetails",
}],
- "Object": "list",
- "ContinuationToken": null
+ "object": "list",
+ "continuationToken": null
}));
}
let emergency_access_list = EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &mut conn).await;
@@ -67,9 +67,9 @@ async fn get_contacts(headers: Headers, mut conn: DbConn) -> Json<Value> {
}
Json(json!({
- "Data": emergency_access_list_json,
- "Object": "list",
- "ContinuationToken": null
+ "data": emergency_access_list_json,
+ "object": "list",
+ "continuationToken": null
}))
}
@@ -86,9 +86,9 @@ async fn get_grantees(headers: Headers, mut conn: DbConn) -> Json<Value> {
}
Json(json!({
- "Data": emergency_access_list_json,
- "Object": "list",
- "ContinuationToken": null
+ "data": emergency_access_list_json,
+ "object": "list",
+ "continuationToken": null
}))
}
@@ -109,42 +109,38 @@ async fn get_emergency_access(emer_id: &str, mut conn: DbConn) -> JsonResult {
// region put/post
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct EmergencyAccessUpdateData {
- Type: NumberOrString,
- WaitTimeDays: i32,
- KeyEncrypted: Option<String>,
+ r#type: NumberOrString,
+ wait_time_days: i32,
+ key_encrypted: Option<String>,
}
#[put("/emergency-access/<emer_id>", data = "<data>")]
-async fn put_emergency_access(emer_id: &str, data: JsonUpcase<EmergencyAccessUpdateData>, conn: DbConn) -> JsonResult {
+async fn put_emergency_access(emer_id: &str, data: Json<EmergencyAccessUpdateData>, conn: DbConn) -> JsonResult {
post_emergency_access(emer_id, data, conn).await
}
#[post("/emergency-access/<emer_id>", data = "<data>")]
-async fn post_emergency_access(
- emer_id: &str,
- data: JsonUpcase<EmergencyAccessUpdateData>,
- mut conn: DbConn,
-) -> JsonResult {
+async fn post_emergency_access(emer_id: &str, data: Json<EmergencyAccessUpdateData>, mut conn: DbConn) -> JsonResult {
check_emergency_access_enabled()?;
- let data: EmergencyAccessUpdateData = data.into_inner().data;
+ let data: EmergencyAccessUpdateData = data.into_inner();
let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
Some(emergency_access) => emergency_access,
None => err!("Emergency access not valid."),
};
- let new_type = match EmergencyAccessType::from_str(&data.Type.into_string()) {
+ let new_type = match EmergencyAccessType::from_str(&data.r#type.into_string()) {
Some(new_type) => new_type as i32,
None => err!("Invalid emergency access type."),
};
emergency_access.atype = new_type;
- emergency_access.wait_time_days = data.WaitTimeDays;
- if data.KeyEncrypted.is_some() {
- emergency_access.key_encrypted = data.KeyEncrypted;
+ emergency_access.wait_time_days = data.wait_time_days;
+ if data.key_encrypted.is_some() {
+ emergency_access.key_encrypted = data.key_encrypted;
}
emergency_access.save(&mut conn).await?;
@@ -184,24 +180,24 @@ async fn post_delete_emergency_access(emer_id: &str, headers: Headers, conn: DbC
// region invite
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct EmergencyAccessInviteData {
- Email: String,
- Type: NumberOrString,
- WaitTimeDays: i32,
+ email: String,
+ r#type: NumberOrString,
+ wait_time_days: i32,
}
#[post("/emergency-access/invite", data = "<data>")]
-async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
+async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
check_emergency_access_enabled()?;
- let data: EmergencyAccessInviteData = data.into_inner().data;
- let email = data.Email.to_lowercase();
- let wait_time_days = data.WaitTimeDays;
+ let data: EmergencyAccessInviteData = data.into_inner();
+ let email = data.email.to_lowercase();
+ let wait_time_days = data.wait_time_days;
let emergency_access_status = EmergencyAccessStatus::Invited as i32;
- let new_type = match EmergencyAccessType::from_str(&data.Type.into_string()) {
+ let new_type = match EmergencyAccessType::from_str(&data.r#type.into_string()) {
Some(new_type) => new_type as i32,
None => err!("Invalid emergency access type."),
};
@@ -319,17 +315,17 @@ async fn resend_invite(emer_id: &str, headers: Headers, mut conn: DbConn) -> Emp
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct AcceptData {
- Token: String,
+ token: String,
}
#[post("/emergency-access/<emer_id>/accept", data = "<data>")]
-async fn accept_invite(emer_id: &str, data: JsonUpcase<AcceptData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
+async fn accept_invite(emer_id: &str, data: Json<AcceptData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
check_emergency_access_enabled()?;
- let data: AcceptData = data.into_inner().data;
- let token = &data.Token;
+ let data: AcceptData = data.into_inner();
+ let token = &data.token;
let claims = decode_emergency_access_invite(token)?;
// This can happen if the user who received the invite used a different email to signup.
@@ -374,23 +370,23 @@ async fn accept_invite(emer_id: &str, data: JsonUpcase<AcceptData>, headers: Hea
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct ConfirmData {
- Key: String,
+ key: String,
}
#[post("/emergency-access/<emer_id>/confirm", data = "<data>")]
async fn confirm_emergency_access(
emer_id: &str,
- data: JsonUpcase<ConfirmData>,
+ data: Json<ConfirmData>,
headers: Headers,
mut conn: DbConn,
) -> JsonResult {
check_emergency_access_enabled()?;
let confirming_user = headers.user;
- let data: ConfirmData = data.into_inner().data;
- let key = data.Key;
+ let data: ConfirmData = data.into_inner();
+ let key = data.key;
let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
Some(emer) => emer,
@@ -585,9 +581,9 @@ async fn view_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn
}
Ok(Json(json!({
- "Ciphers": ciphers_json,
- "KeyEncrypted": &emergency_access.key_encrypted,
- "Object": "emergencyAccessView",
+ "ciphers": ciphers_json,
+ "keyEncrypted": &emergency_access.key_encrypted,
+ "object": "emergencyAccessView",
})))
}
@@ -611,35 +607,35 @@ async fn takeover_emergency_access(emer_id: &str, headers: Headers, mut conn: Db
};
let result = json!({
- "Kdf": grantor_user.client_kdf_type,
- "KdfIterations": grantor_user.client_kdf_iter,
- "KdfMemory": grantor_user.client_kdf_memory,
- "KdfParallelism": grantor_user.client_kdf_parallelism,
- "KeyEncrypted": &emergency_access.key_encrypted,
- "Object": "emergencyAccessTakeover",
+ "kdf": grantor_user.client_kdf_type,
+ "kdfIterations": grantor_user.client_kdf_iter,
+ "kdfMemory": grantor_user.client_kdf_memory,
+ "kdfParallelism": grantor_user.client_kdf_parallelism,
+ "keyEncrypted": &emergency_access.key_encrypted,
+ "object": "emergencyAccessTakeover",
});
Ok(Json(result))
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct EmergencyAccessPasswordData {
- NewMasterPasswordHash: String,
- Key: String,
+ new_master_password_hash: String,
+ key: String,
}
#[post("/emergency-access/<emer_id>/password", data = "<data>")]
async fn password_emergency_access(
emer_id: &str,
- data: JsonUpcase<EmergencyAccessPasswordData>,
+ data: Json<EmergencyAccessPasswordData>,
headers: Headers,
mut conn: DbConn,
) -> EmptyResult {
check_emergency_access_enabled()?;
- let data: EmergencyAccessPasswordData = data.into_inner().data;
- let new_master_password_hash = &data.NewMasterPasswordHash;
+ let data: EmergencyAccessPasswordData = data.into_inner();
+ let new_master_password_hash = &data.new_master_password_hash;
//let key = &data.Key;
let requesting_user = headers.user;
@@ -658,7 +654,7 @@ async fn password_emergency_access(
};
// change grantor_user password
- grantor_user.set_password(new_master_password_hash, Some(data.Key), true, None);
+ grantor_user.set_password(new_master_password_hash, Some(data.key), true, None);
grantor_user.save(&mut conn).await?;
// Disable TwoFactor providers since they will otherwise block logins
@@ -696,9 +692,9 @@ async fn policies_emergency_access(emer_id: &str, headers: Headers, mut conn: Db
let policies_json: Vec<Value> = policies.await.iter().map(OrgPolicy::to_json).collect();
Ok(Json(json!({
- "Data": policies_json,
- "Object": "list",
- "ContinuationToken": null
+ "data": policies_json,
+ "object": "list",
+ "continuationToken": null
})))
}
diff --git a/src/api/core/events.rs b/src/api/core/events.rs
index dd6b92e0..484094f5 100644
--- a/src/api/core/events.rs
+++ b/src/api/core/events.rs
@@ -5,7 +5,7 @@ use rocket::{form::FromForm, serde::json::Json, Route};
use serde_json::Value;
use crate::{
- api::{EmptyResult, JsonResult, JsonUpcaseVec},
+ api::{EmptyResult, JsonResult},
auth::{AdminHeaders, Headers},
db::{
models::{Cipher, Event, UserOrganization},
@@ -22,7 +22,6 @@ pub fn routes() -> Vec<Route> {
}
#[derive(FromForm)]
-#[allow(non_snake_case)]
struct EventRange {
start: String,
end: String,
@@ -53,9 +52,9 @@ async fn get_org_events(org_id: &str, data: EventRange, _headers: AdminHeaders,
};
Ok(Json(json!({
- "Data": events_json,
- "Object": "list",
- "ContinuationToken": get_continuation_token(&events_json),
+ "data": events_json,
+ "object": "list",
+ "continuationToken": get_continuation_token(&events_json),
})))
}
@@ -85,9 +84,9 @@ async fn get_cipher_events(cipher_id: &str, data: EventRange, headers: Headers,
};
Ok(Json(json!({
- "Data": events_json,
- "Object": "list",
- "ContinuationToken": get_continuation_token(&events_json),
+ "data": events_json,
+ "object": "list",
+ "continuationToken": get_continuation_token(&events_json),
})))
}
@@ -119,9 +118,9 @@ async fn get_user_events(
};
Ok(Json(json!({
- "Data": events_json,
- "Object": "list",
- "ContinuationToken": get_continuation_token(&events_json),
+ "data": events_json,
+ "object": "list",
+ "continuationToken": get_continuation_token(&events_json),
})))
}
@@ -145,33 +144,33 @@ pub fn main_routes() -> Vec<Route> {
routes![post_events_collect,]
}
-#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
struct EventCollection {
// Mandatory
- Type: i32,
- Date: String,
+ r#type: i32,
+ date: String,
// Optional
- CipherId: Option<String>,
- OrganizationId: Option<String>,
+ cipher_id: Option<String>,
+ organization_id: Option<String>,
}
// Upstream:
// https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Events/Controllers/CollectController.cs
// https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Core/Services/Implementations/EventService.cs
#[post("/collect", format = "application/json", data = "<data>")]
-async fn post_events_collect(data: JsonUpcaseVec<EventCollection>, headers: Headers, mut conn: DbConn) -> EmptyResult {
+async fn post_events_collect(data: Json<Vec<EventCollection>>, headers: Headers, mut conn: DbConn) -> EmptyResult {
if !CONFIG.org_events_enabled() {
return Ok(());
}
- for event in data.iter().map(|d| &d.data) {
- let event_date = parse_date(&event.Date);
- match event.Type {
+ for event in data.iter() {
+ let event_date = parse_date(&event.date);
+ match event.r#type {
1000..=1099 => {
_log_user_event(
- event.Type,
+ event.r#type,
&headers.user.uuid,
headers.device.atype,
Some(event_date),
@@ -181,9 +180,9 @@ async fn post_events_collect(data: JsonUpcaseVec<EventCollection>, headers: Head
.await;
}
1600..=1699 => {
- if let Some(org_uuid) = &event.OrganizationId {
+ if let Some(org_uuid) = &event.organization_id {
_log_event(
- event.Type,
+ event.r#type,
org_uuid,
org_uuid,
&headers.user.uuid,
@@ -196,11 +195,11 @@ async fn post_events_collect(data: JsonUpcaseVec<EventCollection>, headers: Head
}
}
_ => {
- if let Some(cipher_uuid) = &event.CipherId {
+ if let Some(cipher_uuid) = &event.cipher_id {
if let Some(cipher) = Cipher::find_by_uuid(cipher_uuid, &mut conn).await {
if let Some(org_uuid) = cipher.organization_uuid {
_log_event(
- event.Type,
+ event.r#type,
cipher_uuid,
&org_uuid,
&headers.user.uuid,
diff --git a/src/api/core/folders.rs b/src/api/core/folders.rs
index 3af1285c..fd9ce6a0 100644
--- a/src/api/core/folders.rs
+++ b/src/api/core/folders.rs
@@ -2,7 +2,7 @@ use rocket::serde::json::Json;
use serde_json::Value;
use crate::{
- api::{EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType},
+ api::{EmptyResult, JsonResult, Notify, UpdateType},
auth::Headers,
db::{models::*, DbConn},
};
@@ -17,9 +17,9 @@ async fn get_folders(headers: Headers, mut conn: DbConn) -> Json<Value> {
let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect();
Json(json!({
- "Data": folders_json,
- "Object": "list",
- "ContinuationToken": null,
+ "data": folders_json,
+ "object": "list",
+ "continuationToken": null,
}))
}
@@ -38,16 +38,16 @@ async fn get_folder(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResul
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
pub struct FolderData {
- pub Name: String,
+ pub name: String,
}
#[post("/folders", data = "<data>")]
-async fn post_folders(data: JsonUpcase<FolderData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
- let data: FolderData = data.into_inner().data;
+async fn post_folders(data: Json<FolderData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
+ let data: FolderData = data.into_inner();
- let mut folder = Folder::new(headers.user.uuid, data.Name);
+ let mut folder = Folder::new(headers.user.uuid, data.name);
folder.save(&mut conn).await?;
nt.send_folder_update(UpdateType::SyncFolderCreate, &folder, &headers.device.uuid, &mut conn).await;
@@ -56,25 +56,19 @@ async fn post_folders(data: JsonUpcase<FolderData>, headers: Headers, mut conn:
}
#[post("/folders/<uuid>", data = "<data>")]
-async fn post_folder(
- uuid: &str,
- data: JsonUpcase<FolderData>,
- headers: Headers,
- conn: DbConn,
- nt: Notify<'_>,
-) -> JsonResult {
+async fn post_folder(uuid: &str, data: Json<FolderData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult {
put_folder(uuid, data, headers, conn, nt).await
}
#[put("/folders/<uuid>", data = "<data>")]
async fn put_folder(
uuid: &str,
- data: JsonUpcase<FolderData>,
+ data: Json<FolderData>,
headers: Headers,
mut conn: DbConn,
nt: Notify<'_>,
) -> JsonResult {
- let data: FolderData = data.into_inner().data;
+ let data: FolderData = data.into_inner();
let mut folder = match Folder::find_by_uuid(uuid, &mut conn).await {
Some(folder) => folder,
@@ -85,7 +79,7 @@ async fn put_folder(
err!("Folder belongs to another user")
}
- folder.name = data.Name;
+ folder.name = data.name;
folder.save(&mut conn).await?;
nt.send_folder_update(UpdateType::SyncFolderUpdate, &folder, &headers.device.uuid, &mut conn).await;
diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs
index 1d31b27c..9da0e886 100644
--- a/src/api/core/mod.rs
+++ b/src/api/core/mod.rs
@@ -49,19 +49,19 @@ pub fn events_routes() -> Vec<Route> {
use rocket::{serde::json::Json, serde::json::Value, Catcher, Route};
use crate::{
- api::{JsonResult, JsonUpcase, Notify, UpdateType},
+ api::{JsonResult, Notify, UpdateType},
auth::Headers,
db::DbConn,
error::Error,
util::{get_reqwest_client, parse_experimental_client_feature_flags},
};
-#[derive(Serialize, Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[derive(Debug, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
struct GlobalDomain {
- Type: i32,
- Domains: Vec<String>,
- Excluded: bool,
+ r#type: i32,
+ domains: Vec<String>,
+ excluded: bool,
}
const GLOBAL_DOMAINS: &str = include_str!("../../static/global_domains.json");
@@ -81,38 +81,38 @@ fn _get_eq_domains(headers: Headers, no_excluded: bool) -> Json<Value> {
let mut globals: Vec<GlobalDomain> = from_str(GLOBAL_DOMAINS).unwrap();
for global in &mut globals {
- global.Excluded = excluded_globals.contains(&global.Type);
+ global.excluded = excluded_globals.contains(&global.r#type);
}
if no_excluded {
- globals.retain(|g| !g.Excluded);
+ globals.retain(|g| !g.excluded);
}
Json(json!({
- "EquivalentDomains": equivalent_domains,
- "GlobalEquivalentDomains": globals,
- "Object": "domains",
+ "equivalentDomains": equivalent_domains,
+ "globalEquivalentDomains": globals,
+ "object": "domains",
}))
}
-#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
struct EquivDomainData {
- ExcludedGlobalEquivalentDomains: Option<Vec<i32>>,
- EquivalentDomains: Option<Vec<Vec<String>>>,
+ excluded_global_equivalent_domains: Option<Vec<i32>>,
+ equivalent_domains: Option<Vec<Vec<String>>>,
}
#[post("/settings/domains", data = "<data>")]
async fn post_eq_domains(
- data: JsonUpcase<EquivDomainData>,
+ data: Json<EquivDomainData>,
headers: Headers,
mut conn: DbConn,
nt: Notify<'_>,
) -> JsonResult {
- let data: EquivDomainData = data.into_inner().data;
+ let data: EquivDomainData = data.into_inner();
- let excluded_globals = data.ExcludedGlobalEquivalentDomains.unwrap_or_default();
- let equivalent_domains = data.EquivalentDomains.unwrap_or_default();
+ let excluded_globals = data.excluded_global_equivalent_domains.unwrap_or_default();
+ let equivalent_domains = data.equivalent_domains.unwrap_or_default();
let mut user = headers.user;
use serde_json::to_string;
@@ -128,12 +128,7 @@ async fn post_eq_domains(
}
#[put("/settings/domains", data = "<data>")]
-async fn put_eq_domains(
- data: JsonUpcase<EquivDomainData>,
- headers: Headers,
- conn: DbConn,
- nt: Notify<'_>,
-) -> JsonResult {
+async fn put_eq_domains(data: Json<EquivDomainData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult {
post_eq_domains(data, headers, conn, nt).await
}
@@ -157,15 +152,15 @@ async fn hibp_breach(username: &str) -> JsonResult {
Ok(Json(value))
} else {
Ok(Json(json!([{
- "Name": "HaveIBeenPwned",
- "Title": "Manual HIBP Check",
- "Domain": "haveibeenpwned.com",
- "BreachDate": "2019-08-18T00:00:00Z",
- "AddedDate": "2019-08-18T00:00:00Z",
- "Description": format!("Go to: <a href=\"https://haveibeenpwned.com/account/{username}\" target=\"_blank\" rel=\"noreferrer\">https://haveibeenpwned.com/account/{username}</a> for a manual check.<br/><br/>HaveIBeenPwned API key not set!<br/>Go to <a href=\"https://haveibeenpwned.com/API/Key\" target=\"_blank\" rel=\"noreferrer\">https://haveibeenpwned.com/API/Key</a> to purchase an API key from HaveIBeenPwned.<br/><br/>"),
- "LogoPath": "vw_static/hibp.png",
- "PwnCount": 0,
- "DataClasses": [
+ "name": "HaveIBeenPwned",
+ "title": "Manual HIBP Check",
+ "domain": "haveibeenpwned.com",
+ "breachDate": "2019-08-18T00:00:00Z",
+ "addedDate": "2019-08-18T00:00:00Z",
+ "description": format!("Go to: <a href=\"https://haveibeenpwned.com/account/{username}\" target=\"_blank\" rel=\"noreferrer\">https://haveibeenpwned.com/account/{username}</a> for a manual check.<br/><br/>HaveIBeenPwned API key not set!<br/>Go to <a href=\"https://haveibeenpwned.com/API/Key\" target=\"_blank\" rel=\"noreferrer\">https://haveibeenpwned.com/API/Key</a> to purchase an API key from HaveIBeenPwned.<br/><br/>"),
+ "logoPath": "vw_static/hibp.png",
+ "pwnCount": 0,
+ "dataClasses": [
"Error - No API key set!"
]
}])))
diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs
index c6556f77..0708f91c 100644
--- a/src/api/core/organizations.rs
+++ b/src/api/core/organizations.rs
@@ -6,7 +6,7 @@ use serde_json::Value;
use crate::{
api::{
core::{log_event, two_factor, CipherSyncData, CipherSyncType},
- EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, JsonVec, Notify, PasswordOrOtpData, UpdateType,
+ EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType,
},
auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders},
db::{models::*, DbConn},
@@ -100,56 +100,56 @@ pub fn routes() -> Vec<Route> {
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct OrgData {
- BillingEmail: String,
- CollectionName: String,
- Key: String,
- Name: String,
- Keys: Option<OrgKeyData>,
- #[serde(rename = "PlanType")]
- _PlanType: NumberOrString, // Ignored, always use the same plan
+ billing_email: String,
+ collection_name: String,
+ key: String,
+ name: String,
+ keys: Option<OrgKeyData>,
+ #[allow(dead_code)]
+ plan_type: NumberOrString, // Ignored, always use the same plan
}
#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct OrganizationUpdateData {
- BillingEmail: String,
- Name: String,
+ billing_email: String,
+ name: String,
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct NewCollectionData {
- Name: String,
- Groups: Vec<NewCollectionObjectData>,
- Users: Vec<NewCollectionObjectData>,
- ExternalId: Option<String>,
+ name: String,
+ groups: Vec<NewCollectionObjectData>,
+ users: Vec<NewCollectionObjectData>,
+ external_id: Option<String>,
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct NewCollectionObjectData {
- HidePasswords: bool,
- Id: String,
- ReadOnly: bool,
+ hide_passwords: bool,
+ id: String,
+ read_only: bool,
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct OrgKeyData {
- EncryptedPrivateKey: String,
- PublicKey: String,
+ encrypted_private_key: String,
+ public_key: String,
}
#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct OrgBulkIds {
- Ids: Vec<String>,
+ ids: Vec<String>,
}
#[post("/organizations", data = "<data>")]
-async fn create_organization(headers: Headers, data: JsonUpcase<OrgData>, mut conn: DbConn) -> JsonResult {
+async fn create_organization(headers: Headers, data: Json<OrgData>, mut conn: DbConn) -> JsonResult {
if !CONFIG.is_org_creation_allowed(&headers.user.email) {
err!("User not allowed to create organizations")
}
@@ -159,19 +159,19 @@ async fn create_organization(headers: Headers, data: JsonUpcase<OrgData>, mut co
)
}
- let data: OrgData = data.into_inner().data;
- let (private_key, public_key) = if data.Keys.is_some() {
- let keys: OrgKeyData = data.Keys.unwrap();
- (Some(keys.EncryptedPrivateKey), Some(keys.PublicKey))
+ let data: OrgData = data.into_inner();
+ let (private_key, public_key) = if data.keys.is_some() {
+ let keys: OrgKeyData = data.keys.unwrap();
+ (Some(keys.encrypted_private_key), Some(keys.public_key))
} else {
(None, None)
};
- let org = Organization::new(data.Name, data.BillingEmail, private_key, public_key);
+ let org = Organization::new(data.name, data.billing_email, private_key, public_key);
let mut user_org = UserOrganization::new(headers.user.uuid, org.uuid.clone());
- let collection = Collection::new(org.uuid.clone(), data.CollectionName, None);
+ let collection = Collection::new(org.uuid.clone(), data.collection_name, None);
- user_org.akey = data.Key;
+ user_org.akey = data.key;
user_org.access_all = true;
user_org.atype = UserOrgType::Owner as i32;
user_org.status = UserOrgStatus::Confirmed as i32;
@@ -186,11 +186,11 @@ async fn create_organization(headers: Headers, data: JsonUpcase<OrgData>, mut co
#[delete("/organizations/<org_id>", data = "<data>")]
async fn delete_organization(
org_id: &str,
- data: JsonUpcase<PasswordOrOtpData>,
+ data: Json<PasswordOrOtpData>,
headers: OwnerHeaders,
mut conn: DbConn,
) -> EmptyResult {
- let data: PasswordOrOtpData = data.into_inner().data;
+ let data: PasswordOrOtpData = data.into_inner();
data.validate(&headers.user, true, &mut conn).await?;
@@ -203,7 +203,7 @@ async fn delete_organization(
#[post("/organizations/<org_id>/delete", data = "<data>")]
async fn post_delete_organization(
org_id: &str,
- data: JsonUpcase<PasswordOrOtpData>,
+ data: Json<PasswordOrOtpData>,
headers: OwnerHeaders,
conn: DbConn,
) -> EmptyResult {
@@ -249,7 +249,7 @@ async fn get_organization(org_id: &str, _headers: OwnerHeaders, mut conn: DbConn
async fn put_organization(
org_id: &str,
headers: OwnerHeaders,
- data: JsonUpcase<OrganizationUpdateData>,
+ data: Json<OrganizationUpdateData>,
conn: DbConn,
) -> JsonResult {
post_organization(org_id, headers, data, conn).await
@@ -259,18 +259,18 @@ async fn put_organization(
async fn post_organization(
org_id: &str,
headers: OwnerHeaders,
- data: JsonUpcase<OrganizationUpdateData>,
+ data: Json<OrganizationUpdateData>,
mut conn: DbConn,
) -> JsonResult {
- let data: OrganizationUpdateData = data.into_inner().data;
+ let data: OrganizationUpdateData = data.into_inner();
let mut org = match Organization::find_by_uuid(org_id, &mut conn).await {
Some(organization) => organization,
None => err!("Can't find organization details"),
};
- org.name = data.Name;
- org.billing_email = data.BillingEmail;
+ org.name = data.name;
+ org.billing_email = data.billing_email;
org.save(&mut conn).await?;
@@ -292,22 +292,22 @@ async fn post_organization(
#[get("/collections")]
async fn get_user_collections(headers: Headers, mut conn: DbConn) -> Json<Value> {
Json(json!({
- "Data":
+ "data":
Collection::find_by_user_uuid(headers.user.uuid, &mut conn).await
.iter()
.map(Collection::to_json)
.collect::<Value>(),
- "Object": "list",
- "ContinuationToken": null,
+ "object": "list",
+ "continuationToken": null,
}))
}
#[get("/organizations/<org_id>/collections")]
async fn get_org_collections(org_id: &str, _headers: ManagerHeadersLoose, mut conn: DbConn) -> Json<Value> {
Json(json!({
- "Data": _get_org_collections(org_id, &mut conn).await,
- "Object": "list",
- "ContinuationToken": null,
+ "data": _get_org_collections(org_id, &mut conn).await,
+ "object": "list",
+ "continuationToken": null,
}))
}
@@ -356,17 +356,17 @@ async fn get_org_collections_details(org_id: &str, headers: ManagerHeadersLoose,
};
let mut json_object = col.to_json();
- json_object["Assigned"] = json!(assigned);
- json_object["Users"] = json!(users);
- json_object["Groups"] = json!(groups);
- json_object["Object"] = json!("collectionAccessDetails");
+ json_object["assigned"] = json!(assigned);
+ json_object["users"] = json!(users);
+ json_object["groups"] = json!(groups);
+ json_object["object"] = json!("collectionAccessDetails");
data.push(json_object)
}
Ok(Json(json!({
- "Data": data,
- "Object": "list",
- "ContinuationToken": null,
+ "data": data,
+ "object": "list",
+ "continuationToken": null,
})))
}
@@ -378,17 +378,17 @@ async fn _get_org_collections(org_id: &str, conn: &mut DbConn) -> Value {
async fn post_organization_collections(
org_id: &str,
headers: ManagerHeadersLoose,
- data: JsonUpcase<NewCollectionData>,
+ data: Json<NewCollectionData>,
mut conn: DbConn,
) -> JsonResult {
- let data: NewCollectionData = data.into_inner().data;
+ let data: NewCollectionData = data.into_inner();
let org = match Organization::find_by_uuid(org_id, &mut conn).await {
Some(organization) => organization,
None => err!("Can't find organization details"),
};
- let collection = Collection::new(org.uuid, data.Name, data.ExternalId);
+ let collection = Collection::new(org.uuid, data.name, data.external_id);
collection.save(&mut conn).await?;
log_event(
@@ -402,14 +402,14 @@ async fn post_organization_collections(
)
.await;
- for group in data.Groups {
- CollectionGroup::new(collection.uuid.clone(), group.Id, group.ReadOnly, group.HidePasswords)
+ for group in data.groups {
+ CollectionGroup::new(collection.uuid.clone(), group.id, group.read_only, group.hide_passwords)
.save(&mut conn)
.await?;
}
- for user in data.Users {
- let org_user = match UserOrganization::find_by_uuid(&user.Id, &mut conn).await {
+ for user in data.users {
+ let org_user = match UserOrganization::find_by_uuid(&user.id, &mut conn).await {
Some(u) => u,
None => err!("User is not part of organization"),
};
@@ -418,7 +418,7 @@ async fn post_organization_collections(
continue;
}
- CollectionUser::save(&org_user.user_uuid, &collection.uuid, user.ReadOnly, user.HidePasswords, &mut conn)
+ CollectionUser::save(&org_user.user_uuid, &collection.uuid, user.read_only, user.hide_passwords, &mut conn)
.await?;
}
@@ -434,7 +434,7 @@ async fn put_organization_collection_update(
org_id: &str,
col_id: &str,
headers: ManagerHeaders,
- data: JsonUpcase<NewCollectionData>,
+ data: Json<NewCollectionData>,
conn: DbConn,
) -> JsonResult {
post_organization_collection_update(org_id, col_id, headers, data, conn).await
@@ -445,10 +445,10 @@ async fn post_organization_collection_update(
org_id: &str,
col_id: &str,
headers: ManagerHeaders,
- data: JsonUpcase<NewCollectionData>,
+ data: Json<NewCollectionData>,
mut conn: DbConn,
) -> JsonResult {
- let data: NewCollectionData = data.into_inner().data;
+ let data: NewCollectionData = data.into_inner();
let org = match Organization::find_by_uuid(org_id, &mut conn).await {
Some(organization) => organization,
@@ -464,8 +464,8 @@ async fn post_organization_collection_update(
err!("Collection is not owned by organization");
}
- collection.name = data.Name;
- collection.external_id = match data.ExternalId {
+ collection.name = data.name;
+ collection.external_id = match data.external_id {
Some(external_id) if !external_id.trim().is_empty() => Some(external_id),
_ => None,
};
@@ -485,16 +485,16 @@ async fn post_organization_collection_update(
CollectionGroup::delete_all_by_collection(col_id, &mut conn).await?;
- for group in data.Groups {
- CollectionGroup::new(String::from(col_id), group.Id, group.ReadOnly, group.HidePasswords)
+ for group in data.groups {
+ CollectionGroup::new(String::from(col_id), group.id, group.read_only, group.hide_passwords)
.save(&mut conn)
.await?;
}
CollectionUser::delete_all_by_collection(col_id, &mut conn).await?;
- for user in data.Users {
- let org_user = match UserOrganization::find_by_uuid(&user.Id, &mut conn).await {
+ for user in data.users {
+ let org_user = match UserOrganization::find_by_uuid(&user.id, &mut conn).await {
Some(u) => u,
None => err!("User is not part of organization"),
};
@@ -503,7 +503,7 @@ async fn post_organization_collection_update(
continue;
}
- CollectionUser::save(&org_user.user_uuid, col_id, user.ReadOnly, user.HidePasswords, &mut conn).await?;
+ CollectionUser::save(&org_user.user_uuid, col_id, user.read_only, user.hide_passwords, &mut conn).await?;
}
Ok(Json(collection.to_json()))
@@ -589,10 +589,12 @@ async fn delete_organization_collection(
}
#[derive(Deserialize, Debug)]
-#[allow(non_snake_case, dead_code)]
+#[serde(rename_all = "camelCase")]
struct DeleteCollectionData {
- Id: String,
- OrgId: String,
+ #[allow(dead_code)]
+ id: String,
+ #[allow(dead_code)]
+ org_id: String,
}
#[post("/organizations/<org_id>/collections/<col_id>/delete", data = "<_data>")]
@@ -600,28 +602,28 @@ async fn post_organization_collection_delete(
org_id: &str,
col_id: &str,
headers: ManagerHeaders,
- _data: JsonUpcase<DeleteCollectionData>,
+ _data: Json<DeleteCollectionData>,
mut conn: DbConn,
) -> EmptyResult {
_delete_organization_collection(org_id, col_id, &headers, &mut conn).await
}
#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct BulkCollectionIds {
- Ids: Vec<String>,
+ ids: Vec<String>,
}
#[delete("/organizations/<org_id>/collections", data = "<data>")]
async fn bulk_delete_organization_collections(
org_id: &str,
headers: ManagerHeadersLoose,
- data: JsonUpcase<BulkCollectionIds>,
+ data: Json<BulkCollectionIds>,
mut conn: DbConn,
) -> EmptyResult {
- let data: BulkCollectionIds = data.into_inner().data;
+ let data: BulkCollectionIds = data.into_inner();
- let collections = data.Ids;
+ let collections = data.ids;
let headers = ManagerHeaders::from_loose(headers, &collections, &mut conn).await?;
@@ -676,10 +678,10 @@ async fn get_org_collection_detail(
let assigned = Collection::can_access_collection(&user_org, &collection.uuid, &mut conn).await;
let mut json_object = collection.to_json();
- json_object["Assigned"] = json!(assigned);
- json_object["Users"] = json!(users);
- json_object["Groups"] = json!(groups);
- json_object["Object"] = json!("collectionAccessDetails");
+ json_object["assigned"] = json!(assigned);
+ json_object["users"] = json!(users);
+ json_object["groups"] = json!(groups);
+ json_object["object"] = json!("collectionAccessDetails");
Ok(Json(json_object))
}
@@ -711,7 +713,7 @@ async fn get_collection_users(org_id: &str, coll_id: &str, _headers: ManagerHead
async fn put_collection_users(
org_id: &str,
coll_id: &str,
- data: JsonUpcaseVec<CollectionData>,
+ data: Json<Vec<CollectionData>>,
_headers: ManagerHeaders,
mut conn: DbConn,
) -> EmptyResult {
@@ -724,8 +726,8 @@ async fn put_collection_users(
CollectionUser::delete_all_by_collection(coll_id, &mut conn).await?;
// And then add all the received ones (except if the user has access_all)
- for d in data.iter().map(|d| &d.data) {
- let user = match UserOrganization::find_by_uuid(&d.Id, &mut conn).await {
+ for d in data.iter() {
+ let user = match UserOrganization::find_by_uuid(&d.id, &mut conn).await {
Some(u) => u,
None => err!("User is not part of organization"),
};
@@ -734,7 +736,7 @@ async fn put_collection_users(
continue;
}
- CollectionUser::save(&user.user_uuid, coll_id, d.ReadOnly, d.HidePasswords, &mut conn).await?;
+ CollectionUser::save(&user.user_uuid, coll_id, d.read_only, d.hide_passwords, &mut conn).await?;
}
Ok(())
@@ -749,9 +751,9 @@ struct OrgIdData {
#[get("/ciphers/organization-details?<data..>")]
async fn get_org_details(data: OrgIdData, headers: Headers, mut conn: DbConn) -> Json<Value> {
Json(json!({
- "Data": _get_org_details(&data.organization_id, &headers.host, &headers.user.uuid, &mut conn).await,
- "Object": "list",
- "ContinuationToken": null,
+ "data": _get_org_details(&data.organization_id, &headers.host, &headers.user.uuid, &mut conn).await,
+ "object": "list",
+ "continuationToken": null,
}))
}
@@ -795,20 +797,15 @@ async fn get_org_users(
}
Json(json!({
- "Data": users_json,
- "Object": "list",
- "ContinuationToken": null,
+ "data": users_json,
+ "object": "list",
+ "continuationToken": null,
}))
}
#[post("/organizations/<org_id>/keys", data = "<data>")]
-async fn post_org_keys(
- org_id: &str,
- data: JsonUpcase<OrgKeyData>,
- _headers: AdminHeaders,
- mut conn: DbConn,
-) -> JsonResult {
- let data: OrgKeyData = data.into_inner().data;
+async fn post_org_keys(org_id: &str, data: Json<OrgKeyData>, _headers: AdminHeaders, mut conn: DbConn) -> JsonResult {
+ let data: OrgKeyData = data.into_inner();
let mut org = match Organization::find_by_uuid(org_id, &mut conn).await {
Some(organization) => {
@@ -820,46 +817,41 @@ async fn post_org_keys(
None => err!("Can't find organization details"),
};
- org.private_key = Some(data.EncryptedPrivateKey);
- org.public_key = Some(data.PublicKey);
+ org.private_key = Some(data.encrypted_private_key);
+ org.public_key = Some(data.public_key);
org.save(&mut conn).await?;
Ok(Json(json!({
- "Object": "organizationKeys",
- "PublicKey": org.public_key,
- "PrivateKey": org.private_key,
+ "object": "organizationKeys",
+ "publicKey": org.public_key,
+ "privateKey": org.private_key,
})))
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct CollectionData {
- Id: String,
- ReadOnly: bool,
- HidePasswords: bool,
+ id: String,
+ read_only: bool,
+ hide_passwords: bool,
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct InviteData {
- Emails: Vec<String>,
- Groups: Vec<String>,
- Type: NumberOrString,
- Collections: Option<Vec<CollectionData>>,
- AccessAll: Option<bool>,
+ emails: Vec<String>,
+ groups: Vec<String>,
+ r#type: NumberOrString,
+ collections: Option<Vec<CollectionData>>,
+ access_all: Option<bool>,
}
#[post("/organizations/<org_id>/users/invite", data = "<data>")]
-async fn send_invite(
- org_id: &str,
- data: JsonUpcase<InviteData>,
- headers: AdminHeaders,
- mut conn: DbConn,
-) -> EmptyResult {
- let data: InviteData = data.into_inner().data;
+async fn send_invite(org_id: &str, data: Json<InviteData>, headers: AdminHeaders, mut conn: DbConn) -> EmptyResult {
+ let data: InviteData = data.into_inner();
- let new_type = match UserOrgType::from_str(&data.Type.into_string()) {
+ let new_type = match UserOrgType::from_str(&data.r#type.into_string()) {
Some(new_type) => new_type as i32,
None => err!("Invalid type"),
};
@@ -868,7 +860,7 @@ async fn send_invite(
err!("Only Owners can invite Managers, Admins or Owners")
}
- for email in data.Emails.iter() {
+ for email in data.emails.iter() {
let email = email.to_lowercase();
let mut user_org_status = UserOrgStatus::Invited as i32;
let user = match User::find_by_mail(&email, &mut conn).await {
@@ -904,19 +896,25 @@ async fn send_invite(
};
let mut new_user = UserOrganization::new(user.uuid.clone(), String::from(org_id));
- let access_all = data.AccessAll.unwrap_or(false);
+ let access_all = data.access_all.unwrap_or(false);
new_user.access_all = access_all;
new_user.atype = new_type;
new_user.status = user_org_status;
// If no accessAll, add the collections received
if !access_all {
- for col in data.Collections.iter().flatten() {
- match Collection::find_by_uuid_and_org(&col.Id, org_id, &mut conn).await {
+ for col in data.collections.iter().flatten() {
+ match Collection::find_by_uuid_and_org(&col.id, org_id, &mut conn).await {
None => err!("Collection not found in Organization"),
Some(collection) => {
- CollectionUser::save(&user.uuid, &collection.uuid, col.ReadOnly, col.HidePasswords, &mut conn)
- .await?;
+ CollectionUser::save(
+ &user.uuid,
+ &collection.uuid,
+ col.read_only,
+ col.hide_passwords,
+ &mut conn,
+ )
+ .await?;
}
}
}
@@ -924,7 +922,7 @@ async fn send_invite(
new_user.save(&mut conn).await?;
- for group in data.Groups.iter() {
+ for group in data.groups.iter() {
let mut group_entry = GroupUser::new(String::from(group), user.uuid.clone());
group_entry.save(&mut conn).await?;
}
@@ -964,14 +962,14 @@ async fn send_invite(
#[post("/organizations/<org_id>/users/reinvite", data = "<data>")]
async fn bulk_reinvite_user(
org_id: &str,
- data: JsonUpcase<OrgBulkIds>,
+ data: Json<OrgBulkIds>,
headers: AdminHeaders,
mut conn: DbConn,
) -> Json<Value> {
- let data: OrgBulkIds = data.into_inner().data;
+ let data: OrgBulkIds = data.into_inner();
let mut bulk_response = Vec::new();
- for org_user_id in data.Ids {
+ for org_user_id in data.ids {
let err_msg = match _reinvite_user(org_id, &org_user_id, &headers.user.email, &mut conn).await {
Ok(_) => String::new(),
Err(e) => format!("{e:?}"),
@@ -979,17 +977,17 @@ async fn bulk_reinvite_user(
bulk_response.push(json!(
{
- "Object": "OrganizationBulkConfirmResponseModel",
- "Id": org_user_id,
- "Error": err_msg
+ "object": "OrganizationBulkConfirmResponseModel",
+ "id": org_user_id,
+ "error": err_msg
}
))
}
Json(json!({
- "Data": bulk_response,
- "Object": "list",
- "ContinuationToken": null
+ "data": bulk_response,
+ "object": "list",
+ "continuationToken": null
}))
}
@@ -1045,22 +1043,17 @@ async fn _reinvite_user(org_id: &str, user_org: &str, invited_by_email: &str, co
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct AcceptData {
- Token: String,
- ResetPasswordKey: Option<String>,
+ token: String,
+ reset_password_key: Option<String>,
}
#[post("/organizations/<org_id>/users/<_org_user_id>/accept", data = "<data>")]
-async fn accept_invite(
- org_id: &str,
- _org_user_id: &str,
- data: JsonUpcase<AcceptData>,
- mut conn: DbConn,
-) -> EmptyResult {
+async fn accept_invite(org_id: &str, _org_user_id: &str, data: Json<AcceptData>, mut conn: DbConn) -> EmptyResult {
// The web-vault passes org_id and org_user_id in the URL, but we are just reading them from the JWT instead
- let data: AcceptData = data.into_inner().data;
- let claims = decode_invite(&data.Token)?;
+ let data: AcceptData = data.into_inner();
+ let claims = decode_invite(&data.token)?;
match User::find_by_mail(&claims.email, &mut conn).await {
Some(user) => {
@@ -1077,7 +1070,7 @@ async fn accept_invite(
}
let master_password_required = OrgPolicy::org_is_reset_password_auto_enroll(org, &mut conn).await;
- if data.ResetPasswordKey.is_none() && master_password_required {
+ if data.reset_password_key.is_none() && master_password_required {
err!("Reset password key is required, but not provided.");
}
@@ -1102,7 +1095,7 @@ async fn accept_invite(
user_org.status = UserOrgStatus::Accepted as i32;
if master_password_required {
- user_org.reset_password_key = data.ResetPasswordKey;
+ user_org.reset_password_key = data.reset_password_key;
}
user_org.save(&mut conn).await?;
@@ -1131,32 +1124,45 @@ async fn accept_invite(
Ok(())
}
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct ConfirmData {
+ id: Option<String>,
+ key: Option<String>,
+}
+
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct BulkConfirmData {
+ keys: Option<Vec<ConfirmData>>,
+}
+
#[post("/organizations/<org_id>/users/confirm", data = "<data>")]
async fn bulk_confirm_invite(
org_id: &str,
- data: JsonUpcase<Value>,
+ data: Json<BulkConfirmData>,
headers: AdminHeaders,
mut conn: DbConn,
nt: Notify<'_>,
) -> Json<Value> {
- let data = data.into_inner().data;
+ let data = data.into_inner();
let mut bulk_response = Vec::new();
- match data["Keys"].as_array() {
+ match data.keys {
Some(keys) => {
for invite in keys {
- let org_user_id = invite["Id"].as_str().unwrap_or_default();
- let user_key = invite["Key"].as_str().unwrap_or_default();
- let err_msg = match _confirm_invite(org_id, org_user_id, user_key, &headers, &mut conn, &nt).await {
+ let org_user_id = invite.id.unwrap_or_default();
+ let user_key = invite.key.unwrap_or_default();
+ let err_msg = match _confirm_invite(org_id, &org_user_id, &user_key, &headers, &mut conn, &nt).await {
Ok(_) => String::new(),
Err(e) => format!("{e:?}"),
};
bulk_response.push(json!(
{
- "Object": "OrganizationBulkConfirmResponseModel",
- "Id": org_user_id,
- "Error": err_msg
+ "object": "OrganizationBulkConfirmResponseModel",
+ "id": org_user_id,
+ "error": err_msg
}
));
}
@@ -1165,9 +1171,9 @@ async fn bulk_confirm_invite(
}
Json(json!({
- "Data": bulk_response,
- "Object": "list",
- "ContinuationToken": null
+ "data": bulk_response,
+ "object": "list",
+ "continuationToken": null
}))
}
@@ -1175,14 +1181,14 @@ async fn bulk_confirm_invite(
async fn confirm_invite(
org_id: &str,
org_user_id: &str,
- data: JsonUpcase<Value>,
+ data: Json<ConfirmData>,
headers: AdminHeaders,
mut conn: DbConn,
nt: Notify<'_>,
) -> EmptyResult {
- let data = data.into_inner().data;
- let user_key = data["Key"].as_str().unwrap_or_default();
- _confirm_invite(org_id, org_user_id, user_key, &headers, &mut conn, &nt).await
+ let data = data.into_inner();
+ let user_key = data.key.unwrap_or_default();
+ _confirm_invite(org_id, org_user_id, &user_key, &headers, &mut conn, &nt).await
}
async fn _confirm_invite(
@@ -1285,19 +1291,19 @@ async fn get_user(
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct EditUserData {
- Type: NumberOrString,
- Collections: Option<Vec<CollectionData>>,
- Groups: Option<Vec<String>>,
- AccessAll: bool,
+ r#type: NumberOrString,
+ collections: Option<Vec<CollectionData>>,
+ groups: Option<Vec<String>>,
+ access_all: bool,
}
#[put("/organizations/<org_id>/users/<org_user_id>", data = "<data>", rank = 1)]
async fn put_organization_user(
org_id: &str,
org_user_id: &str,
- data: JsonUpcase<EditUserData>,
+ data: Json<EditUserData>,
headers: AdminHeaders,
conn: DbConn,
) -> EmptyResult {
@@ -1308,13 +1314,13 @@ async fn put_organization_user(
async fn edit_user(
org_id: &str,
org_user_id: &str,
- data: JsonUpcase<EditUserData>,
+ data: Json<EditUserData>,
headers: AdminHeaders,
mut conn: DbConn,
) -> EmptyResult {
- let data: EditUserData = data.into_inner().data;
+ let data: EditUserData = data.into_inner();
- let new_type = match UserOrgType::from_str(&data.Type.into_string()) {
+ let new_type = match UserOrgType::from_str(&data.r#type.into_string()) {
Some(new_type) => new_type,
None => err!("Invalid type"),
};
@@ -1363,7 +1369,7 @@ async fn edit_user(
}
}
- user_to_edit.access_all = data.AccessAll;
+ user_to_edit.access_all = data.access_all;
user_to_edit.atype = new_type as i32;
// Delete all the odd collections
@@ -1372,16 +1378,16 @@ async fn edit_user(
}
// If no accessAll, add the collections received
- if !data.AccessAll {
- for col in data.Collections.iter().flatten() {
- match Collection::find_by_uuid_and_org(&col.Id, org_id, &mut conn).await {
+ if !data.access_all {
+ for col in data.collections.iter().flatten() {
+ match Collection::find_by_uuid_and_org(&col.id, org_id, &mut conn).await {
None => err!("Collection not found in Organization"),
Some(collection) => {
CollectionUser::save(
&user_to_edit.user_uuid,
&collection.uuid,
- col.ReadOnly,
- col.HidePasswords,
+ col.read_only,
+ col.hide_passwords,
&mut conn,
)
.await?;
@@ -1392,7 +1398,7 @@ async fn edit_user(
GroupUser::delete_all_by_user(&user_to_edit.uuid, &mut conn).await?;
- for group in data.Groups.iter().flatten() {
+ for group in data.groups.iter().flatten() {
let mut group_entry = GroupUser::new(String::from(group), user_to_edit.uuid.clone());
group_entry.save(&mut conn).await?;
}
@@ -1414,15 +1420,15 @@ async fn edit_user(
#[delete("/organizations/<org_id>/users", data = "<data>")]
async fn bulk_delete_user(
org_id: &str,
- data: JsonUpcase<OrgBulkIds>,
+ data: Json<OrgBulkIds>,
headers: AdminHeaders,
mut conn: DbConn,
nt: Notify<'_>,
) -> Json<Value> {
- let data: OrgBulkIds = data.into_inner().data;
+ let data: OrgBulkIds = data.into_inner();
let mut bulk_response = Vec::new();
- for org_user_id in data.Ids {
+ for org_user_id in data.ids {
let err_msg = match _delete_user(org_id, &org_user_id, &headers, &mut conn, &nt).await {
Ok(_) => String::new(),
Err(e) => format!("{e:?}"),
@@ -1430,17 +1436,17 @@ async fn bulk_delete_user(
bulk_response.push(json!(
{
- "Object": "OrganizationBulkConfirmResponseModel",
- "Id": org_user_id,
- "Error": err_msg
+ "object": "OrganizationBulkConfirmResponseModel",
+ "id": org_user_id,
+ "error": err_msg
}
))
}
Json(json!({
- "Data": bulk_response,
- "Object": "list",
- "ContinuationToken": null
+ "data": bulk_response,
+ "object": "list",
+ "continuationToken": null
}))
}
@@ -1510,25 +1516,25 @@ async fn _delete_user(
#[post("/organizations/<org_id>/users/public-keys", data = "<data>")]
async fn bulk_public_keys(
org_id: &str,
- data: JsonUpcase<OrgBulkIds>,
+ data: Json<OrgBulkIds>,
_headers: AdminHeaders,
mut conn: DbConn,
) -> Json<Value> {
- let data: OrgBulkIds = data.into_inner().data;
+ let data: OrgBulkIds = data.into_inner();
let mut bulk_response = Vec::new();
// Check all received UserOrg UUID's and find the matching User to retrieve the public-key.
// If the user does not exists, just ignore it, and do not return any information regarding that UserOrg UUID.
// The web-vault will then ignore that user for the following steps.
- for user_org_id in data.Ids {
+ for user_org_id in data.ids {
match UserOrganization::find_by_uuid_and_org(&user_org_id, org_id, &mut conn).await {
Some(user_org) => match User::find_by_uuid(&user_org.user_uuid, &mut conn).await {
Some(user) => bulk_response.push(json!(
{
- "Object": "organizationUserPublicKeyResponseModel",
- "Id": user_org_id,
- "UserId": user.uuid,
- "Key": user.public_key
+ "object": "organizationUserPublicKeyResponseModel",
+ "id": user_org_id,
+ "userId": user.uuid,
+ "key": user.public_key
}
)),
None => debug!("User doesn't exist"),
@@ -1538,9 +1544,9 @@ async fn bulk_public_keys(
}
Json(json!({
- "Data": bulk_response,
- "Object": "list",
- "ContinuationToken": null
+ "data": bulk_response,
+ "object": "list",
+ "continuationToken": null
}))
}
@@ -1548,42 +1554,42 @@ use super::ciphers::update_cipher_from_data;
use super::ciphers::CipherData;
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct ImportData {
- Ciphers: Vec<CipherData>,
- Collections: Vec<NewCollectionData>,
- CollectionRelationships: Vec<RelationsData>,
+ ciphers: Vec<CipherData>,
+ collections: Vec<NewCollectionData>,
+ collection_relationships: Vec<RelationsData>,
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct RelationsData {
// Cipher index
- Key: usize,
+ key: usize,
// Collection index
- Value: usize,
+ value: usize,
}
#[post("/ciphers/import-organization?<query..>", data = "<data>")]
async fn post_org_import(
query: OrgIdData,
- data: JsonUpcase<ImportData>,
+ data: Json<ImportData>,
headers: AdminHeaders,
mut conn: DbConn,
nt: Notify<'_>,
) -> EmptyResult {
- let data: ImportData = data.into_inner().data;
+ let data: ImportData = data.into_inner();
let org_id = query.organization_id;
// Validate the import before continuing
// Bitwarden does not process the import if there is one item invalid.
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
- Cipher::validate_notes(&data.Ciphers)?;
+ Cipher::validate_notes(&data.ciphers)?;
let mut collections = Vec::new();
- for coll in data.Collections {
- let collection = Collection::new(org_id.clone(), coll.Name, coll.ExternalId);
+ for coll in data.collections {
+ let collection = Collection::new(org_id.clone(), coll.name, coll.external_id);
if collection.save(&mut conn).await.is_err() {
collections.push(Err(Error::new("Failed to create Collection", "Failed to create Collection")));
} else {
@@ -1593,15 +1599,15 @@ async fn post_org_import(
// Read the relations between collections and ciphers
let mut relations = Vec::new();
- for relation in data.CollectionRelationships {
- relations.push((relation.Key, relation.Value));
+ for relation in data.collection_relationships {
+ relations.push((relation.key, relation.value));
}
let headers: Headers = headers.into();
let mut ciphers = Vec::new();
- for cipher_data in data.Ciphers {
- let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone());
+ for cipher_data in data.ciphers {
+ let mut cipher = Cipher::new(cipher_data.r#type, cipher_data.name.clone());
update_cipher_from_data(&mut cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await.ok();
ciphers.push(cipher);
}
@@ -1628,9 +1634,9 @@ async fn list_policies(org_id: &str, _headers: AdminHeaders, mut conn: DbConn) -
let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect();
Json(json!({
- "Data": policies_json,
- "Object": "list",
- "ContinuationToken": null
+ "data": policies_json,
+ "object": "list",
+ "continuationToken": null
}))
}
@@ -1652,9 +1658,9 @@ async fn list_policies_token(org_id: &str, token: &str, mut conn: DbConn) -> Jso
let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect();
Ok(Json(json!({
- "Data": policies_json,
- "Object": "list",
- "ContinuationToken": null
+ "data": policies_json,
+ "object": "list",
+ "continuationToken": null
})))
}
@@ -1779,27 +1785,27 @@ fn get_organization_tax(org_id: &str, _headers: Headers) -> Json<Value> {
fn get_plans() -> Json<Value> {
// Respond with a minimal json just enough to allow the creation of an new organization.
Json(json!({
- "Object": "list",
- "Data": [{
- "Object": "plan",
- "Type": 0,
- "Product": 0,
- "Name": "Free",
- "NameLocalizationKey": "planNameFree",
- "BitwardenProduct": 0,
- "MaxUsers": 0,
- "DescriptionLocalizationKey": "planDescFree"
+ "object": "list",
+ "data": [{
+ "object": "plan",
+ "type": 0,
+ "product": 0,
+ "name": "Free",
+ "nameLocalizationKey": "planNameFree",
+ "bitwardenProduct": 0,
+ "maxUsers": 0,
+ "descriptionLocalizationKey": "planDescFree"
},{
- "Object": "plan",
- "Type": 0,
- "Product": 1,
- "Name": "Free",
- "NameLocalizationKey": "planNameFree",
- "BitwardenProduct": 1,
- "MaxUsers": 0,
- "DescriptionLocalizationKey": "planDescFree"
+ "object": "plan",
+ "type": 0,
+ "product": 1,
+ "name": "Free",
+ "nameLocalizationKey": "planNameFree",
+ "bitwardenProduct": 1,
+ "maxUsers": 0,
+ "descriptionLocalizationKey": "planDescFree"
}],
- "ContinuationToken": null
+ "continuationToken": null
}))
}
@@ -1816,41 +1822,44 @@ fn get_plans_tax_rates(_headers: Headers) -> Json<Value> {
fn _empty_data_json() -> Value {
json!({
- "Object": "list",
- "Data": [],
- "ContinuationToken": null
+ "object": "list",
+ "data": [],
+ "continuationToken": null
})
}
#[derive(Deserialize, Debug)]
-#[allow(non_snake_case, dead_code)]
+#[serde(rename_all = "camelCase")]
struct OrgImportGroupData {
- Name: String, // "GroupName"
- ExternalId: String, // "cn=GroupName,ou=Groups,dc=example,dc=com"
- Users: Vec<String>, // ["uid=user,ou=People,dc=example,dc=com"]
+ #[allow(dead_code)]
+ name: String, // "GroupName"
+ #[allow(dead_code)]
+ external_id: String, // "cn=GroupName,ou=Groups,dc=example,dc=com"
+ #[allow(dead_code)]
+ users: Vec<String>, // ["uid=user,ou=People,dc=example,dc=com"]
}
#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct OrgImportUserData {
- Email: String, // "[email protected]"
+ email: String, // "[email protected]"
#[allow(dead_code)]
- ExternalId: String, // "uid=user,ou=People,dc=example,dc=com"
- Deleted: bool,
+ external_id: String, // "uid=user,ou=People,dc=example,dc=com"
+ deleted: bool,
}
#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct OrgImportData {
#[allow(dead_code)]
- Groups: Vec<OrgImportGroupData>,
- OverwriteExisting: bool,
- Users: Vec<OrgImportUserData>,
+ groups: Vec<OrgImportGroupData>,
+ overwrite_existing: bool,
+ users: Vec<OrgImportUserData>,
}
#[post("/organizations/<org_id>/import", data = "<data>")]
-async fn import(org_id: &str, data: JsonUpcase<OrgImportData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
- let data = data.into_inner().data;
+async fn import(org_id: &str, data: Json<OrgImportData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
+ let data = data.into_inner();
// TODO: Currently we aren't storing the externalId's anywhere, so we also don't have a way
// to differentiate between auto-imported users and manually added ones.
@@ -1864,10 +1873,10 @@ async fn import(org_id: &str, data: JsonUpcase<OrgImportData>, headers: Headers,
None => err!("User not part of organization"),
};
- for user_data in &data.Users {
- if user_data.Deleted {
+ for user_data in &data.users {
+ if user_data.deleted {
// If user is marked for deletion and it exists, delete it
- if let Some(user_org) = UserOrganization::find_by_email_and_org(&user_data.Email, org_id, &mut conn).await {
+ if let Some(user_org) = UserOrganization::find_by_email_and_org(&user_data.email, org_id, &mut conn).await {
log_event(
EventType::OrganizationUserRemoved as i32,
&user_org.uuid,
@@ -1883,8 +1892,8 @@ async fn import(org_id: &str, data: JsonUpcase<OrgImportData>, headers: Headers,
}
// If user is not part of the organization, but it exists
- } else if UserOrganization::find_by_email_and_org(&user_data.Email, org_id, &mut conn).await.is_none() {
- if let Some(user) = User::find_by_mail(&user_data.Email, &mut conn).await {
+ } else if UserOrganization::find_by_email_and_org(&user_data.email, org_id, &mut conn).await.is_none() {
+ if let Some(user) = User::find_by_mail(&user_data.email, &mut conn).await {
let user_org_status = if CONFIG.mail_enabled() {
UserOrgStatus::Invited as i32
} else {
@@ -1916,7 +1925,7 @@ async fn import(org_id: &str, data: JsonUpcase<OrgImportData>, headers: Headers,
};
mail::send_invite(
- &user_data.Email,
+ &user_data.email,
&user.uuid,
Some(String::from(org_id)),
Some(new_org_user.uuid),
@@ -1930,10 +1939,10 @@ async fn import(org_id: &str, data: JsonUpcase<OrgImportData>, headers: Headers,
}
// If this flag is enabled, any user that isn't provided in the Users list will be removed (by default they will be kept unless they have Deleted == true)
- if data.OverwriteExisting {
+ if data.overwrite_existing {
for user_org in UserOrganization::find_by_org_and_type(org_id, UserOrgType::User, &mut conn).await {
if let Some(user_email) = User::find_by_uuid(&user_org.user_uuid, &mut conn).await.map(|u| u.email) {
- if !data.Users.iter().any(|u| u.Email == user_email) {
+ if !data.users.iter().any(|u| u.email == user_email) {
log_event(
EventType::OrganizationUserRemoved as i32,
&user_org.uuid,
@@ -1969,7 +1978,7 @@ async fn deactivate_organization_user(
#[put("/organizations/<org_id>/users/deactivate", data = "<data>")]
async fn bulk_deactivate_organization_user(
org_id: &str,
- data: JsonUpcase<Value>,
+ data: Json<Value>,
headers: AdminHeaders,
conn: DbConn,
) -> Json<Value> {
@@ -1989,11 +1998,11 @@ async fn revoke_organization_user(
#[put("/organizations/<org_id>/users/revoke", data = "<data>")]
async fn bulk_revoke_organization_user(
org_id: &str,
- data: JsonUpcase<Value>,
+ data: Json<Value>,
headers: AdminHeaders,
mut conn: DbConn,
) -> Json<Value> {
- let data = data.into_inner().data;
+ let data = data.into_inner();
let mut bulk_response = Vec::new();
match data["Ids"].as_array() {
@@ -2007,9 +2016,9 @@ async fn bulk_revoke_organization_user(
bulk_response.push(json!(
{
- "Object": "OrganizationUserBulkResponseModel",
- "Id": org_user_id,
- "Error": err_msg
+ "object": "OrganizationUserBulkResponseModel",
+ "id": org_user_id,
+ "error": err_msg
}
));
}
@@ -2018,9 +2027,9 @@ async fn bulk_revoke_organization_user(
}
Json(json!({
- "Data": bulk_response,
- "Object": "list",
- "ContinuationToken": null
+ "data": bulk_response,
+ "object": "list",
+ "continuationToken": null
}))
}
@@ -2079,7 +2088,7 @@ async fn activate_organization_user(
#[put("/organizations/<org_id>/users/activate", data = "<data>")]
async fn bulk_activate_organization_user(
org_id: &str,
- data: JsonUpcase<Value>,
+ data: Json<Value>,
headers: AdminHeaders,
conn: DbConn,
) -> Json<Value> {
@@ -2099,11 +2108,11 @@ async fn restore_organization_user(
#[put("/organizations/<org_id>/users/restore", data = "<data>")]
async fn bulk_restore_organization_user(
org_id: &str,
- data: JsonUpcase<Value>,
+ data: Json<Value>,
headers: AdminHeaders,
mut conn: DbConn,
) -> Json<Value> {
- let data = data.into_inner().data;
+ let data = data.into_inner();
let mut bulk_response = Vec::new();
match data["Ids"].as_array() {
@@ -2117,9 +2126,9 @@ async fn bulk_restore_organization_user(
bulk_response.push(json!(
{
- "Object": "OrganizationUserBulkResponseModel",
- "Id": org_user_id,
- "Error": err_msg
+ "object": "OrganizationUserBulkResponseModel",
+ "id": org_user_id,
+ "error": err_msg
}
));
}
@@ -2128,9 +2137,9 @@ async fn bulk_restore_organization_user(
}
Json(json!({
- "Data": bulk_response,
- "Object": "list",
- "ContinuationToken": null
+ "data": bulk_response,
+ "object": "list",
+ "continuationToken": null
}))
}
@@ -2204,35 +2213,35 @@ async fn get_groups(org_id: &str, _headers: ManagerHeadersLoose, mut conn: DbCon
};
Ok(Json(json!({
- "Data": groups,
- "Object": "list",
- "ContinuationToken": null,
+ "data": groups,
+ "object": "list",
+ "continuationToken": null,
})))
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct GroupRequest {
- Name: String,
- AccessAll: Option<bool>,
- ExternalId: Option<String>,
- Collections: Vec<SelectionReadOnly>,
- Users: Vec<String>,
+ name: String,
+ access_all: Option<bool>,
+ external_id: Option<String>,
+ collections: Vec<SelectionReadOnly>,
+ users: Vec<String>,
}
impl GroupRequest {
pub fn to_group(&self, organizations_uuid: &str) -> Group {
Group::new(
String::from(organizations_uuid),
- self.Name.clone(),
- self.AccessAll.unwrap_or(false),
- self.ExternalId.clone(),
+ self.name.clone(),
+ self.access_all.unwrap_or(false),
+ self.external_id.clone(),
)
}
pub fn update_group(&self, mut group: Group) -> Group {
- group.name.clone_from(&self.Name);
- group.access_all = self.AccessAll.unwrap_or(false);
+ group.name.clone_from(&self.name);
+ group.access_all = self.access_all.unwrap_or(false);
// Group Updates do not support changing the external_id
// These input fields are in a disabled state, and can only be updated/added via ldap_import
@@ -2241,31 +2250,31 @@ impl GroupRequest {
}
#[derive(Deserialize, Serialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct SelectionReadOnly {
- Id: String,
- ReadOnly: bool,
- HidePasswords: bool,
+ id: String,
+ read_only: bool,
+ hide_passwords: bool,
}
impl SelectionReadOnly {
pub fn to_collection_group(&self, groups_uuid: String) -> CollectionGroup {
- CollectionGroup::new(self.Id.clone(), groups_uuid, self.ReadOnly, self.HidePasswords)
+ CollectionGroup::new(self.id.clone(), groups_uuid, self.read_only, self.hide_passwords)
}
pub fn to_collection_group_details_read_only(collection_group: &CollectionGroup) -> SelectionReadOnly {
SelectionReadOnly {
- Id: collection_group.groups_uuid.clone(),
- ReadOnly: collection_group.read_only,
- HidePasswords: collection_group.hide_passwords,
+ id: collection_group.groups_uuid.clone(),
+ read_only: collection_group.read_only,
+ hide_passwords: collection_group.hide_passwords,
}
}
pub fn to_collection_user_details_read_only(collection_user: &CollectionUser) -> SelectionReadOnly {
SelectionReadOnly {
- Id: collection_user.user_uuid.clone(),
- ReadOnly: collection_user.read_only,
- HidePasswords: collection_user.hide_passwords,
+ id: collection_user.user_uuid.clone(),
+ read_only: collection_user.read_only,
+ hide_passwords: collection_user.hide_passwords,
}
}
@@ -2278,7 +2287,7 @@ impl SelectionReadOnly {
async fn post_group(
org_id: &str,
group_id: &str,
- data: JsonUpcase<GroupRequest>,
+ data: Json<GroupRequest>,
headers: AdminHeaders,
conn: DbConn,
) -> JsonResult {
@@ -2286,17 +2295,12 @@ async fn post_group(
}
#[post("/organizations/<org_id>/groups", data = "<data>")]
-async fn post_groups(
- org_id: &str,
- headers: AdminHeaders,
- data: JsonUpcase<GroupRequest>,
- mut conn: DbConn,
-) -> JsonResult {
+async fn post_groups(org_id: &str, headers: AdminHeaders, data: Json<GroupRequest>, mut conn: DbConn) -> JsonResult {
if !CONFIG.org_groups_enabled() {
err!("Group support is disabled");
}
- let group_request = data.into_inner().data;
+ let group_request = data.into_inner();
let group = group_request.to_group(org_id);
log_event(
@@ -2310,14 +2314,14 @@ async fn post_groups(
)
.await;
- add_update_group(group, group_request.Collections, group_request.Users, org_id, &headers, &mut conn).await
+ add_update_group(group, group_request.collections, group_request.users, org_id, &headers, &mut conn).await
}
#[put("/organizations/<org_id>/groups/<group_id>", data = "<data>")]
async fn put_group(
org_id: &str,
group_id: &str,
- data: JsonUpcase<GroupRequest>,
+ data: Json<GroupRequest>,
headers: AdminHeaders,
mut conn: DbConn,
) -> JsonResult {
@@ -2330,7 +2334,7 @@ async fn put_group(
None => err!("Group not found"),
};
- let group_request = data.into_inner().data;
+ let group_request = data.into_inner();
let updated_group = group_request.update_group(group);
CollectionGroup::delete_all_by_group(group_id, &mut conn).await?;
@@ -2347,7 +2351,7 @@ async fn put_group(
)
.await;
- add_update_group(updated_group, group_request.Collections, group_request.Users, org_id, &headers, &mut conn).await
+ add_update_group(updated_group, group_request.collections, group_request.users, org_id, &headers, &mut conn).await
}
async fn add_update_group(
@@ -2382,11 +2386,11 @@ async fn add_update_group(
}
Ok(Json(json!({
- "Id": group.uuid,
- "OrganizationId": group.organizations_uuid,
- "Name": group.name,
- "AccessAll": group.access_all,
- "ExternalId": group.external_id
+ "id": group.uuid,
+ "organizationId": group.organizations_uuid,
+ "name": group.name,
+ "accessAll": group.access_all,
+ "externalId": group.external_id
})))
}
@@ -2441,7 +2445,7 @@ async fn _delete_group(org_id: &str, group_id: &str, headers: &AdminHeaders, con
#[delete("/organizations/<org_id>/groups", data = "<data>")]
async fn bulk_delete_groups(
org_id: &str,
- data: JsonUpcase<OrgBulkIds>,
+ data: Json<OrgBulkIds>,
headers: AdminHeaders,
mut conn: DbConn,
) -> EmptyResult {
@@ -2449,9 +2453,9 @@ async fn bulk_delete_groups(
err!("Group support is disabled");
}
- let data: OrgBulkIds = data.into_inner().data;
+ let data: OrgBulkIds = data.into_inner();
- for group_id in data.Ids {
+ for group_id in data.ids {
_delete_group(org_id, &group_id, &headers, &mut conn).await?
}
Ok(())
@@ -2496,7 +2500,7 @@ async fn put_group_users(
org_id: &str,
group_id: &str,
headers: AdminHeaders,
- data: JsonVec<String>,
+ data: Json<Vec<String>>,
mut conn: DbConn,
) -> EmptyResult {
if !CONFIG.org_groups_enabled() {
@@ -2548,16 +2552,16 @@ async fn get_user_groups(_org_id: &str, user_id: &str, _headers: AdminHeaders, m
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct OrganizationUserUpdateGroupsRequest {
- GroupIds: Vec<String>,
+ group_ids: Vec<String>,
}
#[post("/organizations/<org_id>/users/<org_user_id>/groups", data = "<data>")]
async fn post_user_groups(
org_id: &str,
org_user_id: &str,
- data: JsonUpcase<OrganizationUserUpdateGroupsRequest>,
+ data: Json<OrganizationUserUpdateGroupsRequest>,
headers: AdminHeaders,
conn: DbConn,
) -> EmptyResult {
@@ -2568,7 +2572,7 @@ async fn post_user_groups(
async fn put_user_groups(
org_id: &str,
org_user_id: &str,
- data: JsonUpcase<OrganizationUserUpdateGroupsRequest>,
+ data: Json<OrganizationUserUpdateGroupsRequest>,
headers: AdminHeaders,
mut conn: DbConn,
) -> EmptyResult {
@@ -2587,8 +2591,8 @@ async fn put_user_groups(
GroupUser::delete_all_by_user(org_user_id, &mut conn).await?;
- let assigned_group_ids = data.into_inner().data;
- for assigned_group_id in assigned_group_ids.GroupIds {
+ let assigned_group_ids = data.into_inner();
+ for assigned_group_id in assigned_group_ids.group_ids {
let mut group_user = GroupUser::new(assigned_group_id.clone(), String::from(org_user_id));
group_user.save(&mut conn).await?;
}
@@ -2663,18 +2667,18 @@ async fn delete_group_user(
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct OrganizationUserResetPasswordEnrollmentRequest {
- ResetPasswordKey: Option<String>,
- MasterPasswordHash: Option<String>,
- Otp: Option<String>,
+ reset_password_key: Option<String>,
+ master_password_hash: Option<String>,
+ otp: Option<String>,
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct OrganizationUserResetPasswordRequest {
- NewMasterPasswordHash: String,
- Key: String,
+ new_master_password_hash: String,
+ key: String,
}
#[get("/organizations/<org_id>/keys")]
@@ -2685,9 +2689,9 @@ async fn get_organization_keys(org_id: &str, mut conn: DbConn) -> JsonResult {
};
Ok(Json(json!({
- "Object": "organizationKeys",
- "PublicKey": org.public_key,
- "PrivateKey": org.private_key,
+ "object": "organizationKeys",
+ "publicKey": org.public_key,
+ "privateKey": org.private_key,
})))
}
@@ -2696,7 +2700,7 @@ async fn put_reset_password(
org_id: &str,
org_user_id: &str,
headers: AdminHeaders,
- data: JsonUpcase<OrganizationUserResetPasswordRequest>,
+ data: Json<OrganizationUserResetPasswordRequest>,
mut conn: DbConn,
nt: Notify<'_>,
) -> EmptyResult {
@@ -2730,10 +2734,10 @@ async fn put_reset_password(
err!(format!("Error sending user reset password email: {e:#?}"));
}
- let reset_request = data.into_inner().data;
+ let reset_request = data.into_inner();
let mut user = user;
- user.set_password(reset_request.NewMasterPasswordHash.as_str(), Some(reset_request.Key), true, None);
+ user.set_password(reset_request.new_master_password_hash.as_str(), Some(reset_request.key), true, None);
user.save(&mut conn).await?;
nt.send_logout(&user, None).await;
@@ -2778,13 +2782,13 @@ async fn get_reset_password_details(
// https://github.com/bitwarden/server/blob/3b50ccb9f804efaacdc46bed5b60e5b28eddefcf/src/Api/Models/Response/Organizations/OrganizationUserResponseModel.cs#L111
Ok(Json(json!({
- "Object": "organizationUserResetPasswordDetails",
- "Kdf":user.client_kdf_type,
- "KdfIterations":user.client_kdf_iter,
- "KdfMemory":user.client_kdf_memory,
- "KdfParallelism":user.client_kdf_parallelism,
- "ResetPasswordKey":org_user.reset_password_key,
- "EncryptedPrivateKey":org.private_key,
+ "object": "organizationUserResetPasswordDetails",
+ "kdf":user.client_kdf_type,
+ "kdfIterations":user.client_kdf_iter,
+ "kdfMemory":user.client_kdf_memory,
+ "kdfParallelism":user.client_kdf_parallelism,
+ "resetPasswordKey":org_user.reset_password_key,
+ "encryptedPrivateKey":org.private_key,
})))
}
@@ -2832,7 +2836,7 @@ async fn put_reset_password_enrollment(
org_id: &str,
org_user_id: &str,
headers: Headers,
- data: JsonUpcase<OrganizationUserResetPasswordEnrollmentRequest>,
+ data: Json<OrganizationUserResetPasswordEnrollmentRequest>,
mut conn: DbConn,
) -> EmptyResult {
let mut org_user = match UserOrganization::find_by_user_and_org(&headers.user.uuid, org_id, &mut conn).await {
@@ -2842,23 +2846,24 @@ async fn put_reset_password_enrollment(
check_reset_password_applicable(org_id, &mut conn).await?;
- let reset_request = data.into_inner().data;
+ let reset_request = data.into_inner();
- if reset_request.ResetPasswordKey.is_none() && OrgPolicy::org_is_reset_password_auto_enroll(org_id, &mut conn).await
+ if reset_request.reset_password_key.is_none()
+ && OrgPolicy::org_is_reset_password_auto_enroll(org_id, &mut conn).await
{
err!("Reset password can't be withdrawed due to an enterprise policy");
}
- if reset_request.ResetPasswordKey.is_some() {
+ if reset_request.reset_password_key.is_some() {
PasswordOrOtpData {
- MasterPasswordHash: reset_request.MasterPasswordHash,
- Otp: reset_request.Otp,
+ master_password_hash: reset_request.master_password_hash,
+ otp: reset_request.otp,
}
.validate(&headers.user, true, &mut conn)
.await?;
}
- org_user.reset_password_key = reset_request.ResetPasswordKey;
+ org_user.reset_password_key = reset_request.reset_password_key;
org_user.save(&mut conn).await?;
let log_id = if org_user.reset_password_key.is_some() {
@@ -2922,12 +2927,12 @@ async fn get_org_export(org_id: &str, headers: AdminHeaders, mut conn: DbConn) -
async fn _api_key(
org_id: &str,
- data: JsonUpcase<PasswordOrOtpData>,
+ data: Json<PasswordOrOtpData>,
rotate: bool,
headers: AdminHeaders,
mut conn: DbConn,
) -> JsonResult {
- let data: PasswordOrOtpData = data.into_inner().data;
+ let data: PasswordOrOtpData = data.into_inner();
let user = headers.user;
// Validate the admin users password/otp
@@ -2951,21 +2956,21 @@ async fn _api_key(
};
Ok(Json(json!({
- "ApiKey": org_api_key.api_key,
- "RevisionDate": crate::util::format_date(&org_api_key.revision_date),
- "Object": "apiKey",
+ "apiKey": org_api_key.api_key,
+ "revisionDate": crate::util::format_date(&org_api_key.revision_date),
+ "object": "apiKey",
})))
}
#[post("/organizations/<org_id>/api-key", data = "<data>")]
-async fn api_key(org_id: &str, data: JsonUpcase<PasswordOrOtpData>, headers: AdminHeaders, conn: DbConn) -> JsonResult {
+async fn api_key(org_id: &str, data: Json<PasswordOrOtpData>, headers: AdminHeaders, conn: DbConn) -> JsonResult {
_api_key(org_id, data, false, headers, conn).await
}
#[post("/organizations/<org_id>/rotate-api-key", data = "<data>")]
async fn rotate_api_key(
org_id: &str,
- data: JsonUpcase<PasswordOrOtpData>,
+ data: Json<PasswordOrOtpData>,
headers: AdminHeaders,
conn: DbConn,
) -> JsonResult {
diff --git a/src/api/core/public.rs b/src/api/core/public.rs
index 19cd0de8..0cdcbb63 100644
--- a/src/api/core/public.rs
+++ b/src/api/core/public.rs
@@ -1,13 +1,14 @@
use chrono::Utc;
use rocket::{
request::{self, FromRequest, Outcome},
+ serde::json::Json,
Request, Route,
};
use std::collections::HashSet;
use crate::{
- api::{EmptyResult, JsonUpcase},
+ api::EmptyResult,
auth,
db::{models::*, DbConn},
mail, CONFIG,
@@ -18,43 +19,43 @@ pub fn routes() -> Vec<Route> {
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct OrgImportGroupData {
- Name: String,
- ExternalId: String,
- MemberExternalIds: Vec<String>,
+ name: String,
+ external_id: String,
+ member_external_ids: Vec<String>,
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct OrgImportUserData {
- Email: String,
- ExternalId: String,
- Deleted: bool,
+ email: String,
+ external_id: String,
+ deleted: bool,
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct OrgImportData {
- Groups: Vec<OrgImportGroupData>,
- Members: Vec<OrgImportUserData>,
- OverwriteExisting: bool,
- // LargeImport: bool, // For now this will not be used, upstream uses this to prevent syncs of more then 2000 users or groups without the flag set.
+ groups: Vec<OrgImportGroupData>,
+ members: Vec<OrgImportUserData>,
+ overwrite_existing: bool,
+ // largeImport: bool, // For now this will not be used, upstream uses this to prevent syncs of more then 2000 users or groups without the flag set.
}
#[post("/public/organization/import", data = "<data>")]
-async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut conn: DbConn) -> EmptyResult {
+async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: DbConn) -> EmptyResult {
// Most of the logic for this function can be found here
// https://github.com/bitwarden/server/blob/fd892b2ff4547648a276734fb2b14a8abae2c6f5/src/Core/Services/Implementations/OrganizationService.cs#L1797
let org_id = token.0;
- let data = data.into_inner().data;
+ let data = data.into_inner();
- for user_data in &data.Members {
- if user_data.Deleted {
+ for user_data in &data.members {
+ if user_data.deleted {
// If user is marked for deletion and it exists, revoke it
if let Some(mut user_org) =
- UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &mut conn).await
+ UserOrganization::find_by_email_and_org(&user_data.email, &org_id, &mut conn).await
{
// Only revoke a user if it is not the last confirmed owner
let revoked = if user_org.atype == UserOrgType::Owner
@@ -72,27 +73,27 @@ async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut co
user_org.revoke()
};
- let ext_modified = user_org.set_external_id(Some(user_data.ExternalId.clone()));
+ let ext_modified = user_org.set_external_id(Some(user_data.external_id.clone()));
if revoked || ext_modified {
user_org.save(&mut conn).await?;
}
}
// If user is part of the organization, restore it
} else if let Some(mut user_org) =
- UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &mut conn).await
+ UserOrganization::find_by_email_and_org(&user_data.email, &org_id, &mut conn).await
{
let restored = user_org.restore();
- let ext_modified = user_org.set_external_id(Some(user_data.ExternalId.clone()));
+ let ext_modified = user_org.set_external_id(Some(user_data.external_id.clone()));
if restored || ext_modified {
user_org.save(&mut conn).await?;
}
} else {
// If user is not part of the organization
- let user = match User::find_by_mail(&user_data.Email, &mut conn).await {
+ let user = match User::find_by_mail(&user_data.email, &mut conn).await {
Some(user) => user, // exists in vaultwarden
None => {
// User does not exist yet
- let mut new_user = User::new(user_data.Email.clone());
+ let mut new_user = User::new(user_data.email.clone());
new_user.save(&mut conn).await?;
if !CONFIG.mail_enabled() {
@@ -109,7 +110,7 @@ async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut co
};
let mut new_org_user = UserOrganization::new(user.uuid.clone(), org_id.clone());
- new_org_user.set_external_id(Some(user_data.ExternalId.clone()));
+ new_org_user.set_external_id(Some(user_data.external_id.clone()));
new_org_user.access_all = false;
new_org_user.atype = UserOrgType::User as i32;
new_org_user.status = user_org_status;
@@ -123,7 +124,7 @@ async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut co
};
mail::send_invite(
- &user_data.Email,
+ &user_data.email,
&user.uuid,
Some(org_id.clone()),
Some(new_org_user.uuid),
@@ -136,13 +137,17 @@ async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut co
}
if CONFIG.org_groups_enabled() {
- for group_data in &data.Groups {
- let group_uuid = match Group::find_by_external_id_and_org(&group_data.ExternalId, &org_id, &mut conn).await
+ for group_data in &data.groups {
+ let group_uuid = match Group::find_by_external_id_and_org(&group_data.external_id, &org_id, &mut conn).await
{
Some(group) => group.uuid,
None => {
- let mut group =
- Group::new(org_id.clone(), group_data.Name.clone(), false, Some(group_data.ExternalId.clone()));
+ let mut group = Group::new(
+ org_id.clone(),
+ group_data.name.clone(),
+ false,
+ Some(group_data.external_id.clone()),
+ );
group.save(&mut conn).await?;
group.uuid
}
@@ -150,7 +155,7 @@ async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut co
GroupUser::delete_all_by_group(&group_uuid, &mut conn).await?;
- for ext_id in &group_data.MemberExternalIds {
+ for ext_id in &group_data.member_external_ids {
if let Some(user_org) = UserOrganization::find_by_external_id_and_org(ext_id, &org_id, &mut conn).await
{
let mut group_user = GroupUser::new(group_uuid.clone(), user_org.uuid.clone());
@@ -163,9 +168,9 @@ async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut co
}
// If this flag is enabled, any user that isn't provided in the Users list will be removed (by default they will be kept unless they have Deleted == true)
- if data.OverwriteExisting {
+ if data.overwrite_existing {
// Generate a HashSet to quickly verify if a member is listed or not.
- let sync_members: HashSet<String> = data.Members.into_iter().map(|m| m.ExternalId).collect();
+ let sync_members: HashSet<String> = data.members.into_iter().map(|m| m.external_id).collect();
for user_org in UserOrganization::find_by_org(&org_id, &mut conn).await {
if let Some(ref user_external_id) = user_org.external_id {
if !sync_members.contains(user_external_id) {
diff --git a/src/api/core/sends.rs b/src/api/core/sends.rs
index 338510c6..27aea95a 100644
--- a/src/api/core/sends.rs
+++ b/src/api/core/sends.rs
@@ -9,7 +9,7 @@ use rocket::serde::json::Json;
use serde_json::Value;
use crate::{
- api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType},
+ api::{ApiResult, EmptyResult, JsonResult, Notify, UpdateType},
auth::{ClientIp, Headers, Host},
db::{models::*, DbConn, DbPool},
util::{NumberOrString, SafeString},
@@ -48,26 +48,26 @@ pub async fn purge_sends(pool: DbPool) {
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
pub struct SendData {
- Type: i32,
- Key: String,
- Password: Option<String>,
- MaxAccessCount: Option<NumberOrString>,
- ExpirationDate: Option<DateTime<Utc>>,
- DeletionDate: DateTime<Utc>,
- Disabled: bool,
- HideEmail: Option<bool>,
+ r#type: i32,
+ key: String,
+ password: Option<String>,
+ max_access_count: Option<NumberOrString>,
+ expiration_date: Option<DateTime<Utc>>,
+ deletion_date: DateTime<Utc>,
+ disabled: bool,
+ hide_email: Option<bool>,
// Data field
- Name: String,
- Notes: Option<String>,
- Text: Option<Value>,
- File: Option<Value>,
- FileLength: Option<NumberOrString>,
+ name: String,
+ notes: Option<String>,
+ text: Option<Value>,
+ file: Option<Value>,
+ file_length: Option<NumberOrString>,
// Used for key rotations
- pub Id: Option<String>,
+ pub id: Option<String>,
}
/// Enforces the `Disable Send` policy. A non-owner/admin user belonging to
@@ -96,7 +96,7 @@ async fn enforce_disable_send_policy(headers: &Headers, conn: &mut DbConn) -> Em
/// Ref: https://bitwarden.com/help/article/policies/#send-options
async fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, conn: &mut DbConn) -> EmptyResult {
let user_uuid = &headers.user.uuid;
- let hide_email = data.HideEmail.unwrap_or(false);
+ let hide_email = data.hide_email.unwrap_or(false);
if hide_email && OrgPolicy::is_hide_email_disabled(user_uuid, conn).await {
err!(
"Due to an Enterprise Policy, you are not allowed to hide your email address \
@@ -107,40 +107,40 @@ async fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, c
}
fn create_send(data: SendData, user_uuid: String) -> ApiResult<Send> {
- let data_val = if data.Type == SendType::Text as i32 {
- data.Text
- } else if data.Type == SendType::File as i32 {
- data.File
+ let data_val = if data.r#type == SendType::Text as i32 {
+ data.text
+ } else if data.r#type == SendType::File as i32 {
+ data.file
} else {
err!("Invalid Send type")
};
let data_str = if let Some(mut d) = data_val {
- d.as_object_mut().and_then(|o| o.remove("Response"));
+ d.as_object_mut().and_then(|o| o.remove("response"));
serde_json::to_string(&d)?
} else {
err!("Send data not provided");
};
- if data.DeletionDate > Utc::now() + TimeDelta::try_days(31).unwrap() {
+ if data.deletion_date > Utc::now() + TimeDelta::try_days(31).unwrap() {
err!(
"You cannot have a Send with a deletion date that far into the future. Adjust the Deletion Date to a value less than 31 days from now and try again."
);
}
- let mut send = Send::new(data.Type, data.Name, data_str, data.Key, data.DeletionDate.naive_utc());
+ let mut send = Send::new(data.r#type, data.name, data_str, data.key, data.deletion_date.naive_utc());
send.user_uuid = Some(user_uuid);
- send.notes = data.Notes;
- send.max_access_count = match data.MaxAccessCount {
+ send.notes = data.notes;
+ send.max_access_count = match data.max_access_count {
Some(m) => Some(m.into_i32()?),
_ => None,
};
- send.expiration_date = data.ExpirationDate.map(|d| d.naive_utc());
- send.disabled = data.Disabled;
- send.hide_email = data.HideEmail;
- send.atype = data.Type;
+ send.expiration_date = data.expiration_date.map(|d| d.naive_utc());
+ send.disabled = data.disabled;
+ send.hide_email = data.hide_email;
+ send.atype = data.r#type;
- send.set_password(data.Password.as_deref());
+ send.set_password(data.password.as_deref());
Ok(send)
}
@@ -151,9 +151,9 @@ async fn get_sends(headers: Headers, mut conn: DbConn) -> Json<Value> {
let sends_json: Vec<Value> = sends.await.iter().map(|s| s.to_json()).collect();
Json(json!({
- "Data": sends_json,
- "Object": "list",
- "ContinuationToken": null
+ "data": sends_json,
+ "object": "list",
+ "continuationToken": null
}))
}
@@ -172,13 +172,13 @@ async fn get_send(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResult
}
#[post("/sends", data = "<data>")]
-async fn post_send(data: JsonUpcase<SendData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
+async fn post_send(data: Json<SendData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
enforce_disable_send_policy(&headers, &mut conn).await?;
- let data: SendData = data.into_inner().data;
+ let data: SendData = data.into_inner();
enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?;
- if data.Type == SendType::File as i32 {
+ if data.r#type == SendType::File as i32 {
err!("File sends should use /api/sends/file")
}
@@ -198,7 +198,7 @@ async fn post_send(data: JsonUpcase<SendData>, headers: Headers, mut conn: DbCon
#[derive(FromForm)]
struct UploadData<'f> {
- model: Json<crate::util::UpCase<SendData>>,
+ model: Json<SendData>,
data: TempFile<'f>,
}
@@ -218,7 +218,7 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
model,
mut data,
} = data.into_inner();
- let model = model.into_inner().data;
+ let model = model.into_inner();
let Some(size) = data.len().to_i64() else {
err!("Invalid send size");
@@ -266,9 +266,9 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
let mut data_value: Value = serde_json::from_str(&send.data)?;
if let Some(o) = data_value.as_object_mut() {
- o.insert(String::from("Id"), Value::String(file_id));
- o.insert(String::from("Size"), Value::Number(size.into()));
- o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(size)));
+ o.insert(String::from("id"), Value::String(file_id));
+ o.insert(String::from("size"), Value::Number(size.into()));
+ o.insert(String::from("sizeName"), Value::String(crate::util::get_display_size(size)));
}
send.data = serde_json::to_string(&data_value)?;
@@ -288,18 +288,18 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
// Upstream: https://github.com/bitwarden/server/blob/d0c793c95181dfb1b447eb450f85ba0bfd7ef643/src/Api/Controllers/SendsController.cs#L190
#[post("/sends/file/v2", data = "<data>")]
-async fn post_send_file_v2(data: JsonUpcase<SendData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+async fn post_send_file_v2(data: Json<SendData>, headers: Headers, mut conn: DbConn) -> JsonResult {
enforce_disable_send_policy(&headers, &mut conn).await?;
- let data = data.into_inner().data;
+ let data = data.into_inner();
- if data.Type != SendType::File as i32 {
+ if data.r#type != SendType::File as i32 {
err!("Send content is not a file");
}
enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?;
- let file_length = match &data.FileLength {
+ let file_length = match &data.file_length {
Some(m) => m.into_i64()?,
_ => err!("Invalid send length"),
};
@@ -334,9 +334,9 @@ async fn post_send_file_v2(data: JsonUpcase<SendData>, headers: Headers, mut con
let mut data_value: Value = serde_json::from_str(&send.data)?;
if let Some(o) = data_value.as_object_mut() {
- o.insert(String::from("Id"), Value::String(file_id.clone()));
- o.insert(String::from("Size"), Value::Number(file_length.into()));
- o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(file_length)));
+ o.insert(String::from("id"), Value::String(file_id.clone()));
+ o.insert(String::from("size"), Value::Number(file_length.into()));
+ o.insert(String::from("sizeName"), Value::String(crate::util::get_display_size(file_length)));
}
send.data = serde_json::to_string(&data_value)?;
send.save(&mut conn).await?;
@@ -395,15 +395,15 @@ async fn post_send_file_v2_data(
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
pub struct SendAccessData {
- pub Password: Option<String>,
+ pub password: Option<String>,
}
#[post("/sends/access/<access_id>", data = "<data>")]
async fn post_access(
access_id: &str,
- data: JsonUpcase<SendAccessData>,
+ data: Json<SendAccessData>,
mut conn: DbConn,
ip: ClientIp,
nt: Notify<'_>,
@@ -434,7 +434,7 @@ async fn post_access(
}
if send.password_hash.is_some() {
- match data.into_inner().data.Password {
+ match data.into_inner().password {
Some(ref p) if send.check_password(p) => { /* Nothing to do here */ }
Some(_) => err!("Invalid password", format!("IP: {}.", ip.ip)),
None => err_code!("Password not provided", format!("IP: {}.", ip.ip), 401),
@@ -464,7 +464,7 @@ async fn post_access(
async fn post_access_file(
send_id: &str,
file_id: &str,
- data: JsonUpcase<SendAccessData>,
+ data: Json<SendAccessData>,
host: Host,
mut conn: DbConn,
nt: Notify<'_>,
@@ -495,7 +495,7 @@ async fn post_access_file(
}
if send.password_hash.is_some() {
- match data.into_inner().data.Password {
+ match data.into_inner().password {
Some(ref p) if send.check_password(p) => { /* Nothing to do here */ }
Some(_) => err!("Invalid password."),
None => err_code!("Password not provided", 401),
@@ -518,9 +518,9 @@ async fn post_access_file(
let token_claims = crate::auth::generate_send_claims(send_id, file_id);
let token = crate::auth::encode_jwt(&token_claims);
Ok(Json(json!({
- "Object": "send-fileDownload",
- "Id": file_id,
- "Url": format!("{}/api/sends/{}/{}?t={}", &host.host, send_id, file_id, token)
+ "object": "send-fileDownload",
+ "id": file_id,
+ "url": format!("{}/api/sends/{}/{}?t={}", &host.host, send_id, file_id, token)
})))
}
@@ -535,16 +535,10 @@ async fn download_send(send_id: SafeString, file_id: SafeString, t: &str) -> Opt
}
#[put("/sends/<id>", data = "<data>")]
-async fn put_send(
- id: &str,
- data: JsonUpcase<SendData>,
- headers: Headers,
- mut conn: DbConn,
- nt: Notify<'_>,
-) -> JsonResult {
+async fn put_send(id: &str, data: Json<SendData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
enforce_disable_send_policy(&headers, &mut conn).await?;
- let data: SendData = data.into_inner().data;
+ let data: SendData = data.into_inner();
enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?;
let mut send = match Send::find_by_uuid(id, &mut conn).await {
@@ -569,11 +563,11 @@ pub async fn update_send_from_data(
err!("Send is not owned by user")
}
- if send.atype != data.Type {
+ if send.atype != data.r#type {
err!("Sends can't change type")
}
- if data.DeletionDate > Utc::now() + TimeDelta::try_days(31).unwrap() {
+ if data.deletion_date > Utc::now() + TimeDelta::try_days(31).unwrap() {
err!(
"You cannot have a Send with a deletion date that far into the future. Adjust the Deletion Date to a value less than 31 days from now and try again."
);
@@ -581,9 +575,9 @@ pub async fn update_send_from_data(
// When updating a file Send, we receive nulls in the File field, as it's immutable,
// so we only need to update the data field in the Text case
- if data.Type == SendType::Text as i32 {
- let data_str = if let Some(mut d) = data.Text {
- d.as_object_mut().and_then(|d| d.remove("Response"));
+ if data.r#type == SendType::Text as i32 {
+ let data_str = if let Some(mut d) = data.text {
+ d.as_object_mut().and_then(|d| d.remove("response"));
serde_json::to_string(&d)?
} else {
err!("Send data not provided");
@@ -591,20 +585,20 @@ pub async fn update_send_from_data(
send.data = data_str;
}
- send.name = data.Name;
- send.akey = data.Key;
- send.deletion_date = data.DeletionDate.naive_utc();
- send.notes = data.Notes;
- send.max_access_count = match data.MaxAccessCount {
+ send.name = data.name;
+ send.akey = data.key;
+ send.deletion_date = data.deletion_date.naive_utc();
+ send.notes = data.notes;
+ send.max_access_count = match data.max_access_count {
Some(m) => Some(m.into_i32()?),
_ => None,
};
- send.expiration_date = data.ExpirationDate.map(|d| d.naive_utc());
- send.hide_email = data.HideEmail;
- send.disabled = data.Disabled;
+ send.expiration_date = data.expiration_date.map(|d| d.naive_utc());
+ send.hide_email = data.hide_email;
+ send.disabled = data.disabled;
// Only change the value if it's present
- if let Some(password) = data.Password {
+ if let Some(password) = data.password {
send.set_password(Some(&password));
}
diff --git a/src/api/core/two_factor/authenticator.rs b/src/api/core/two_factor/authenticator.rs
index c959e0d4..9d4bd480 100644
--- a/src/api/core/two_factor/authenticator.rs
+++ b/src/api/core/two_factor/authenticator.rs
@@ -3,10 +3,7 @@ use rocket::serde::json::Json;
use rocket::Route;
use crate::{
- api::{
- core::log_user_event, core::two_factor::_generate_recover_code, EmptyResult, JsonResult, JsonUpcase,
- PasswordOrOtpData,
- },
+ api::{core::log_user_event, core::two_factor::_generate_recover_code, EmptyResult, JsonResult, PasswordOrOtpData},
auth::{ClientIp, Headers},
crypto,
db::{
@@ -23,8 +20,8 @@ pub fn routes() -> Vec<Route> {
}
#[post("/two-factor/get-authenticator", data = "<data>")]
-async fn generate_authenticator(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
- let data: PasswordOrOtpData = data.into_inner().data;
+async fn generate_authenticator(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let data: PasswordOrOtpData = data.into_inner();
let user = headers.user;
data.validate(&user, false, &mut conn).await?;
@@ -38,36 +35,32 @@ async fn generate_authenticator(data: JsonUpcase<PasswordOrOtpData>, headers: He
};
Ok(Json(json!({
- "Enabled": enabled,
- "Key": key,
- "Object": "twoFactorAuthenticator"
+ "enabled": enabled,
+ "key": key,
+ "object": "twoFactorAuthenticator"
})))
}
-#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
struct EnableAuthenticatorData {
- Key: String,
- Token: NumberOrString,
- MasterPasswordHash: Option<String>,
- Otp: Option<String>,
+ key: String,
+ token: NumberOrString,
+ master_password_hash: Option<String>,
+ otp: Option<String>,
}
#[post("/two-factor/authenticator", data = "<data>")]
-async fn activate_authenticator(
- data: JsonUpcase<EnableAuthenticatorData>,
- headers: Headers,
- mut conn: DbConn,
-) -> JsonResult {
- let data: EnableAuthenticatorData = data.into_inner().data;
- let key = data.Key;
- let token = data.Token.into_string();
+async fn activate_authenticator(data: Json<EnableAuthenticatorData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let data: EnableAuthenticatorData = data.into_inner();
+ let key = data.key;
+ let token = data.token.into_string();
let mut user = headers.user;
PasswordOrOtpData {
- MasterPasswordHash: data.MasterPasswordHash,
- Otp: data.Otp,
+ master_password_hash: data.master_password_hash,
+ otp: data.otp,
}
.validate(&user, true, &mut conn)
.await?;
@@ -90,18 +83,14 @@ async fn activate_authenticator(
log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await;
Ok(Json(json!({
- "Enabled": true,
- "Key": key,
- "Object": "twoFactorAuthenticator"
+ "enabled": true,
+ "key": key,
+ "object": "twoFactorAuthenticator"
})))
}
#[put("/two-factor/authenticator", data = "<data>")]
-async fn activate_authenticator_put(
- data: JsonUpcase<EnableAuthenticatorData>,
- headers: Headers,
- conn: DbConn,
-) -> JsonResult {
+async fn activate_authenticator_put(data: Json<EnableAuthenticatorData>, headers: Headers, conn: DbConn) -> JsonResult {
activate_authenticator(data, headers, conn).await
}
diff --git a/src/api/core/two_factor/duo.rs b/src/api/core/two_factor/duo.rs
index ea5589fb..c5bfa9e5 100644
--- a/src/api/core/two_factor/duo.rs
+++ b/src/api/core/two_factor/duo.rs
@@ -5,7 +5,7 @@ use rocket::Route;
use crate::{
api::{
- core::log_user_event, core::two_factor::_generate_recover_code, ApiResult, EmptyResult, JsonResult, JsonUpcase,
+ core::log_user_event, core::two_factor::_generate_recover_code, ApiResult, EmptyResult, JsonResult,
PasswordOrOtpData,
},
auth::Headers,
@@ -92,8 +92,8 @@ impl DuoStatus {
const DISABLED_MESSAGE_DEFAULT: &str = "<To use the global Duo keys, please leave these fields untouched>";
#[post("/two-factor/get-duo", data = "<data>")]
-async fn get_duo(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
- let data: PasswordOrOtpData = data.into_inner().data;
+async fn get_duo(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let data: PasswordOrOtpData = data.into_inner();
let user = headers.user;
data.validate(&user, false, &mut conn).await?;
@@ -109,16 +109,16 @@ async fn get_duo(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn
let json = if let Some(data) = data {
json!({
- "Enabled": enabled,
- "Host": data.host,
- "SecretKey": data.sk,
- "IntegrationKey": data.ik,
- "Object": "twoFactorDuo"
+ "enabled": enabled,
+ "host": data.host,
+ "secretKey": data.sk,
+ "integrationKey": data.ik,
+ "object": "twoFactorDuo"
})
} else {
json!({
- "Enabled": enabled,
- "Object": "twoFactorDuo"
+ "enabled": enabled,
+ "object": "twoFactorDuo"
})
};
@@ -126,21 +126,21 @@ async fn get_duo(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn
}
#[derive(Deserialize)]
-#[allow(non_snake_case, dead_code)]
+#[serde(rename_all = "camelCase")]
struct EnableDuoData {
- Host: String,
- SecretKey: String,
- IntegrationKey: String,
- MasterPasswordHash: Option<String>,
- Otp: Option<String>,
+ host: String,
+ secret_key: String,
+ integration_key: String,
+ master_password_hash: Option<String>,
+ otp: Option<String>,
}
impl From<EnableDuoData> for DuoData {
fn from(d: EnableDuoData) -> Self {
Self {
- host: d.Host,
- ik: d.IntegrationKey,
- sk: d.SecretKey,
+ host: d.host,
+ ik: d.integration_key,
+ sk: d.secret_key,
}
}
}
@@ -151,17 +151,17 @@ fn check_duo_fields_custom(data: &EnableDuoData) -> bool {
st.is_empty() || s == DISABLED_MESSAGE_DEFAULT
}
- !empty_or_default(&data.Host) && !empty_or_default(&data.SecretKey) && !empty_or_default(&data.IntegrationKey)
+ !empty_or_default(&data.host) && !empty_or_default(&data.secret_key) && !empty_or_default(&data.integration_key)
}
#[post("/two-factor/duo", data = "<data>")]
-async fn activate_duo(data: JsonUpcase<EnableDuoData>, headers: Headers, mut conn: DbConn) -> JsonResult {
- let data: EnableDuoData = data.into_inner().data;
+async fn activate_duo(data: Json<EnableDuoData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let data: EnableDuoData = data.into_inner();
let mut user = headers.user;
PasswordOrOtpData {
- MasterPasswordHash: data.MasterPasswordHash.clone(),
- Otp: data.Otp.clone(),
+ master_password_hash: data.master_password_hash.clone(),
+ otp: data.otp.clone(),
}
.validate(&user, true, &mut conn)
.await?;
@@ -184,16 +184,16 @@ async fn activate_duo(data: JsonUpcase<EnableDuoData>, headers: Headers, mut con
log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await;
Ok(Json(json!({
- "Enabled": true,
- "Host": data.host,
- "SecretKey": data.sk,
- "IntegrationKey": data.ik,
- "Object": "twoFactorDuo"
+ "enabled": true,
+ "host": data.host,
+ "secretKey": data.sk,
+ "integrationKey": data.ik,
+ "object": "twoFactorDuo"
})))
}
#[put("/two-factor/duo", data = "<data>")]
-async fn activate_duo_put(data: JsonUpcase<EnableDuoData>, headers: Headers, conn: DbConn) -> JsonResult {
+async fn activate_duo_put(data: Json<EnableDuoData>, headers: Headers, conn: DbConn) -> JsonResult {
activate_duo(data, headers, conn).await
}
diff --git a/src/api/core/two_factor/email.rs b/src/api/core/two_factor/email.rs
index 62344cf8..a4a69240 100644
--- a/src/api/core/two_factor/email.rs
+++ b/src/api/core/two_factor/email.rs
@@ -5,7 +5,7 @@ use rocket::Route;
use crate::{
api::{
core::{log_user_event, two_factor::_generate_recover_code},
- EmptyResult, JsonResult, JsonUpcase, PasswordOrOtpData,
+ EmptyResult, JsonResult, PasswordOrOtpData,
},
auth::Headers,
crypto,
@@ -22,28 +22,28 @@ pub fn routes() -> Vec<Route> {
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct SendEmailLoginData {
- Email: String,
- MasterPasswordHash: String,
+ email: String,
+ master_password_hash: String,
}
/// User is trying to login and wants to use email 2FA.
/// Does not require Bearer token
#[post("/two-factor/send-email-login", data = "<data>")] // JsonResult
-async fn send_email_login(data: JsonUpcase<SendEmailLoginData>, mut conn: DbConn) -> EmptyResult {
- let data: SendEmailLoginData = data.into_inner().data;
+async fn send_email_login(data: Json<SendEmailLoginData>, mut conn: DbConn) -> EmptyResult {
+ let data: SendEmailLoginData = data.into_inner();
use crate::db::models::User;
// Get the user
- let user = match User::find_by_mail(&data.Email, &mut conn).await {
+ let user = match User::find_by_mail(&data.email, &mut conn).await {
Some(user) => user,
None => err!("Username or password is incorrect. Try again."),
};
// Check password
- if !user.check_valid_password(&data.MasterPasswordHash) {
+ if !user.check_valid_password(&data.master_password_hash) {
err!("Username or password is incorrect. Try again.")
}
@@ -76,8 +76,8 @@ pub async fn send_token(user_uuid: &str, conn: &mut DbConn) -> EmptyResult {
/// When user clicks on Manage email 2FA show the user the related information
#[post("/two-factor/get-email", data = "<data>")]
-async fn get_email(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
- let data: PasswordOrOtpData = data.into_inner().data;
+async fn get_email(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let data: PasswordOrOtpData = data.into_inner();
let user = headers.user;
data.validate(&user, false, &mut conn).await?;
@@ -92,30 +92,30 @@ async fn get_email(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut co
};
Ok(Json(json!({
- "Email": mfa_email,
- "Enabled": enabled,
- "Object": "twoFactorEmail"
+ "email": mfa_email,
+ "enabled": enabled,
+ "object": "twoFactorEmail"
})))
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct SendEmailData {
/// Email where 2FA codes will be sent to, can be different than user email account.
- Email: String,
- MasterPasswordHash: Option<String>,
- Otp: Option<String>,
+ email: String,
+ master_password_hash: Option<String>,
+ otp: Option<String>,
}
/// Send a verification email to the specified email address to check whether it exists/belongs to user.
#[post("/two-factor/send-email", data = "<data>")]
-async fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
- let data: SendEmailData = data.into_inner().data;
+async fn send_email(data: Json<SendEmailData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
+ let data: SendEmailData = data.into_inner();
let user = headers.user;
PasswordOrOtpData {
- MasterPasswordHash: data.MasterPasswordHash,
- Otp: data.Otp,
+ master_password_hash: data.master_password_hash,
+ otp: data.otp,
}
.validate(&user, false, &mut conn)
.await?;
@@ -131,7 +131,7 @@ async fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, mut conn:
}
let generated_token = crypto::generate_email_token(CONFIG.email_token_size());
- let twofactor_data = EmailTokenData::new(data.Email, generated_token);
+ let twofactor_data = EmailTokenData::new(data.email, generated_token);
// Uses EmailVerificationChallenge as type to show that it's not verified yet.
let twofactor = TwoFactor::new(user.uuid, TwoFactorType::EmailVerificationChallenge, twofactor_data.to_json());
@@ -143,24 +143,24 @@ async fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, mut conn:
}
#[derive(Deserialize, Serialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct EmailData {
- Email: String,
- Token: String,
- MasterPasswordHash: Option<String>,
- Otp: Option<String>,
+ email: String,
+ token: String,
+ master_password_hash: Option<String>,
+ otp: Option<String>,
}
/// Verify email belongs to user and can be used for 2FA email codes.
#[put("/two-factor/email", data = "<data>")]
-async fn email(data: JsonUpcase<EmailData>, headers: Headers, mut conn: DbConn) -> JsonResult {
- let data: EmailData = data.into_inner().data;
+async fn email(data: Json<EmailData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let data: EmailData = data.into_inner();
let mut user = headers.user;
// This is the last step in the verification process, delete the otp directly afterwards
PasswordOrOtpData {
- MasterPasswordHash: data.MasterPasswordHash,
- Otp: data.Otp,
+ master_password_hash: data.master_password_hash,
+ otp: data.otp,
}
.validate(&user, true, &mut conn)
.await?;
@@ -176,7 +176,7 @@ async fn email(data: JsonUpcase<EmailData>, headers: Headers, mut conn: DbConn)
_ => err!("No token available"),
};
- if !crypto::ct_eq(issued_token, data.Token) {
+ if !crypto::ct_eq(issued_token, data.token) {
err!("Token is invalid")
}
@@ -190,9 +190,9 @@ async fn email(data: JsonUpcase<EmailData>, headers: Headers, mut conn: DbConn)
log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await;
Ok(Json(json!({
- "Email": email_data.email,
- "Enabled": "true",
- "Object": "twoFactorEmail"
+ "email": email_data.email,
+ "enabled": "true",
+ "object": "twoFactorEmail"
})))
}
diff --git a/src/api/core/two_factor/mod.rs b/src/api/core/two_factor/mod.rs
index 8c0d6764..2fbcfb3b 100644
--- a/src/api/core/two_factor/mod.rs
+++ b/src/api/core/two_factor/mod.rs
@@ -7,7 +7,7 @@ use serde_json::Value;
use crate::{
api::{
core::{log_event, log_user_event},
- EmptyResult, JsonResult, JsonUpcase, PasswordOrOtpData,
+ EmptyResult, JsonResult, PasswordOrOtpData,
},
auth::{ClientHeaders, Headers},
crypto,
@@ -50,52 +50,52 @@ async fn get_twofactor(headers: Headers, mut conn: DbConn) -> Json<Value> {
let twofactors_json: Vec<Value> = twofactors.iter().map(TwoFactor::to_json_provider).collect();
Json(json!({
- "Data": twofactors_json,
- "Object": "list",
- "ContinuationToken": null,
+ "data": twofactors_json,
+ "object": "list",
+ "continuationToken": null,
}))
}
#[post("/two-factor/get-recover", data = "<data>")]
-async fn get_recover(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
- let data: PasswordOrOtpData = data.into_inner().data;
+async fn get_recover(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let data: PasswordOrOtpData = data.into_inner();
let user = headers.user;
data.validate(&user, true, &mut conn).await?;
Ok(Json(json!({
- "Code": user.totp_recover,
- "Object": "twoFactorRecover"
+ "code": user.totp_recover,
+ "object": "twoFactorRecover"
})))
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct RecoverTwoFactor {
- MasterPasswordHash: String,
- Email: String,
- RecoveryCode: String,
+ master_password_hash: String,
+ email: String,
+ recovery_code: String,
}
#[post("/two-factor/recover", data = "<data>")]
-async fn recover(data: JsonUpcase<RecoverTwoFactor>, client_headers: ClientHeaders, mut conn: DbConn) -> JsonResult {
- let data: RecoverTwoFactor = data.into_inner().data;
+async fn recover(data: Json<RecoverTwoFactor>, client_headers: ClientHeaders, mut conn: DbConn) -> JsonResult {
+ let data: RecoverTwoFactor = data.into_inner();
use crate::db::models::User;
// Get the user
- let mut user = match User::find_by_mail(&data.Email, &mut conn).await {
+ let mut user = match User::find_by_mail(&data.email, &mut conn).await {
Some(user) => user,
None => err!("Username or password is incorrect. Try again."),
};
// Check password
- if !user.check_valid_password(&data.MasterPasswordHash) {
+ if !user.check_valid_password(&data.master_password_hash) {
err!("Username or password is incorrect. Try again.")
}
// Check if recovery code is correct
- if !user.check_valid_recovery_code(&data.RecoveryCode) {
+ if !user.check_valid_recovery_code(&data.recovery_code) {
err!("Recovery code is incorrect. Try again.")
}
@@ -127,27 +127,27 @@ async fn _generate_recover_code(user: &mut User, conn: &mut DbConn) {
}
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct DisableTwoFactorData {
- MasterPasswordHash: Option<String>,
- Otp: Option<String>,
- Type: NumberOrString,
+ master_password_hash: Option<String>,
+ otp: Option<String>,
+ r#type: NumberOrString,
}
#[post("/two-factor/disable", data = "<data>")]
-async fn disable_twofactor(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, mut conn: DbConn) -> JsonResult {
- let data: DisableTwoFactorData = data.into_inner().data;
+async fn disable_twofactor(data: Json<DisableTwoFactorData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let data: DisableTwoFactorData = data.into_inner();
let user = headers.user;
// Delete directly after a valid token has been provided
PasswordOrOtpData {
- MasterPasswordHash: data.MasterPasswordHash,
- Otp: data.Otp,
+ master_password_hash: data.master_password_hash,
+ otp: data.otp,
}
.validate(&user, true, &mut conn)
.await?;
- let type_ = data.Type.into_i32()?;
+ let type_ = data.r#type.into_i32()?;
if let Some(twofactor) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &mut conn).await {
twofactor.delete(&mut conn).await?;
@@ -160,14 +160,14 @@ async fn disable_twofactor(data: JsonUpcase<DisableTwoFactorData>, headers: Head
}
Ok(Json(json!({
- "Enabled": false,
- "Type": type_,
- "Object": "twoFactorProvider"
+ "enabled": false,
+ "type": type_,
+ "object": "twoFactorProvider"
})))
}
#[put("/two-factor/disable", data = "<data>")]
-async fn disable_twofactor_put(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, conn: DbConn) -> JsonResult {
+async fn disable_twofactor_put(data: Json<DisableTwoFactorData>, headers: Headers, conn: DbConn) -> JsonResult {
disable_twofactor(data, headers, conn).await
}
diff --git a/src/api/core/two_factor/protected_actions.rs b/src/api/core/two_factor/protected_actions.rs
index 537ed0c6..8bfc59c1 100644
--- a/src/api/core/two_factor/protected_actions.rs
+++ b/src/api/core/two_factor/protected_actions.rs
@@ -1,8 +1,8 @@
use chrono::{DateTime, TimeDelta, Utc};
-use rocket::Route;
+use rocket::{serde::json::Json, Route};
use crate::{
- api::{EmptyResult, JsonUpcase},
+ api::EmptyResult,
auth::Headers,
crypto,
db::{
@@ -18,7 +18,7 @@ pub fn routes() -> Vec<Route> {
}
/// Data stored in the TwoFactor table in the db
-#[derive(Serialize, Deserialize, Debug)]
+#[derive(Debug, Serialize, Deserialize)]
pub struct ProtectedActionData {
/// Token issued to validate the protected action
pub token: String,
@@ -82,23 +82,24 @@ async fn request_otp(headers: Headers, mut conn: DbConn) -> EmptyResult {
}
#[derive(Deserialize, Serialize, Debug)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct ProtectedActionVerify {
- OTP: String,
+ #[serde(rename = "OTP", alias = "otp")]
+ otp: String,
}
#[post("/accounts/verify-otp", data = "<data>")]
-async fn verify_otp(data: JsonUpcase<ProtectedActionVerify>, headers: Headers, mut conn: DbConn) -> EmptyResult {
+async fn verify_otp(data: Json<ProtectedActionVerify>, headers: Headers, mut conn: DbConn) -> EmptyResult {
if !CONFIG.mail_enabled() {
err!("Email is disabled for this server. Either enable email or login using your master password instead of login via device.");
}
let user = headers.user;
- let data: ProtectedActionVerify = data.into_inner().data;
+ let data: ProtectedActionVerify = data.into_inner();
// Delete the token after one validation attempt
// This endpoint only gets called for the vault export, and doesn't need a second attempt
- validate_protected_action_otp(&data.OTP, &user.uuid, true, &mut conn).await
+ validate_protected_action_otp(&data.otp, &user.uuid, true, &mut conn).await
}
pub async fn validate_protected_action_otp(
diff --git a/src/api/core/two_factor/webauthn.rs b/src/api/core/two_factor/webauthn.rs
index 14ba8514..52ca70c4 100644
--- a/src/api/core/two_factor/webauthn.rs
+++ b/src/api/core/two_factor/webauthn.rs
@@ -7,7 +7,7 @@ use webauthn_rs::{base64_data::Base64UrlSafeData, proto::*, AuthenticationState,
use crate::{
api::{
core::{log_user_event, two_factor::_generate_recover_code},
- EmptyResult, JsonResult, JsonUpcase, PasswordOrOtpData,
+ EmptyResult, JsonResult, PasswordOrOtpData,
},
auth::Headers,
db::{
@@ -96,20 +96,20 @@ pub struct WebauthnRegistration {
impl WebauthnRegistration {
fn to_json(&self) -> Value {
json!({
- "Id": self.id,
- "Name": self.name,
+ "id": self.id,
+ "name": self.name,
"migrated": self.migrated,
})
}
}
#[post("/two-factor/get-webauthn", data = "<data>")]
-async fn get_webauthn(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+async fn get_webauthn(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
if !CONFIG.domain_set() {
err!("`DOMAIN` environment variable is not set. Webauthn disabled")
}
- let data: PasswordOrOtpData = data.into_inner().data;
+ let data: PasswordOrOtpData = data.into_inner();
let user = headers.user;
data.validate(&user, false, &mut conn).await?;
@@ -118,19 +118,15 @@ async fn get_webauthn(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut
let registrations_json: Vec<Value> = registrations.iter().map(WebauthnRegistration::to_json).collect();
Ok(Json(json!({
- "Enabled": enabled,
- "Keys": registrations_json,
- "Object": "twoFactorWebAuthn"
+ "enabled": enabled,
+ "keys": registrations_json,
+ "object": "twoFactorWebAuthn"
})))
}
#[post("/two-factor/get-webauthn-challenge", data = "<data>")]
-async fn generate_webauthn_challenge(
- data: JsonUpcase<PasswordOrOtpData>,
- headers: Headers,
- mut conn: DbConn,
-) -> JsonResult {
- let data: PasswordOrOtpData = data.into_inner().data;
+async fn generate_webauthn_challenge(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let data: PasswordOrOtpData = data.into_inner();
let user = headers.user;
data.validate(&user, false, &mut conn).await?;
@@ -161,102 +157,94 @@ async fn generate_webauthn_challenge(
}
#[derive(Debug, Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct EnableWebauthnData {
- Id: NumberOrString, // 1..5
- Name: String,
- DeviceResponse: RegisterPublicKeyCredentialCopy,
- MasterPasswordHash: Option<String>,
- Otp: Option<String>,
+ id: NumberOrString, // 1..5
+ name: String,
+ device_response: RegisterPublicKeyCredentialCopy,
+ master_password_hash: Option<String>,
+ otp: Option<String>,
}
-// This is copied from RegisterPublicKeyCredential to change the Response objects casing
#[derive(Debug, Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct RegisterPublicKeyCredentialCopy {
- pub Id: String,
- pub RawId: Base64UrlSafeData,
- pub Response: AuthenticatorAttestationResponseRawCopy,
- pub Type: String,
+ pub id: String,
+ pub raw_id: Base64UrlSafeData,
+ pub response: AuthenticatorAttestationResponseRawCopy,
+ pub r#type: String,
}
// This is copied from AuthenticatorAttestationResponseRaw to change clientDataJSON to clientDataJson
#[derive(Debug, Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
pub struct AuthenticatorAttestationResponseRawCopy {
- pub AttestationObject: Base64UrlSafeData,
- pub ClientDataJson: Base64UrlSafeData,
+ #[serde(rename = "AttestationObject", alias = "attestationObject")]
+ pub attestation_object: Base64UrlSafeData,
+ #[serde(rename = "clientDataJson", alias = "clientDataJSON")]
+ pub client_data_json: Base64UrlSafeData,
}
impl From<RegisterPublicKeyCredentialCopy> for RegisterPublicKeyCredential {
fn from(r: RegisterPublicKeyCredentialCopy) -> Self {
Self {
- id: r.Id,
- raw_id: r.RawId,
+ id: r.id,
+ raw_id: r.raw_id,
response: AuthenticatorAttestationResponseRaw {
- attestation_object: r.Response.AttestationObject,
- client_data_json: r.Response.ClientDataJson,
+ attestation_object: r.response.attestation_object,
+ client_data_json: r.response.client_data_json,
},
- type_: r.Type,
+ type_: r.r#type,
}
}
}
-// This is copied from PublicKeyCredential to change the Response objects casing
#[derive(Debug, Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
pub struct PublicKeyCredentialCopy {
- pub Id: String,
- pub RawId: Base64UrlSafeData,
- pub Response: AuthenticatorAssertionResponseRawCopy,
- pub Extensions: Option<AuthenticationExtensionsClientOutputsCopy>,
- pub Type: String,
+ pub id: String,
+ pub raw_id: Base64UrlSafeData,
+ pub response: AuthenticatorAssertionResponseRawCopy,
+ pub extensions: Option<AuthenticationExtensionsClientOutputs>,
+ pub r#type: String,
}
// This is copied from AuthenticatorAssertionResponseRaw to change clientDataJSON to clientDataJson
#[derive(Debug, Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
pub struct AuthenticatorAssertionResponseRawCopy {
- pub AuthenticatorData: Base64UrlSafeData,
- pub ClientDataJson: Base64UrlSafeData,
- pub Signature: Base64UrlSafeData,
- pub UserHandle: Option<Base64UrlSafeData>,
-}
-
-#[derive(Debug, Deserialize)]
-#[allow(non_snake_case)]
-pub struct AuthenticationExtensionsClientOutputsCopy {
- #[serde(default)]
- pub Appid: bool,
+ pub authenticator_data: Base64UrlSafeData,
+ #[serde(rename = "clientDataJson", alias = "clientDataJSON")]
+ pub client_data_json: Base64UrlSafeData,
+ pub signature: Base64UrlSafeData,
+ pub user_handle: Option<Base64UrlSafeData>,
}
impl From<PublicKeyCredentialCopy> for PublicKeyCredential {
fn from(r: PublicKeyCredentialCopy) -> Self {
Self {
- id: r.Id,
- raw_id: r.RawId,
+ id: r.id,
+ raw_id: r.raw_id,
response: AuthenticatorAssertionResponseRaw {
- authenticator_data: r.Response.AuthenticatorData,
- client_data_json: r.Response.ClientDataJson,
- signature: r.Response.Signature,
- user_handle: r.Response.UserHandle,
+ authenticator_data: r.response.authenticator_data,
+ client_data_json: r.response.client_data_json,
+ signature: r.response.signature,
+ user_handle: r.response.user_handle,
},
- extensions: r.Extensions.map(|e| AuthenticationExtensionsClientOutputs {
- appid: e.Appid,
- }),
- type_: r.Type,
+ extensions: r.extensions,
+ type_: r.r#type,
}
}
}
#[post("/two-factor/webauthn", data = "<data>")]
-async fn activate_webauthn(data: JsonUpcase<EnableWebauthnData>, headers: Headers, mut conn: DbConn) -> JsonResult {
- let data: EnableWebauthnData = data.into_inner().data;
+async fn activate_webauthn(data: Json<EnableWebauthnData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let data: EnableWebauthnData = data.into_inner();
let mut user = headers.user;
PasswordOrOtpData {
- MasterPasswordHash: data.MasterPasswordHash,
- Otp: data.Otp,
+ master_password_hash: data.master_password_hash,
+ otp: data.otp,
}
.validate(&user, true, &mut conn)
.await?;
@@ -274,13 +262,13 @@ async fn activate_webauthn(data: JsonUpcase<EnableWebauthnData>, headers: Header
// Verify the credentials with the saved state
let (credential, _data) =
- WebauthnConfig::load().register_credential(&data.DeviceResponse.into(), &state, |_| Ok(false))?;
+ WebauthnConfig::load().register_credential(&data.device_response.into(), &state, |_| Ok(false))?;
let mut registrations: Vec<_> = get_webauthn_registrations(&user.uuid, &mut conn).await?.1;
// TODO: Check for repeated ID's
registrations.push(WebauthnRegistration {
- id: data.Id.into_i32()?,
- name: data.Name,
+ id: data.id.into_i32()?,
+ name: data.name,
migrated: false,
credential,
@@ -296,28 +284,28 @@ async fn activate_webauthn(data: JsonUpcase<EnableWebauthnData>, headers: Header
let keys_json: Vec<Value> = registrations.iter().map(WebauthnRegistration::to_json).collect();
Ok(Json(json!({
- "Enabled": true,
- "Keys": keys_json,
- "Object": "twoFactorU2f"
+ "enabled": true,
+ "keys": keys_json,
+ "object": "twoFactorU2f"
})))
}
#[put("/two-factor/webauthn", data = "<data>")]
-async fn activate_webauthn_put(data: JsonUpcase<EnableWebauthnData>, headers: Headers, conn: DbConn) -> JsonResult {
+async fn activate_webauthn_put(data: Json<EnableWebauthnData>, headers: Headers, conn: DbConn) -> JsonResult {
activate_webauthn(data, headers, conn).await
}
-#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
struct DeleteU2FData {
- Id: NumberOrString,
- MasterPasswordHash: String,
+ id: NumberOrString,
+ master_password_hash: String,
}
#[delete("/two-factor/webauthn", data = "<data>")]
-async fn delete_webauthn(data: JsonUpcase<DeleteU2FData>, headers: Headers, mut conn: DbConn) -> JsonResult {
- let id = data.data.Id.into_i32()?;
- if !headers.user.check_valid_password(&data.data.MasterPasswordHash) {
+async fn delete_webauthn(data: Json<DeleteU2FData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let id = data.id.into_i32()?;
+ if !headers.user.check_valid_password(&data.master_password_hash) {
err!("Invalid password");
}
@@ -358,9 +346,9 @@ async fn delete_webauthn(data: JsonUpcase<DeleteU2FData>, headers: Headers, mut
let keys_json: Vec<Value> = data.iter().map(WebauthnRegistration::to_json).collect();
Ok(Json(json!({
- "Enabled": true,
- "Keys": keys_json,
- "Object": "twoFactorU2f"
+ "enabled": true,
+ "keys": keys_json,
+ "object": "twoFactorU2f"
})))
}
@@ -413,8 +401,8 @@ pub async fn validate_webauthn_login(user_uuid: &str, response: &str, conn: &mut
),
};
- let rsp: crate::util::UpCase<PublicKeyCredentialCopy> = serde_json::from_str(response)?;
- let rsp: PublicKeyCredential = rsp.data.into();
+ let rsp: PublicKeyCredentialCopy = serde_json::from_str(response)?;
+ let rsp: PublicKeyCredential = rsp.into();
let mut registrations = get_webauthn_registrations(user_uuid, conn).await?.1;
diff --git a/src/api/core/two_factor/yubikey.rs b/src/api/core/two_factor/yubikey.rs
index 2b199dfd..2eff3b6f 100644
--- a/src/api/core/two_factor/yubikey.rs
+++ b/src/api/core/two_factor/yubikey.rs
@@ -6,7 +6,7 @@ use yubico::{config::Config, verify_async};
use crate::{
api::{
core::{log_user_event, two_factor::_generate_recover_code},
- EmptyResult, JsonResult, JsonUpcase, PasswordOrOtpData,
+ EmptyResult, JsonResult, PasswordOrOtpData,
},
auth::Headers,
db::{
@@ -21,28 +21,30 @@ pub fn routes() -> Vec<Route> {
routes![generate_yubikey, activate_yubikey, activate_yubikey_put,]
}
-#[derive(Deserialize, Debug)]
-#[allow(non_snake_case)]
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
struct EnableYubikeyData {
- Key1: Option<String>,
- Key2: Option<String>,
- Key3: Option<String>,
- Key4: Option<String>,
- Key5: Option<String>,
- Nfc: bool,
- MasterPasswordHash: Option<String>,
- Otp: Option<String>,
+ key1: Option<String>,
+ key2: Option<String>,
+ key3: Option<String>,
+ key4: Option<String>,
+ key5: Option<String>,
+ nfc: bool,
+ master_password_hash: Option<String>,
+ otp: Option<String>,
}
#[derive(Deserialize, Serialize, Debug)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
pub struct YubikeyMetadata {
- Keys: Vec<String>,
- pub Nfc: bool,
+ #[serde(rename = "keys", alias = "Keys")]
+ keys: Vec<String>,
+ #[serde(rename = "nfc", alias = "Nfc")]
+ pub nfc: bool,
}
fn parse_yubikeys(data: &EnableYubikeyData) -> Vec<String> {
- let data_keys = [&data.Key1, &data.Key2, &data.Key3, &data.Key4, &data.Key5];
+ let data_keys = [&data.key1, &data.key2, &data.key3, &data.key4, &data.key5];
data_keys.iter().filter_map(|e| e.as_ref().cloned()).collect()
}
@@ -81,11 +83,11 @@ async fn verify_yubikey_otp(otp: String) -> EmptyResult {
}
#[post("/two-factor/get-yubikey", data = "<data>")]
-async fn generate_yubikey(data: JsonUpcase<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+async fn generate_yubikey(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult {
// Make sure the credentials are set
get_yubico_credentials()?;
- let data: PasswordOrOtpData = data.into_inner().data;
+ let data: PasswordOrOtpData = data.into_inner();
let user = headers.user;
data.validate(&user, false, &mut conn).await?;
@@ -98,29 +100,29 @@ async fn generate_yubikey(data: JsonUpcase<PasswordOrOtpData>, headers: Headers,
if let Some(r) = r {
let yubikey_metadata: YubikeyMetadata = serde_json::from_str(&r.data)?;
- let mut result = jsonify_yubikeys(yubikey_metadata.Keys);
+ let mut result = jsonify_yubikeys(yubikey_metadata.keys);
- result["Enabled"] = Value::Bool(true);
- result["Nfc"] = Value::Bool(yubikey_metadata.Nfc);
- result["Object"] = Value::String("twoFactorU2f".to_owned());
+ result["enabled"] = Value::Bool(true);
+ result["nfc"] = Value::Bool(yubikey_metadata.nfc);
+ result["object"] = Value::String("twoFactorU2f".to_owned());
Ok(Json(result))
} else {
Ok(Json(json!({
- "Enabled": false,
- "Object": "twoFactorU2f",
+ "enabled": false,
+ "object": "twoFactorU2f",
})))
}
}
#[post("/two-factor/yubikey", data = "<data>")]
-async fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers, mut conn: DbConn) -> JsonResult {
- let data: EnableYubikeyData = data.into_inner().data;
+async fn activate_yubikey(data: Json<EnableYubikeyData>, headers: Headers, mut conn: DbConn) -> JsonResult {
+ let data: EnableYubikeyData = data.into_inner();
let mut user = headers.user;
PasswordOrOtpData {
- MasterPasswordHash: data.MasterPasswordHash.clone(),
- Otp: data.Otp.clone(),
+ master_password_hash: data.master_password_hash.clone(),
+ otp: data.otp.clone(),
}
.validate(&user, true, &mut conn)
.await?;
@@ -136,8 +138,8 @@ async fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers,
if yubikeys.is_empty() {
return Ok(Json(json!({
- "Enabled": false,
- "Object": "twoFactorU2f",
+ "enabled": false,
+ "object": "twoFactorU2f",
})));
}
@@ -154,8 +156,8 @@ async fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers,
let yubikey_ids: Vec<String> = yubikeys.into_iter().map(|x| (x[..12]).to_owned()).collect();
let yubikey_metadata = YubikeyMetadata {
- Keys: yubikey_ids,
- Nfc: data.Nfc,
+ keys: yubikey_ids,
+ nfc: data.nfc,
};
yubikey_data.data = serde_json::to_string(&yubikey_metadata).unwrap();
@@ -165,17 +167,17 @@ async fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers,
log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await;
- let mut result = jsonify_yubikeys(yubikey_metadata.Keys);
+ let mut result = jsonify_yubikeys(yubikey_metadata.keys);
- result["Enabled"] = Value::Bool(true);
- result["Nfc"] = Value::Bool(yubikey_metadata.Nfc);
- result["Object"] = Value::String("twoFactorU2f".to_owned());
+ result["enabled"] = Value::Bool(true);
+ result["nfc"] = Value::Bool(yubikey_metadata.nfc);
+ result["object"] = Value::String("twoFactorU2f".to_owned());
Ok(Json(result))
}
#[put("/two-factor/yubikey", data = "<data>")]
-async fn activate_yubikey_put(data: JsonUpcase<EnableYubikeyData>, headers: Headers, conn: DbConn) -> JsonResult {
+async fn activate_yubikey_put(data: Json<EnableYubikeyData>, headers: Headers, conn: DbConn) -> JsonResult {
activate_yubikey(data, headers, conn).await
}
@@ -187,7 +189,7 @@ pub async fn validate_yubikey_login(response: &str, twofactor_data: &str) -> Emp
let yubikey_metadata: YubikeyMetadata = serde_json::from_str(twofactor_data).expect("Can't parse Yubikey Metadata");
let response_id = &response[..12];
- if !yubikey_metadata.Keys.contains(&response_id.to_owned()) {
+ if !yubikey_metadata.keys.contains(&response_id.to_owned()) {
err!("Given Yubikey is not registered");
}
diff --git a/src/api/identity.rs b/src/api/identity.rs
index ad51d664..89c82859 100644
--- a/src/api/identity.rs
+++ b/src/api/identity.rs
@@ -15,7 +15,7 @@ use crate::{
two_factor::{authenticator, duo, email, enforce_2fa_policy, webauthn, yubikey},
},
push::register_push_device,
- ApiResult, EmptyResult, JsonResult, JsonUpcase,
+ ApiResult, EmptyResult, JsonResult,
},
auth::{generate_organization_api_key_login_claims, ClientHeaders, ClientIp},
db::{models::*, DbConn},
@@ -602,7 +602,7 @@ async fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &mut DbCo
let yubikey_metadata: yubikey::YubikeyMetadata = serde_json::from_str(&twofactor.data)?;
result["TwoFactorProviders2"][provider.to_string()] = json!({
- "Nfc": yubikey_metadata.Nfc,
+ "Nfc": yubikey_metadata.nfc,
})
}
@@ -631,19 +631,18 @@ async fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &mut DbCo
}
#[post("/accounts/prelogin", data = "<data>")]
-async fn prelogin(data: JsonUpcase<PreloginData>, conn: DbConn) -> Json<Value> {
+async fn prelogin(data: Json<PreloginData>, conn: DbConn) -> Json<Value> {
_prelogin(data, conn).await
}
#[post("/accounts/register", data = "<data>")]
-async fn identity_register(data: JsonUpcase<RegisterData>, conn: DbConn) -> JsonResult {
+async fn identity_register(data: Json<RegisterData>, conn: DbConn) -> JsonResult {
_register(data, conn).await
}
// https://github.com/bitwarden/jslib/blob/master/common/src/models/request/tokenRequest.ts
// https://github.com/bitwarden/mobile/blob/master/src/Core/Models/Request/TokenRequest.cs
#[derive(Debug, Clone, Default, FromForm)]
-#[allow(non_snake_case)]
struct ConnectData {
#[field(name = uncased("grant_type"))]
#[field(name = uncased("granttype"))]
diff --git a/src/api/mod.rs b/src/api/mod.rs
index de81630d..d5281bda 100644
--- a/src/api/mod.rs
+++ b/src/api/mod.rs
@@ -33,23 +33,18 @@ pub use crate::api::{
web::static_files,
};
use crate::db::{models::User, DbConn};
-use crate::util;
// Type aliases for API methods results
type ApiResult<T> = Result<T, crate::error::Error>;
pub type JsonResult = ApiResult<Json<Value>>;
pub type EmptyResult = ApiResult<()>;
-type JsonUpcase<T> = Json<util::UpCase<T>>;
-type JsonUpcaseVec<T> = Json<Vec<util::UpCase<T>>>;
-type JsonVec<T> = Json<Vec<T>>;
-
// Common structs representing JSON data received
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
struct PasswordOrOtpData {
- MasterPasswordHash: Option<String>,
- Otp: Option<String>,
+ master_password_hash: Option<String>,
+ otp: Option<String>,
}
impl PasswordOrOtpData {
@@ -59,7 +54,7 @@ impl PasswordOrOtpData {
pub async fn validate(&self, user: &User, delete_if_valid: bool, conn: &mut DbConn) -> EmptyResult {
use crate::api::core::two_factor::protected_actions::validate_protected_action_otp;
- match (self.MasterPasswordHash.as_deref(), self.Otp.as_deref()) {
+ match (self.master_password_hash.as_deref(), self.otp.as_deref()) {
(Some(pw_hash), None) => {
if !user.check_valid_password(pw_hash) {
err!("Invalid password");
diff --git a/src/db/models/attachment.rs b/src/db/models/attachment.rs
index f8eca72f..83f4889c 100644
--- a/src/db/models/attachment.rs
+++ b/src/db/models/attachment.rs
@@ -42,13 +42,13 @@ impl Attachment {
pub fn to_json(&self, host: &str) -> Value {
json!({
- "Id": self.id,
- "Url": self.get_url(host),
- "FileName": self.file_name,
- "Size": self.file_size.to_string(),
- "SizeName": crate::util::get_display_size(self.file_size),
- "Key": self.akey,
- "Object": "attachment"
+ "id": self.id,
+ "url": self.get_url(host),
+ "fileName": self.file_name,
+ "size": self.file_size.to_string(),
+ "sizeName": crate::util::get_display_size(self.file_size),
+ "key": self.akey,
+ "object": "attachment"
})
}
}
diff --git a/src/db/models/cipher.rs b/src/db/models/cipher.rs
index 3ed3401a..11285550 100644
--- a/src/db/models/cipher.rs
+++ b/src/db/models/cipher.rs
@@ -1,3 +1,4 @@
+use crate::util::LowerCase;
use crate::CONFIG;
use chrono::{NaiveDateTime, TimeDelta, Utc};
use serde_json::Value;
@@ -81,7 +82,7 @@ impl Cipher {
pub fn validate_notes(cipher_data: &[CipherData]) -> EmptyResult {
let mut validation_errors = serde_json::Map::new();
for (index, cipher) in cipher_data.iter().enumerate() {
- if let Some(note) = &cipher.Notes {
+ if let Some(note) = &cipher.notes {
if note.len() > 10_000 {
validation_errors.insert(
format!("Ciphers[{index}].Notes"),
@@ -135,10 +136,6 @@ impl Cipher {
}
}
- let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
- let password_history_json =
- self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
-
// We don't need these values at all for Organizational syncs
// Skip any other database calls if this is the case and just return false.
let (read_only, hide_passwords) = if sync_type == CipherSyncType::User {
@@ -153,20 +150,34 @@ impl Cipher {
(false, false)
};
+ let fields_json = self
+ .fields
+ .as_ref()
+ .and_then(|s| serde_json::from_str::<LowerCase<Value>>(s).ok())
+ .unwrap_or_default()
+ .data;
+ let password_history_json = self
+ .password_history
+ .as_ref()
+ .and_then(|s| serde_json::from_str::<LowerCase<Value>>(s).ok())
+ .unwrap_or_default()
+ .data;
+
// Get the type_data or a default to an empty json object '{}'.
// If not passing an empty object, mobile clients will crash.
- let mut type_data_json: Value =
- serde_json::from_str(&self.data).unwrap_or_else(|_| Value::Object(serde_json::Map::new()));
+ let mut type_data_json = serde_json::from_str::<LowerCase<Value>>(&self.data)
+ .map(|d| d.data)
+ .unwrap_or_else(|_| Value::Object(serde_json::Map::new()));
// NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream
// Set the first element of the Uris array as Uri, this is needed several (mobile) clients.
if self.atype == 1 {
- if type_data_json["Uris"].is_array() {
- let uri = type_data_json["Uris"][0]["Uri"].clone();
- type_data_json["Uri"] = uri;
+ if type_data_json["uris"].is_array() {
+ let uri = type_data_json["uris"][0]["uri"].clone();
+ type_data_json["uri"] = uri;
} else {
// Upstream always has an Uri key/value
- type_data_json["Uri"] = Value::Null;
+ type_data_json["uri"] = Value::Null;
}
}
@@ -175,10 +186,10 @@ impl Cipher {
// NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream
// data_json should always contain the following keys with every atype
- data_json["Fields"] = fields_json.clone();
- data_json["Name"] = json!(self.name);
- data_json["Notes"] = json!(self.notes);
- data_json["PasswordHistory"] = password_history_json.clone();
+ data_json["fields"] = fields_json.clone();
+ data_json["name"] = json!(self.name);
+ data_json["notes"] = json!(self.notes);
+ data_json["passwordHistory"] = password_history_json.clone();
let collection_ids = if let Some(cipher_sync_data) = cipher_sync_data {
if let Some(cipher_collections) = cipher_sync_data.cipher_collections.get(&self.uuid) {
@@ -198,48 +209,48 @@ impl Cipher {
//
// Ref: https://github.com/bitwarden/server/blob/master/src/Core/Models/Api/Response/CipherResponseModel.cs
let mut json_object = json!({
- "Object": "cipherDetails",
- "Id": self.uuid,
- "Type": self.atype,
- "CreationDate": format_date(&self.created_at),
- "RevisionDate": format_date(&self.updated_at),
- "DeletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))),
- "Reprompt": self.reprompt.unwrap_or(RepromptType::None as i32),
- "OrganizationId": self.organization_uuid,
- "Key": self.key,
- "Attachments": attachments_json,
+ "object": "cipherDetails",
+ "id": self.uuid,
+ "type": self.atype,
+ "creationDate": format_date(&self.created_at),
+ "revisionDate": format_date(&self.updated_at),
+ "deletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))),
+ "reprompt": self.reprompt.unwrap_or(RepromptType::None as i32),
+ "organizationId": self.organization_uuid,
+ "key": self.key,
+ "attachments": attachments_json,
// We have UseTotp set to true by default within the Organization model.
// This variable together with UsersGetPremium is used to show or hide the TOTP counter.
- "OrganizationUseTotp": true,
+ "organizationUseTotp": true,
// This field is specific to the cipherDetails type.
- "CollectionIds": collection_ids,
+ "collectionIds": collection_ids,
- "Name": self.name,
- "Notes": self.notes,
- "Fields": fields_json,
+ "name": self.name,
+ "notes": self.notes,
+ "fields": fields_json,
- "Data": data_json,
+ "data": data_json,
- "PasswordHistory": password_history_json,
+ "passwordHistory": password_history_json,
// All Cipher types are included by default as null, but only the matching one will be populated
- "Login": null,
- "SecureNote": null,
- "Card": null,
- "Identity": null,
+ "login": null,
+ "secureNote": null,
+ "card": null,
+ "identity": null,
});
// These values are only needed for user/default syncs
// Not during an organizational sync like `get_org_details`
// Skip adding these fields in that case
if sync_type == CipherSyncType::User {
- json_object["FolderId"] = json!(if let Some(cipher_sync_data) = cipher_sync_data {
+ json_object["folderId"] = json!(if let Some(cipher_sync_data) = cipher_sync_data {
cipher_sync_data.cipher_folders.get(&self.uuid).map(|c| c.to_string())
} else {
self.get_folder_uuid(user_uuid, conn).await
});
- json_object["Favorite"] = json!(if let Some(cipher_sync_data) = cipher_sync_data {
+ json_object["favorite"] = json!(if let Some(cipher_sync_data) = cipher_sync_data {
cipher_sync_data.cipher_favorites.contains(&self.uuid)
} else {
self.is_favorite(user_uuid, conn).await
@@ -247,15 +258,15 @@ impl Cipher {
// These values are true by default, but can be false if the
// cipher belongs to a collection or group where the org owner has enabled
// the "Read Only" or "Hide Passwords" restrictions for the user.
- json_object["Edit"] = json!(!read_only);
- json_object["ViewPassword"] = json!(!hide_passwords);
+ json_object["edit"] = json!(!read_only);
+ json_object["viewPassword"] = json!(!hide_passwords);
}
let key = match self.atype {
- 1 => "Login",
- 2 => "SecureNote",
- 3 => "Card",
- 4 => "Identity",
+ 1 => "login",
+ 2 => "secureNote",
+ 3 => "card",
+ 4 => "identity",
_ => panic!("Wrong type"),
};
diff --git a/src/db/models/collection.rs b/src/db/models/collection.rs
index ae70c76c..b10dd1d6 100644
--- a/src/db/models/collection.rs
+++ b/src/db/models/collection.rs
@@ -49,11 +49,11 @@ impl Collection {
pub fn to_json(&self) -> Value {
json!({
- "ExternalId": self.external_id,
- "Id": self.uuid,
- "OrganizationId": self.org_uuid,
- "Name": self.name,
- "Object": "collection",
+ "externalId": self.external_id,
+ "id": self.uuid,
+ "organizationId": self.org_uuid,
+ "name": self.name,
+ "object": "collection",
})
}
@@ -97,9 +97,9 @@ impl Collection {
};
let mut json_object = self.to_json();
- json_object["Object"] = json!("collectionDetails");
- json_object["ReadOnly"] = json!(read_only);
- json_object["HidePasswords"] = json!(hide_passwords);
+ json_object["object"] = json!("collectionDetails");
+ json_object["readOnly"] = json!(read_only);
+ json_object["hidePasswords"] = json!(hide_passwords);
json_object
}
diff --git a/src/db/models/emergency_access.rs b/src/db/models/emergency_access.rs
index a0f07e7f..b5e4eb86 100644
--- a/src/db/models/emergency_access.rs
+++ b/src/db/models/emergency_access.rs
@@ -58,11 +58,11 @@ impl EmergencyAccess {
pub fn to_json(&self) -> Value {
json!({
- "Id": self.uuid,
- "Status": self.status,
- "Type": self.atype,
- "WaitTimeDays": self.wait_time_days,
- "Object": "emergencyAccess",
+ "id": self.uuid,
+ "status": self.status,
+ "type": self.atype,
+ "waitTimeDays": self.wait_time_days,
+ "object": "emergencyAccess",
})
}
@@ -70,14 +70,14 @@ impl EmergencyAccess {
let grantor_user = User::find_by_uuid(&self.grantor_uuid, conn).await.expect("Grantor user not found.");
json!({
- "Id": self.uuid,
- "Status": self.status,
- "Type": self.atype,
- "WaitTimeDays": self.wait_time_days,
- "GrantorId": grantor_user.uuid,
- "Email": grantor_user.email,
- "Name": grantor_user.name,
- "Object": "emergencyAccessGrantorDetails",
+ "id": self.uuid,
+ "status": self.status,
+ "type": self.atype,
+ "waitTimeDays": self.wait_time_days,
+ "grantorId": grantor_user.uuid,
+ "email": grantor_user.email,
+ "name": grantor_user.name,
+ "object": "emergencyAccessGrantorDetails",
})
}
@@ -98,14 +98,14 @@ impl EmergencyAccess {
};
Some(json!({
- "Id": self.uuid,
- "Status": self.status,
- "Type": self.atype,
- "WaitTimeDays": self.wait_time_days,
- "GranteeId": grantee_user.uuid,
- "Email": grantee_user.email,
- "Name": grantee_user.name,
- "Object": "emergencyAccessGranteeDetails",
+ "id": self.uuid,
+ "status": self.status,
+ "type": self.atype,
+ "waitTimeDays": self.wait_time_days,
+ "granteeId": grantee_user.uuid,
+ "email": grantee_user.email,
+ "name": grantee_user.name,
+ "object": "emergencyAccessGranteeDetails",
}))
}
}
diff --git a/src/db/models/folder.rs b/src/db/models/folder.rs
index 9385e78d..5370c9dd 100644
--- a/src/db/models/folder.rs
+++ b/src/db/models/folder.rs
@@ -43,10 +43,10 @@ impl Folder {
use crate::util::format_date;
json!({
- "Id": self.uuid,
- "RevisionDate": format_date(&self.updated_at),
- "Name": self.name,
- "Object": "folder",
+ "id": self.uuid,
+ "revisionDate": format_date(&self.updated_at),
+ "name": self.name,
+ "object": "folder",
})
}
}
diff --git a/src/db/models/group.rs b/src/db/models/group.rs
index 7faf7566..f6ccc710 100644
--- a/src/db/models/group.rs
+++ b/src/db/models/group.rs
@@ -58,14 +58,14 @@ impl Group {
use crate::util::format_date;
json!({
- "Id": self.uuid,
- "OrganizationId": self.organizations_uuid,
- "Name": self.name,
- "AccessAll": self.access_all,
- "ExternalId": self.external_id,
- "CreationDate": format_date(&self.creation_date),
- "RevisionDate": format_date(&self.revision_date),
- "Object": "group"
+ "id": self.uuid,
+ "organizationId": self.organizations_uuid,
+ "name": self.name,
+ "accessAll": self.access_all,
+ "externalId": self.external_id,
+ "creationDate": format_date(&self.creation_date),
+ "revisionDate": format_date(&self.revision_date),
+ "object": "group"
})
}
@@ -75,21 +75,21 @@ impl Group {
.iter()
.map(|entry| {
json!({
- "Id": entry.collections_uuid,
- "ReadOnly": entry.read_only,
- "HidePasswords": entry.hide_passwords
+ "id": entry.collections_uuid,
+ "readOnly": entry.read_only,
+ "hidePasswords": entry.hide_passwords
})
})
.collect();
json!({
- "Id": self.uuid,
- "OrganizationId": self.organizations_uuid,
- "Name": self.name,
- "AccessAll": self.access_all,
- "ExternalId": self.external_id,
- "Collections": collections_groups,
- "Object": "groupDetails"
+ "id": self.uuid,
+ "organizationId": self.organizations_uuid,
+ "name": self.name,
+ "accessAll": self.access_all,
+ "externalId": self.external_id,
+ "collections": collections_groups,
+ "object": "groupDetails"
})
}
diff --git a/src/db/models/org_policy.rs b/src/db/models/org_policy.rs
index 18bbdcd3..6f6f894e 100644
--- a/src/db/models/org_policy.rs
+++ b/src/db/models/org_policy.rs
@@ -4,7 +4,6 @@ use serde_json::Value;
use crate::api::EmptyResult;
use crate::db::DbConn;
use crate::error::MapResult;
-use crate::util::UpCase;
use super::{TwoFactor, UserOrgStatus, UserOrgType, UserOrganization};
@@ -39,16 +38,18 @@ pub enum OrgPolicyType {
// https://github.com/bitwarden/server/blob/5cbdee137921a19b1f722920f0fa3cd45af2ef0f/src/Core/Models/Data/Organizations/Policies/SendOptionsPolicyData.cs
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
pub struct SendOptionsPolicyData {
- pub DisableHideEmail: bool,
+ #[serde(rename = "disableHideEmail", alias = "DisableHideEmail")]
+ pub disable_hide_email: bool,
}
// https://github.com/bitwarden/server/blob/5cbdee137921a19b1f722920f0fa3cd45af2ef0f/src/Core/Models/Data/Organizations/Policies/ResetPasswordDataModel.cs
#[derive(Deserialize)]
-#[allow(non_snake_case)]
+#[serde(rename_all = "camelCase")]
pub struct ResetPasswordDataModel {
- pub AutoEnrollEnabled: bool,
+ #[serde(rename = "autoEnrollEnabled", alias = "AutoEnrollEnabled")]
+ pub auto_enroll_enabled: bool,
}
pub type OrgPolicyResult = Result<(), OrgPolicyErr>;
@@ -78,12 +79,12 @@ impl OrgPolicy {
pub fn to_json(&self) -> Value {
let data_json: Value = serde_json::from_str(&self.data).unwrap_or(Value::Null);
json!({
- "Id": self.uuid,
- "OrganizationId": self.org_uuid,
- "Type": self.atype,
- "Data": data_json,
- "Enabled": self.enabled,
- "Object": "policy",
+ "id": self.uuid,
+ "organizationId": self.org_uuid,
+ "type": self.atype,
+ "data": data_json,
+ "enabled": self.enabled,
+ "object": "policy",
})
}
}
@@ -307,9 +308,9 @@ impl OrgPolicy {
pub async fn org_is_reset_password_auto_enroll(org_uuid: &str, conn: &mut DbConn) -> bool {
match OrgPolicy::find_by_org_and_type(org_uuid, OrgPolicyType::ResetPassword, conn).await {
- Some(policy) => match serde_json::from_str::<UpCase<ResetPasswordDataModel>>(&policy.data) {
+ Some(policy) => match serde_json::from_str::<ResetPasswordDataModel>(&policy.data) {
Ok(opts) => {
- return policy.enabled && opts.data.AutoEnrollEnabled;
+ return policy.enabled && opts.auto_enroll_enabled;
}
_ => error!("Failed to deserialize ResetPasswordDataModel: {}", policy.data),
},
@@ -327,9 +328,9 @@ impl OrgPolicy {
{
if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, &policy.org_uuid, conn).await {
if user.atype < UserOrgType::Admin {
- match serde_json::from_str::<UpCase<SendOptionsPolicyData>>(&policy.data) {
+ match serde_json::from_str::<SendOptionsPolicyData>(&policy.data) {
Ok(opts) => {
- if opts.data.DisableHideEmail {
+ if opts.disable_hide_email {
return true;
}
}
diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs
index 73088650..fce9f9c9 100644
--- a/src/db/models/organization.rs
+++ b/src/db/models/organization.rs
@@ -153,39 +153,39 @@ impl Organization {
// https://github.com/bitwarden/server/blob/13d1e74d6960cf0d042620b72d85bf583a4236f7/src/Api/Models/Response/Organizations/OrganizationResponseModel.cs
pub fn to_json(&self) -> Value {
json!({
- "Id": self.uuid,
- "Identifier": null, // not supported by us
- "Name": self.name,
- "Seats": 10, // The value doesn't matter, we don't check server-side
- // "MaxAutoscaleSeats": null, // The value doesn't matter, we don't check server-side
- "MaxCollections": 10, // The value doesn't matter, we don't check server-side
- "MaxStorageGb": 10, // The value doesn't matter, we don't check server-side
- "Use2fa": true,
- "UseDirectory": false, // Is supported, but this value isn't checked anywhere (yet)
- "UseEvents": CONFIG.org_events_enabled(),
- "UseGroups": CONFIG.org_groups_enabled(),
- "UseTotp": true,
- "UsePolicies": true,
- // "UseScim": false, // Not supported (Not AGPLv3 Licensed)
- "UseSso": false, // Not supported
- // "UseKeyConnector": false, // Not supported
- "SelfHost": true,
- "UseApi": true,
- "HasPublicAndPrivateKeys": self.private_key.is_some() && self.public_key.is_some(),
- "UseResetPassword": CONFIG.mail_enabled(),
-
- "BusinessName": null,
- "BusinessAddress1": null,
- "BusinessAddress2": null,
- "BusinessAddress3": null,
- "BusinessCountry": null,
- "BusinessTaxNumber": null,
-
- "BillingEmail": self.billing_email,
- "Plan": "TeamsAnnually",
- "PlanType": 5, // TeamsAnnually plan
- "UsersGetPremium": true,
- "Object": "organization",
+ "id": self.uuid,
+ "identifier": null, // not supported by us
+ "name": self.name,
+ "seats": 10, // The value doesn't matter, we don't check server-side
+ // "maxAutoscaleSeats": null, // The value doesn't matter, we don't check server-side
+ "maxCollections": 10, // The value doesn't matter, we don't check server-side
+ "maxStorageGb": 10, // The value doesn't matter, we don't check server-side
+ "use2fa": true,
+ "useDirectory": false, // Is supported, but this value isn't checked anywhere (yet)
+ "useEvents": CONFIG.org_events_enabled(),
+ "useGroups": CONFIG.org_groups_enabled(),
+ "useTotp": true,
+ "usePolicies": true,
+ // "useScim": false, // Not supported (Not AGPLv3 Licensed)
+ "useSso": false, // Not supported
+ // "useKeyConnector": false, // Not supported
+ "selfHost": true,
+ "useApi": true,
+ "hasPublicAndPrivateKeys": self.private_key.is_some() && self.public_key.is_some(),
+ "useResetPassword": CONFIG.mail_enabled(),
+
+ "businessName": null,
+ "businessAddress1": null,
+ "businessAddress2": null,
+ "businessAddress3": null,
+ "businessCountry": null,
+ "businessTaxNumber": null,
+
+ "billingEmail": self.billing_email,
+ "plan": "TeamsAnnually",
+ "planType": 5, // TeamsAnnually plan
+ "usersGetPremium": true,
+ "object": "organization",
})
}
}
@@ -366,43 +366,60 @@ impl UserOrganization {
// https://github.com/bitwarden/server/blob/13d1e74d6960cf0d042620b72d85bf583a4236f7/src/Api/Models/Response/ProfileOrganizationResponseModel.cs
json!({
- "Id": self.org_uuid,
- "Identifier": null, // Not supported
- "Name": org.name,
- "Seats": 10, // The value doesn't matter, we don't check server-side
- "MaxCollections": 10, // The value doesn't matter, we don't check server-side
- "UsersGetPremium": true,
- "Use2fa": true,
- "UseDirectory": false, // Is supported, but this value isn't checked anywhere (yet)
- "UseEvents": CONFIG.org_events_enabled(),
- "UseGroups": CONFIG.org_groups_enabled(),
- "UseTotp": true,
- // "UseScim": false, // Not supported (Not AGPLv3 Licensed)
- "UsePolicies": true,
- "UseApi": true,
- "SelfHost": true,
- "HasPublicAndPrivateKeys": org.private_key.is_some() && org.public_key.is_some(),
- "ResetPasswordEnrolled": self.reset_password_key.is_some(),
- "UseResetPassword": CONFIG.mail_enabled(),
- "SsoBound": false, // Not supported
- "UseSso": false, // Not supported
- "ProviderId": null,
- "ProviderName": null,
- // "KeyConnectorEnabled": false,
- // "KeyConnectorUrl": null,
+ "id": self.org_uuid,
+ "identifier": null, // Not supported
+ "name": org.name,
+ "seats": 10, // The value doesn't matter, we don't check server-side
+ "maxCollections": 10, // The value doesn't matter, we don't check server-side
+ "usersGetPremium": true,
+ "use2fa": true,
+ "useDirectory": false, // Is supported, but this value isn't checked anywhere (yet)
+ "useEvents": CONFIG.org_events_enabled(),
+ "useGroups": CONFIG.org_groups_enabled(),
+ "useTotp": true,
+ "useScim": false, // Not supported (Not AGPLv3 Licensed)
+ "usePolicies": true,
+ "useApi": true,
+ "selfHost": true,
+ "hasPublicAndPrivateKeys": org.private_key.is_some() && org.public_key.is_some(),
+ "resetPasswordEnrolled": self.reset_password_key.is_some(),
+ "useResetPassword": CONFIG.mail_enabled(),
+ "ssoBound": false, // Not supported
+ "useSso": false, // Not supported
+ "useKeyConnector": false,
+ "useSecretsManager": false,
+ "usePasswordManager": true,
+ "useCustomPermissions": false,
+ "useActivateAutofillPolicy": false,
+
+ "providerId": null,
+ "providerName": null,
+ "providerType": null,
+ "familySponsorshipFriendlyName": null,
+ "familySponsorshipAvailable": false,
+ "planProductType": 0,
+ "keyConnectorEnabled": false,
+ "keyConnectorUrl": null,
+ "familySponsorshipLastSyncDate": null,
+ "familySponsorshipValidUntil": null,
+ "familySponsorshipToDelete": null,
+ "accessSecretsManager": false,
+ "limitCollectionCreationDeletion": true,
+ "allowAdminAccessToAllCollectionItems": true,
+ "flexibleCollections": true,
"permissions": permissions,
- "MaxStorageGb": 10, // The value doesn't matter, we don't check server-side
+ "maxStorageGb": 10, // The value doesn't matter, we don't check server-side
// These are per user
- "UserId": self.user_uuid,
- "Key": self.akey,
- "Status": self.status,
- "Type": self.atype,
- "Enabled": true,
+ "userId": self.user_uuid,
+ "key": self.akey,
+ "status": self.status,
+ "type": self.atype,
+ "enabled": true,
- "Object": "profileOrganization",
+ "object": "profileOrganization",
})
}
@@ -438,9 +455,9 @@ impl UserOrganization {
.iter()
.map(|cu| {
json!({
- "Id": cu.collection_uuid,
- "ReadOnly": cu.read_only,
- "HidePasswords": cu.hide_passwords,
+ "id": cu.collection_uuid,
+ "readOnly": cu.read_only,
+ "hidePasswords": cu.hide_passwords,
})
})
.collect()
@@ -449,29 +466,29 @@ impl UserOrganization {
};
json!({
- "Id": self.uuid,
- "UserId": self.user_uuid,
- "Name": user.name,
- "Email": user.email,
- "ExternalId": self.external_id,
- "Groups": groups,
- "Collections": collections,
-
- "Status": status,
- "Type": self.atype,
- "AccessAll": self.access_all,
- "TwoFactorEnabled": twofactor_enabled,
- "ResetPasswordEnrolled": self.reset_password_key.is_some(),
-
- "Object": "organizationUserUserDetails",
+ "id": self.uuid,
+ "userId": self.user_uuid,
+ "name": user.name,
+ "email": user.email,
+ "externalId": self.external_id,
+ "groups": groups,
+ "collections": collections,
+
+ "status": status,
+ "type": self.atype,
+ "accessAll": self.access_all,
+ "twoFactorEnabled": twofactor_enabled,
+ "resetPasswordEnrolled": self.reset_password_key.is_some(),
+
+ "object": "organizationUserUserDetails",
})
}
pub fn to_json_user_access_restrictions(&self, col_user: &CollectionUser) -> Value {
json!({
- "Id": self.uuid,
- "ReadOnly": col_user.read_only,
- "HidePasswords": col_user.hide_passwords,
+ "id": self.uuid,
+ "readOnly": col_user.read_only,
+ "hidePasswords": col_user.hide_passwords,
})
}
@@ -485,9 +502,9 @@ impl UserOrganization {
.iter()
.map(|c| {
json!({
- "Id": c.collection_uuid,
- "ReadOnly": c.read_only,
- "HidePasswords": c.hide_passwords,
+ "id": c.collection_uuid,
+ "readOnly": c.read_only,
+ "hidePasswords": c.hide_passwords,
})
})
.collect()
@@ -502,15 +519,15 @@ impl UserOrganization {
};
json!({
- "Id": self.uuid,
- "UserId": self.user_uuid,
+ "id": self.uuid,
+ "userId": self.user_uuid,
- "Status": status,
- "Type": self.atype,
- "AccessAll": self.access_all,
- "Collections": coll_uuids,
+ "status": status,
+ "type": self.atype,
+ "accessAll": self.access_all,
+ "collections": coll_uuids,
- "Object": "organizationUserDetails",
+ "object": "organizationUserDetails",
})
}
pub async fn save(&self, conn: &mut DbConn) -> EmptyResult {
diff --git a/src/db/models/send.rs b/src/db/models/send.rs
index 7cfeb478..ad842659 100644
--- a/src/db/models/send.rs
+++ b/src/db/models/send.rs
@@ -125,26 +125,26 @@ impl Send {
let data: Value = serde_json::from_str(&self.data).unwrap_or_default();
json!({
- "Id": self.uuid,
- "AccessId": BASE64URL_NOPAD.encode(Uuid::parse_str(&self.uuid).unwrap_or_default().as_bytes()),
- "Type": self.atype,
-
- "Name": self.name,
- "Notes": self.notes,
- "Text": if self.atype == SendType::Text as i32 { Some(&data) } else { None },
- "File": if self.atype == SendType::File as i32 { Some(&data) } else { None },
-
- "Key": self.akey,
- "MaxAccessCount": self.max_access_count,
- "AccessCount": self.access_count,
- "Password": self.password_hash.as_deref().map(|h| BASE64URL_NOPAD.encode(h)),
- "Disabled": self.disabled,
- "HideEmail": self.hide_email,
-
- "RevisionDate": format_date(&self.revision_date),
- "ExpirationDate": self.expiration_date.as_ref().map(format_date),
- "DeletionDate": format_date(&self.deletion_date),
- "Object": "send",
+ "id": self.uuid,
+ "accessId": BASE64URL_NOPAD.encode(Uuid::parse_str(&self.uuid).unwrap_or_default().as_bytes()),
+ "type": self.atype,
+
+ "name": self.name,
+ "notes": self.notes,
+ "text": if self.atype == SendType::Text as i32 { Some(&data) } else { None },
+ "file": if self.atype == SendType::File as i32 { Some(&data) } else { None },
+
+ "key": self.akey,
+ "maxAccessCount": self.max_access_count,
+ "accessCount": self.access_count,
+ "password": self.password_hash.as_deref().map(|h| BASE64URL_NOPAD.encode(h)),
+ "disabled": self.disabled,
+ "hideEmail": self.hide_email,
+
+ "revisionDate": format_date(&self.revision_date),
+ "expirationDate": self.expiration_date.as_ref().map(format_date),
+ "deletionDate": format_date(&self.deletion_date),
+ "object": "send",
})
}
@@ -154,16 +154,16 @@ impl Send {
let data: Value = serde_json::from_str(&self.data).unwrap_or_default();
json!({
- "Id": self.uuid,
- "Type": self.atype,
+ "id": self.uuid,
+ "type": self.atype,
- "Name": self.name,
- "Text": if self.atype == SendType::Text as i32 { Some(&data) } else { None },
- "File": if self.atype == SendType::File as i32 { Some(&data) } else { None },
+ "name": self.name,
+ "text": if self.atype == SendType::Text as i32 { Some(&data) } else { None },
+ "file": if self.atype == SendType::File as i32 { Some(&data) } else { None },
- "ExpirationDate": self.expiration_date.as_ref().map(format_date),
- "CreatorIdentifier": self.creator_identifier(conn).await,
- "Object": "send-access",
+ "expirationDate": self.expiration_date.as_ref().map(format_date),
+ "creatorIdentifier": self.creator_identifier(conn).await,
+ "object": "send-access",
})
}
}
@@ -290,25 +290,18 @@ impl Send {
pub async fn size_by_user(user_uuid: &str, conn: &mut DbConn) -> Option<i64> {
let sends = Self::find_by_user(user_uuid, conn).await;
- #[allow(non_snake_case)]
- #[derive(serde::Deserialize, Default)]
+ #[derive(serde::Deserialize)]
struct FileData {
- Size: Option<NumberOrString>,
- size: Option<NumberOrString>,
+ #[serde(rename = "size", alias = "Size")]
+ size: NumberOrString,
}
let mut total: i64 = 0;
for send in sends {
if send.atype == SendType::File as i32 {
- let data: FileData = serde_json::from_str(&send.data).unwrap_or_default();
-
- let size = match (data.size, data.Size) {
- (Some(s), _) => s.into_i64(),
- (_, Some(s)) => s.into_i64(),
- (None, None) => continue,
- };
-
- if let Ok(size) = size {
+ if let Ok(size) =
+ serde_json::from_str::<FileData>(&send.data).map_err(Into::into).and_then(|d| d.size.into_i64())
+ {
total = total.checked_add(size)?;
};
}
diff --git a/src/db/models/two_factor.rs b/src/db/models/two_factor.rs
index 530e35b4..2120a0e2 100644
--- a/src/db/models/two_factor.rs
+++ b/src/db/models/two_factor.rs
@@ -54,17 +54,17 @@ impl TwoFactor {
pub fn to_json(&self) -> Value {
json!({
- "Enabled": self.enabled,
- "Key": "", // This key and value vary
- "Object": "twoFactorAuthenticator" // This value varies
+ "enabled": self.enabled,
+ "key": "", // This key and value vary
+ "Oobject": "twoFactorAuthenticator" // This value varies
})
}
pub fn to_json_provider(&self) -> Value {
json!({
- "Enabled": self.enabled,
- "Type": self.atype,
- "Object": "twoFactorProvider"
+ "enabled": self.enabled,
+ "type": self.atype,
+ "object": "twoFactorProvider"
})
}
}
diff --git a/src/db/models/user.rs b/src/db/models/user.rs
index d87defd0..45cbb7f9 100644
--- a/src/db/models/user.rs
+++ b/src/db/models/user.rs
@@ -241,25 +241,25 @@ impl User {
json!({
"_Status": status as i32,
- "Id": self.uuid,
- "Name": self.name,
- "Email": self.email,
- "EmailVerified": !CONFIG.mail_enabled() || self.verified_at.is_some(),
- "Premium": true,
- "PremiumFromOrganization": false,
- "MasterPasswordHint": self.password_hint,
- "Culture": "en-US",
- "TwoFactorEnabled": twofactor_enabled,
- "Key": self.akey,
- "PrivateKey": self.private_key,
- "SecurityStamp": self.security_stamp,
- "Organizations": orgs_json,
- "Providers": [],
- "ProviderOrganizations": [],
- "ForcePasswordReset": false,
- "AvatarColor": self.avatar_color,
- "UsesKeyConnector": false,
- "Object": "profile",
+ "id": self.uuid,
+ "name": self.name,
+ "email": self.email,
+ "emailVerified": !CONFIG.mail_enabled() || self.verified_at.is_some(),
+ "premium": true,
+ "premiumFromOrganization": false,
+ "masterPasswordHint": self.password_hint,
+ "culture": "en-US",
+ "twoFactorEnabled": twofactor_enabled,
+ "key": self.akey,
+ "privateKey": self.private_key,
+ "securityStamp": self.security_stamp,
+ "organizations": orgs_json,
+ "providers": [],
+ "providerOrganizations": [],
+ "forcePasswordReset": false,
+ "avatarColor": self.avatar_color,
+ "usesKeyConnector": false,
+ "object": "profile",
})
}
diff --git a/src/error.rs b/src/error.rs
index 784aad6a..afb1dc83 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -179,18 +179,18 @@ fn _serialize(e: &impl serde::Serialize, _msg: &str) -> String {
fn _api_error(_: &impl std::any::Any, msg: &str) -> String {
let json = json!({
- "Message": msg,
+ "message": msg,
"error": "",
"error_description": "",
- "ValidationErrors": {"": [ msg ]},
- "ErrorModel": {
- "Message": msg,
- "Object": "error"
+ "validationErrors": {"": [ msg ]},
+ "errorModel": {
+ "message": msg,
+ "object": "error"
},
- "ExceptionMessage": null,
- "ExceptionStackTrace": null,
- "InnerExceptionMessage": null,
- "Object": "error"
+ "exceptionMessage": null,
+ "exceptionStackTrace": null,
+ "innerExceptionMessage": null,
+ "object": "error"
});
_serialize(&json, "")
}
diff --git a/src/main.rs b/src/main.rs
index c7726a87..73085901 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -3,7 +3,7 @@
// The more key/value pairs there are the more recursion occurs.
// We want to keep this as low as possible, but not higher then 128.
// If you go above 128 it will cause rust-analyzer to fail,
-#![recursion_limit = "90"]
+#![recursion_limit = "200"]
// When enabled use MiMalloc as malloc instead of the default malloc
#[cfg(feature = "enable_mimalloc")]
diff --git a/src/static/global_domains.json b/src/static/global_domains.json
index 78458cbb..e3f08813 100644
--- a/src/static/global_domains.json
+++ b/src/static/global_domains.json
@@ -1,80 +1,80 @@
[
{
- "Type": 2,
- "Domains": [
+ "type": 2,
+ "domains": [
"ameritrade.com",
"tdameritrade.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 3,
- "Domains": [
+ "type": 3,
+ "domains": [
"bankofamerica.com",
"bofa.com",
"mbna.com",
"usecfo.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 4,
- "Domains": [
+ "type": 4,
+ "domains": [
"sprint.com",
"sprintpcs.com",
"nextel.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 0,
- "Domains": [
+ "type": 0,
+ "domains": [
"youtube.com",
"google.com",
"gmail.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 1,
- "Domains": [
+ "type": 1,
+ "domains": [
"apple.com",
"icloud.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 5,
- "Domains": [
+ "type": 5,
+ "domains": [
"wellsfargo.com",
"wf.com",
"wellsfargoadvisors.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 6,
- "Domains": [
+ "type": 6,
+ "domains": [
"mymerrill.com",
"ml.com",
"merrilledge.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 7,
- "Domains": [
+ "type": 7,
+ "domains": [
"accountonline.com",
"citi.com",
"citibank.com",
"citicards.com",
"citibankonline.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 8,
- "Domains": [
+ "type": 8,
+ "domains": [
"cnet.com",
"cnettv.com",
"com.com",
@@ -83,21 +83,21 @@
"search.com",
"upload.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 9,
- "Domains": [
+ "type": 9,
+ "domains": [
"bananarepublic.com",
"gap.com",
"oldnavy.com",
"piperlime.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 10,
- "Domains": [
+ "type": 10,
+ "domains": [
"bing.com",
"hotmail.com",
"live.com",
@@ -113,53 +113,53 @@
"azure.com",
"windowsazure.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 11,
- "Domains": [
+ "type": 11,
+ "domains": [
"ua2go.com",
"ual.com",
"united.com",
"unitedwifi.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 12,
- "Domains": [
+ "type": 12,
+ "domains": [
"overture.com",
"yahoo.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 13,
- "Domains": [
+ "type": 13,
+ "domains": [
"zonealarm.com",
"zonelabs.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 14,
- "Domains": [
+ "type": 14,
+ "domains": [
"paypal.com",
"paypal-search.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 15,
- "Domains": [
+ "type": 15,
+ "domains": [
"avon.com",
"youravon.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 16,
- "Domains": [
+ "type": 16,
+ "domains": [
"diapers.com",
"soap.com",
"wag.com",
@@ -172,19 +172,19 @@
"look.com",
"vinemarket.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 17,
- "Domains": [
+ "type": 17,
+ "domains": [
"1800contacts.com",
"800contacts.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 18,
- "Domains": [
+ "type": 18,
+ "domains": [
"amazon.com",
"amazon.com.be",
"amazon.ae",
@@ -205,240 +205,240 @@
"amazon.se",
"amazon.sg"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 19,
- "Domains": [
+ "type": 19,
+ "domains": [
"cox.com",
"cox.net",
"coxbusiness.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 20,
- "Domains": [
+ "type": 20,
+ "domains": [
"mynortonaccount.com",
"norton.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 21,
- "Domains": [
+ "type": 21,
+ "domains": [
"verizon.com",
"verizon.net"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 22,
- "Domains": [
+ "type": 22,
+ "domains": [
"rakuten.com",
"buy.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 23,
- "Domains": [
+ "type": 23,
+ "domains": [
"siriusxm.com",
"sirius.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 24,
- "Domains": [
+ "type": 24,
+ "domains": [
"ea.com",
"origin.com",
"play4free.com",
"tiberiumalliance.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 25,
- "Domains": [
+ "type": 25,
+ "domains": [
"37signals.com",
"basecamp.com",
"basecamphq.com",
"highrisehq.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 26,
- "Domains": [
+ "type": 26,
+ "domains": [
"steampowered.com",
"steamcommunity.com",
"steamgames.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 27,
- "Domains": [
+ "type": 27,
+ "domains": [
"chart.io",
"chartio.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 28,
- "Domains": [
+ "type": 28,
+ "domains": [
"gotomeeting.com",
"citrixonline.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 29,
- "Domains": [
+ "type": 29,
+ "domains": [
"gogoair.com",
"gogoinflight.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 30,
- "Domains": [
+ "type": 30,
+ "domains": [
"mysql.com",
"oracle.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 31,
- "Domains": [
+ "type": 31,
+ "domains": [
"discover.com",
"discovercard.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 32,
- "Domains": [
+ "type": 32,
+ "domains": [
"dcu.org",
"dcu-online.org"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 33,
- "Domains": [
+ "type": 33,
+ "domains": [
"healthcare.gov",
"cuidadodesalud.gov",
"cms.gov"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 34,
- "Domains": [
+ "type": 34,
+ "domains": [
"pepco.com",
"pepcoholdings.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 35,
- "Domains": [
+ "type": 35,
+ "domains": [
"century21.com",
"21online.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 36,
- "Domains": [
+ "type": 36,
+ "domains": [
"comcast.com",
"comcast.net",
"xfinity.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 37,
- "Domains": [
+ "type": 37,
+ "domains": [
"cricketwireless.com",
"aiowireless.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 38,
- "Domains": [
+ "type": 38,
+ "domains": [
"mandtbank.com",
"mtb.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 39,
- "Domains": [
+ "type": 39,
+ "domains": [
"dropbox.com",
"getdropbox.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 40,
- "Domains": [
+ "type": 40,
+ "domains": [
"snapfish.com",
"snapfish.ca"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 41,
- "Domains": [
+ "type": 41,
+ "domains": [
"alibaba.com",
"aliexpress.com",
"aliyun.com",
"net.cn"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 42,
- "Domains": [
+ "type": 42,
+ "domains": [
"playstation.com",
"sonyentertainmentnetwork.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 43,
- "Domains": [
+ "type": 43,
+ "domains": [
"mercadolivre.com",
"mercadolivre.com.br",
"mercadolibre.com",
"mercadolibre.com.ar",
"mercadolibre.com.mx"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 44,
- "Domains": [
+ "type": 44,
+ "domains": [
"zendesk.com",
"zopim.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 45,
- "Domains": [
+ "type": 45,
+ "domains": [
"autodesk.com",
"tinkercad.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 46,
- "Domains": [
+ "type": 46,
+ "domains": [
"railnation.ru",
"railnation.de",
"rail-nation.com",
@@ -447,152 +447,152 @@
"trucknation.de",
"traviangames.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 47,
- "Domains": [
+ "type": 47,
+ "domains": [
"wpcu.coop",
"wpcuonline.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 48,
- "Domains": [
+ "type": 48,
+ "domains": [
"mathletics.com",
"mathletics.com.au",
"mathletics.co.uk"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 49,
- "Domains": [
+ "type": 49,
+ "domains": [
"discountbank.co.il",
"telebank.co.il"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 50,
- "Domains": [
+ "type": 50,
+ "domains": [
"mi.com",
"xiaomi.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 52,
- "Domains": [
+ "type": 52,
+ "domains": [
"postepay.it",
"poste.it"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 51,
- "Domains": [
+ "type": 51,
+ "domains": [
"facebook.com",
"messenger.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 53,
- "Domains": [
+ "type": 53,
+ "domains": [
"skysports.com",
"skybet.com",
"skyvegas.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 54,
- "Domains": [
+ "type": 54,
+ "domains": [
"disneymoviesanywhere.com",
"go.com",
"disney.com",
"dadt.com",
"disneyplus.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 55,
- "Domains": [
+ "type": 55,
+ "domains": [
"pokemon-gl.com",
"pokemon.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 56,
- "Domains": [
+ "type": 56,
+ "domains": [
"myuv.com",
"uvvu.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 58,
- "Domains": [
+ "type": 58,
+ "domains": [
"mdsol.com",
"imedidata.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 57,
- "Domains": [
+ "type": 57,
+ "domains": [
"bank-yahav.co.il",
"bankhapoalim.co.il"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 59,
- "Domains": [
+ "type": 59,
+ "domains": [
"sears.com",
"shld.net"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 60,
- "Domains": [
+ "type": 60,
+ "domains": [
"xiami.com",
"alipay.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 61,
- "Domains": [
+ "type": 61,
+ "domains": [
"belkin.com",
"seedonk.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 62,
- "Domains": [
+ "type": 62,
+ "domains": [
"turbotax.com",
"intuit.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 63,
- "Domains": [
+ "type": 63,
+ "domains": [
"shopify.com",
"myshopify.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 64,
- "Domains": [
+ "type": 64,
+ "domains": [
"ebay.com",
"ebay.at",
"ebay.be",
@@ -617,53 +617,53 @@
"ebay.ph",
"ebay.pl"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 65,
- "Domains": [
+ "type": 65,
+ "domains": [
"techdata.com",
"techdata.ch"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 66,
- "Domains": [
+ "type": 66,
+ "domains": [
"schwab.com",
"schwabplan.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 68,
- "Domains": [
+ "type": 68,
+ "domains": [
"tesla.com",
"teslamotors.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 69,
- "Domains": [
+ "type": 69,
+ "domains": [
"morganstanley.com",
"morganstanleyclientserv.com",
"stockplanconnect.com",
"ms.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 70,
- "Domains": [
+ "type": 70,
+ "domains": [
"taxact.com",
"taxactonline.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 71,
- "Domains": [
+ "type": 71,
+ "domains": [
"mediawiki.org",
"wikibooks.org",
"wikidata.org",
@@ -676,11 +676,11 @@
"wikivoyage.org",
"wiktionary.org"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 72,
- "Domains": [
+ "type": 72,
+ "domains": [
"airbnb.at",
"airbnb.be",
"airbnb.ca",
@@ -735,11 +735,11 @@
"airbnb.ru",
"airbnb.se"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 73,
- "Domains": [
+ "type": 73,
+ "domains": [
"eventbrite.at",
"eventbrite.be",
"eventbrite.ca",
@@ -767,11 +767,11 @@
"eventbrite.se",
"eventbrite.sg"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 74,
- "Domains": [
+ "type": 74,
+ "domains": [
"stackexchange.com",
"superuser.com",
"stackoverflow.com",
@@ -780,19 +780,19 @@
"askubuntu.com",
"stackapps.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 75,
- "Domains": [
+ "type": 75,
+ "domains": [
"docusign.com",
"docusign.net"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 76,
- "Domains": [
+ "type": 76,
+ "domains": [
"envato.com",
"themeforest.net",
"codecanyon.net",
@@ -802,28 +802,28 @@
"photodune.net",
"3docean.net"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 77,
- "Domains": [
+ "type": 77,
+ "domains": [
"x10hosting.com",
"x10premium.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 78,
- "Domains": [
+ "type": 78,
+ "domains": [
"dnsomatic.com",
"opendns.com",
"umbrella.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 79,
- "Domains": [
+ "type": 79,
+ "domains": [
"cagreatamerica.com",
"canadaswonderland.com",
"carowinds.com",
@@ -838,36 +838,36 @@
"visitkingsisland.com",
"worldsoffun.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 80,
- "Domains": [
+ "type": 80,
+ "domains": [
"ubnt.com",
"ui.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 81,
- "Domains": [
+ "type": 81,
+ "domains": [
"discordapp.com",
"discord.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 82,
- "Domains": [
+ "type": 82,
+ "domains": [
"netcup.de",
"netcup.eu",
"customercontrolpanel.de"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 83,
- "Domains": [
+ "type": 83,
+ "domains": [
"yandex.com",
"ya.ru",
"yandex.az",
@@ -891,44 +891,44 @@
"yandex.ua",
"yandex.uz"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 84,
- "Domains": [
+ "type": 84,
+ "domains": [
"sonyentertainmentnetwork.com",
"sony.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 85,
- "Domains": [
+ "type": 85,
+ "domains": [
"proton.me",
"protonmail.com",
"protonvpn.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 86,
- "Domains": [
+ "type": 86,
+ "domains": [
"ubisoft.com",
"ubi.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 87,
- "Domains": [
+ "type": 87,
+ "domains": [
"transferwise.com",
"wise.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 88,
- "Domains": [
+ "type": 88,
+ "domains": [
"takeaway.com",
"just-eat.dk",
"just-eat.no",
@@ -939,11 +939,11 @@
"thuisbezorgd.nl",
"pyszne.pl"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 89,
- "Domains": [
+ "type": 89,
+ "domains": [
"atlassian.com",
"bitbucket.org",
"trello.com",
@@ -951,11 +951,11 @@
"atlassian.net",
"jira.com"
],
- "Excluded": false
+ "excluded": false
},
{
- "Type": 90,
- "Domains": [
+ "type": 90,
+ "domains": [
"pinterest.com",
"pinterest.com.au",
"pinterest.cl",
@@ -970,6 +970,6 @@
"pinterest.pt",
"pinterest.se"
],
- "Excluded": false
+ "excluded": false
}
] \ No newline at end of file
diff --git a/src/util.rs b/src/util.rs
index e96a1741..8157f269 100644
--- a/src/util.rs
+++ b/src/util.rs
@@ -526,25 +526,33 @@ use serde_json::Value;
pub type JsonMap = serde_json::Map<String, Value>;
#[derive(Serialize, Deserialize)]
-pub struct UpCase<T: DeserializeOwned> {
- #[serde(deserialize_with = "upcase_deserialize")]
+pub struct LowerCase<T: DeserializeOwned> {
+ #[serde(deserialize_with = "lowercase_deserialize")]
#[serde(flatten)]
pub data: T,
}
+impl Default for LowerCase<Value> {
+ fn default() -> Self {
+ Self {
+ data: Value::Null,
+ }
+ }
+}
+
// https://github.com/serde-rs/serde/issues/586
-pub fn upcase_deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
+pub fn lowercase_deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where
T: DeserializeOwned,
D: Deserializer<'de>,
{
- let d = deserializer.deserialize_any(UpCaseVisitor)?;
+ let d = deserializer.deserialize_any(LowerCaseVisitor)?;
T::deserialize(d).map_err(de::Error::custom)
}
-struct UpCaseVisitor;
+struct LowerCaseVisitor;
-impl<'de> Visitor<'de> for UpCaseVisitor {
+impl<'de> Visitor<'de> for LowerCaseVisitor {
type Value = Value;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
@@ -558,7 +566,7 @@ impl<'de> Visitor<'de> for UpCaseVisitor {
let mut result_map = JsonMap::new();
while let Some((key, value)) = map.next_entry()? {
- result_map.insert(upcase_first(key), upcase_value(value));
+ result_map.insert(lcase_first(key), lowercase_value(value));
}
Ok(Value::Object(result_map))
@@ -571,20 +579,20 @@ impl<'de> Visitor<'de> for UpCaseVisitor {
let mut result_seq = Vec::<Value>::new();
while let Some(value) = seq.next_element()? {
- result_seq.push(upcase_value(value));
+ result_seq.push(lowercase_value(value));
}
Ok(Value::Array(result_seq))
}
}
-fn upcase_value(value: Value) -> Value {
+fn lowercase_value(value: Value) -> Value {
if let Value::Object(map) = value {
let mut new_value = Value::Object(serde_json::Map::new());
for (key, val) in map.into_iter() {
let processed_key = _process_key(&key);
- new_value[processed_key] = upcase_value(val);
+ new_value[processed_key] = lowercase_value(val);
}
new_value
} else if let Value::Array(array) = value {
@@ -592,7 +600,7 @@ fn upcase_value(value: Value) -> Value {
let mut new_value = Value::Array(vec![Value::Null; array.len()]);
for (index, val) in array.into_iter().enumerate() {
- new_value[index] = upcase_value(val);
+ new_value[index] = lowercase_value(val);
}
new_value
} else {
@@ -604,12 +612,12 @@ fn upcase_value(value: Value) -> Value {
// This key is part of the Identity Cipher (Social Security Number)
fn _process_key(key: &str) -> String {
match key.to_lowercase().as_ref() {
- "ssn" => "SSN".into(),
- _ => self::upcase_first(key),
+ "ssn" => "ssn".into(),
+ _ => self::lcase_first(key),
}
}
-#[derive(Deserialize, Debug, Clone)]
+#[derive(Clone, Debug, Deserialize)]
#[serde(untagged)]
pub enum NumberOrString {
Number(i64),