aboutsummaryrefslogtreecommitdiff
path: root/src/api/admin.rs
diff options
context:
space:
mode:
authorBlackDex <[email protected]>2022-06-08 19:46:33 +0200
committerBlackDex <[email protected]>2022-06-14 14:51:51 +0200
commit5d05ec58be9e6dcb028d69a4280a1d9e9d99f20e (patch)
treea7e1f4403a922b8624fb8e42fed5ba25533115e1 /src/api/admin.rs
parentf95bd3bb04839ea4fa8f2700cd3867ee12b260b0 (diff)
downloadvaultwarden-5d05ec58be9e6dcb028d69a4280a1d9e9d99f20e.tar.gz
vaultwarden-5d05ec58be9e6dcb028d69a4280a1d9e9d99f20e.zip
Updated deps and misc fixes and updates
- Updated some Rust dependencies - Fixed an issue with CSP header, this was not configured correctly - Prevent sending CSP and Frame headers for the MFA connector.html files. Else some clients will fail to handle these protocols. - Add `unsafe-inline` for `script-src` only to the CSP for the Admin Interface - Updated JavaScript and CSS files for the Admin interface - Changed the layout for showing overridden settings, better visible now. - Made the version check cachable to prevent hitting the Github API rate limits - Hide the `database_url` as if it is a password in the Admin Interface Else for MariaDB/MySQL or PostgreSQL this was plain text. - Fixed an issue that pressing enter on the SMTP Test would save the config. resolves #2542 - Prevent user names larger then 50 characters resolves #2419
Diffstat (limited to 'src/api/admin.rs')
-rw-r--r--src/api/admin.rs77
1 files changed, 43 insertions, 34 deletions
diff --git a/src/api/admin.rs b/src/api/admin.rs
index 37b169ed..63edde66 100644
--- a/src/api/admin.rs
+++ b/src/api/admin.rs
@@ -491,41 +491,14 @@ async fn has_http_access() -> bool {
}
}
-#[get("/diagnostics")]
-async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResult<Html<String>> {
- use crate::util::read_file_string;
- use chrono::prelude::*;
- use std::net::ToSocketAddrs;
-
- // Get current running versions
- let web_vault_version: WebVaultVersion =
- match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "vw-version.json")) {
- Ok(s) => serde_json::from_str(&s)?,
- _ => match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
- Ok(s) => serde_json::from_str(&s)?,
- _ => WebVaultVersion {
- version: String::from("Version file missing"),
- },
- },
- };
-
- // Execute some environment checks
- let running_within_docker = is_running_in_docker();
- let has_http_access = has_http_access().await;
- let uses_proxy = env::var_os("HTTP_PROXY").is_some()
- || env::var_os("http_proxy").is_some()
- || env::var_os("HTTPS_PROXY").is_some()
- || env::var_os("https_proxy").is_some();
-
- // Check if we are able to resolve DNS entries
- let dns_resolved = match ("github.com", 0).to_socket_addrs().map(|mut i| i.next()) {
- Ok(Some(a)) => a.ip().to_string(),
- _ => "Could not resolve domain name.".to_string(),
- };
-
+use cached::proc_macro::cached;
+/// Cache this function to prevent API call rate limit. Github only allows 60 requests per hour, and we use 3 here already.
+/// It will cache this function for 300 seconds (5 minutes) which should prevent the exhaustion of the rate limit.
+#[cached(time = 300, sync_writes = true)]
+async fn get_release_info(has_http_access: bool, running_within_docker: bool) -> (String, String, String) {
// If the HTTP Check failed, do not even attempt to check for new versions since we were not able to connect with github.com anyway.
- // TODO: Maybe we need to cache this using a LazyStatic or something. Github only allows 60 requests per hour, and we use 3 here already.
- let (latest_release, latest_commit, latest_web_build) = if has_http_access {
+ if has_http_access {
+ info!("Running get_release_info!!");
(
match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/vaultwarden/releases/latest")
.await
@@ -558,8 +531,44 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> A
)
} else {
("-".to_string(), "-".to_string(), "-".to_string())
+ }
+}
+
+#[get("/diagnostics")]
+async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResult<Html<String>> {
+ use crate::util::read_file_string;
+ use chrono::prelude::*;
+ use std::net::ToSocketAddrs;
+
+ // Get current running versions
+ let web_vault_version: WebVaultVersion =
+ match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "vw-version.json")) {
+ Ok(s) => serde_json::from_str(&s)?,
+ _ => match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
+ Ok(s) => serde_json::from_str(&s)?,
+ _ => WebVaultVersion {
+ version: String::from("Version file missing"),
+ },
+ },
+ };
+
+ // Execute some environment checks
+ let running_within_docker = is_running_in_docker();
+ let has_http_access = has_http_access().await;
+ let uses_proxy = env::var_os("HTTP_PROXY").is_some()
+ || env::var_os("http_proxy").is_some()
+ || env::var_os("HTTPS_PROXY").is_some()
+ || env::var_os("https_proxy").is_some();
+
+ // Check if we are able to resolve DNS entries
+ let dns_resolved = match ("github.com", 0).to_socket_addrs().map(|mut i| i.next()) {
+ Ok(Some(a)) => a.ip().to_string(),
+ _ => "Could not resolve domain name.".to_string(),
};
+ let (latest_release, latest_commit, latest_web_build) =
+ get_release_info(has_http_access, running_within_docker).await;
+
let ip_header_name = match &ip_header.0 {
Some(h) => h,
_ => "",