mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-09-09 18:25:58 +03:00
Compare commits
8 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
45e5f06b86 | ||
|
620ad92331 | ||
|
c9860af11c | ||
|
d7adce97df | ||
|
71b3d3c818 | ||
|
da3701c0cf | ||
|
96813b1317 | ||
|
b0b953f348 |
15
.github/workflows/build.yml
vendored
15
.github/workflows/build.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
steps:
|
||||
# Checkout the repo
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
# End Checkout the repo
|
||||
|
||||
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
|
||||
# Only install the clippy and rustfmt components on the default rust-toolchain
|
||||
- name: "Install rust-toolchain version"
|
||||
uses: dtolnay/rust-toolchain@7b1c307e0dcbda6122208f10795a713336a9b35a # master @ Aug 8, 2024, 7:36 PM GMT+2
|
||||
uses: dtolnay/rust-toolchain@315e265cd78dad1e1dcf3a5074f6d6c47029d5aa # master @ Nov 18, 2024, 5:36 AM GMT+1
|
||||
if: ${{ matrix.channel == 'rust-toolchain' }}
|
||||
with:
|
||||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||
@@ -85,7 +85,7 @@ jobs:
|
||||
|
||||
# Install the any other channel to be used for which we do not execute clippy and rustfmt
|
||||
- name: "Install MSRV version"
|
||||
uses: dtolnay/rust-toolchain@7b1c307e0dcbda6122208f10795a713336a9b35a # master @ Aug 8, 2024, 7:36 PM GMT+2
|
||||
uses: dtolnay/rust-toolchain@315e265cd78dad1e1dcf3a5074f6d6c47029d5aa # master @ Nov 18, 2024, 5:36 AM GMT+1
|
||||
if: ${{ matrix.channel != 'rust-toolchain' }}
|
||||
with:
|
||||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||
@@ -107,7 +107,7 @@ jobs:
|
||||
# End Show environment
|
||||
|
||||
# Enable Rust Caching
|
||||
- uses: Swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2.7.3
|
||||
- uses: Swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2.7.5
|
||||
with:
|
||||
# Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
|
||||
# Like changing the build host from Ubuntu 20.04 to 22.04 for example.
|
||||
@@ -117,6 +117,12 @@ jobs:
|
||||
|
||||
# Run cargo tests
|
||||
# First test all features together, afterwards test them separately.
|
||||
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc,query_logger"
|
||||
id: test_sqlite_mysql_postgresql_mimalloc_logger
|
||||
if: $${{ always() }}
|
||||
run: |
|
||||
cargo test --features sqlite,mysql,postgresql,enable_mimalloc,query_logger
|
||||
|
||||
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc"
|
||||
id: test_sqlite_mysql_postgresql_mimalloc
|
||||
if: $${{ always() }}
|
||||
@@ -176,6 +182,7 @@ jobs:
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|Job|Status|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|---|------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (sqlite,mysql,postgresql)|${{ steps.test_sqlite_mysql_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (sqlite)|${{ steps.test_sqlite.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
|
2
.github/workflows/hadolint.yml
vendored
2
.github/workflows/hadolint.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
steps:
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
# End Checkout the repo
|
||||
|
||||
# Start Docker Buildx
|
||||
|
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
steps:
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
9
.github/workflows/trivy.yml
vendored
9
.github/workflows/trivy.yml
vendored
@@ -28,10 +28,13 @@ jobs:
|
||||
actions: read
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@5681af892cd0f4997658e2bacc62bd0a894cf564 # v0.27.0
|
||||
uses: aquasecurity/trivy-action@18f2510ee396bbf400402947b394f2dd8c87dbb0 # v0.29.0
|
||||
env:
|
||||
TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2,public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2
|
||||
TRIVY_JAVA_DB_REPOSITORY: docker.io/aquasec/trivy-java-db:1,public.ecr.aws/aquasecurity/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1
|
||||
with:
|
||||
scan-type: repo
|
||||
ignore-unfixed: true
|
||||
@@ -40,6 +43,6 @@ jobs:
|
||||
severity: CRITICAL,HIGH
|
||||
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@2bbafcdd7fbf96243689e764c2f15d9735164f33 # v3.26.6
|
||||
uses: github/codeql-action/upload-sarif@86b04fb0e47484f7282357688f21d5d0e32175fe # v3.27.5
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
|
583
Cargo.lock
generated
583
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
48
Cargo.toml
48
Cargo.toml
@@ -3,7 +3,7 @@ name = "vaultwarden"
|
||||
version = "1.0.0"
|
||||
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.80.0"
|
||||
rust-version = "1.82.0"
|
||||
resolver = "2"
|
||||
|
||||
repository = "https://github.com/dani-garcia/vaultwarden"
|
||||
@@ -36,13 +36,13 @@ unstable = []
|
||||
|
||||
[target."cfg(unix)".dependencies]
|
||||
# Logging
|
||||
syslog = "6.1.1"
|
||||
syslog = "7.0.0"
|
||||
|
||||
[dependencies]
|
||||
# Logging
|
||||
log = "0.4.22"
|
||||
fern = { version = "0.7.0", features = ["syslog-6", "reopen-1"] }
|
||||
tracing = { version = "0.1.40", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
|
||||
fern = { version = "0.7.0", features = ["syslog-7", "reopen-1"] }
|
||||
tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
|
||||
|
||||
# A `dotenv` implementation for Rust
|
||||
dotenvy = { version = "0.15.7", default-features = false }
|
||||
@@ -53,7 +53,7 @@ once_cell = "1.20.2"
|
||||
# Numerical libraries
|
||||
num-traits = "0.2.19"
|
||||
num-derive = "0.4.2"
|
||||
bigdecimal = "0.4.6"
|
||||
bigdecimal = "0.4.7"
|
||||
|
||||
# Web framework
|
||||
rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false }
|
||||
@@ -67,16 +67,16 @@ dashmap = "6.1.0"
|
||||
|
||||
# Async futures
|
||||
futures = "0.3.31"
|
||||
tokio = { version = "1.41.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
||||
tokio = { version = "1.42.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
||||
|
||||
# A generic serialization/deserialization framework
|
||||
serde = { version = "1.0.214", features = ["derive"] }
|
||||
serde_json = "1.0.132"
|
||||
serde = { version = "1.0.215", features = ["derive"] }
|
||||
serde_json = "1.0.133"
|
||||
|
||||
# A safe, extensible ORM and Query builder
|
||||
diesel = { version = "2.2.4", features = ["chrono", "r2d2", "numeric"] }
|
||||
diesel = { version = "2.2.6", features = ["chrono", "r2d2", "numeric"] }
|
||||
diesel_migrations = "2.2.0"
|
||||
diesel_logger = { version = "0.3.0", optional = true }
|
||||
diesel_logger = { version = "0.4.0", optional = true }
|
||||
|
||||
# Bundled/Static SQLite
|
||||
libsqlite3-sys = { version = "0.30.1", features = ["bundled"], optional = true }
|
||||
@@ -89,9 +89,9 @@ ring = "0.17.8"
|
||||
uuid = { version = "1.11.0", features = ["v4"] }
|
||||
|
||||
# Date and time libraries
|
||||
chrono = { version = "0.4.38", features = ["clock", "serde"], default-features = false }
|
||||
chrono = { version = "0.4.39", features = ["clock", "serde"], default-features = false }
|
||||
chrono-tz = "0.10.0"
|
||||
time = "0.3.36"
|
||||
time = "0.3.37"
|
||||
|
||||
# Job scheduler
|
||||
job_scheduler_ng = "2.0.5"
|
||||
@@ -106,16 +106,16 @@ jsonwebtoken = "9.3.0"
|
||||
totp-lite = "2.0.1"
|
||||
|
||||
# Yubico Library
|
||||
yubico = { version = "0.11.0", features = ["online-tokio"], default-features = false }
|
||||
yubico = { version = "0.12.0", features = ["online-tokio"], default-features = false }
|
||||
|
||||
# WebAuthn libraries
|
||||
webauthn-rs = "0.3.2"
|
||||
|
||||
# Handling of URL's for WebAuthn and favicons
|
||||
url = "2.5.3"
|
||||
url = "2.5.4"
|
||||
|
||||
# Email libraries
|
||||
lettre = { version = "0.11.10", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
||||
lettre = { version = "0.11.11", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
||||
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
|
||||
email_address = "0.2.9"
|
||||
|
||||
@@ -124,13 +124,13 @@ handlebars = { version = "6.2.0", features = ["dir_source"] }
|
||||
|
||||
# HTTP client (Used for favicons, version check, DUO and HIBP API)
|
||||
reqwest = { version = "0.12.9", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] }
|
||||
hickory-resolver = "0.24.1"
|
||||
hickory-resolver = "0.24.2"
|
||||
|
||||
# Favicon extraction libraries
|
||||
html5gum = "0.6.1"
|
||||
html5gum = "0.7.0"
|
||||
regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false }
|
||||
data-url = "0.3.1"
|
||||
bytes = "1.8.0"
|
||||
bytes = "1.9.0"
|
||||
|
||||
# Cache function results (Used for version check and favicon fetching)
|
||||
cached = { version = "0.54.0", features = ["async"] }
|
||||
@@ -166,6 +166,12 @@ rpassword = "7.3.1"
|
||||
# Loading a dynamic CSS Stylesheet
|
||||
grass_compiler = { version = "0.13.4", default-features = false }
|
||||
|
||||
[patch.crates-io]
|
||||
# Patch fern to support syslog v7
|
||||
fern = { git = "https://github.com/daboross/fern", rev = "3e775ccfafe7d24baee39826d38011981b2e55b5" }
|
||||
# Patch yubico to remove duplicate crates of older versions
|
||||
yubico = { git = "https://github.com/BlackDex/yubico-rs", rev = "00df14811f58155c0f02e3ab10f1570ed3e115c6" }
|
||||
|
||||
# Strip debuginfo from the release builds
|
||||
# The symbols are the provide better panic traces
|
||||
# Also enable fat LTO and use 1 codegen unit for optimizations
|
||||
@@ -216,7 +222,8 @@ noop_method_call = "deny"
|
||||
refining_impl_trait = { level = "deny", priority = -1 }
|
||||
rust_2018_idioms = { level = "deny", priority = -1 }
|
||||
rust_2021_compatibility = { level = "deny", priority = -1 }
|
||||
# rust_2024_compatibility = { level = "deny", priority = -1 } # Enable once we are at MSRV 1.81.0
|
||||
rust_2024_compatibility = { level = "deny", priority = -1 }
|
||||
edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
|
||||
single_use_lifetimes = "deny"
|
||||
trivial_casts = "deny"
|
||||
trivial_numeric_casts = "deny"
|
||||
@@ -225,9 +232,6 @@ unused_import_braces = "deny"
|
||||
unused_lifetimes = "deny"
|
||||
unused_qualifications = "deny"
|
||||
variant_size_differences = "deny"
|
||||
# The lints below are part of the rust_2024_compatibility group
|
||||
static-mut-refs = "deny"
|
||||
unsafe-op-in-unsafe-fn = "deny"
|
||||
|
||||
# https://rust-lang.github.io/rust-clippy/stable/index.html
|
||||
[lints.clippy]
|
||||
|
@@ -21,7 +21,7 @@ notify us. We welcome working with you to resolve the issue promptly. Thanks in
|
||||
The following bug classes are out-of scope:
|
||||
|
||||
- Bugs that are already reported on Vaultwarden's issue tracker (https://github.com/dani-garcia/vaultwarden/issues)
|
||||
- Bugs that are not part of Vaultwarden, like on the the web-vault or mobile and desktop clients. These issues need to be reported in the respective project issue tracker at https://github.com/bitwarden to which we are not associated
|
||||
- Bugs that are not part of Vaultwarden, like on the web-vault or mobile and desktop clients. These issues need to be reported in the respective project issue tracker at https://github.com/bitwarden to which we are not associated
|
||||
- Issues in an upstream software dependency (ex: Rust, or External Libraries) which are already reported to the upstream maintainer
|
||||
- Attacks requiring physical access to a user's device
|
||||
- Issues related to software or protocols not under Vaultwarden's control
|
||||
|
@@ -5,9 +5,9 @@ vault_image_digest: "sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf716
|
||||
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
|
||||
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
|
||||
xx_image_digest: "sha256:1978e7a58a1777cb0ef0dde76bad60b7914b21da57cfa88047875e4f364297aa"
|
||||
rust_version: 1.82.0 # Rust version to be used
|
||||
rust_version: 1.83.0 # Rust version to be used
|
||||
debian_version: bookworm # Debian release name to be used
|
||||
alpine_version: "3.20" # Alpine version to be used
|
||||
alpine_version: "3.21" # Alpine version to be used
|
||||
# For which platforms/architectures will we try to build images
|
||||
platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"]
|
||||
# Determine the build images per OS/Arch
|
||||
|
@@ -32,10 +32,10 @@ FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:409ab328ca931
|
||||
########################## ALPINE BUILD IMAGES ##########################
|
||||
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
||||
## And for Alpine we define all build images here, they will only be loaded when actually used
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.82.0 AS build_amd64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.82.0 AS build_arm64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.82.0 AS build_armv7
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.82.0 AS build_armv6
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.83.0 AS build_amd64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.83.0 AS build_arm64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.83.0 AS build_armv7
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.83.0 AS build_armv6
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# hadolint ignore=DL3006
|
||||
@@ -126,7 +126,7 @@ RUN source /env-cargo && \
|
||||
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
|
||||
#
|
||||
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
|
||||
FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.20
|
||||
FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.21
|
||||
|
||||
ENV ROCKET_PROFILE="release" \
|
||||
ROCKET_ADDRESS=0.0.0.0 \
|
||||
|
@@ -36,7 +36,7 @@ FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:1978e7a58a1777cb0ef0d
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# hadolint ignore=DL3006
|
||||
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.82.0-slim-bookworm AS build
|
||||
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.83.0-slim-bookworm AS build
|
||||
COPY --from=xx / /
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
|
@@ -46,7 +46,7 @@ There also is an option to use an other docker container to provide support for
|
||||
```bash
|
||||
# To install and activate
|
||||
docker run --privileged --rm tonistiigi/binfmt --install arm64,arm
|
||||
# To unistall
|
||||
# To uninstall
|
||||
docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
|
||||
```
|
||||
|
||||
|
@@ -17,7 +17,7 @@ variable "SOURCE_REPOSITORY_URL" {
|
||||
default = null
|
||||
}
|
||||
|
||||
// The commit hash of of the current commit this build was triggered on
|
||||
// The commit hash of the current commit this build was triggered on
|
||||
variable "SOURCE_COMMIT" {
|
||||
default = null
|
||||
}
|
||||
|
@@ -1,4 +1,4 @@
|
||||
[toolchain]
|
||||
channel = "1.82.0"
|
||||
channel = "1.83.0"
|
||||
components = [ "rustfmt", "clippy" ]
|
||||
profile = "minimal"
|
||||
|
@@ -62,6 +62,7 @@ pub fn routes() -> Vec<Route> {
|
||||
diagnostics,
|
||||
get_diagnostics_config,
|
||||
resend_user_invite,
|
||||
get_diagnostics_http,
|
||||
]
|
||||
}
|
||||
|
||||
@@ -713,6 +714,7 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn)
|
||||
"ip_header_name": ip_header_name,
|
||||
"ip_header_config": &CONFIG.ip_header(),
|
||||
"uses_proxy": uses_proxy,
|
||||
"enable_websocket": &CONFIG.enable_websocket(),
|
||||
"db_type": *DB_TYPE,
|
||||
"db_version": get_sql_server_version(&mut conn).await,
|
||||
"admin_url": format!("{}/diagnostics", admin_url()),
|
||||
@@ -734,6 +736,11 @@ fn get_diagnostics_config(_token: AdminToken) -> Json<Value> {
|
||||
Json(support_json)
|
||||
}
|
||||
|
||||
#[get("/diagnostics/http?<code>")]
|
||||
fn get_diagnostics_http(code: u16, _token: AdminToken) -> EmptyResult {
|
||||
err_code!(format!("Testing error {code} response"), code);
|
||||
}
|
||||
|
||||
#[post("/config", data = "<data>")]
|
||||
fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult {
|
||||
let data: ConfigBuilder = data.into_inner();
|
||||
|
@@ -511,7 +511,7 @@ pub async fn update_cipher_from_data(
|
||||
cipher.fields = data.fields.map(|f| _clean_cipher_data(f).to_string());
|
||||
cipher.data = type_data.to_string();
|
||||
cipher.password_history = data.password_history.map(|f| f.to_string());
|
||||
cipher.reprompt = data.reprompt;
|
||||
cipher.reprompt = data.reprompt.filter(|r| *r == RepromptType::None as i32 || *r == RepromptType::Password as i32);
|
||||
|
||||
cipher.save(conn).await?;
|
||||
cipher.move_to_folder(data.folder_id, &headers.user.uuid, conn).await?;
|
||||
|
@@ -1652,7 +1652,7 @@ struct BulkCollectionsData {
|
||||
remove_collections: bool,
|
||||
}
|
||||
|
||||
// This endpoint is only reachable via the organization view, therefor this endpoint is located here
|
||||
// This endpoint is only reachable via the organization view, therefore this endpoint is located here
|
||||
// Also Bitwarden does not send out Notifications for these changes, it only does this for individual cipher collection updates
|
||||
#[post("/ciphers/bulk-collections", data = "<data>")]
|
||||
async fn post_bulk_collections(data: Json<BulkCollectionsData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
|
||||
@@ -2789,7 +2789,7 @@ struct OrganizationUserResetPasswordRequest {
|
||||
key: String,
|
||||
}
|
||||
|
||||
// Upstrem reports this is the renamed endpoint instead of `/keys`
|
||||
// Upstream reports this is the renamed endpoint instead of `/keys`
|
||||
// But the clients do not seem to use this at all
|
||||
// Just add it here in case they will
|
||||
#[get("/organizations/<org_id>/public-key")]
|
||||
|
@@ -19,7 +19,7 @@ use tokio::{
|
||||
io::{AsyncReadExt, AsyncWriteExt},
|
||||
};
|
||||
|
||||
use html5gum::{Emitter, HtmlString, InfallibleTokenizer, Readable, StringReader, Tokenizer};
|
||||
use html5gum::{Emitter, HtmlString, Readable, StringReader, Tokenizer};
|
||||
|
||||
use crate::{
|
||||
error::Error,
|
||||
@@ -261,11 +261,7 @@ impl Icon {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_favicons_node(
|
||||
dom: InfallibleTokenizer<StringReader<'_>, FaviconEmitter>,
|
||||
icons: &mut Vec<Icon>,
|
||||
url: &url::Url,
|
||||
) {
|
||||
fn get_favicons_node(dom: Tokenizer<StringReader<'_>, FaviconEmitter>, icons: &mut Vec<Icon>, url: &url::Url) {
|
||||
const TAG_LINK: &[u8] = b"link";
|
||||
const TAG_BASE: &[u8] = b"base";
|
||||
const TAG_HEAD: &[u8] = b"head";
|
||||
@@ -274,7 +270,7 @@ fn get_favicons_node(
|
||||
|
||||
let mut base_url = url.clone();
|
||||
let mut icon_tags: Vec<Tag> = Vec::new();
|
||||
for token in dom {
|
||||
for Ok(token) in dom {
|
||||
let tag_name: &[u8] = &token.tag.name;
|
||||
match tag_name {
|
||||
TAG_LINK => {
|
||||
@@ -401,7 +397,7 @@ async fn get_icon_url(domain: &str) -> Result<IconUrlResult, Error> {
|
||||
// 384KB should be more than enough for the HTML, though as we only really need the HTML header.
|
||||
let limited_reader = stream_to_bytes_limit(content, 384 * 1024).await?.to_vec();
|
||||
|
||||
let dom = Tokenizer::new_with_emitter(limited_reader.to_reader(), FaviconEmitter::default()).infallible();
|
||||
let dom = Tokenizer::new_with_emitter(limited_reader.to_reader(), FaviconEmitter::default());
|
||||
get_favicons_node(dom, &mut iconlist, &url);
|
||||
} else {
|
||||
// Add the default favicon.ico to the list with just the given domain
|
||||
@@ -662,7 +658,7 @@ impl reqwest::cookie::CookieStore for Jar {
|
||||
/// The FaviconEmitter is using an optimized version of the DefaultEmitter.
|
||||
/// This prevents emitting tags like comments, doctype and also strings between the tags.
|
||||
/// But it will also only emit the tags we need and only if they have the correct attributes
|
||||
/// Therefor parsing the HTML content is faster.
|
||||
/// Therefore parsing the HTML content is faster.
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
#[derive(Default)]
|
||||
|
@@ -9,6 +9,7 @@ use crate::{
|
||||
api::{ApiResult, EmptyResult, UpdateType},
|
||||
db::models::{Cipher, Device, Folder, Send, User},
|
||||
http_client::make_http_request,
|
||||
util::format_date,
|
||||
CONFIG,
|
||||
};
|
||||
|
||||
@@ -170,10 +171,10 @@ pub async fn push_cipher_update(
|
||||
"identifier": acting_device_uuid,
|
||||
"type": ut as i32,
|
||||
"payload": {
|
||||
"id": cipher.uuid,
|
||||
"userId": cipher.user_uuid,
|
||||
"organizationId": (),
|
||||
"revisionDate": cipher.updated_at
|
||||
"Id": cipher.uuid,
|
||||
"UserId": cipher.user_uuid,
|
||||
"OrganizationId": (),
|
||||
"RevisionDate": format_date(&cipher.updated_at)
|
||||
}
|
||||
}))
|
||||
.await;
|
||||
@@ -190,8 +191,8 @@ pub fn push_logout(user: &User, acting_device_uuid: Option<String>) {
|
||||
"identifier": acting_device_uuid,
|
||||
"type": UpdateType::LogOut as i32,
|
||||
"payload": {
|
||||
"userId": user.uuid,
|
||||
"date": user.updated_at
|
||||
"UserId": user.uuid,
|
||||
"Date": format_date(&user.updated_at)
|
||||
}
|
||||
})));
|
||||
}
|
||||
@@ -204,8 +205,8 @@ pub fn push_user_update(ut: UpdateType, user: &User) {
|
||||
"identifier": (),
|
||||
"type": ut as i32,
|
||||
"payload": {
|
||||
"userId": user.uuid,
|
||||
"date": user.updated_at
|
||||
"UserId": user.uuid,
|
||||
"Date": format_date(&user.updated_at)
|
||||
}
|
||||
})));
|
||||
}
|
||||
@@ -224,9 +225,9 @@ pub async fn push_folder_update(
|
||||
"identifier": acting_device_uuid,
|
||||
"type": ut as i32,
|
||||
"payload": {
|
||||
"id": folder.uuid,
|
||||
"userId": folder.user_uuid,
|
||||
"revisionDate": folder.updated_at
|
||||
"Id": folder.uuid,
|
||||
"UserId": folder.user_uuid,
|
||||
"RevisionDate": format_date(&folder.updated_at)
|
||||
}
|
||||
})));
|
||||
}
|
||||
@@ -242,9 +243,9 @@ pub async fn push_send_update(ut: UpdateType, send: &Send, acting_device_uuid: &
|
||||
"identifier": acting_device_uuid,
|
||||
"type": ut as i32,
|
||||
"payload": {
|
||||
"id": send.uuid,
|
||||
"userId": send.user_uuid,
|
||||
"revisionDate": send.revision_date
|
||||
"Id": send.uuid,
|
||||
"UserId": send.user_uuid,
|
||||
"RevisionDate": format_date(&send.revision_date)
|
||||
}
|
||||
})));
|
||||
}
|
||||
@@ -295,8 +296,8 @@ pub async fn push_auth_request(user_uuid: String, auth_request_uuid: String, con
|
||||
"identifier": null,
|
||||
"type": UpdateType::AuthRequest as i32,
|
||||
"payload": {
|
||||
"id": auth_request_uuid,
|
||||
"userId": user_uuid,
|
||||
"Id": auth_request_uuid,
|
||||
"UserId": user_uuid,
|
||||
}
|
||||
})));
|
||||
}
|
||||
@@ -316,8 +317,8 @@ pub async fn push_auth_response(
|
||||
"identifier": approving_device_uuid,
|
||||
"type": UpdateType::AuthRequestResponse as i32,
|
||||
"payload": {
|
||||
"id": auth_request_uuid,
|
||||
"userId": user_uuid,
|
||||
"Id": auth_request_uuid,
|
||||
"UserId": user_uuid,
|
||||
}
|
||||
})));
|
||||
}
|
||||
|
@@ -373,24 +373,18 @@ pub async fn backup_database(conn: &mut DbConn) -> Result<String, Error> {
|
||||
err!("PostgreSQL and MySQL/MariaDB do not support this backup feature");
|
||||
}
|
||||
sqlite {
|
||||
backup_sqlite_database(conn)
|
||||
let db_url = CONFIG.database_url();
|
||||
let db_path = std::path::Path::new(&db_url).parent().unwrap();
|
||||
let backup_file = db_path
|
||||
.join(format!("db_{}.sqlite3", chrono::Utc::now().format("%Y%m%d_%H%M%S")))
|
||||
.to_string_lossy()
|
||||
.into_owned();
|
||||
diesel::sql_query(format!("VACUUM INTO '{backup_file}'")).execute(conn)?;
|
||||
Ok(backup_file)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(sqlite)]
|
||||
pub fn backup_sqlite_database(conn: &mut diesel::sqlite::SqliteConnection) -> Result<String, Error> {
|
||||
use diesel::RunQueryDsl;
|
||||
let db_url = CONFIG.database_url();
|
||||
let db_path = std::path::Path::new(&db_url).parent().unwrap();
|
||||
let backup_file = db_path
|
||||
.join(format!("db_{}.sqlite3", chrono::Utc::now().format("%Y%m%d_%H%M%S")))
|
||||
.to_string_lossy()
|
||||
.into_owned();
|
||||
diesel::sql_query(format!("VACUUM INTO '{backup_file}'")).execute(conn)?;
|
||||
Ok(backup_file)
|
||||
}
|
||||
|
||||
/// Get the SQL Server version
|
||||
pub async fn get_sql_server_version(conn: &mut DbConn) -> String {
|
||||
db_run! {@raw conn:
|
||||
|
@@ -46,10 +46,9 @@ db_object! {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub enum RepromptType {
|
||||
None = 0,
|
||||
Password = 1, // not currently used in server
|
||||
Password = 1,
|
||||
}
|
||||
|
||||
/// Local methods
|
||||
@@ -296,7 +295,7 @@ impl Cipher {
|
||||
"creationDate": format_date(&self.created_at),
|
||||
"revisionDate": format_date(&self.updated_at),
|
||||
"deletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))),
|
||||
"reprompt": self.reprompt.unwrap_or(RepromptType::None as i32),
|
||||
"reprompt": self.reprompt.filter(|r| *r == RepromptType::None as i32 || *r == RepromptType::Password as i32).unwrap_or(RepromptType::None as i32),
|
||||
"organizationId": self.organization_uuid,
|
||||
"key": self.key,
|
||||
"attachments": attachments_json,
|
||||
|
@@ -18,7 +18,7 @@ mod user;
|
||||
|
||||
pub use self::attachment::Attachment;
|
||||
pub use self::auth_request::AuthRequest;
|
||||
pub use self::cipher::Cipher;
|
||||
pub use self::cipher::{Cipher, RepromptType};
|
||||
pub use self::collection::{Collection, CollectionCipher, CollectionUser};
|
||||
pub use self::device::{Device, DeviceType};
|
||||
pub use self::emergency_access::{EmergencyAccess, EmergencyAccessStatus, EmergencyAccessType};
|
||||
|
@@ -462,7 +462,13 @@ impl UserOrganization {
|
||||
Vec::with_capacity(0)
|
||||
};
|
||||
|
||||
let collections: Vec<Value> = if include_collections {
|
||||
// Check if a user is in a group which has access to all collections
|
||||
// If that is the case, we should not return individual collections!
|
||||
let full_access_group =
|
||||
CONFIG.org_groups_enabled() && Group::is_in_full_access_group(&self.user_uuid, &self.org_uuid, conn).await;
|
||||
|
||||
// If collections are to be included, only include them if the user does not have full access via a group or defined to the user it self
|
||||
let collections: Vec<Value> = if include_collections && !(full_access_group || self.has_full_access()) {
|
||||
// Get all collections for the user here already to prevent more queries
|
||||
let cu: HashMap<String, CollectionUser> =
|
||||
CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn)
|
||||
|
39
src/main.rs
39
src/main.rs
@@ -67,7 +67,7 @@ pub use util::is_running_in_container;
|
||||
|
||||
#[rocket::main]
|
||||
async fn main() -> Result<(), Error> {
|
||||
parse_args();
|
||||
parse_args().await;
|
||||
launch_info();
|
||||
|
||||
let level = init_logging()?;
|
||||
@@ -115,7 +115,7 @@ PRESETS: m= t= p=
|
||||
|
||||
pub const VERSION: Option<&str> = option_env!("VW_VERSION");
|
||||
|
||||
fn parse_args() {
|
||||
async fn parse_args() {
|
||||
let mut pargs = pico_args::Arguments::from_env();
|
||||
let version = VERSION.unwrap_or("(Version info from Git not present)");
|
||||
|
||||
@@ -186,7 +186,7 @@ fn parse_args() {
|
||||
exit(1);
|
||||
}
|
||||
} else if command == "backup" {
|
||||
match backup_sqlite() {
|
||||
match backup_sqlite().await {
|
||||
Ok(f) => {
|
||||
println!("Backup to '{f}' was successful");
|
||||
exit(0);
|
||||
@@ -201,25 +201,20 @@ fn parse_args() {
|
||||
}
|
||||
}
|
||||
|
||||
fn backup_sqlite() -> Result<String, Error> {
|
||||
#[cfg(sqlite)]
|
||||
{
|
||||
use crate::db::{backup_sqlite_database, DbConnType};
|
||||
if DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false) {
|
||||
use diesel::Connection;
|
||||
let url = CONFIG.database_url();
|
||||
async fn backup_sqlite() -> Result<String, Error> {
|
||||
use crate::db::{backup_database, DbConnType};
|
||||
if DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false) {
|
||||
// Establish a connection to the sqlite database
|
||||
let mut conn = db::DbPool::from_config()
|
||||
.expect("SQLite database connection failed")
|
||||
.get()
|
||||
.await
|
||||
.expect("Unable to get SQLite db pool");
|
||||
|
||||
// Establish a connection to the sqlite database
|
||||
let mut conn = diesel::sqlite::SqliteConnection::establish(&url)?;
|
||||
let backup_file = backup_sqlite_database(&mut conn)?;
|
||||
Ok(backup_file)
|
||||
} else {
|
||||
err_silent!("The database type is not SQLite. Backups only works for SQLite databases")
|
||||
}
|
||||
}
|
||||
#[cfg(not(sqlite))]
|
||||
{
|
||||
err_silent!("The 'sqlite' feature is not enabled. Backups only works for SQLite databases")
|
||||
let backup_file = backup_database(&mut conn).await?;
|
||||
Ok(backup_file)
|
||||
} else {
|
||||
err_silent!("The database type is not SQLite. Backups only works for SQLite databases")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -610,7 +605,7 @@ async fn launch_rocket(pool: db::DbPool, extra_debug: bool) -> Result<(), Error>
|
||||
// If we need more signals to act upon, we might want to use select! here.
|
||||
// With only one item to listen for this is enough.
|
||||
let _ = signal_user1.recv().await;
|
||||
match backup_sqlite() {
|
||||
match backup_sqlite().await {
|
||||
Ok(f) => info!("Backup to '{f}' was successful"),
|
||||
Err(e) => error!("Backup failed. {e:?}"),
|
||||
}
|
||||
|
4
src/static/scripts/admin.css
vendored
4
src/static/scripts/admin.css
vendored
@@ -38,8 +38,8 @@ img {
|
||||
max-width: 130px;
|
||||
}
|
||||
#users-table .vw-actions, #orgs-table .vw-actions {
|
||||
min-width: 130px;
|
||||
max-width: 130px;
|
||||
min-width: 135px;
|
||||
max-width: 140px;
|
||||
}
|
||||
#users-table .vw-org-cell {
|
||||
max-height: 120px;
|
||||
|
212
src/static/scripts/admin_diagnostics.js
vendored
212
src/static/scripts/admin_diagnostics.js
vendored
@@ -7,6 +7,8 @@ var timeCheck = false;
|
||||
var ntpTimeCheck = false;
|
||||
var domainCheck = false;
|
||||
var httpsCheck = false;
|
||||
var websocketCheck = false;
|
||||
var httpResponseCheck = false;
|
||||
|
||||
// ================================
|
||||
// Date & Time Check
|
||||
@@ -76,18 +78,15 @@ async function generateSupportString(event, dj) {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
|
||||
let supportString = "### Your environment (Generated via diagnostics page)\n";
|
||||
let supportString = "### Your environment (Generated via diagnostics page)\n\n";
|
||||
|
||||
supportString += `* Vaultwarden version: v${dj.current_release}\n`;
|
||||
supportString += `* Web-vault version: v${dj.web_vault_version}\n`;
|
||||
supportString += `* OS/Arch: ${dj.host_os}/${dj.host_arch}\n`;
|
||||
supportString += `* Running within a container: ${dj.running_within_container} (Base: ${dj.container_base_image})\n`;
|
||||
supportString += "* Environment settings overridden: ";
|
||||
if (dj.overrides != "") {
|
||||
supportString += "true\n";
|
||||
} else {
|
||||
supportString += "false\n";
|
||||
}
|
||||
supportString += `* Database type: ${dj.db_type}\n`;
|
||||
supportString += `* Database version: ${dj.db_version}\n`;
|
||||
supportString += `* Environment settings overridden!: ${dj.overrides !== ""}\n`;
|
||||
supportString += `* Uses a reverse proxy: ${dj.ip_header_exists}\n`;
|
||||
if (dj.ip_header_exists) {
|
||||
supportString += `* IP Header check: ${dj.ip_header_match} (${dj.ip_header_name})\n`;
|
||||
@@ -99,11 +98,12 @@ async function generateSupportString(event, dj) {
|
||||
supportString += `* Server/NTP Time Check: ${ntpTimeCheck}\n`;
|
||||
supportString += `* Domain Configuration Check: ${domainCheck}\n`;
|
||||
supportString += `* HTTPS Check: ${httpsCheck}\n`;
|
||||
supportString += `* Database type: ${dj.db_type}\n`;
|
||||
supportString += `* Database version: ${dj.db_version}\n`;
|
||||
supportString += "* Clients used: \n";
|
||||
supportString += "* Reverse proxy and version: \n";
|
||||
supportString += "* Other relevant information: \n";
|
||||
if (dj.enable_websocket) {
|
||||
supportString += `* Websocket Check: ${websocketCheck}\n`;
|
||||
} else {
|
||||
supportString += "* Websocket Check: disabled\n";
|
||||
}
|
||||
supportString += `* HTTP Response Checks: ${httpResponseCheck}\n`;
|
||||
|
||||
const jsonResponse = await fetch(`${BASE_URL}/admin/diagnostics/config`, {
|
||||
"headers": { "Accept": "application/json" }
|
||||
@@ -113,10 +113,30 @@ async function generateSupportString(event, dj) {
|
||||
throw new Error(jsonResponse);
|
||||
}
|
||||
const configJson = await jsonResponse.json();
|
||||
supportString += "\n### Config (Generated via diagnostics page)\n<details><summary>Show Running Config</summary>\n";
|
||||
supportString += `\n**Environment settings which are overridden:** ${dj.overrides}\n`;
|
||||
supportString += "\n\n```json\n" + JSON.stringify(configJson, undefined, 2) + "\n```\n</details>\n";
|
||||
|
||||
// Start Config and Details section within a details block which is collapsed by default
|
||||
supportString += "\n### Config & Details (Generated via diagnostics page)\n\n";
|
||||
supportString += "<details><summary>Show Config & Details</summary>\n";
|
||||
|
||||
// Add overrides if they exists
|
||||
if (dj.overrides != "") {
|
||||
supportString += `\n**Environment settings which are overridden:** ${dj.overrides}\n`;
|
||||
}
|
||||
|
||||
// Add http response check messages if they exists
|
||||
if (httpResponseCheck === false) {
|
||||
supportString += "\n**Failed HTTP Checks:**\n";
|
||||
// We use `innerText` here since that will convert <br> into new-lines
|
||||
supportString += "\n```yaml\n" + document.getElementById("http-response-errors").innerText.trim() + "\n```\n";
|
||||
}
|
||||
|
||||
// Add the current config in json form
|
||||
supportString += "\n**Config:**\n";
|
||||
supportString += "\n```json\n" + JSON.stringify(configJson, undefined, 2) + "\n```\n";
|
||||
|
||||
supportString += "\n</details>\n";
|
||||
|
||||
// Add the support string to the textbox so it can be viewed and copied
|
||||
document.getElementById("support-string").textContent = supportString;
|
||||
document.getElementById("support-string").classList.remove("d-none");
|
||||
document.getElementById("copy-support").classList.remove("d-none");
|
||||
@@ -199,6 +219,162 @@ function checkDns(dns_resolved) {
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchCheckUrl(url) {
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
return { headers: response.headers, status: response.status, text: await response.text() };
|
||||
} catch (error) {
|
||||
console.error(`Error fetching ${url}: ${error}`);
|
||||
return { error };
|
||||
}
|
||||
}
|
||||
|
||||
function checkSecurityHeaders(headers, omit) {
|
||||
let securityHeaders = {
|
||||
"x-frame-options": ["SAMEORIGIN"],
|
||||
"x-content-type-options": ["nosniff"],
|
||||
"referrer-policy": ["same-origin"],
|
||||
"x-xss-protection": ["0"],
|
||||
"x-robots-tag": ["noindex", "nofollow"],
|
||||
"content-security-policy": [
|
||||
"default-src 'self'",
|
||||
"base-uri 'self'",
|
||||
"form-action 'self'",
|
||||
"object-src 'self' blob:",
|
||||
"script-src 'self' 'wasm-unsafe-eval'",
|
||||
"style-src 'self' 'unsafe-inline'",
|
||||
"child-src 'self' https://*.duosecurity.com https://*.duofederal.com",
|
||||
"frame-src 'self' https://*.duosecurity.com https://*.duofederal.com",
|
||||
"frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb chrome-extension://jbkfoedolllekgbhcbcoahefnbanhhlh moz-extension://*",
|
||||
"img-src 'self' data: https://haveibeenpwned.com",
|
||||
"connect-src 'self' https://api.pwnedpasswords.com https://api.2fa.directory https://app.simplelogin.io/api/ https://app.addy.io/api/ https://api.fastmail.com/ https://api.forwardemail.net",
|
||||
]
|
||||
};
|
||||
|
||||
let messages = [];
|
||||
for (let header in securityHeaders) {
|
||||
// Skip some headers for specific endpoints if needed
|
||||
if (typeof omit === "object" && omit.includes(header) === true) {
|
||||
continue;
|
||||
}
|
||||
// If the header exists, check if the contents matches what we expect it to be
|
||||
let headerValue = headers.get(header);
|
||||
if (headerValue !== null) {
|
||||
securityHeaders[header].forEach((expectedValue) => {
|
||||
if (headerValue.indexOf(expectedValue) === -1) {
|
||||
messages.push(`'${header}' does not contain '${expectedValue}'`);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
messages.push(`'${header}' is missing!`);
|
||||
}
|
||||
}
|
||||
return messages;
|
||||
}
|
||||
|
||||
async function checkHttpResponse() {
|
||||
const [apiConfig, webauthnConnector, notFound, notFoundApi, badRequest, unauthorized, forbidden] = await Promise.all([
|
||||
fetchCheckUrl(`${BASE_URL}/api/config`),
|
||||
fetchCheckUrl(`${BASE_URL}/webauthn-connector.html`),
|
||||
fetchCheckUrl(`${BASE_URL}/admin/does-not-exist`),
|
||||
fetchCheckUrl(`${BASE_URL}/admin/diagnostics/http?code=404`),
|
||||
fetchCheckUrl(`${BASE_URL}/admin/diagnostics/http?code=400`),
|
||||
fetchCheckUrl(`${BASE_URL}/admin/diagnostics/http?code=401`),
|
||||
fetchCheckUrl(`${BASE_URL}/admin/diagnostics/http?code=403`),
|
||||
]);
|
||||
|
||||
const respErrorElm = document.getElementById("http-response-errors");
|
||||
|
||||
// Check and validate the default API header responses
|
||||
let apiErrors = checkSecurityHeaders(apiConfig.headers);
|
||||
if (apiErrors.length >= 1) {
|
||||
respErrorElm.innerHTML += "<b>API calls:</b><br>";
|
||||
apiErrors.forEach((errMsg) => {
|
||||
respErrorElm.innerHTML += `<b>Header:</b> ${errMsg}<br>`;
|
||||
});
|
||||
}
|
||||
|
||||
// Check the special `-connector.html` headers, these should have some headers omitted.
|
||||
const omitConnectorHeaders = ["x-frame-options", "content-security-policy"];
|
||||
let connectorErrors = checkSecurityHeaders(webauthnConnector.headers, omitConnectorHeaders);
|
||||
omitConnectorHeaders.forEach((header) => {
|
||||
if (webauthnConnector.headers.get(header) !== null) {
|
||||
connectorErrors.push(`'${header}' is present while it should not`);
|
||||
}
|
||||
});
|
||||
if (connectorErrors.length >= 1) {
|
||||
respErrorElm.innerHTML += "<b>2FA Connector calls:</b><br>";
|
||||
connectorErrors.forEach((errMsg) => {
|
||||
respErrorElm.innerHTML += `<b>Header:</b> ${errMsg}<br>`;
|
||||
});
|
||||
}
|
||||
|
||||
// Check specific error code responses if they are not re-written by a reverse proxy
|
||||
let responseErrors = [];
|
||||
if (notFound.status !== 404 || notFound.text.indexOf("return to the web-vault") === -1) {
|
||||
responseErrors.push("404 (Not Found) HTML is invalid");
|
||||
}
|
||||
|
||||
if (notFoundApi.status !== 404 || notFoundApi.text.indexOf("\"message\":\"Testing error 404 response\",") === -1) {
|
||||
responseErrors.push("404 (Not Found) JSON is invalid");
|
||||
}
|
||||
|
||||
if (badRequest.status !== 400 || badRequest.text.indexOf("\"message\":\"Testing error 400 response\",") === -1) {
|
||||
responseErrors.push("400 (Bad Request) is invalid");
|
||||
}
|
||||
|
||||
if (unauthorized.status !== 401 || unauthorized.text.indexOf("\"message\":\"Testing error 401 response\",") === -1) {
|
||||
responseErrors.push("401 (Unauthorized) is invalid");
|
||||
}
|
||||
|
||||
if (forbidden.status !== 403 || forbidden.text.indexOf("\"message\":\"Testing error 403 response\",") === -1) {
|
||||
responseErrors.push("403 (Forbidden) is invalid");
|
||||
}
|
||||
|
||||
if (responseErrors.length >= 1) {
|
||||
respErrorElm.innerHTML += "<b>HTTP error responses:</b><br>";
|
||||
responseErrors.forEach((errMsg) => {
|
||||
respErrorElm.innerHTML += `<b>Response to:</b> ${errMsg}<br>`;
|
||||
});
|
||||
}
|
||||
|
||||
if (responseErrors.length >= 1 || connectorErrors.length >= 1 || apiErrors.length >= 1) {
|
||||
document.getElementById("http-response-warning").classList.remove("d-none");
|
||||
} else {
|
||||
httpResponseCheck = true;
|
||||
document.getElementById("http-response-success").classList.remove("d-none");
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchWsUrl(wsUrl) {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
const ws = new WebSocket(wsUrl);
|
||||
ws.onopen = () => {
|
||||
ws.close();
|
||||
resolve(true);
|
||||
};
|
||||
|
||||
ws.onerror = () => {
|
||||
reject(false);
|
||||
};
|
||||
} catch (_) {
|
||||
reject(false);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function checkWebsocketConnection() {
|
||||
// Test Websocket connections via the anonymous (login with device) connection
|
||||
const isConnected = await fetchWsUrl(`${BASE_URL}/notifications/anonymous-hub?token=admin-diagnostics`).catch(() => false);
|
||||
if (isConnected) {
|
||||
websocketCheck = true;
|
||||
document.getElementById("websocket-success").classList.remove("d-none");
|
||||
} else {
|
||||
document.getElementById("websocket-error").classList.remove("d-none");
|
||||
}
|
||||
}
|
||||
|
||||
function init(dj) {
|
||||
// Time check
|
||||
document.getElementById("time-browser-string").textContent = browserUTC;
|
||||
@@ -225,6 +401,12 @@ function init(dj) {
|
||||
|
||||
// DNS Check
|
||||
checkDns(dj.dns_resolved);
|
||||
|
||||
checkHttpResponse();
|
||||
|
||||
if (dj.enable_websocket) {
|
||||
checkWebsocketConnection();
|
||||
}
|
||||
}
|
||||
|
||||
// onLoad events
|
||||
|
42
src/static/scripts/datatables.css
vendored
42
src/static/scripts/datatables.css
vendored
@@ -4,10 +4,10 @@
|
||||
*
|
||||
* To rebuild or modify this file with the latest versions of the included
|
||||
* software please visit:
|
||||
* https://datatables.net/download/#bs5/dt-2.0.8
|
||||
* https://datatables.net/download/#bs5/dt-2.1.8
|
||||
*
|
||||
* Included libraries:
|
||||
* DataTables 2.0.8
|
||||
* DataTables 2.1.8
|
||||
*/
|
||||
|
||||
@charset "UTF-8";
|
||||
@@ -45,15 +45,21 @@ table.dataTable tr.dt-hasChild td.dt-control:before {
|
||||
}
|
||||
|
||||
html.dark table.dataTable td.dt-control:before,
|
||||
:root[data-bs-theme=dark] table.dataTable td.dt-control:before {
|
||||
:root[data-bs-theme=dark] table.dataTable td.dt-control:before,
|
||||
:root[data-theme=dark] table.dataTable td.dt-control:before {
|
||||
border-left-color: rgba(255, 255, 255, 0.5);
|
||||
}
|
||||
html.dark table.dataTable tr.dt-hasChild td.dt-control:before,
|
||||
:root[data-bs-theme=dark] table.dataTable tr.dt-hasChild td.dt-control:before {
|
||||
:root[data-bs-theme=dark] table.dataTable tr.dt-hasChild td.dt-control:before,
|
||||
:root[data-theme=dark] table.dataTable tr.dt-hasChild td.dt-control:before {
|
||||
border-top-color: rgba(255, 255, 255, 0.5);
|
||||
border-left-color: transparent;
|
||||
}
|
||||
|
||||
div.dt-scroll {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
div.dt-scroll-body thead tr,
|
||||
div.dt-scroll-body tfoot tr {
|
||||
height: 0;
|
||||
@@ -377,6 +383,31 @@ table.table.dataTable.table-hover > tbody > tr.selected:hover > * {
|
||||
box-shadow: inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.975);
|
||||
}
|
||||
|
||||
div.dt-container div.dt-layout-start > *:not(:last-child) {
|
||||
margin-right: 1em;
|
||||
}
|
||||
div.dt-container div.dt-layout-end > *:not(:first-child) {
|
||||
margin-left: 1em;
|
||||
}
|
||||
div.dt-container div.dt-layout-full {
|
||||
width: 100%;
|
||||
}
|
||||
div.dt-container div.dt-layout-full > *:only-child {
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
div.dt-container div.dt-layout-table > div {
|
||||
display: block !important;
|
||||
}
|
||||
|
||||
@media screen and (max-width: 767px) {
|
||||
div.dt-container div.dt-layout-start > *:not(:last-child) {
|
||||
margin-right: 0;
|
||||
}
|
||||
div.dt-container div.dt-layout-end > *:not(:first-child) {
|
||||
margin-left: 0;
|
||||
}
|
||||
}
|
||||
div.dt-container div.dt-length label {
|
||||
font-weight: normal;
|
||||
text-align: left;
|
||||
@@ -400,9 +431,6 @@ div.dt-container div.dt-search input {
|
||||
display: inline-block;
|
||||
width: auto;
|
||||
}
|
||||
div.dt-container div.dt-info {
|
||||
padding-top: 0.85em;
|
||||
}
|
||||
div.dt-container div.dt-paging {
|
||||
margin: 0;
|
||||
}
|
||||
|
1414
src/static/scripts/datatables.js
vendored
1414
src/static/scripts/datatables.js
vendored
File diff suppressed because it is too large
Load Diff
@@ -132,6 +132,21 @@
|
||||
<span class="d-block" title="We have direct internet access, no outgoing proxy configured."><b>No</b></span>
|
||||
{{/unless}}
|
||||
</dd>
|
||||
<dt class="col-sm-5">Websocket enabled
|
||||
{{#if page_data.enable_websocket}}
|
||||
<span class="badge bg-success d-none" id="websocket-success" title="Websocket connection is working.">Ok</span>
|
||||
<span class="badge bg-danger d-none" id="websocket-error" title="Websocket connection error, validate your reverse proxy configuration!">Error</span>
|
||||
{{/if}}
|
||||
</dt>
|
||||
<dd class="col-sm-7">
|
||||
{{#if page_data.enable_websocket}}
|
||||
<span class="d-block" title="Websocket connections are enabled (ENABLE_WEBSOCKET is true)."><b>Yes</b></span>
|
||||
{{/if}}
|
||||
{{#unless page_data.enable_websocket}}
|
||||
<span class="d-block" title="Websocket connections are disabled (ENABLE_WEBSOCKET is false)."><b>No</b></span>
|
||||
{{/unless}}
|
||||
</dd>
|
||||
|
||||
<dt class="col-sm-5">DNS (github.com)
|
||||
<span class="badge bg-success d-none" id="dns-success" title="DNS Resolving works!">Ok</span>
|
||||
<span class="badge bg-danger d-none" id="dns-warning" title="DNS Resolving failed. Please fix.">Error</span>
|
||||
@@ -167,6 +182,14 @@
|
||||
<span id="domain-server" class="d-block"><b>Server:</b> <span id="domain-server-string">{{page_data.admin_url}}</span></span>
|
||||
<span id="domain-browser" class="d-block"><b>Browser:</b> <span id="domain-browser-string"></span></span>
|
||||
</dd>
|
||||
|
||||
<dt class="col-sm-5">HTTP Response validation
|
||||
<span class="badge bg-success d-none" id="http-response-success" title="All headers and HTTP request responses seem to be ok.">Ok</span>
|
||||
<span class="badge bg-danger d-none" id="http-response-warning" title="Some headers or HTTP request responses return invalid data!">Error</span>
|
||||
</dt>
|
||||
<dd class="col-sm-7">
|
||||
<span id="http-response-errors" class="d-block"></span>
|
||||
</dd>
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -19,7 +19,7 @@
|
||||
<tr>
|
||||
<td>
|
||||
<svg width="48" height="48" class="float-start me-2 rounded" data-jdenticon-value="{{email}}">
|
||||
<div class="float-start">
|
||||
<div>
|
||||
<strong>{{name}}</strong>
|
||||
<span class="d-block">{{email}}</span>
|
||||
<span class="d-block">
|
||||
|
@@ -51,9 +51,11 @@ impl Fairing for AppHeaders {
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: When modifying or adding security headers be sure to also update the diagnostic checks in `src/static/scripts/admin_diagnostics.js` in `checkSecurityHeaders`
|
||||
res.set_raw_header("Permissions-Policy", "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), display-capture=(), document-domain=(), encrypted-media=(), execution-while-not-rendered=(), execution-while-out-of-viewport=(), fullscreen=(), geolocation=(), gyroscope=(), keyboard-map=(), magnetometer=(), microphone=(), midi=(), payment=(), picture-in-picture=(), screen-wake-lock=(), sync-xhr=(), usb=(), web-share=(), xr-spatial-tracking=()");
|
||||
res.set_raw_header("Referrer-Policy", "same-origin");
|
||||
res.set_raw_header("X-Content-Type-Options", "nosniff");
|
||||
res.set_raw_header("X-Robots-Tag", "noindex, nofollow");
|
||||
// Obsolete in modern browsers, unsafe (XS-Leak), and largely replaced by CSP
|
||||
res.set_raw_header("X-XSS-Protection", "0");
|
||||
|
||||
|
Reference in New Issue
Block a user