mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-11-27 15:12:33 +02:00
Compare commits
55 Commits
5d84f17600
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c9d527d84f | ||
|
|
7c7f4f5d4f | ||
|
|
aad1f19b45 | ||
|
|
35e1a306f3 | ||
|
|
7f7b412220 | ||
|
|
bb41f64c0a | ||
|
|
319d982113 | ||
|
|
95a0c667e4 | ||
|
|
b519832086 | ||
|
|
2ee40d6105 | ||
|
|
0182567a62 | ||
|
|
f9751a0a1d | ||
|
|
9017ca265a | ||
|
|
8d30285160 | ||
|
|
3cd3d33d00 | ||
|
|
2ee5819b56 | ||
|
|
7c597e88f9 | ||
|
|
a85b48512c | ||
|
|
fe1a8f7738 | ||
|
|
d43edb8f17 | ||
|
|
8043f7eca7 | ||
|
|
e659a61581 | ||
|
|
2d54cc61df | ||
|
|
3f010a50af | ||
|
|
e83faad8d2 | ||
|
|
a79cd40ea9 | ||
|
|
b1d84298cc | ||
|
|
a2ad1dc7c3 | ||
|
|
7cc4dfabbf | ||
|
|
5a8736e116 | ||
|
|
f76362ff89 | ||
|
|
6db5b7115d | ||
|
|
3510351f4d | ||
|
|
7161f612a1 | ||
|
|
5ee908517f | ||
|
|
55577fa4eb | ||
|
|
843c063649 | ||
|
|
550b670dba | ||
|
|
de808c5ad9 | ||
|
|
1f73630136 | ||
|
|
77008a91e9 | ||
|
|
7f386d38ae | ||
|
|
8e7eeab293 | ||
|
|
e35c6f8705 | ||
|
|
ae7b725c0f | ||
|
|
2a5489a4b2 | ||
|
|
8fd0ee4211 | ||
|
|
4a5516e150 | ||
|
|
7fc94516ce | ||
|
|
5ea0779d6b | ||
|
|
a133d4e90c | ||
|
|
49eff787de | ||
|
|
cff6c2b3af | ||
|
|
a0c76284fd | ||
|
|
318653b0e5 |
@@ -80,8 +80,16 @@
|
||||
## Timeout when acquiring database connection
|
||||
# DATABASE_TIMEOUT=30
|
||||
|
||||
## Database idle timeout
|
||||
## Timeout in seconds before idle connections to the database are closed.
|
||||
# DATABASE_IDLE_TIMEOUT=600
|
||||
|
||||
## Database min connections
|
||||
## Define the minimum size of the connection pool used for connecting to the database.
|
||||
# DATABASE_MIN_CONNS=2
|
||||
|
||||
## Database max connections
|
||||
## Define the size of the connection pool used for connecting to the database.
|
||||
## Define the maximum size of the connection pool used for connecting to the database.
|
||||
# DATABASE_MAX_CONNS=10
|
||||
|
||||
## Database connection initialization
|
||||
@@ -174,6 +182,10 @@
|
||||
## Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt.
|
||||
## Defaults to every minute. Set blank to disable this job.
|
||||
# DUO_CONTEXT_PURGE_SCHEDULE="30 * * * * *"
|
||||
#
|
||||
## Cron schedule of the job that cleans sso nonce from incomplete flow
|
||||
## Defaults to daily (20 minutes after midnight). Set blank to disable this job.
|
||||
# PURGE_INCOMPLETE_SSO_NONCE="0 20 0 * * *"
|
||||
|
||||
########################
|
||||
### General settings ###
|
||||
@@ -364,6 +376,7 @@
|
||||
## - "inline-menu-totp": Enable the use of inline menu TOTP codes in the browser extension.
|
||||
## - "ssh-agent": Enable SSH agent support on Desktop. (Needs desktop >=2024.12.0)
|
||||
## - "ssh-key-vault-item": Enable the creation and use of SSH key vault items. (Needs clients >=2024.12.0)
|
||||
## - "pm-25373-windows-biometrics-v2": Enable the new implementation of biometrics on Windows. (Needs desktop >= 2025.11.0)
|
||||
## - "export-attachments": Enable support for exporting attachments (Clients >=2025.4.0)
|
||||
## - "anon-addy-self-host-alias": Enable configuring self-hosted Anon Addy alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0)
|
||||
## - "simple-login-self-host-alias": Enable configuring self-hosted Simple Login alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0)
|
||||
@@ -459,6 +472,60 @@
|
||||
## Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy.
|
||||
# ENFORCE_SINGLE_ORG_WITH_RESET_PW_POLICY=false
|
||||
|
||||
## Prefer IPv6 (AAAA) resolving
|
||||
## This settings configures the DNS resolver to resolve IPv6 first, and if not available try IPv4
|
||||
## This could be useful in IPv6 only environments.
|
||||
# DNS_PREFER_IPV6=false
|
||||
|
||||
#####################################
|
||||
### SSO settings (OpenID Connect) ###
|
||||
#####################################
|
||||
|
||||
## Controls whether users can login using an OpenID Connect identity provider
|
||||
# SSO_ENABLED=false
|
||||
|
||||
## Prevent users from logging in directly without going through SSO
|
||||
# SSO_ONLY=false
|
||||
|
||||
## On SSO Signup if a user with a matching email already exists make the association
|
||||
# SSO_SIGNUPS_MATCH_EMAIL=true
|
||||
|
||||
## Allow unknown email verification status. Allowing this with `SSO_SIGNUPS_MATCH_EMAIL=true` open potential account takeover.
|
||||
# SSO_ALLOW_UNKNOWN_EMAIL_VERIFICATION=false
|
||||
|
||||
## Base URL of the OIDC server (auto-discovery is used)
|
||||
## - Should not include the `/.well-known/openid-configuration` part and no trailing `/`
|
||||
## - ${SSO_AUTHORITY}/.well-known/openid-configuration should return a json document: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationResponse
|
||||
# SSO_AUTHORITY=https://auth.example.com
|
||||
|
||||
## Authorization request scopes. Optional SSO scopes, override if email and profile are not enough (`openid` is implicit).
|
||||
# SSO_SCOPES="email profile"
|
||||
|
||||
## Additional authorization url parameters (ex: to obtain a `refresh_token` with Google Auth).
|
||||
# SSO_AUTHORIZE_EXTRA_PARAMS="access_type=offline&prompt=consent"
|
||||
|
||||
## Activate PKCE for the Auth Code flow.
|
||||
# SSO_PKCE=true
|
||||
|
||||
## Regex for additional trusted Id token audience (by default only the client_id is trusted).
|
||||
# SSO_AUDIENCE_TRUSTED='^$'
|
||||
|
||||
## Set your Client ID and Client Key
|
||||
# SSO_CLIENT_ID=11111
|
||||
# SSO_CLIENT_SECRET=AAAAAAAAAAAAAAAAAAAAAAAA
|
||||
|
||||
## Optional Master password policy (minComplexity=[0-4]), `enforceOnLogin` is not supported at the moment.
|
||||
# SSO_MASTER_PASSWORD_POLICY='{"enforceOnLogin":false,"minComplexity":3,"minLength":12,"requireLower":false,"requireNumbers":false,"requireSpecial":false,"requireUpper":false}'
|
||||
|
||||
## Use sso only for authentication not the session lifecycle
|
||||
# SSO_AUTH_ONLY_NOT_SESSION=false
|
||||
|
||||
## Client cache for discovery endpoint. Duration in seconds (0 to disable).
|
||||
# SSO_CLIENT_CACHE_EXPIRATION=0
|
||||
|
||||
## Log all the tokens, LOG_LEVEL=debug is required
|
||||
# SSO_DEBUG_TOKENS=false
|
||||
|
||||
########################
|
||||
### MFA/2FA settings ###
|
||||
########################
|
||||
@@ -518,7 +585,7 @@
|
||||
##
|
||||
## According to the RFC6238 (https://tools.ietf.org/html/rfc6238),
|
||||
## we allow by default the TOTP code which was valid one step back and one in the future.
|
||||
## This can however allow attackers to be a bit more lucky with there attempts because there are 3 valid codes.
|
||||
## This can however allow attackers to be a bit more lucky with their attempts because there are 3 valid codes.
|
||||
## You can disable this, so that only the current TOTP Code is allowed.
|
||||
## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid.
|
||||
## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid.
|
||||
@@ -558,7 +625,7 @@
|
||||
# SMTP_AUTH_MECHANISM=
|
||||
|
||||
## Server name sent during the SMTP HELO
|
||||
## By default this value should be is on the machine's hostname,
|
||||
## By default this value should be the machine's hostname,
|
||||
## but might need to be changed in case it trips some anti-spam filters
|
||||
# HELO_NAME=
|
||||
|
||||
|
||||
48
.github/workflows/build.yml
vendored
48
.github/workflows/build.yml
vendored
@@ -14,6 +14,7 @@ on:
|
||||
- "diesel.toml"
|
||||
- "docker/Dockerfile.j2"
|
||||
- "docker/DockerSettings.yaml"
|
||||
- "macros/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
@@ -27,15 +28,12 @@ on:
|
||||
- "diesel.toml"
|
||||
- "docker/Dockerfile.j2"
|
||||
- "docker/DockerSettings.yaml"
|
||||
- "macros/**"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build and Test ${{ matrix.channel }}
|
||||
permissions:
|
||||
actions: write
|
||||
contents: read
|
||||
# We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 120
|
||||
# Make warnings errors, this is to prevent warnings slipping through.
|
||||
# This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes.
|
||||
@@ -56,7 +54,7 @@ jobs:
|
||||
|
||||
# Checkout the repo
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
@@ -70,9 +68,9 @@ jobs:
|
||||
CHANNEL: ${{ matrix.channel }}
|
||||
run: |
|
||||
if [[ "${CHANNEL}" == 'rust-toolchain' ]]; then
|
||||
RUST_TOOLCHAIN="$(grep -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)"
|
||||
RUST_TOOLCHAIN="$(grep -m1 -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)"
|
||||
elif [[ "${CHANNEL}" == 'msrv' ]]; then
|
||||
RUST_TOOLCHAIN="$(grep -oP 'rust-version.*"(\K.*?)(?=")' Cargo.toml)"
|
||||
RUST_TOOLCHAIN="$(grep -m1 -oP 'rust-version\s.*"(\K.*?)(?=")' Cargo.toml)"
|
||||
else
|
||||
RUST_TOOLCHAIN="${CHANNEL}"
|
||||
fi
|
||||
@@ -82,7 +80,7 @@ jobs:
|
||||
|
||||
# Only install the clippy and rustfmt components on the default rust-toolchain
|
||||
- name: "Install rust-toolchain version"
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master @ Apr 29, 2025, 9:22 PM GMT+2
|
||||
uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master @ Sep 16, 2025, 8:37 PM GMT+2
|
||||
if: ${{ matrix.channel == 'rust-toolchain' }}
|
||||
with:
|
||||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||
@@ -92,7 +90,7 @@ jobs:
|
||||
|
||||
# Install the any other channel to be used for which we do not execute clippy and rustfmt
|
||||
- name: "Install MSRV version"
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master @ Apr 29, 2025, 9:22 PM GMT+2
|
||||
uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master @ Sep 16, 2025, 8:37 PM GMT+2
|
||||
if: ${{ matrix.channel != 'rust-toolchain' }}
|
||||
with:
|
||||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||
@@ -117,60 +115,60 @@ jobs:
|
||||
|
||||
# Enable Rust Caching
|
||||
- name: Rust Caching
|
||||
uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
with:
|
||||
# Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
|
||||
# Like changing the build host from Ubuntu 20.04 to 22.04 for example.
|
||||
# Only update when really needed! Use a <year>.<month>[.<inc>] format.
|
||||
prefix-key: "v2023.07-rust"
|
||||
prefix-key: "v2025.09-rust"
|
||||
# End Enable Rust Caching
|
||||
|
||||
# Run cargo tests
|
||||
# First test all features together, afterwards test them separately.
|
||||
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc,query_logger"
|
||||
id: test_sqlite_mysql_postgresql_mimalloc_logger
|
||||
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc,s3"
|
||||
id: test_sqlite_mysql_postgresql_mimalloc_s3
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
cargo test --features sqlite,mysql,postgresql,enable_mimalloc,query_logger
|
||||
cargo test --profile ci --features sqlite,mysql,postgresql,enable_mimalloc,s3
|
||||
|
||||
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc"
|
||||
id: test_sqlite_mysql_postgresql_mimalloc
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
cargo test --features sqlite,mysql,postgresql,enable_mimalloc
|
||||
cargo test --profile ci --features sqlite,mysql,postgresql,enable_mimalloc
|
||||
|
||||
- name: "test features: sqlite,mysql,postgresql"
|
||||
id: test_sqlite_mysql_postgresql
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
cargo test --features sqlite,mysql,postgresql
|
||||
cargo test --profile ci --features sqlite,mysql,postgresql
|
||||
|
||||
- name: "test features: sqlite"
|
||||
id: test_sqlite
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
cargo test --features sqlite
|
||||
cargo test --profile ci --features sqlite
|
||||
|
||||
- name: "test features: mysql"
|
||||
id: test_mysql
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
cargo test --features mysql
|
||||
cargo test --profile ci --features mysql
|
||||
|
||||
- name: "test features: postgresql"
|
||||
id: test_postgresql
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
cargo test --features postgresql
|
||||
cargo test --profile ci --features postgresql
|
||||
# End Run cargo tests
|
||||
|
||||
|
||||
# Run cargo clippy, and fail on warnings
|
||||
- name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc"
|
||||
- name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc,s3"
|
||||
id: clippy
|
||||
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }}
|
||||
run: |
|
||||
cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc
|
||||
cargo clippy --profile ci --features sqlite,mysql,postgresql,enable_mimalloc,s3
|
||||
# End Run cargo clippy
|
||||
|
||||
|
||||
@@ -188,7 +186,7 @@ jobs:
|
||||
- name: "Some checks failed"
|
||||
if: ${{ failure() }}
|
||||
env:
|
||||
TEST_DB_M_L: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}
|
||||
TEST_DB_M_S3: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_s3.outcome }}
|
||||
TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}
|
||||
TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }}
|
||||
TEST_SQLITE: ${{ steps.test_sqlite.outcome }}
|
||||
@@ -201,13 +199,13 @@ jobs:
|
||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${TEST_DB_M_L}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,s3)|${TEST_DB_M_S3}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc,s3)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}"
|
||||
|
||||
4
.github/workflows/check-templates.yml
vendored
4
.github/workflows/check-templates.yml
vendored
@@ -6,15 +6,13 @@ on: [ push, pull_request ]
|
||||
jobs:
|
||||
docker-templates:
|
||||
name: Validate docker templates
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
# Checkout the repo
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
# End Checkout the repo
|
||||
|
||||
9
.github/workflows/hadolint.yml
vendored
9
.github/workflows/hadolint.yml
vendored
@@ -1,13 +1,12 @@
|
||||
name: Hadolint
|
||||
permissions: {}
|
||||
|
||||
on: [ push, pull_request ]
|
||||
permissions: {}
|
||||
|
||||
|
||||
jobs:
|
||||
hadolint:
|
||||
name: Validate Dockerfile syntax
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
|
||||
@@ -31,11 +30,11 @@ jobs:
|
||||
sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \
|
||||
sudo chmod +x /usr/local/bin/hadolint
|
||||
env:
|
||||
HADOLINT_VERSION: 2.12.0
|
||||
HADOLINT_VERSION: 2.14.0
|
||||
# End Download hadolint
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
# End Checkout the repo
|
||||
|
||||
353
.github/workflows/release.yml
vendored
353
.github/workflows/release.yml
vendored
@@ -10,39 +10,39 @@ on:
|
||||
# https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
|
||||
- '[1-2].[0-9]+.[0-9]+'
|
||||
|
||||
concurrency:
|
||||
# Apply concurrency control only on the upstream repo
|
||||
group: ${{ github.repository == 'dani-garcia/vaultwarden' && format('{0}-{1}', github.workflow, github.ref) || github.run_id }}
|
||||
# Don't cancel other runs when creating a tag
|
||||
cancel-in-progress: ${{ github.ref_type == 'branch' }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
# The *_REPO variables need to be configured as repository variables
|
||||
# Append `/settings/variables/actions` to your repo url
|
||||
# DOCKERHUB_REPO needs to be 'index.docker.io/<user>/<repo>'
|
||||
# Check for Docker hub credentials in secrets
|
||||
HAVE_DOCKERHUB_LOGIN: ${{ vars.DOCKERHUB_REPO != '' && secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}
|
||||
# GHCR_REPO needs to be 'ghcr.io/<user>/<repo>'
|
||||
# Check for Github credentials in secrets
|
||||
HAVE_GHCR_LOGIN: ${{ vars.GHCR_REPO != '' && github.repository_owner != '' && secrets.GITHUB_TOKEN != '' }}
|
||||
# QUAY_REPO needs to be 'quay.io/<user>/<repo>'
|
||||
# Check for Quay.io credentials in secrets
|
||||
HAVE_QUAY_LOGIN: ${{ vars.QUAY_REPO != '' && secrets.QUAY_USERNAME != '' && secrets.QUAY_TOKEN != '' }}
|
||||
|
||||
jobs:
|
||||
# https://github.com/marketplace/actions/skip-duplicate-actions
|
||||
# Some checks to determine if we need to continue with building a new docker.
|
||||
# We will skip this check if we are creating a tag, because that has the same hash as a previous run already.
|
||||
skip_check:
|
||||
# Only run this in the upstream repo and not on forks
|
||||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||
name: Cancel older jobs when running
|
||||
permissions:
|
||||
actions: write
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
should_skip: ${{ steps.skip_check.outputs.should_skip }}
|
||||
|
||||
steps:
|
||||
- name: Skip Duplicates Actions
|
||||
id: skip_check
|
||||
uses: fkirc/skip-duplicate-actions@f75f66ce1886f00957d99748a42c724f4330bdcf # v5.3.1
|
||||
with:
|
||||
cancel_others: 'true'
|
||||
# Only run this when not creating a tag
|
||||
if: ${{ github.ref_type == 'branch' }}
|
||||
|
||||
docker-build:
|
||||
needs: skip_check
|
||||
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
|
||||
name: Build Vaultwarden containers
|
||||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||
permissions:
|
||||
packages: write
|
||||
packages: write # Needed to upload packages and artifacts
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
runs-on: ubuntu-24.04
|
||||
attestations: write # Needed to generate an artifact attestation for a build
|
||||
id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate
|
||||
runs-on: ${{ contains(matrix.arch, 'arm') && 'ubuntu-24.04-arm' || 'ubuntu-24.04' }}
|
||||
timeout-minutes: 120
|
||||
# Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them
|
||||
services:
|
||||
@@ -53,24 +53,16 @@ jobs:
|
||||
env:
|
||||
SOURCE_COMMIT: ${{ github.sha }}
|
||||
SOURCE_REPOSITORY_URL: "https://github.com/${{ github.repository }}"
|
||||
# The *_REPO variables need to be configured as repository variables
|
||||
# Append `/settings/variables/actions` to your repo url
|
||||
# DOCKERHUB_REPO needs to be 'index.docker.io/<user>/<repo>'
|
||||
# Check for Docker hub credentials in secrets
|
||||
HAVE_DOCKERHUB_LOGIN: ${{ vars.DOCKERHUB_REPO != '' && secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}
|
||||
# GHCR_REPO needs to be 'ghcr.io/<user>/<repo>'
|
||||
# Check for Github credentials in secrets
|
||||
HAVE_GHCR_LOGIN: ${{ vars.GHCR_REPO != '' && github.repository_owner != '' && secrets.GITHUB_TOKEN != '' }}
|
||||
# QUAY_REPO needs to be 'quay.io/<user>/<repo>'
|
||||
# Check for Quay.io credentials in secrets
|
||||
HAVE_QUAY_LOGIN: ${{ vars.QUAY_REPO != '' && secrets.QUAY_USERNAME != '' && secrets.QUAY_TOKEN != '' }}
|
||||
strategy:
|
||||
matrix:
|
||||
arch: ["amd64", "arm64", "arm/v7", "arm/v6"]
|
||||
base_image: ["debian","alpine"]
|
||||
outputs:
|
||||
base-tags: ${{ steps.determine-version.outputs.BASE_TAGS }}
|
||||
|
||||
steps:
|
||||
- name: Initialize QEMU binfmt support
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
with:
|
||||
platforms: "arm64,arm"
|
||||
|
||||
@@ -89,23 +81,32 @@ jobs:
|
||||
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
# We need fetch-depth of 0 so we also get all the tag metadata
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
# Normalize the architecture string for use in paths and cache keys
|
||||
- name: Normalize architecture string
|
||||
env:
|
||||
MATRIX_ARCH: ${{ matrix.arch }}
|
||||
run: |
|
||||
# Replace slashes with nothing to create a safe string for paths/cache keys
|
||||
NORMALIZED_ARCH="${MATRIX_ARCH//\/}"
|
||||
echo "NORMALIZED_ARCH=${NORMALIZED_ARCH}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
# Determine Base Tags and Source Version
|
||||
- name: Determine Base Tags and Source Version
|
||||
shell: bash
|
||||
id: determine-version
|
||||
env:
|
||||
REF_TYPE: ${{ github.ref_type }}
|
||||
run: |
|
||||
# Check which main tag we are going to build determined by ref_type
|
||||
if [[ "${REF_TYPE}" == "tag" ]]; then
|
||||
echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}"
|
||||
echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_OUTPUT}"
|
||||
elif [[ "${REF_TYPE}" == "branch" ]]; then
|
||||
echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}"
|
||||
echo "BASE_TAGS=testing" | tee -a "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
# Get the Source Version for this release
|
||||
@@ -120,7 +121,7 @@ jobs:
|
||||
|
||||
# Login to Docker Hub
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
@@ -128,7 +129,6 @@ jobs:
|
||||
|
||||
- name: Add registry for DockerHub
|
||||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }}
|
||||
run: |
|
||||
@@ -136,7 +136,7 @@ jobs:
|
||||
|
||||
# Login to GitHub Container Registry
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -145,7 +145,6 @@ jobs:
|
||||
|
||||
- name: Add registry for ghcr.io
|
||||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
GHCR_REPO: ${{ vars.GHCR_REPO }}
|
||||
run: |
|
||||
@@ -153,7 +152,7 @@ jobs:
|
||||
|
||||
# Login to Quay.io
|
||||
- name: Login to Quay.io
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USERNAME }}
|
||||
@@ -162,23 +161,22 @@ jobs:
|
||||
|
||||
- name: Add registry for Quay.io
|
||||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
QUAY_REPO: ${{ vars.QUAY_REPO }}
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
- name: Configure build cache from/to
|
||||
shell: bash
|
||||
env:
|
||||
GHCR_REPO: ${{ vars.GHCR_REPO }}
|
||||
BASE_IMAGE: ${{ matrix.base_image }}
|
||||
NORMALIZED_ARCH: ${{ env.NORMALIZED_ARCH }}
|
||||
run: |
|
||||
#
|
||||
# Check if there is a GitHub Container Registry Login and use it for caching
|
||||
if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then
|
||||
echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}" | tee -a "${GITHUB_ENV}"
|
||||
echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
|
||||
echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}-${NORMALIZED_ARCH}" | tee -a "${GITHUB_ENV}"
|
||||
echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}-${NORMALIZED_ARCH},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
|
||||
else
|
||||
echo "BAKE_CACHE_FROM="
|
||||
echo "BAKE_CACHE_TO="
|
||||
@@ -186,31 +184,45 @@ jobs:
|
||||
#
|
||||
|
||||
- name: Add localhost registry
|
||||
shell: bash
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
- name: Generate tags
|
||||
id: tags
|
||||
env:
|
||||
CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}"
|
||||
run: |
|
||||
# Convert comma-separated list to newline-separated set commands
|
||||
TAGS=$(echo "${CONTAINER_REGISTRIES}" | tr ',' '\n' | sed "s|.*|*.tags=&|")
|
||||
|
||||
# Output for use in next step
|
||||
{
|
||||
echo "TAGS<<EOF"
|
||||
echo "$TAGS"
|
||||
echo "EOF"
|
||||
} >> "$GITHUB_ENV"
|
||||
|
||||
- name: Bake ${{ matrix.base_image }} containers
|
||||
id: bake_vw
|
||||
uses: docker/bake-action@37816e747588cb137173af99ab33873600c46ea8 # v6.8.0
|
||||
uses: docker/bake-action@3acf805d94d93a86cce4ca44798a76464a75b88c # v6.9.0
|
||||
env:
|
||||
BASE_TAGS: "${{ env.BASE_TAGS }}"
|
||||
BASE_TAGS: "${{ steps.determine-version.outputs.BASE_TAGS }}"
|
||||
SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}"
|
||||
SOURCE_VERSION: "${{ env.SOURCE_VERSION }}"
|
||||
SOURCE_REPOSITORY_URL: "${{ env.SOURCE_REPOSITORY_URL }}"
|
||||
CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}"
|
||||
with:
|
||||
pull: true
|
||||
push: true
|
||||
source: .
|
||||
files: docker/docker-bake.hcl
|
||||
targets: "${{ matrix.base_image }}-multi"
|
||||
set: |
|
||||
*.cache-from=${{ env.BAKE_CACHE_FROM }}
|
||||
*.cache-to=${{ env.BAKE_CACHE_TO }}
|
||||
*.platform=linux/${{ matrix.arch }}
|
||||
${{ env.TAGS }}
|
||||
*.output=type=image,push-by-digest=true,name-canonical=true,push=true,compression=zstd
|
||||
|
||||
- name: Extract digest SHA
|
||||
shell: bash
|
||||
env:
|
||||
BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }}
|
||||
BASE_IMAGE: ${{ matrix.base_image }}
|
||||
@@ -218,38 +230,30 @@ jobs:
|
||||
GET_DIGEST_SHA="$(jq -r --arg base "$BASE_IMAGE" '.[$base + "-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")"
|
||||
echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
# Attest container images
|
||||
- name: Attest - docker.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ vars.DOCKERHUB_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
push-to-registry: true
|
||||
- name: Export digest
|
||||
env:
|
||||
DIGEST_SHA: ${{ env.DIGEST_SHA }}
|
||||
RUNNER_TEMP: ${{ runner.temp }}
|
||||
run: |
|
||||
mkdir -p "${RUNNER_TEMP}"/digests
|
||||
digest="${DIGEST_SHA}"
|
||||
touch "${RUNNER_TEMP}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Attest - ghcr.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
subject-name: ${{ vars.GHCR_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
push-to-registry: true
|
||||
|
||||
- name: Attest - quay.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ vars.QUAY_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
push-to-registry: true
|
||||
|
||||
name: digests-${{ env.NORMALIZED_ARCH }}-${{ matrix.base_image }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
# Extract the Alpine binaries from the containers
|
||||
- name: Extract binaries
|
||||
shell: bash
|
||||
env:
|
||||
REF_TYPE: ${{ github.ref_type }}
|
||||
BASE_IMAGE: ${{ matrix.base_image }}
|
||||
DIGEST_SHA: ${{ env.DIGEST_SHA }}
|
||||
NORMALIZED_ARCH: ${{ env.NORMALIZED_ARCH }}
|
||||
run: |
|
||||
# Check which main tag we are going to build determined by ref_type
|
||||
if [[ "${REF_TYPE}" == "tag" ]]; then
|
||||
@@ -263,60 +267,151 @@ jobs:
|
||||
EXTRACT_TAG="${EXTRACT_TAG}-alpine"
|
||||
fi
|
||||
|
||||
# After each extraction the image is removed.
|
||||
# This is needed because using different platforms doesn't trigger a new pull/download
|
||||
CONTAINER_ID="$(docker create "localhost:5000/vaultwarden/server:${EXTRACT_TAG}@${DIGEST_SHA}")"
|
||||
|
||||
# Extract amd64 binary
|
||||
docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
docker cp amd64:/vaultwarden vaultwarden-amd64-${BASE_IMAGE}
|
||||
docker rm --force amd64
|
||||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
# Copy the binary
|
||||
docker cp "$CONTAINER_ID":/vaultwarden vaultwarden-"${NORMALIZED_ARCH}"
|
||||
|
||||
# Extract arm64 binary
|
||||
docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
docker cp arm64:/vaultwarden vaultwarden-arm64-${BASE_IMAGE}
|
||||
docker rm --force arm64
|
||||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
|
||||
# Extract armv7 binary
|
||||
docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
docker cp armv7:/vaultwarden vaultwarden-armv7-${BASE_IMAGE}
|
||||
docker rm --force armv7
|
||||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
|
||||
# Extract armv6 binary
|
||||
docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
docker cp armv6:/vaultwarden vaultwarden-armv6-${BASE_IMAGE}
|
||||
docker rm --force armv6
|
||||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
# Clean up
|
||||
docker rm "$CONTAINER_ID"
|
||||
|
||||
# Upload artifacts to Github Actions and Attest the binaries
|
||||
- name: "Upload amd64 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
- name: Attest binaries
|
||||
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }}
|
||||
path: vaultwarden-amd64-${{ matrix.base_image }}
|
||||
subject-path: vaultwarden-${{ env.NORMALIZED_ARCH }}
|
||||
|
||||
- name: "Upload arm64 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
- name: Upload binaries as artifacts
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }}
|
||||
path: vaultwarden-arm64-${{ matrix.base_image }}
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-${{ env.NORMALIZED_ARCH }}-${{ matrix.base_image }}
|
||||
path: vaultwarden-${{ env.NORMALIZED_ARCH }}
|
||||
|
||||
- name: "Upload armv7 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }}
|
||||
path: vaultwarden-armv7-${{ matrix.base_image }}
|
||||
merge-manifests:
|
||||
name: Merge manifests
|
||||
runs-on: ubuntu-latest
|
||||
needs: docker-build
|
||||
|
||||
- name: "Upload armv6 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }}
|
||||
path: vaultwarden-armv6-${{ matrix.base_image }}
|
||||
env:
|
||||
BASE_TAGS: ${{ needs.docker-build.outputs.base-tags }}
|
||||
|
||||
- name: "Attest artifacts ${{ matrix.base_image }}"
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
permissions:
|
||||
packages: write # Needed to upload packages and artifacts
|
||||
attestations: write # Needed to generate an artifact attestation for a build
|
||||
id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
base_image: ["debian","alpine"]
|
||||
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
subject-path: vaultwarden-*
|
||||
# End Upload artifacts to Github Actions
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*-${{ matrix.base_image }}
|
||||
merge-multiple: true
|
||||
|
||||
# Login to Docker Hub
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
|
||||
|
||||
- name: Add registry for DockerHub
|
||||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
|
||||
env:
|
||||
DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }}
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
# Login to GitHub Container Registry
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
|
||||
|
||||
- name: Add registry for ghcr.io
|
||||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
|
||||
env:
|
||||
GHCR_REPO: ${{ vars.GHCR_REPO }}
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
# Login to Quay.io
|
||||
- name: Login to Quay.io
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USERNAME }}
|
||||
password: ${{ secrets.QUAY_TOKEN }}
|
||||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
|
||||
|
||||
- name: Add registry for Quay.io
|
||||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
|
||||
env:
|
||||
QUAY_REPO: ${{ vars.QUAY_REPO }}
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
- name: Create manifest list, push it and extract digest SHA
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
env:
|
||||
BASE_IMAGE: "${{ matrix.base_image }}"
|
||||
BASE_TAGS: "${{ env.BASE_TAGS }}"
|
||||
CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}"
|
||||
run: |
|
||||
set +e
|
||||
IFS=',' read -ra IMAGES <<< "${CONTAINER_REGISTRIES}"
|
||||
for img in "${IMAGES[@]}"; do
|
||||
echo "Creating manifest for $img:${BASE_TAGS}-${BASE_IMAGE}"
|
||||
|
||||
OUTPUT=$(docker buildx imagetools create \
|
||||
-t "$img:${BASE_TAGS}-${BASE_IMAGE}" \
|
||||
$(printf "$img:${BASE_TAGS}-${BASE_IMAGE}@sha256:%s " *) 2>&1)
|
||||
STATUS=$?
|
||||
|
||||
if [ $STATUS -ne 0 ]; then
|
||||
echo "Manifest creation failed for $img"
|
||||
echo "$OUTPUT"
|
||||
exit $STATUS
|
||||
fi
|
||||
|
||||
echo "Manifest created for $img"
|
||||
echo "$OUTPUT"
|
||||
done
|
||||
set -e
|
||||
|
||||
# Extract digest SHA for subsequent steps
|
||||
GET_DIGEST_SHA="$(echo "$OUTPUT" | grep -oE 'sha256:[a-f0-9]{64}' | tail -1)"
|
||||
echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
# Attest container images
|
||||
- name: Attest - docker.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && env.DIGEST_SHA != ''}}
|
||||
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
|
||||
with:
|
||||
subject-name: ${{ vars.DOCKERHUB_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
push-to-registry: true
|
||||
|
||||
- name: Attest - ghcr.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' && env.DIGEST_SHA != ''}}
|
||||
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
|
||||
with:
|
||||
subject-name: ${{ vars.GHCR_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
push-to-registry: true
|
||||
|
||||
- name: Attest - quay.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' && env.DIGEST_SHA != ''}}
|
||||
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
|
||||
with:
|
||||
subject-name: ${{ vars.QUAY_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
push-to-registry: true
|
||||
|
||||
2
.github/workflows/releasecache-cleanup.yml
vendored
2
.github/workflows/releasecache-cleanup.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
releasecache-cleanup:
|
||||
name: Releasecache Cleanup
|
||||
permissions:
|
||||
packages: write
|
||||
packages: write # To be able to cleanup old caches
|
||||
runs-on: ubuntu-24.04
|
||||
continue-on-error: true
|
||||
timeout-minutes: 30
|
||||
|
||||
10
.github/workflows/trivy.yml
vendored
10
.github/workflows/trivy.yml
vendored
@@ -23,20 +23,18 @@ jobs:
|
||||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||
name: Trivy Scan
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
security-events: write
|
||||
security-events: write # To write the security report
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@dc5a429b52fcf669ce959baa2c2dd26090d2a6c4 # v0.32.0
|
||||
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # 0.33.1
|
||||
env:
|
||||
TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2,public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2
|
||||
TRIVY_JAVA_DB_REPOSITORY: docker.io/aquasec/trivy-java-db:1,public.ecr.aws/aquasecurity/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1
|
||||
@@ -48,6 +46,6 @@ jobs:
|
||||
severity: CRITICAL,HIGH
|
||||
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@4e828ff8d448a8a6e532957b1811f387a63867e8 # v3.29.4
|
||||
uses: github/codeql-action/upload-sarif@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
|
||||
22
.github/workflows/typos.yml
vendored
Normal file
22
.github/workflows/typos.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
name: Code Spell Checking
|
||||
|
||||
on: [ push, pull_request ]
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
typos:
|
||||
name: Run typos spell checking
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
# End Checkout the repo
|
||||
|
||||
# When this version is updated, do not forget to update this in `.pre-commit-config.yaml` too
|
||||
- name: Spell Check Repo
|
||||
uses: crate-ci/typos@07d900b8fa1097806b8adb6391b0d3e0ac2fdea7 # v1.39.0
|
||||
6
.github/workflows/zizmor.yml
vendored
6
.github/workflows/zizmor.yml
vendored
@@ -13,15 +13,15 @@ jobs:
|
||||
name: Run zizmor
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
security-events: write
|
||||
security-events: write # To write the security report
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run zizmor
|
||||
uses: zizmorcore/zizmor-action@f52a838cfabf134edcbaa7c8b3677dde20045018 # v0.1.1
|
||||
uses: zizmorcore/zizmor-action@e673c3917a1aef3c65c972347ed84ccd013ecda4 # v0.2.0
|
||||
with:
|
||||
# intentionally not scanning the entire repository,
|
||||
# since it contains integration tests.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # v6.0.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
- id: check-json
|
||||
@@ -22,14 +22,15 @@ repos:
|
||||
description: Format files with cargo fmt.
|
||||
entry: cargo fmt
|
||||
language: system
|
||||
types: [rust]
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
args: ["--", "--check"]
|
||||
- id: cargo-test
|
||||
name: cargo test
|
||||
description: Test the package for errors.
|
||||
entry: cargo test
|
||||
language: system
|
||||
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--"]
|
||||
args: ["--features", "sqlite,mysql,postgresql", "--"]
|
||||
types_or: [rust, file]
|
||||
files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
|
||||
pass_filenames: false
|
||||
@@ -38,7 +39,7 @@ repos:
|
||||
description: Lint Rust sources
|
||||
entry: cargo clippy
|
||||
language: system
|
||||
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--", "-D", "warnings"]
|
||||
args: ["--features", "sqlite,mysql,postgresql", "--", "-D", "warnings"]
|
||||
types_or: [rust, file]
|
||||
files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
|
||||
pass_filenames: false
|
||||
@@ -50,3 +51,8 @@ repos:
|
||||
args:
|
||||
- "-c"
|
||||
- "cd docker && make"
|
||||
# When this version is updated, do not forget to update this in `.github/workflows/typos.yaml` too
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: 07d900b8fa1097806b8adb6391b0d3e0ac2fdea7 # v1.39.0
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
26
.typos.toml
Normal file
26
.typos.toml
Normal file
@@ -0,0 +1,26 @@
|
||||
[files]
|
||||
extend-exclude = [
|
||||
".git/",
|
||||
"playwright/",
|
||||
"*.js", # Ignore all JavaScript files
|
||||
"!admin*.js", # Except our own JavaScript files
|
||||
]
|
||||
ignore-hidden = false
|
||||
|
||||
[default]
|
||||
extend-ignore-re = [
|
||||
# We use this in place of the reserved type identifier at some places
|
||||
"typ",
|
||||
# In SMTP it's called HELO, so ignore it
|
||||
"(?i)helo_name",
|
||||
"Server name sent during.+HELO",
|
||||
# COSE Is short for CBOR Object Signing and Encryption, ignore these specific items
|
||||
"COSEKey",
|
||||
"COSEAlgorithm",
|
||||
# Ignore this specific string as it's valid
|
||||
"Ensure they are valid OTPs",
|
||||
# This word is misspelled upstream
|
||||
# https://github.com/bitwarden/server/blob/dff9f1cf538198819911cf2c20f8cda3307701c5/src/Notifications/HubHelpers.cs#L86
|
||||
# https://github.com/bitwarden/clients/blob/9612a4ac45063e372a6fbe87eb253c7cb3c588fb/libs/common/src/auth/services/anonymous-hub.service.ts#L45
|
||||
"AuthRequestResponseRecieved",
|
||||
]
|
||||
2293
Cargo.lock
generated
2293
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
169
Cargo.toml
169
Cargo.toml
@@ -1,3 +1,10 @@
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.89.0"
|
||||
license = "AGPL-3.0-only"
|
||||
repository = "https://github.com/dani-garcia/vaultwarden"
|
||||
publish = false
|
||||
|
||||
[workspace]
|
||||
members = ["macros"]
|
||||
|
||||
@@ -5,18 +12,21 @@ members = ["macros"]
|
||||
name = "vaultwarden"
|
||||
version = "1.0.0"
|
||||
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.86.0"
|
||||
resolver = "2"
|
||||
|
||||
repository = "https://github.com/dani-garcia/vaultwarden"
|
||||
readme = "README.md"
|
||||
license = "AGPL-3.0-only"
|
||||
publish = false
|
||||
build = "build.rs"
|
||||
resolver = "2"
|
||||
repository.workspace = true
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
license.workspace = true
|
||||
publish.workspace = true
|
||||
|
||||
[features]
|
||||
# default = ["sqlite"]
|
||||
default = [
|
||||
# "sqlite",
|
||||
# "mysql",
|
||||
# "postgresql",
|
||||
]
|
||||
# Empty to keep compatibility, prefer to set USE_SYSLOG=true
|
||||
enable_syslog = []
|
||||
mysql = ["diesel/mysql", "diesel_migrations/mysql"]
|
||||
@@ -27,13 +37,12 @@ vendored_openssl = ["openssl/vendored"]
|
||||
# Enable MiMalloc memory allocator to replace the default malloc
|
||||
# This can improve performance for Alpine builds
|
||||
enable_mimalloc = ["dep:mimalloc"]
|
||||
# This is a development dependency, and should only be used during development!
|
||||
# It enables the usage of the diesel_logger crate, which is able to output the generated queries.
|
||||
# You also need to set an env variable `QUERY_LOGGER=1` to fully activate this so you do not have to re-compile
|
||||
# if you want to turn off the logging for a specific run.
|
||||
query_logger = ["dep:diesel_logger"]
|
||||
s3 = ["opendal/services-s3", "dep:aws-config", "dep:aws-credential-types", "dep:aws-smithy-runtime-api", "dep:anyhow", "dep:http", "dep:reqsign"]
|
||||
|
||||
# OIDC specific features
|
||||
oidc-accept-rfc3339-timestamps = ["openidconnect/accept-rfc3339-timestamps"]
|
||||
oidc-accept-string-booleans = ["openidconnect/accept-string-booleans"]
|
||||
|
||||
# Enable unstable features, requires nightly
|
||||
# Currently only used to enable rusts official ip support
|
||||
unstable = []
|
||||
@@ -46,20 +55,17 @@ syslog = "7.0.0"
|
||||
macros = { path = "./macros" }
|
||||
|
||||
# Logging
|
||||
log = "0.4.27"
|
||||
log = "0.4.28"
|
||||
fern = { version = "0.7.1", features = ["syslog-7", "reopen-1"] }
|
||||
tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
|
||||
|
||||
# A `dotenv` implementation for Rust
|
||||
dotenvy = { version = "0.15.7", default-features = false }
|
||||
|
||||
# Lazy initialization
|
||||
once_cell = "1.21.3"
|
||||
|
||||
# Numerical libraries
|
||||
num-traits = "0.2.19"
|
||||
num-derive = "0.4.2"
|
||||
bigdecimal = "0.4.8"
|
||||
bigdecimal = "0.4.9"
|
||||
|
||||
# Web framework
|
||||
rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false }
|
||||
@@ -73,17 +79,16 @@ dashmap = "6.1.0"
|
||||
|
||||
# Async futures
|
||||
futures = "0.3.31"
|
||||
tokio = { version = "1.47.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
||||
tokio-util = { version = "0.7.15", features = ["compat"]}
|
||||
tokio = { version = "1.48.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
||||
tokio-util = { version = "0.7.17", features = ["compat"]}
|
||||
|
||||
# A generic serialization/deserialization framework
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = "1.0.141"
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
serde_json = "1.0.145"
|
||||
|
||||
# A safe, extensible ORM and Query builder
|
||||
diesel = { version = "2.2.12", features = ["chrono", "r2d2", "numeric"] }
|
||||
diesel_migrations = "2.2.0"
|
||||
diesel_logger = { version = "0.4.0", optional = true }
|
||||
diesel = { version = "2.3.3", features = ["chrono", "r2d2", "numeric"] }
|
||||
diesel_migrations = "2.3.0"
|
||||
|
||||
derive_more = { version = "2.0.1", features = ["from", "into", "as_ref", "deref", "display"] }
|
||||
diesel-derive-newtype = "2.1.2"
|
||||
@@ -97,51 +102,55 @@ ring = "0.17.14"
|
||||
subtle = "2.6.1"
|
||||
|
||||
# UUID generation
|
||||
uuid = { version = "1.17.0", features = ["v4"] }
|
||||
uuid = { version = "1.18.1", features = ["v4"] }
|
||||
|
||||
# Date and time libraries
|
||||
chrono = { version = "0.4.41", features = ["clock", "serde"], default-features = false }
|
||||
chrono = { version = "0.4.42", features = ["clock", "serde"], default-features = false }
|
||||
chrono-tz = "0.10.4"
|
||||
time = "0.3.41"
|
||||
time = "0.3.44"
|
||||
|
||||
# Job scheduler
|
||||
job_scheduler_ng = "2.2.0"
|
||||
job_scheduler_ng = "2.4.0"
|
||||
|
||||
# Data encoding library Hex/Base32/Base64
|
||||
data-encoding = "2.9.0"
|
||||
|
||||
# JWT library
|
||||
jsonwebtoken = "9.3.1"
|
||||
jsonwebtoken = { version = "10.2.0", features = ["use_pem", "rust_crypto"], default-features = false }
|
||||
|
||||
# TOTP library
|
||||
totp-lite = "2.0.1"
|
||||
|
||||
# Yubico Library
|
||||
yubico = { package = "yubico_ng", version = "0.13.0", features = ["online-tokio"], default-features = false }
|
||||
yubico = { package = "yubico_ng", version = "0.14.1", features = ["online-tokio"], default-features = false }
|
||||
|
||||
# WebAuthn libraries
|
||||
webauthn-rs = "0.3.2"
|
||||
# danger-allow-state-serialisation is needed to save the state in the db
|
||||
# danger-credential-internals is needed to support U2F to Webauthn migration
|
||||
webauthn-rs = { version = "0.5.3", features = ["danger-allow-state-serialisation", "danger-credential-internals"] }
|
||||
webauthn-rs-proto = "0.5.3"
|
||||
webauthn-rs-core = "0.5.3"
|
||||
|
||||
# Handling of URL's for WebAuthn and favicons
|
||||
url = "2.5.4"
|
||||
url = "2.5.7"
|
||||
|
||||
# Email libraries
|
||||
lettre = { version = "0.11.18", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "hostname", "tracing", "tokio1-rustls", "ring", "rustls-native-certs"], default-features = false }
|
||||
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
|
||||
lettre = { version = "0.11.19", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "hostname", "tracing", "tokio1-rustls", "ring", "rustls-native-certs"], default-features = false }
|
||||
percent-encoding = "2.3.2" # URL encoding library used for URL's in the emails
|
||||
email_address = "0.2.9"
|
||||
|
||||
# HTML Template library
|
||||
handlebars = { version = "6.3.2", features = ["dir_source"] }
|
||||
|
||||
# HTTP client (Used for favicons, version check, DUO and HIBP API)
|
||||
reqwest = { version = "0.12.22", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false}
|
||||
reqwest = { version = "0.12.24", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false}
|
||||
hickory-resolver = "0.25.2"
|
||||
|
||||
# Favicon extraction libraries
|
||||
html5gum = "0.7.0"
|
||||
regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false }
|
||||
data-url = "0.3.1"
|
||||
bytes = "1.10.1"
|
||||
html5gum = "0.8.0"
|
||||
regex = { version = "1.12.2", features = ["std", "perf", "unicode-perl"], default-features = false }
|
||||
data-url = "0.3.2"
|
||||
bytes = "1.11.0"
|
||||
svg-hush = "0.9.5"
|
||||
|
||||
# Cache function results (Used for version check and favicon fetching)
|
||||
@@ -149,24 +158,28 @@ cached = { version = "0.56.0", features = ["async"] }
|
||||
|
||||
# Used for custom short lived cookie jar during favicon extraction
|
||||
cookie = "0.18.1"
|
||||
cookie_store = "0.21.1"
|
||||
cookie_store = "0.22.0"
|
||||
|
||||
# Used by U2F, JWT and PostgreSQL
|
||||
openssl = "0.10.73"
|
||||
openssl = "0.10.75"
|
||||
|
||||
# CLI argument parsing
|
||||
pico-args = "0.5.0"
|
||||
|
||||
# Macro ident concatenation
|
||||
pastey = "0.1.0"
|
||||
governor = "0.10.0"
|
||||
pastey = "0.2.0"
|
||||
governor = "0.10.2"
|
||||
|
||||
# OIDC for SSO
|
||||
openidconnect = { version = "4.0.1", features = ["reqwest", "native-tls"] }
|
||||
mini-moka = "0.10.3"
|
||||
|
||||
# Check client versions for specific features.
|
||||
semver = "1.0.26"
|
||||
semver = "1.0.27"
|
||||
|
||||
# Allow overriding the default memory allocator
|
||||
# Mainly used for the musl builds, since the default musl malloc is very slow
|
||||
mimalloc = { version = "0.1.47", features = ["secure"], default-features = false, optional = true }
|
||||
mimalloc = { version = "0.1.48", features = ["secure"], default-features = false, optional = true }
|
||||
|
||||
which = "8.0.0"
|
||||
|
||||
@@ -180,13 +193,13 @@ rpassword = "7.4.0"
|
||||
grass_compiler = { version = "0.13.4", default-features = false }
|
||||
|
||||
# File are accessed through Apache OpenDAL
|
||||
opendal = { version = "0.54.0", features = ["services-fs"], default-features = false }
|
||||
opendal = { version = "0.55.0", features = ["services-fs"], default-features = false }
|
||||
|
||||
# For retrieving AWS credentials, including temporary SSO credentials
|
||||
anyhow = { version = "1.0.98", optional = true }
|
||||
aws-config = { version = "1.8.3", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true }
|
||||
aws-credential-types = { version = "1.2.4", optional = true }
|
||||
aws-smithy-runtime-api = { version = "1.8.5", optional = true }
|
||||
anyhow = { version = "1.0.100", optional = true }
|
||||
aws-config = { version = "1.8.11", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true }
|
||||
aws-credential-types = { version = "1.2.10", optional = true }
|
||||
aws-smithy-runtime-api = { version = "1.9.2", optional = true }
|
||||
http = { version = "1.3.1", optional = true }
|
||||
reqsign = { version = "0.16.5", optional = true }
|
||||
|
||||
@@ -197,23 +210,13 @@ reqsign = { version = "0.16.5", optional = true }
|
||||
strip = "debuginfo"
|
||||
lto = "fat"
|
||||
codegen-units = 1
|
||||
|
||||
# A little bit of a speedup
|
||||
[profile.dev]
|
||||
split-debuginfo = "unpacked"
|
||||
|
||||
# Always build argon2 using opt-level 3
|
||||
# This is a huge speed improvement during testing
|
||||
[profile.dev.package.argon2]
|
||||
opt-level = 3
|
||||
debug = false
|
||||
|
||||
# Optimize for size
|
||||
[profile.release-micro]
|
||||
inherits = "release"
|
||||
opt-level = "z"
|
||||
strip = "symbols"
|
||||
lto = "fat"
|
||||
codegen-units = 1
|
||||
opt-level = "z"
|
||||
panic = "abort"
|
||||
|
||||
# Profile for systems with low resources
|
||||
@@ -224,6 +227,32 @@ strip = "symbols"
|
||||
lto = "thin"
|
||||
codegen-units = 16
|
||||
|
||||
# Used for profiling and debugging like valgrind or heaptrack
|
||||
# Inherits release to be sure all optimizations have been done
|
||||
[profile.dbg]
|
||||
inherits = "release"
|
||||
strip = "none"
|
||||
split-debuginfo = "off"
|
||||
debug = "full"
|
||||
|
||||
# A little bit of a speedup for generic building
|
||||
[profile.dev]
|
||||
split-debuginfo = "unpacked"
|
||||
debug = "line-tables-only"
|
||||
|
||||
# Used for CI builds to improve compile time
|
||||
[profile.ci]
|
||||
inherits = "dev"
|
||||
debug = false
|
||||
debug-assertions = false
|
||||
strip = "symbols"
|
||||
panic = "abort"
|
||||
|
||||
# Always build argon2 using opt-level 3
|
||||
# This is a huge speed improvement during testing
|
||||
[profile.dev.package.argon2]
|
||||
opt-level = 3
|
||||
|
||||
# Linting config
|
||||
# https://doc.rust-lang.org/rustc/lints/groups.html
|
||||
[workspace.lints.rust]
|
||||
@@ -233,15 +262,16 @@ non_ascii_idents = "forbid"
|
||||
|
||||
# Deny
|
||||
deprecated_in_future = "deny"
|
||||
deprecated_safe = { level = "deny", priority = -1 }
|
||||
future_incompatible = { level = "deny", priority = -1 }
|
||||
keyword_idents = { level = "deny", priority = -1 }
|
||||
let_underscore = { level = "deny", priority = -1 }
|
||||
nonstandard_style = { level = "deny", priority = -1 }
|
||||
noop_method_call = "deny"
|
||||
refining_impl_trait = { level = "deny", priority = -1 }
|
||||
rust_2018_idioms = { level = "deny", priority = -1 }
|
||||
rust_2021_compatibility = { level = "deny", priority = -1 }
|
||||
rust_2024_compatibility = { level = "deny", priority = -1 }
|
||||
edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
|
||||
single_use_lifetimes = "deny"
|
||||
trivial_casts = "deny"
|
||||
trivial_numeric_casts = "deny"
|
||||
@@ -251,7 +281,8 @@ unused_lifetimes = "deny"
|
||||
unused_qualifications = "deny"
|
||||
variant_size_differences = "deny"
|
||||
# Allow the following lints since these cause issues with Rust v1.84.0 or newer
|
||||
# Building Vaultwarden with Rust v1.85.0 and edition 2024 also works without issues
|
||||
# Building Vaultwarden with Rust v1.85.0 with edition 2024 also works without issues
|
||||
edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
|
||||
if_let_rescope = "allow"
|
||||
tail_expr_drop_order = "allow"
|
||||
|
||||
@@ -265,12 +296,15 @@ todo = "warn"
|
||||
result_large_err = "allow"
|
||||
|
||||
# Deny
|
||||
branches_sharing_code = "deny"
|
||||
case_sensitive_file_extension_comparisons = "deny"
|
||||
cast_lossless = "deny"
|
||||
clone_on_ref_ptr = "deny"
|
||||
equatable_if_let = "deny"
|
||||
excessive_precision = "deny"
|
||||
filter_map_next = "deny"
|
||||
float_cmp_const = "deny"
|
||||
implicit_clone = "deny"
|
||||
inefficient_to_string = "deny"
|
||||
iter_on_empty_collections = "deny"
|
||||
iter_on_single_items = "deny"
|
||||
@@ -281,16 +315,19 @@ manual_instant_elapsed = "deny"
|
||||
manual_string_new = "deny"
|
||||
match_wildcard_for_single_variants = "deny"
|
||||
mem_forget = "deny"
|
||||
needless_borrow = "deny"
|
||||
needless_collect = "deny"
|
||||
needless_continue = "deny"
|
||||
needless_lifetimes = "deny"
|
||||
option_option = "deny"
|
||||
redundant_clone = "deny"
|
||||
string_add_assign = "deny"
|
||||
string_to_string = "deny"
|
||||
unnecessary_join = "deny"
|
||||
unnecessary_self_imports = "deny"
|
||||
unnested_or_patterns = "deny"
|
||||
unused_async = "deny"
|
||||
unused_self = "deny"
|
||||
useless_let_if_seq = "deny"
|
||||
verbose_file_reads = "deny"
|
||||
zero_sized_map_values = "deny"
|
||||
|
||||
|
||||
6
build.rs
6
build.rs
@@ -9,8 +9,6 @@ fn main() {
|
||||
println!("cargo:rustc-cfg=mysql");
|
||||
#[cfg(feature = "postgresql")]
|
||||
println!("cargo:rustc-cfg=postgresql");
|
||||
#[cfg(feature = "query_logger")]
|
||||
println!("cargo:rustc-cfg=query_logger");
|
||||
#[cfg(feature = "s3")]
|
||||
println!("cargo:rustc-cfg=s3");
|
||||
|
||||
@@ -24,7 +22,6 @@ fn main() {
|
||||
println!("cargo::rustc-check-cfg=cfg(sqlite)");
|
||||
println!("cargo::rustc-check-cfg=cfg(mysql)");
|
||||
println!("cargo::rustc-check-cfg=cfg(postgresql)");
|
||||
println!("cargo::rustc-check-cfg=cfg(query_logger)");
|
||||
println!("cargo::rustc-check-cfg=cfg(s3)");
|
||||
|
||||
// Rerun when these paths are changed.
|
||||
@@ -34,9 +31,6 @@ fn main() {
|
||||
println!("cargo:rerun-if-changed=.git/index");
|
||||
println!("cargo:rerun-if-changed=.git/refs/tags");
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "query_logger"))]
|
||||
compile_error!("Query Logging is only allowed during development, it is not intended for production usage!");
|
||||
|
||||
// Support $BWRS_VERSION for legacy compatibility, but default to $VW_VERSION.
|
||||
// If neither exist, read from git.
|
||||
let maybe_vaultwarden_version =
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
---
|
||||
vault_version: "v2025.7.0"
|
||||
vault_image_digest: "sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e"
|
||||
# Cross Compile Docker Helper Scripts v1.6.1
|
||||
vault_version: "v2025.10.1"
|
||||
vault_image_digest: "sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa"
|
||||
# Cross Compile Docker Helper Scripts v1.8.0
|
||||
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
|
||||
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
|
||||
xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894"
|
||||
rust_version: 1.88.0 # Rust version to be used
|
||||
debian_version: bookworm # Debian release name to be used
|
||||
xx_image_digest: "sha256:add602d55daca18914838a78221f6bbe4284114b452c86a48f96d59aeb00f5c6"
|
||||
rust_version: 1.91.1 # Rust version to be used
|
||||
debian_version: trixie # Debian release name to be used
|
||||
alpine_version: "3.22" # Alpine version to be used
|
||||
# For which platforms/architectures will we try to build images
|
||||
platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"]
|
||||
@@ -17,7 +17,6 @@ build_stage_image:
|
||||
platform: "$BUILDPLATFORM"
|
||||
alpine:
|
||||
image: "build_${TARGETARCH}${TARGETVARIANT}"
|
||||
platform: "linux/amd64" # The Alpine build images only have linux/amd64 images
|
||||
arch_image:
|
||||
amd64: "ghcr.io/blackdex/rust-musl:x86_64-musl-stable-{{rust_version}}"
|
||||
arm64: "ghcr.io/blackdex/rust-musl:aarch64-musl-stable-{{rust_version}}"
|
||||
|
||||
@@ -19,27 +19,27 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.7.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.10.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.10.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e
|
||||
# [docker.io/vaultwarden/web-vault:v2025.7.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa
|
||||
# [docker.io/vaultwarden/web-vault:v2025.10.1]
|
||||
#
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e AS vault
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa AS vault
|
||||
|
||||
########################## ALPINE BUILD IMAGES ##########################
|
||||
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
||||
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 and linux/arm64
|
||||
## And for Alpine we define all build images here, they will only be loaded when actually used
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.88.0 AS build_amd64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.88.0 AS build_arm64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.88.0 AS build_armv7
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.88.0 AS build_armv6
|
||||
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.91.1 AS build_amd64
|
||||
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.91.1 AS build_arm64
|
||||
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.91.1 AS build_armv7
|
||||
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.91.1 AS build_armv6
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# hadolint ignore=DL3006
|
||||
FROM --platform=linux/amd64 build_${TARGETARCH}${TARGETVARIANT} AS build
|
||||
FROM --platform=$BUILDPLATFORM build_${TARGETARCH}${TARGETVARIANT} AS build
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
ARG TARGETPLATFORM
|
||||
@@ -53,9 +53,9 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
USER="root" \
|
||||
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||
# Debian Bookworm already contains libpq v15
|
||||
PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||
# Use PostgreSQL v17 during Alpine/MUSL builds instead of the default v16
|
||||
# Debian Trixie uses libpq v17
|
||||
PQ_LIB_DIR="/usr/local/musl/pq17/lib"
|
||||
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
|
||||
@@ -19,24 +19,24 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.7.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.10.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.10.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e
|
||||
# [docker.io/vaultwarden/web-vault:v2025.7.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa
|
||||
# [docker.io/vaultwarden/web-vault:v2025.10.1]
|
||||
#
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e AS vault
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa AS vault
|
||||
|
||||
########################## Cross Compile Docker Helper Scripts ##########################
|
||||
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
|
||||
## And these bash scripts do not have any significant difference if at all
|
||||
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894 AS xx
|
||||
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:add602d55daca18914838a78221f6bbe4284114b452c86a48f96d59aeb00f5c6 AS xx
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# hadolint ignore=DL3006
|
||||
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.88.0-slim-bookworm AS build
|
||||
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.91.1-slim-trixie AS build
|
||||
COPY --from=xx / /
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
@@ -52,6 +52,14 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
USER="root"
|
||||
|
||||
# Force the install of an older MariaDB library to prevent a Diesel panic
|
||||
# See https://github.com/dani-garcia/vaultwarden/issues/6416
|
||||
RUN echo "deb http://snapshot.debian.org/archive/debian/20250707T084701Z/ trixie main" > /etc/apt/sources.list.d/snapshot.list && \
|
||||
echo "Acquire::Check-Valid-Until false;" > etc/apt/apt.conf.d/AllowSnapshot && \
|
||||
echo 'Package: libmariadb libmariadb3 libmariadb-dev mariadb*' > /etc/apt/preferences.d/mariadb-snapshot && \
|
||||
echo 'Pin: origin "snapshot.debian.org"' >> /etc/apt/preferences.d/mariadb-snapshot && \
|
||||
echo 'Pin-Priority: 1001' >> /etc/apt/preferences.d/mariadb-snapshot
|
||||
|
||||
# Install clang to get `xx-cargo` working
|
||||
# Install pkg-config to allow amd64 builds to find all libraries
|
||||
# Install git so build.rs can determine the correct version
|
||||
@@ -68,15 +76,11 @@ RUN apt-get update && \
|
||||
xx-apt-get install -y \
|
||||
--no-install-recommends \
|
||||
gcc \
|
||||
libmariadb3 \
|
||||
libpq-dev \
|
||||
libpq5 \
|
||||
libssl-dev \
|
||||
libmariadb-dev \
|
||||
zlib1g-dev && \
|
||||
# Force install arch dependend mariadb dev packages
|
||||
# Installing them the normal way breaks several other packages (again)
|
||||
apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \
|
||||
dpkg --force-all -i ./libmariadb-dev*.deb && \
|
||||
# Run xx-cargo early, since it sometimes seems to break when run at a later stage
|
||||
echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo
|
||||
|
||||
@@ -166,7 +170,7 @@ RUN source /env-cargo && \
|
||||
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
|
||||
#
|
||||
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
|
||||
FROM --platform=$TARGETPLATFORM docker.io/library/debian:bookworm-slim
|
||||
FROM --platform=$TARGETPLATFORM docker.io/library/debian:trixie-slim
|
||||
|
||||
ENV ROCKET_PROFILE="release" \
|
||||
ROCKET_ADDRESS=0.0.0.0 \
|
||||
@@ -175,11 +179,19 @@ ENV ROCKET_PROFILE="release" \
|
||||
|
||||
# Create data folder and Install needed libraries
|
||||
RUN mkdir /data && \
|
||||
# Force the install of an older MariaDB library to prevent a Diesel panic
|
||||
# See https://github.com/dani-garcia/vaultwarden/issues/6416
|
||||
echo "deb http://snapshot.debian.org/archive/debian/20250707T084701Z/ trixie main" > /etc/apt/sources.list.d/snapshot.list && \
|
||||
echo "Acquire::Check-Valid-Until false;" > etc/apt/apt.conf.d/AllowSnapshot && \
|
||||
echo 'Package: libmariadb libmariadb3 libmariadb-dev mariadb*' > /etc/apt/preferences.d/mariadb-snapshot && \
|
||||
echo 'Pin: origin "snapshot.debian.org"' >> /etc/apt/preferences.d/mariadb-snapshot && \
|
||||
echo 'Pin-Priority: 1001' >> /etc/apt/preferences.d/mariadb-snapshot && \
|
||||
# Continue with normal install
|
||||
apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libmariadb-dev-compat \
|
||||
libmariadb3 \
|
||||
libpq5 \
|
||||
openssl && \
|
||||
apt-get clean && \
|
||||
|
||||
@@ -36,16 +36,16 @@ FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@{{ vault_image_diges
|
||||
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@{{ xx_image_digest }} AS xx
|
||||
{% elif base == "alpine" %}
|
||||
########################## ALPINE BUILD IMAGES ##########################
|
||||
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
||||
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 and linux/arm64
|
||||
## And for Alpine we define all build images here, they will only be loaded when actually used
|
||||
{% for arch in build_stage_image[base].arch_image %}
|
||||
FROM --platform={{ build_stage_image[base].platform }} {{ build_stage_image[base].arch_image[arch] }} AS build_{{ arch }}
|
||||
FROM --platform=$BUILDPLATFORM {{ build_stage_image[base].arch_image[arch] }} AS build_{{ arch }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# hadolint ignore=DL3006
|
||||
FROM --platform={{ build_stage_image[base].platform }} {{ build_stage_image[base].image }} AS build
|
||||
FROM --platform=$BUILDPLATFORM {{ build_stage_image[base].image }} AS build
|
||||
{% if base == "debian" %}
|
||||
COPY --from=xx / /
|
||||
{% endif %}
|
||||
@@ -63,13 +63,21 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
USER="root"
|
||||
{%- if base == "alpine" %} \
|
||||
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||
# Debian Bookworm already contains libpq v15
|
||||
PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||
# Use PostgreSQL v17 during Alpine/MUSL builds instead of the default v16
|
||||
# Debian Trixie uses libpq v17
|
||||
PQ_LIB_DIR="/usr/local/musl/pq17/lib"
|
||||
{% endif %}
|
||||
|
||||
{% if base == "debian" %}
|
||||
|
||||
# Force the install of an older MariaDB library to prevent a Diesel panic
|
||||
# See https://github.com/dani-garcia/vaultwarden/issues/6416
|
||||
RUN echo "deb http://snapshot.debian.org/archive/debian/20250707T084701Z/ trixie main" > /etc/apt/sources.list.d/snapshot.list && \
|
||||
echo "Acquire::Check-Valid-Until false;" > etc/apt/apt.conf.d/AllowSnapshot && \
|
||||
echo 'Package: libmariadb libmariadb3 libmariadb-dev mariadb*' > /etc/apt/preferences.d/mariadb-snapshot && \
|
||||
echo 'Pin: origin "snapshot.debian.org"' >> /etc/apt/preferences.d/mariadb-snapshot && \
|
||||
echo 'Pin-Priority: 1001' >> /etc/apt/preferences.d/mariadb-snapshot
|
||||
|
||||
# Install clang to get `xx-cargo` working
|
||||
# Install pkg-config to allow amd64 builds to find all libraries
|
||||
# Install git so build.rs can determine the correct version
|
||||
@@ -86,15 +94,11 @@ RUN apt-get update && \
|
||||
xx-apt-get install -y \
|
||||
--no-install-recommends \
|
||||
gcc \
|
||||
libmariadb3 \
|
||||
libpq-dev \
|
||||
libpq5 \
|
||||
libssl-dev \
|
||||
libmariadb-dev \
|
||||
zlib1g-dev && \
|
||||
# Force install arch dependend mariadb dev packages
|
||||
# Installing them the normal way breaks several other packages (again)
|
||||
apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \
|
||||
dpkg --force-all -i ./libmariadb-dev*.deb && \
|
||||
# Run xx-cargo early, since it sometimes seems to break when run at a later stage
|
||||
echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo
|
||||
{% endif %}
|
||||
@@ -212,11 +216,19 @@ ENV ROCKET_PROFILE="release" \
|
||||
# Create data folder and Install needed libraries
|
||||
RUN mkdir /data && \
|
||||
{% if base == "debian" %}
|
||||
# Force the install of an older MariaDB library to prevent a Diesel panic
|
||||
# See https://github.com/dani-garcia/vaultwarden/issues/6416
|
||||
echo "deb http://snapshot.debian.org/archive/debian/20250707T084701Z/ trixie main" > /etc/apt/sources.list.d/snapshot.list && \
|
||||
echo "Acquire::Check-Valid-Until false;" > etc/apt/apt.conf.d/AllowSnapshot && \
|
||||
echo 'Package: libmariadb libmariadb3 libmariadb-dev mariadb*' > /etc/apt/preferences.d/mariadb-snapshot && \
|
||||
echo 'Pin: origin "snapshot.debian.org"' >> /etc/apt/preferences.d/mariadb-snapshot && \
|
||||
echo 'Pin-Priority: 1001' >> /etc/apt/preferences.d/mariadb-snapshot && \
|
||||
# Continue with normal install
|
||||
apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libmariadb-dev-compat \
|
||||
libmariadb3 \
|
||||
libpq5 \
|
||||
openssl && \
|
||||
apt-get clean && \
|
||||
|
||||
@@ -116,7 +116,7 @@ docker/bake.sh
|
||||
```
|
||||
|
||||
You can append both `alpine` and `debian` with `-amd64`, `-arm64`, `-armv7` or `-armv6`, which will trigger a build for that specific platform.<br>
|
||||
This will also append those values to the tag so you can see the builded container when running `docker images`.
|
||||
This will also append those values to the tag so you can see the built container when running `docker images`.
|
||||
|
||||
You can also append extra arguments after the target if you want. This can be useful for example to print what bake will use.
|
||||
```bash
|
||||
@@ -162,7 +162,7 @@ You can append extra arguments after the target if you want. This can be useful
|
||||
|
||||
For the podman builds you can, just like the `bake.sh` script, also append the architecture to build for that specific platform.<br>
|
||||
|
||||
### Testing podman builded images
|
||||
### Testing podman built images
|
||||
|
||||
The command to start a podman built container is almost the same as for the docker/bake built containers. The images start with `localhost/`, so you need to prepend that.
|
||||
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
[package]
|
||||
name = "macros"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
repository.workspace = true
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
license.workspace = true
|
||||
publish.workspace = true
|
||||
|
||||
[lib]
|
||||
name = "macros"
|
||||
@@ -9,8 +13,8 @@ path = "src/lib.rs"
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
quote = "1.0.40"
|
||||
syn = "2.0.104"
|
||||
quote = "1.0.42"
|
||||
syn = "2.0.110"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
1
migrations/mysql/2023-09-10-133000_add_sso/down.sql
Normal file
1
migrations/mysql/2023-09-10-133000_add_sso/down.sql
Normal file
@@ -0,0 +1 @@
|
||||
DROP TABLE sso_nonce;
|
||||
4
migrations/mysql/2023-09-10-133000_add_sso/up.sql
Normal file
4
migrations/mysql/2023-09-10-133000_add_sso/up.sql
Normal file
@@ -0,0 +1,4 @@
|
||||
CREATE TABLE sso_nonce (
|
||||
nonce CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE users_organizations DROP COLUMN invited_by_email;
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL;
|
||||
@@ -0,0 +1,6 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
nonce CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
@@ -0,0 +1,8 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state VARCHAR(512) NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
||||
@@ -0,0 +1,8 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state VARCHAR(512) NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
||||
@@ -0,0 +1,9 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state VARCHAR(512) NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
verifier TEXT,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
||||
@@ -0,0 +1 @@
|
||||
DROP TABLE IF EXISTS sso_users;
|
||||
7
migrations/mysql/2024-03-06-170000_add_sso_users/up.sql
Normal file
7
migrations/mysql/2024-03-06-170000_add_sso_users/up.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
CREATE TABLE sso_users (
|
||||
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
identifier VARCHAR(768) NOT NULL UNIQUE,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now(),
|
||||
|
||||
FOREIGN KEY(user_uuid) REFERENCES users(uuid)
|
||||
);
|
||||
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE sso_users DROP FOREIGN KEY `sso_users_ibfk_1`;
|
||||
ALTER TABLE sso_users ADD FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE;
|
||||
1
migrations/postgresql/2023-09-10-133000_add_sso/down.sql
Normal file
1
migrations/postgresql/2023-09-10-133000_add_sso/down.sql
Normal file
@@ -0,0 +1 @@
|
||||
DROP TABLE sso_nonce;
|
||||
4
migrations/postgresql/2023-09-10-133000_add_sso/up.sql
Normal file
4
migrations/postgresql/2023-09-10-133000_add_sso/up.sql
Normal file
@@ -0,0 +1,4 @@
|
||||
CREATE TABLE sso_nonce (
|
||||
nonce CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE users_organizations DROP COLUMN invited_by_email;
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL;
|
||||
@@ -0,0 +1,6 @@
|
||||
DROP TABLE sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
nonce CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
||||
@@ -0,0 +1,8 @@
|
||||
DROP TABLE sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state TEXT NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
||||
@@ -0,0 +1,8 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state TEXT NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
||||
@@ -0,0 +1,9 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state TEXT NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
verifier TEXT,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
||||
@@ -0,0 +1 @@
|
||||
DROP TABLE IF EXISTS sso_users;
|
||||
@@ -0,0 +1,7 @@
|
||||
CREATE TABLE sso_users (
|
||||
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
identifier TEXT NOT NULL UNIQUE,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now(),
|
||||
|
||||
FOREIGN KEY(user_uuid) REFERENCES users(uuid)
|
||||
);
|
||||
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE sso_users
|
||||
DROP CONSTRAINT "sso_users_user_uuid_fkey",
|
||||
ADD CONSTRAINT "sso_users_user_uuid_fkey" FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE;
|
||||
1
migrations/sqlite/2023-09-10-133000_add_sso/down.sql
Normal file
1
migrations/sqlite/2023-09-10-133000_add_sso/down.sql
Normal file
@@ -0,0 +1 @@
|
||||
DROP TABLE sso_nonce;
|
||||
4
migrations/sqlite/2023-09-10-133000_add_sso/up.sql
Normal file
4
migrations/sqlite/2023-09-10-133000_add_sso/up.sql
Normal file
@@ -0,0 +1,4 @@
|
||||
CREATE TABLE sso_nonce (
|
||||
nonce CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE users_organizations DROP COLUMN invited_by_email;
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL;
|
||||
@@ -0,0 +1,6 @@
|
||||
DROP TABLE sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
nonce CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
@@ -0,0 +1,8 @@
|
||||
DROP TABLE sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state TEXT NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
@@ -0,0 +1,8 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state TEXT NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
@@ -0,0 +1,9 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state TEXT NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
verifier TEXT,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
@@ -0,0 +1 @@
|
||||
DROP TABLE IF EXISTS sso_users;
|
||||
7
migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql
Normal file
7
migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
CREATE TABLE sso_users (
|
||||
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
identifier TEXT NOT NULL UNIQUE,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
FOREIGN KEY(user_uuid) REFERENCES users(uuid)
|
||||
);
|
||||
@@ -0,0 +1,9 @@
|
||||
DROP TABLE IF EXISTS sso_users;
|
||||
|
||||
CREATE TABLE sso_users (
|
||||
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
identifier TEXT NOT NULL UNIQUE,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE
|
||||
);
|
||||
64
playwright/.env.template
Normal file
64
playwright/.env.template
Normal file
@@ -0,0 +1,64 @@
|
||||
#################################
|
||||
### Conf to run dev instances ###
|
||||
#################################
|
||||
ENV=dev
|
||||
DC_ENV_FILE=.env
|
||||
COMPOSE_IGNORE_ORPHANS=True
|
||||
DOCKER_BUILDKIT=1
|
||||
|
||||
################
|
||||
# Users Config #
|
||||
################
|
||||
TEST_USER=test
|
||||
TEST_USER_PASSWORD=${TEST_USER}
|
||||
TEST_USER_MAIL=${TEST_USER}@yopmail.com
|
||||
|
||||
TEST_USER2=test2
|
||||
TEST_USER2_PASSWORD=${TEST_USER2}
|
||||
TEST_USER2_MAIL=${TEST_USER2}@yopmail.com
|
||||
|
||||
TEST_USER3=test3
|
||||
TEST_USER3_PASSWORD=${TEST_USER3}
|
||||
TEST_USER3_MAIL=${TEST_USER3}@yopmail.com
|
||||
|
||||
###################
|
||||
# Keycloak Config #
|
||||
###################
|
||||
KEYCLOAK_ADMIN=admin
|
||||
KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN}
|
||||
KC_HTTP_HOST=127.0.0.1
|
||||
KC_HTTP_PORT=8080
|
||||
|
||||
# Script parameters (use Keycloak and Vaultwarden config too)
|
||||
TEST_REALM=test
|
||||
DUMMY_REALM=dummy
|
||||
DUMMY_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${DUMMY_REALM}
|
||||
|
||||
######################
|
||||
# Vaultwarden Config #
|
||||
######################
|
||||
ROCKET_ADDRESS=0.0.0.0
|
||||
ROCKET_PORT=8000
|
||||
DOMAIN=http://localhost:${ROCKET_PORT}
|
||||
LOG_LEVEL=info,oidcwarden::sso=debug
|
||||
I_REALLY_WANT_VOLATILE_STORAGE=true
|
||||
|
||||
SSO_ENABLED=true
|
||||
SSO_ONLY=false
|
||||
SSO_CLIENT_ID=warden
|
||||
SSO_CLIENT_SECRET=warden
|
||||
SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM}
|
||||
|
||||
SMTP_HOST=127.0.0.1
|
||||
SMTP_PORT=1025
|
||||
SMTP_SECURITY=off
|
||||
SMTP_TIMEOUT=5
|
||||
SMTP_FROM=vaultwarden@test
|
||||
SMTP_FROM_NAME=Vaultwarden
|
||||
|
||||
########################################################
|
||||
# DUMMY values for docker-compose to stop bothering us #
|
||||
########################################################
|
||||
MARIADB_PORT=3305
|
||||
MYSQL_PORT=3307
|
||||
POSTGRES_PORT=5432
|
||||
6
playwright/.gitignore
vendored
Normal file
6
playwright/.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
logs
|
||||
node_modules/
|
||||
/test-results/
|
||||
/playwright-report/
|
||||
/playwright/.cache/
|
||||
temp
|
||||
179
playwright/README.md
Normal file
179
playwright/README.md
Normal file
@@ -0,0 +1,179 @@
|
||||
# Integration tests
|
||||
|
||||
This allows running integration tests using [Playwright](https://playwright.dev/).
|
||||
|
||||
It uses its own `test.env` with different ports to not collide with a running dev instance.
|
||||
|
||||
## Install
|
||||
|
||||
This relies on `docker` and the `compose` [plugin](https://docs.docker.com/compose/install/).
|
||||
Databases (`Mariadb`, `Mysql` and `Postgres`) and `Playwright` will run in containers.
|
||||
|
||||
### Running Playwright outside docker
|
||||
|
||||
It is possible to run `Playwright` outside of the container, this removes the need to rebuild the image for each change.
|
||||
You will additionally need `nodejs` then run:
|
||||
|
||||
```bash
|
||||
npm ci --ignore-scripts
|
||||
npx playwright install-deps
|
||||
npx playwright install firefox
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
To run all the tests:
|
||||
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright
|
||||
```
|
||||
|
||||
To force a rebuild of the Playwright image:
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --env-file test.env build Playwright
|
||||
```
|
||||
|
||||
To access the UI to easily run test individually and debug if needed (this will not work in docker):
|
||||
|
||||
```bash
|
||||
npx playwright test --ui
|
||||
```
|
||||
|
||||
### DB
|
||||
|
||||
Projects are configured to allow to run tests only on specific database.
|
||||
|
||||
You can use:
|
||||
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=mariadb
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=mysql
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=postgres
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite
|
||||
```
|
||||
|
||||
### SSO
|
||||
|
||||
To run the SSO tests:
|
||||
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project sso-sqlite
|
||||
```
|
||||
|
||||
### Keep services running
|
||||
|
||||
If you want you can keep the DB and Keycloak runnning (states are not impacted by the tests):
|
||||
|
||||
```bash
|
||||
PW_KEEP_SERVICE_RUNNNING=true npx playwright test
|
||||
```
|
||||
|
||||
### Running specific tests
|
||||
|
||||
To run a whole file you can :
|
||||
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite tests/login.spec.ts
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite login
|
||||
```
|
||||
|
||||
To run only a specifc test (It might fail if it has dependency):
|
||||
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite -g "Account creation"
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite tests/login.spec.ts:16
|
||||
```
|
||||
|
||||
## Writing scenario
|
||||
|
||||
When creating new scenario use the recorder to more easily identify elements
|
||||
(in general try to rely on visible hint to identify elements and not hidden IDs).
|
||||
This does not start the server, you will need to start it manually.
|
||||
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env up Vaultwarden
|
||||
npx playwright codegen "http://127.0.0.1:8003"
|
||||
```
|
||||
|
||||
## Override web-vault
|
||||
|
||||
It is possible to change the `web-vault` used by referencing a different `vw_web_builds` commit.
|
||||
|
||||
Simplest is to set and uncomment `PW_VW_REPO_URL` and `PW_VW_COMMIT_HASH` in the `test.env`.
|
||||
Ensure that the image is built with:
|
||||
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env build Vaultwarden
|
||||
```
|
||||
|
||||
You can check the result running:
|
||||
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env up Vaultwarden
|
||||
```
|
||||
|
||||
Then check `http://127.0.0.1:8003/admin/diagnostics` with `admin`.
|
||||
|
||||
# OpenID Connect test setup
|
||||
|
||||
Additionally this `docker-compose` template allows to run locally Vaultwarden,
|
||||
[Keycloak](https://www.keycloak.org/) and [Maildev](https://github.com/timshel/maildev) to test OIDC.
|
||||
|
||||
## Setup
|
||||
|
||||
This rely on `docker` and the `compose` [plugin](https://docs.docker.com/compose/install/).
|
||||
First create a copy of `.env.template` as `.env` (This is done to prevent committing your custom settings, Ex `SMTP_`).
|
||||
|
||||
## Usage
|
||||
|
||||
Then start the stack (the `profile` is required to run `Vaultwarden`) :
|
||||
|
||||
```bash
|
||||
> docker compose --profile vaultwarden --env-file .env up
|
||||
....
|
||||
keycloakSetup_1 | Logging into http://127.0.0.1:8080 as user admin of realm master
|
||||
keycloakSetup_1 | Created new realm with id 'test'
|
||||
keycloakSetup_1 | 74af4933-e386-4e64-ba15-a7b61212c45e
|
||||
oidc_keycloakSetup_1 exited with code 0
|
||||
```
|
||||
|
||||
Wait until `oidc_keycloakSetup_1 exited with code 0` which indicates the correct setup of the Keycloak realm, client and user
|
||||
(It is normal for this container to stop once the configuration is done).
|
||||
|
||||
Then you can access :
|
||||
|
||||
- `Vaultwarden` on http://0.0.0.0:8000 with the default user `test@yopmail.com/test`.
|
||||
- `Keycloak` on http://0.0.0.0:8080/admin/master/console/ with the default user `admin/admin`
|
||||
- `Maildev` on http://0.0.0.0:1080
|
||||
|
||||
To proceed with an SSO login after you enter the email, on the screen prompting for `Master Password` the SSO button should be visible.
|
||||
To use your computer external ip (for example when testing with a phone) you will have to configure `KC_HTTP_HOST` and `DOMAIN`.
|
||||
|
||||
## Running only Keycloak
|
||||
|
||||
You can run just `Keycloak` with `--profile keycloak`:
|
||||
|
||||
```bash
|
||||
> docker compose --profile keycloak --env-file .env up
|
||||
```
|
||||
When running with a local Vaultwarden, you can use a front-end build from [dani-garcia/bw_web_builds](https://github.com/dani-garcia/bw_web_builds/releases).
|
||||
|
||||
## Rebuilding the Vaultwarden
|
||||
|
||||
To force rebuilding the Vaultwarden image you can run
|
||||
|
||||
```bash
|
||||
docker compose --profile vaultwarden --env-file .env build VaultwardenPrebuild Vaultwarden
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
All configuration for `keycloak` / `Vaultwarden` / `keycloak_setup.sh` can be found in [.env](.env.template).
|
||||
The content of the file will be loaded as environment variables in all containers.
|
||||
|
||||
- `keycloak` [configuration](https://www.keycloak.org/server/all-config) includes `KEYCLOAK_ADMIN` / `KEYCLOAK_ADMIN_PASSWORD` and any variable prefixed `KC_` ([more information](https://www.keycloak.org/server/configuration#_example_configuring_the_db_url_host_parameter)).
|
||||
- All `Vaultwarden` configuration can be set (EX: `SMTP_*`)
|
||||
|
||||
## Cleanup
|
||||
|
||||
Use `docker compose --profile vaultwarden down`.
|
||||
19
playwright/compose/keycloak/Dockerfile
Normal file
19
playwright/compose/keycloak/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM docker.io/library/debian:trixie-slim
|
||||
|
||||
ARG KEYCLOAK_VERSION
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
RUN apt-get update && apt-get install -y ca-certificates curl jq openjdk-21-jdk-headless wget
|
||||
|
||||
WORKDIR /
|
||||
|
||||
RUN wget -c https://github.com/keycloak/keycloak/releases/download/${KEYCLOAK_VERSION}/keycloak-${KEYCLOAK_VERSION}.tar.gz -O - | tar -xz \
|
||||
&& mkdir -p /opt/keycloak \
|
||||
&& mv /keycloak-${KEYCLOAK_VERSION}/bin /opt/keycloak/bin \
|
||||
&& rm -rf /keycloak-${KEYCLOAK_VERSION}
|
||||
|
||||
COPY setup.sh /setup.sh
|
||||
|
||||
CMD "/setup.sh"
|
||||
44
playwright/compose/keycloak/setup.sh
Executable file
44
playwright/compose/keycloak/setup.sh
Executable file
@@ -0,0 +1,44 @@
|
||||
#!/bin/bash
|
||||
|
||||
export PATH=/opt/keycloak/bin:$PATH
|
||||
|
||||
STATUS_CODE=0
|
||||
while [[ "$STATUS_CODE" != "404" ]] ; do
|
||||
echo "Will retry in 2 seconds"
|
||||
sleep 2
|
||||
|
||||
STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}" "$DUMMY_AUTHORITY")
|
||||
|
||||
if [[ "$STATUS_CODE" = "200" ]]; then
|
||||
echo "Setup should already be done. Will not run."
|
||||
exit 0
|
||||
fi
|
||||
done
|
||||
|
||||
set -e
|
||||
|
||||
kcadm.sh config credentials --server "http://${KC_HTTP_HOST}:${KC_HTTP_PORT}" --realm master --user "$KEYCLOAK_ADMIN" --password "$KEYCLOAK_ADMIN_PASSWORD" --client admin-cli
|
||||
|
||||
kcadm.sh create realms -s realm="$TEST_REALM" -s enabled=true -s "accessTokenLifespan=600"
|
||||
kcadm.sh create clients -r test -s "clientId=$SSO_CLIENT_ID" -s "secret=$SSO_CLIENT_SECRET" -s "redirectUris=[\"$DOMAIN/*\"]" -i
|
||||
|
||||
TEST_USER_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER" -s "firstName=$TEST_USER" -s "lastName=$TEST_USER" -s "email=$TEST_USER_MAIL" -s emailVerified=true -s enabled=true -i)
|
||||
kcadm.sh update users/$TEST_USER_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER_PASSWORD" -n
|
||||
|
||||
TEST_USER2_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER2" -s "firstName=$TEST_USER2" -s "lastName=$TEST_USER2" -s "email=$TEST_USER2_MAIL" -s emailVerified=true -s enabled=true -i)
|
||||
kcadm.sh update users/$TEST_USER2_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER2_PASSWORD" -n
|
||||
|
||||
TEST_USER3_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER3" -s "firstName=$TEST_USER3" -s "lastName=$TEST_USER3" -s "email=$TEST_USER3_MAIL" -s emailVerified=true -s enabled=true -i)
|
||||
kcadm.sh update users/$TEST_USER3_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER3_PASSWORD" -n
|
||||
|
||||
# Dummy realm to mark end of setup
|
||||
kcadm.sh create realms -s realm="$DUMMY_REALM" -s enabled=true -s "accessTokenLifespan=600"
|
||||
|
||||
# TO DEBUG uncomment the following line to keep the setup container running
|
||||
# sleep 3600
|
||||
# THEN in another terminal:
|
||||
# docker exec -it keycloakSetup-dev /bin/bash
|
||||
# export PATH=$PATH:/opt/keycloak/bin
|
||||
# kcadm.sh config credentials --server "http://${KC_HTTP_HOST}:${KC_HTTP_PORT}" --realm master --user "$KEYCLOAK_ADMIN" --password "$KEYCLOAK_ADMIN_PASSWORD" --client admin-cli
|
||||
# ENJOY
|
||||
# Doc: https://wjw465150.gitbooks.io/keycloak-documentation/content/server_admin/topics/admin-cli.html
|
||||
40
playwright/compose/playwright/Dockerfile
Normal file
40
playwright/compose/playwright/Dockerfile
Normal file
@@ -0,0 +1,40 @@
|
||||
FROM docker.io/library/debian:trixie-slim
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y ca-certificates curl \
|
||||
&& curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc \
|
||||
&& chmod a+r /etc/apt/keyrings/docker.asc \
|
||||
&& echo "deb [signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian trixie stable" | tee /etc/apt/sources.list.d/docker.list \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
containerd.io \
|
||||
docker-buildx-plugin \
|
||||
docker-ce \
|
||||
docker-ce-cli \
|
||||
docker-compose-plugin \
|
||||
git \
|
||||
libmariadb-dev-compat \
|
||||
libpq5 \
|
||||
nodejs \
|
||||
npm \
|
||||
openssl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /playwright
|
||||
WORKDIR /playwright
|
||||
|
||||
COPY package.json package-lock.json .
|
||||
RUN npm ci --ignore-scripts && npx playwright install-deps && npx playwright install firefox
|
||||
|
||||
COPY docker-compose.yml test.env ./
|
||||
COPY compose ./compose
|
||||
|
||||
COPY *.ts test.env ./
|
||||
COPY tests ./tests
|
||||
|
||||
ENTRYPOINT ["/usr/bin/npx", "playwright"]
|
||||
CMD ["test"]
|
||||
40
playwright/compose/warden/Dockerfile
Normal file
40
playwright/compose/warden/Dockerfile
Normal file
@@ -0,0 +1,40 @@
|
||||
FROM playwright_oidc_vaultwarden_prebuilt AS prebuilt
|
||||
|
||||
FROM node:22-trixie AS build
|
||||
|
||||
ARG REPO_URL
|
||||
ARG COMMIT_HASH
|
||||
|
||||
ENV REPO_URL=$REPO_URL
|
||||
ENV COMMIT_HASH=$COMMIT_HASH
|
||||
|
||||
COPY --from=prebuilt /web-vault /web-vault
|
||||
|
||||
COPY build.sh /build.sh
|
||||
RUN /build.sh
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
FROM docker.io/library/debian:trixie-slim
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Create data folder and Install needed libraries
|
||||
RUN mkdir /data && \
|
||||
apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libmariadb-dev \
|
||||
libpq5 \
|
||||
openssl && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=prebuilt /start.sh .
|
||||
COPY --from=prebuilt /vaultwarden .
|
||||
COPY --from=build /web-vault ./web-vault
|
||||
|
||||
ENTRYPOINT ["/start.sh"]
|
||||
24
playwright/compose/warden/build.sh
Executable file
24
playwright/compose/warden/build.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo $REPO_URL
|
||||
echo $COMMIT_HASH
|
||||
|
||||
if [[ ! -z "$REPO_URL" ]] && [[ ! -z "$COMMIT_HASH" ]] ; then
|
||||
rm -rf /web-vault
|
||||
|
||||
mkdir -p vw_web_builds;
|
||||
cd vw_web_builds;
|
||||
|
||||
git -c init.defaultBranch=main init
|
||||
git remote add origin "$REPO_URL"
|
||||
git fetch --depth 1 origin "$COMMIT_HASH"
|
||||
git -c advice.detachedHead=false checkout FETCH_HEAD
|
||||
|
||||
npm ci --ignore-scripts
|
||||
|
||||
cd apps/web
|
||||
npm run dist:oss:selfhost
|
||||
printf '{"version":"%s"}' "$COMMIT_HASH" > build/vw-version.json
|
||||
|
||||
mv build /web-vault
|
||||
fi
|
||||
124
playwright/docker-compose.yml
Normal file
124
playwright/docker-compose.yml
Normal file
@@ -0,0 +1,124 @@
|
||||
services:
|
||||
VaultwardenPrebuild:
|
||||
profiles: ["playwright", "vaultwarden"]
|
||||
container_name: playwright_oidc_vaultwarden_prebuilt
|
||||
image: playwright_oidc_vaultwarden_prebuilt
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: Dockerfile
|
||||
entrypoint: /bin/bash
|
||||
restart: "no"
|
||||
|
||||
Vaultwarden:
|
||||
profiles: ["playwright", "vaultwarden"]
|
||||
container_name: playwright_oidc_vaultwarden-${ENV:-dev}
|
||||
image: playwright_oidc_vaultwarden-${ENV:-dev}
|
||||
network_mode: "host"
|
||||
build:
|
||||
context: compose/warden
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
REPO_URL: ${PW_VW_REPO_URL:-}
|
||||
COMMIT_HASH: ${PW_VW_COMMIT_HASH:-}
|
||||
env_file: ${DC_ENV_FILE:-.env}
|
||||
environment:
|
||||
- ADMIN_TOKEN
|
||||
- DATABASE_URL
|
||||
- I_REALLY_WANT_VOLATILE_STORAGE
|
||||
- LOG_LEVEL
|
||||
- LOGIN_RATELIMIT_MAX_BURST
|
||||
- SMTP_HOST
|
||||
- SMTP_FROM
|
||||
- SMTP_DEBUG
|
||||
- SSO_DEBUG_TOKENS
|
||||
- SSO_ENABLED
|
||||
- SSO_FRONTEND
|
||||
- SSO_ONLY
|
||||
- SSO_SCOPES
|
||||
restart: "no"
|
||||
depends_on:
|
||||
- VaultwardenPrebuild
|
||||
|
||||
Playwright:
|
||||
profiles: ["playwright"]
|
||||
container_name: playwright_oidc_playwright
|
||||
image: playwright_oidc_playwright
|
||||
network_mode: "host"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: compose/playwright/Dockerfile
|
||||
environment:
|
||||
- PW_WV_REPO_URL
|
||||
- PW_WV_COMMIT_HASH
|
||||
restart: "no"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- ..:/project
|
||||
|
||||
Mariadb:
|
||||
profiles: ["playwright"]
|
||||
container_name: playwright_mariadb
|
||||
image: mariadb:11.2.4
|
||||
env_file: test.env
|
||||
healthcheck:
|
||||
test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"]
|
||||
start_period: 10s
|
||||
interval: 10s
|
||||
ports:
|
||||
- ${MARIADB_PORT}:3306
|
||||
|
||||
Mysql:
|
||||
profiles: ["playwright"]
|
||||
container_name: playwright_mysql
|
||||
image: mysql:8.4.1
|
||||
env_file: test.env
|
||||
healthcheck:
|
||||
test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"]
|
||||
start_period: 10s
|
||||
interval: 10s
|
||||
ports:
|
||||
- ${MYSQL_PORT}:3306
|
||||
|
||||
Postgres:
|
||||
profiles: ["playwright"]
|
||||
container_name: playwright_postgres
|
||||
image: postgres:16.3
|
||||
env_file: test.env
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}"]
|
||||
start_period: 20s
|
||||
interval: 30s
|
||||
ports:
|
||||
- ${POSTGRES_PORT}:5432
|
||||
|
||||
Maildev:
|
||||
profiles: ["vaultwarden", "maildev"]
|
||||
container_name: maildev
|
||||
image: timshel/maildev:3.0.4
|
||||
ports:
|
||||
- ${SMTP_PORT}:1025
|
||||
- 1080:1080
|
||||
|
||||
Keycloak:
|
||||
profiles: ["keycloak", "vaultwarden"]
|
||||
container_name: keycloak-${ENV:-dev}
|
||||
image: quay.io/keycloak/keycloak:26.3.4
|
||||
network_mode: "host"
|
||||
command:
|
||||
- start-dev
|
||||
env_file: ${DC_ENV_FILE:-.env}
|
||||
|
||||
KeycloakSetup:
|
||||
profiles: ["keycloak", "vaultwarden"]
|
||||
container_name: keycloakSetup-${ENV:-dev}
|
||||
image: keycloak_setup-${ENV:-dev}
|
||||
build:
|
||||
context: compose/keycloak
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
KEYCLOAK_VERSION: 26.3.4
|
||||
network_mode: "host"
|
||||
depends_on:
|
||||
- Keycloak
|
||||
restart: "no"
|
||||
env_file: ${DC_ENV_FILE:-.env}
|
||||
22
playwright/global-setup.ts
Normal file
22
playwright/global-setup.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { firefox, type FullConfig } from '@playwright/test';
|
||||
import { execSync } from 'node:child_process';
|
||||
import fs from 'fs';
|
||||
|
||||
const utils = require('./global-utils');
|
||||
|
||||
utils.loadEnv();
|
||||
|
||||
async function globalSetup(config: FullConfig) {
|
||||
// Are we running in docker and the project is mounted ?
|
||||
const path = (fs.existsSync("/project/playwright/playwright.config.ts") ? "/project/playwright" : ".");
|
||||
execSync(`docker compose --project-directory ${path} --profile playwright --env-file test.env build VaultwardenPrebuild`, {
|
||||
env: { ...process.env },
|
||||
stdio: "inherit"
|
||||
});
|
||||
execSync(`docker compose --project-directory ${path} --profile playwright --env-file test.env build Vaultwarden`, {
|
||||
env: { ...process.env },
|
||||
stdio: "inherit"
|
||||
});
|
||||
}
|
||||
|
||||
export default globalSetup;
|
||||
262
playwright/global-utils.ts
Normal file
262
playwright/global-utils.ts
Normal file
@@ -0,0 +1,262 @@
|
||||
import { expect, type Browser, type TestInfo } from '@playwright/test';
|
||||
import { EventEmitter } from "events";
|
||||
import { type Mail, MailServer } from 'maildev';
|
||||
import { execSync } from 'node:child_process';
|
||||
|
||||
import dotenv from 'dotenv';
|
||||
import dotenvExpand from 'dotenv-expand';
|
||||
|
||||
const fs = require("fs");
|
||||
const { spawn } = require('node:child_process');
|
||||
|
||||
export function loadEnv(){
|
||||
var myEnv = dotenv.config({ path: 'test.env', quiet: true });
|
||||
dotenvExpand.expand(myEnv);
|
||||
|
||||
return {
|
||||
user1: {
|
||||
email: process.env.TEST_USER_MAIL,
|
||||
name: process.env.TEST_USER,
|
||||
password: process.env.TEST_USER_PASSWORD,
|
||||
},
|
||||
user2: {
|
||||
email: process.env.TEST_USER2_MAIL,
|
||||
name: process.env.TEST_USER2,
|
||||
password: process.env.TEST_USER2_PASSWORD,
|
||||
},
|
||||
user3: {
|
||||
email: process.env.TEST_USER3_MAIL,
|
||||
name: process.env.TEST_USER3,
|
||||
password: process.env.TEST_USER3_PASSWORD,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export async function waitFor(url: String, browser: Browser) {
|
||||
var ready = false;
|
||||
var context;
|
||||
|
||||
do {
|
||||
try {
|
||||
context = await browser.newContext();
|
||||
const page = await context.newPage();
|
||||
await page.waitForTimeout(500);
|
||||
const result = await page.goto(url);
|
||||
ready = result.status() === 200;
|
||||
} catch(e) {
|
||||
if( !e.message.includes("CONNECTION_REFUSED") ){
|
||||
throw e;
|
||||
}
|
||||
} finally {
|
||||
await context.close();
|
||||
}
|
||||
} while(!ready);
|
||||
}
|
||||
|
||||
export function startComposeService(serviceName: String){
|
||||
console.log(`Starting ${serviceName}`);
|
||||
execSync(`docker compose --profile playwright --env-file test.env up -d ${serviceName}`);
|
||||
}
|
||||
|
||||
export function stopComposeService(serviceName: String){
|
||||
console.log(`Stopping ${serviceName}`);
|
||||
execSync(`docker compose --profile playwright --env-file test.env stop ${serviceName}`);
|
||||
}
|
||||
|
||||
function wipeSqlite(){
|
||||
console.log(`Delete Vaultwarden container to wipe sqlite`);
|
||||
execSync(`docker compose --env-file test.env stop Vaultwarden`);
|
||||
execSync(`docker compose --env-file test.env rm -f Vaultwarden`);
|
||||
}
|
||||
|
||||
async function wipeMariaDB(){
|
||||
var mysql = require('mysql2/promise');
|
||||
var ready = false;
|
||||
var connection;
|
||||
|
||||
do {
|
||||
try {
|
||||
connection = await mysql.createConnection({
|
||||
user: process.env.MARIADB_USER,
|
||||
host: "127.0.0.1",
|
||||
database: process.env.MARIADB_DATABASE,
|
||||
password: process.env.MARIADB_PASSWORD,
|
||||
port: process.env.MARIADB_PORT,
|
||||
});
|
||||
|
||||
await connection.execute(`DROP DATABASE ${process.env.MARIADB_DATABASE}`);
|
||||
await connection.execute(`CREATE DATABASE ${process.env.MARIADB_DATABASE}`);
|
||||
console.log('Successfully wiped mariadb');
|
||||
ready = true;
|
||||
} catch (err) {
|
||||
console.log(`Error when wiping mariadb: ${err}`);
|
||||
} finally {
|
||||
if( connection ){
|
||||
connection.end();
|
||||
}
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
} while(!ready);
|
||||
}
|
||||
|
||||
async function wipeMysqlDB(){
|
||||
var mysql = require('mysql2/promise');
|
||||
var ready = false;
|
||||
var connection;
|
||||
|
||||
do{
|
||||
try {
|
||||
connection = await mysql.createConnection({
|
||||
user: process.env.MYSQL_USER,
|
||||
host: "127.0.0.1",
|
||||
database: process.env.MYSQL_DATABASE,
|
||||
password: process.env.MYSQL_PASSWORD,
|
||||
port: process.env.MYSQL_PORT,
|
||||
});
|
||||
|
||||
await connection.execute(`DROP DATABASE ${process.env.MYSQL_DATABASE}`);
|
||||
await connection.execute(`CREATE DATABASE ${process.env.MYSQL_DATABASE}`);
|
||||
console.log('Successfully wiped mysql');
|
||||
ready = true;
|
||||
} catch (err) {
|
||||
console.log(`Error when wiping mysql: ${err}`);
|
||||
} finally {
|
||||
if( connection ){
|
||||
connection.end();
|
||||
}
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
} while(!ready);
|
||||
}
|
||||
|
||||
async function wipePostgres(){
|
||||
const { Client } = require('pg');
|
||||
|
||||
const client = new Client({
|
||||
user: process.env.POSTGRES_USER,
|
||||
host: "127.0.0.1",
|
||||
database: "postgres",
|
||||
password: process.env.POSTGRES_PASSWORD,
|
||||
port: process.env.POSTGRES_PORT,
|
||||
});
|
||||
|
||||
try {
|
||||
await client.connect();
|
||||
await client.query(`DROP DATABASE ${process.env.POSTGRES_DB}`);
|
||||
await client.query(`CREATE DATABASE ${process.env.POSTGRES_DB}`);
|
||||
console.log('Successfully wiped postgres');
|
||||
} catch (err) {
|
||||
console.log(`Error when wiping postgres: ${err}`);
|
||||
} finally {
|
||||
client.end();
|
||||
}
|
||||
}
|
||||
|
||||
function dbConfig(testInfo: TestInfo){
|
||||
switch(testInfo.project.name) {
|
||||
case "postgres":
|
||||
case "sso-postgres":
|
||||
return { DATABASE_URL: `postgresql://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PASSWORD}@127.0.0.1:${process.env.POSTGRES_PORT}/${process.env.POSTGRES_DB}` };
|
||||
case "mariadb":
|
||||
case "sso-mariadb":
|
||||
return { DATABASE_URL: `mysql://${process.env.MARIADB_USER}:${process.env.MARIADB_PASSWORD}@127.0.0.1:${process.env.MARIADB_PORT}/${process.env.MARIADB_DATABASE}` };
|
||||
case "mysql":
|
||||
case "sso-mysql":
|
||||
return { DATABASE_URL: `mysql://${process.env.MYSQL_USER}:${process.env.MYSQL_PASSWORD}@127.0.0.1:${process.env.MYSQL_PORT}/${process.env.MYSQL_DATABASE}`};
|
||||
case "sqlite":
|
||||
case "sso-sqlite":
|
||||
return { I_REALLY_WANT_VOLATILE_STORAGE: true };
|
||||
default:
|
||||
throw new Error(`Unknow database name: ${testInfo.project.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* All parameters passed in `env` need to be added to the docker-compose.yml
|
||||
**/
|
||||
export async function startVault(browser: Browser, testInfo: TestInfo, env = {}, resetDB: Boolean = true) {
|
||||
if( resetDB ){
|
||||
switch(testInfo.project.name) {
|
||||
case "postgres":
|
||||
case "sso-postgres":
|
||||
await wipePostgres();
|
||||
break;
|
||||
case "mariadb":
|
||||
case "sso-mariadb":
|
||||
await wipeMariaDB();
|
||||
break;
|
||||
case "mysql":
|
||||
case "sso-mysql":
|
||||
await wipeMysqlDB();
|
||||
break;
|
||||
case "sqlite":
|
||||
case "sso-sqlite":
|
||||
wipeSqlite();
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknow database name: ${testInfo.project.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Starting Vaultwarden`);
|
||||
execSync(`docker compose --profile playwright --env-file test.env up -d Vaultwarden`, {
|
||||
env: { ...env, ...dbConfig(testInfo) },
|
||||
});
|
||||
await waitFor("/", browser);
|
||||
console.log(`Vaultwarden running on: ${process.env.DOMAIN}`);
|
||||
}
|
||||
|
||||
export async function stopVault(force: boolean = false) {
|
||||
if( force === false && process.env.PW_KEEP_SERVICE_RUNNNING === "true" ) {
|
||||
console.log(`Keep vaultwarden running on: ${process.env.DOMAIN}`);
|
||||
} else {
|
||||
console.log(`Vaultwarden stopping`);
|
||||
execSync(`docker compose --profile playwright --env-file test.env stop Vaultwarden`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function restartVault(page: Page, testInfo: TestInfo, env, resetDB: Boolean = true) {
|
||||
stopVault(true);
|
||||
return startVault(page.context().browser(), testInfo, env, resetDB);
|
||||
}
|
||||
|
||||
export async function checkNotification(page: Page, hasText: string) {
|
||||
await expect(page.locator('bit-toast', { hasText })).toBeVisible();
|
||||
try {
|
||||
await page.locator('bit-toast', { hasText }).getByRole('button', { name: 'Close' }).click({force: true, timeout: 10_000});
|
||||
} catch (error) {
|
||||
console.log(`Closing notification failed but it should now be invisible (${error})`);
|
||||
}
|
||||
await expect(page.locator('bit-toast', { hasText })).toHaveCount(0);
|
||||
}
|
||||
|
||||
export async function cleanLanding(page: Page) {
|
||||
await page.goto('/', { waitUntil: 'domcontentloaded' });
|
||||
await expect(page.getByRole('button').nth(0)).toBeVisible();
|
||||
|
||||
const logged = await page.getByRole('button', { name: 'Log out' }).count();
|
||||
if( logged > 0 ){
|
||||
await page.getByRole('button', { name: 'Log out' }).click();
|
||||
await page.getByRole('button', { name: 'Log out' }).click();
|
||||
}
|
||||
}
|
||||
|
||||
export async function logout(test: Test, page: Page, user: { name: string }) {
|
||||
await test.step('logout', async () => {
|
||||
await page.getByRole('button', { name: user.name, exact: true }).click();
|
||||
await page.getByRole('menuitem', { name: 'Log out' }).click();
|
||||
await expect(page.getByRole('heading', { name: 'Log in' })).toBeVisible();
|
||||
});
|
||||
}
|
||||
|
||||
export async function ignoreExtension(page: Page) {
|
||||
await page.waitForLoadState('domcontentloaded');
|
||||
|
||||
try {
|
||||
await page.getByRole('button', { name: 'Add it later' }).click({timeout: 5_000});
|
||||
await page.getByRole('link', { name: 'Skip to web app' }).click();
|
||||
} catch (error) {
|
||||
console.log('Extension setup not visible. Continuing');
|
||||
}
|
||||
|
||||
}
|
||||
2706
playwright/package-lock.json
generated
Normal file
2706
playwright/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
21
playwright/package.json
Normal file
21
playwright/package.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "scenarios",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@playwright/test": "1.56.1",
|
||||
"dotenv": "17.2.3",
|
||||
"dotenv-expand": "12.0.3",
|
||||
"maildev": "npm:@timshel_npm/maildev@3.2.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"mysql2": "3.15.3",
|
||||
"otpauth": "9.4.1",
|
||||
"pg": "8.16.3"
|
||||
}
|
||||
}
|
||||
143
playwright/playwright.config.ts
Normal file
143
playwright/playwright.config.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import { defineConfig, devices } from '@playwright/test';
|
||||
import { exec } from 'node:child_process';
|
||||
|
||||
const utils = require('./global-utils');
|
||||
|
||||
utils.loadEnv();
|
||||
|
||||
/**
|
||||
* See https://playwright.dev/docs/test-configuration.
|
||||
*/
|
||||
export default defineConfig({
|
||||
testDir: './.',
|
||||
/* Run tests in files in parallel */
|
||||
fullyParallel: false,
|
||||
|
||||
/* Fail the build on CI if you accidentally left test.only in the source code. */
|
||||
forbidOnly: !!process.env.CI,
|
||||
|
||||
retries: 0,
|
||||
workers: 1,
|
||||
|
||||
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
|
||||
reporter: 'html',
|
||||
|
||||
/* Long global timeout for complex tests
|
||||
* But short action/nav/expect timeouts to fail on specific step (raise locally if not enough).
|
||||
*/
|
||||
timeout: 120 * 1000,
|
||||
actionTimeout: 20 * 1000,
|
||||
navigationTimeout: 20 * 1000,
|
||||
expect: { timeout: 20 * 1000 },
|
||||
|
||||
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
|
||||
use: {
|
||||
/* Base URL to use in actions like `await page.goto('/')`. */
|
||||
baseURL: process.env.DOMAIN,
|
||||
browserName: 'firefox',
|
||||
locale: 'en-GB',
|
||||
timezoneId: 'Europe/London',
|
||||
|
||||
/* Always collect trace (other values add random test failures) See https://playwright.dev/docs/trace-viewer */
|
||||
trace: 'on',
|
||||
viewport: {
|
||||
width: 1080,
|
||||
height: 720,
|
||||
},
|
||||
video: "on",
|
||||
},
|
||||
|
||||
/* Configure projects for major browsers */
|
||||
projects: [
|
||||
{
|
||||
name: 'mariadb-setup',
|
||||
testMatch: 'tests/setups/db-setup.ts',
|
||||
use: { serviceName: "Mariadb" },
|
||||
teardown: 'mariadb-teardown',
|
||||
},
|
||||
{
|
||||
name: 'mysql-setup',
|
||||
testMatch: 'tests/setups/db-setup.ts',
|
||||
use: { serviceName: "Mysql" },
|
||||
teardown: 'mysql-teardown',
|
||||
},
|
||||
{
|
||||
name: 'postgres-setup',
|
||||
testMatch: 'tests/setups/db-setup.ts',
|
||||
use: { serviceName: "Postgres" },
|
||||
teardown: 'postgres-teardown',
|
||||
},
|
||||
{
|
||||
name: 'sso-setup',
|
||||
testMatch: 'tests/setups/sso-setup.ts',
|
||||
teardown: 'sso-teardown',
|
||||
},
|
||||
|
||||
{
|
||||
name: 'mariadb',
|
||||
testMatch: 'tests/*.spec.ts',
|
||||
testIgnore: 'tests/sso_*.spec.ts',
|
||||
dependencies: ['mariadb-setup'],
|
||||
},
|
||||
{
|
||||
name: 'mysql',
|
||||
testMatch: 'tests/*.spec.ts',
|
||||
testIgnore: 'tests/sso_*.spec.ts',
|
||||
dependencies: ['mysql-setup'],
|
||||
},
|
||||
{
|
||||
name: 'postgres',
|
||||
testMatch: 'tests/*.spec.ts',
|
||||
testIgnore: 'tests/sso_*.spec.ts',
|
||||
dependencies: ['postgres-setup'],
|
||||
},
|
||||
{
|
||||
name: 'sqlite',
|
||||
testMatch: 'tests/*.spec.ts',
|
||||
testIgnore: 'tests/sso_*.spec.ts',
|
||||
},
|
||||
|
||||
{
|
||||
name: 'sso-mariadb',
|
||||
testMatch: 'tests/sso_*.spec.ts',
|
||||
dependencies: ['sso-setup', 'mariadb-setup'],
|
||||
},
|
||||
{
|
||||
name: 'sso-mysql',
|
||||
testMatch: 'tests/sso_*.spec.ts',
|
||||
dependencies: ['sso-setup', 'mysql-setup'],
|
||||
},
|
||||
{
|
||||
name: 'sso-postgres',
|
||||
testMatch: 'tests/sso_*.spec.ts',
|
||||
dependencies: ['sso-setup', 'postgres-setup'],
|
||||
},
|
||||
{
|
||||
name: 'sso-sqlite',
|
||||
testMatch: 'tests/sso_*.spec.ts',
|
||||
dependencies: ['sso-setup'],
|
||||
},
|
||||
|
||||
{
|
||||
name: 'mariadb-teardown',
|
||||
testMatch: 'tests/setups/db-teardown.ts',
|
||||
use: { serviceName: "Mariadb" },
|
||||
},
|
||||
{
|
||||
name: 'mysql-teardown',
|
||||
testMatch: 'tests/setups/db-teardown.ts',
|
||||
use: { serviceName: "Mysql" },
|
||||
},
|
||||
{
|
||||
name: 'postgres-teardown',
|
||||
testMatch: 'tests/setups/db-teardown.ts',
|
||||
use: { serviceName: "Postgres" },
|
||||
},
|
||||
{
|
||||
name: 'sso-teardown',
|
||||
testMatch: 'tests/setups/sso-teardown.ts',
|
||||
},
|
||||
],
|
||||
|
||||
globalSetup: require.resolve('./global-setup'),
|
||||
});
|
||||
98
playwright/test.env
Normal file
98
playwright/test.env
Normal file
@@ -0,0 +1,98 @@
|
||||
##################################################################
|
||||
### Shared Playwright conf test file Vaultwarden and Databases ###
|
||||
##################################################################
|
||||
|
||||
ENV=test
|
||||
DC_ENV_FILE=test.env
|
||||
COMPOSE_IGNORE_ORPHANS=True
|
||||
DOCKER_BUILDKIT=1
|
||||
|
||||
#####################
|
||||
# Playwright Config #
|
||||
#####################
|
||||
PW_KEEP_SERVICE_RUNNNING=${PW_KEEP_SERVICE_RUNNNING:-false}
|
||||
PW_SMTP_FROM=vaultwarden@playwright.test
|
||||
|
||||
#####################
|
||||
# Maildev Config #
|
||||
#####################
|
||||
MAILDEV_HTTP_PORT=1081
|
||||
MAILDEV_SMTP_PORT=1026
|
||||
MAILDEV_HOST=127.0.0.1
|
||||
|
||||
################
|
||||
# Users Config #
|
||||
################
|
||||
TEST_USER=test
|
||||
TEST_USER_PASSWORD=Master Password
|
||||
TEST_USER_MAIL=${TEST_USER}@example.com
|
||||
|
||||
TEST_USER2=test2
|
||||
TEST_USER2_PASSWORD=Master Password
|
||||
TEST_USER2_MAIL=${TEST_USER2}@example.com
|
||||
|
||||
TEST_USER3=test3
|
||||
TEST_USER3_PASSWORD=Master Password
|
||||
TEST_USER3_MAIL=${TEST_USER3}@example.com
|
||||
|
||||
###################
|
||||
# Keycloak Config #
|
||||
###################
|
||||
KEYCLOAK_ADMIN=admin
|
||||
KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN}
|
||||
KC_HTTP_HOST=127.0.0.1
|
||||
KC_HTTP_PORT=8081
|
||||
|
||||
# Script parameters (use Keycloak and Vaultwarden config too)
|
||||
TEST_REALM=test
|
||||
DUMMY_REALM=dummy
|
||||
DUMMY_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${DUMMY_REALM}
|
||||
|
||||
######################
|
||||
# Vaultwarden Config #
|
||||
######################
|
||||
ROCKET_PORT=8003
|
||||
DOMAIN=http://localhost:${ROCKET_PORT}
|
||||
LOG_LEVEL=info,oidcwarden::sso=debug
|
||||
LOGIN_RATELIMIT_MAX_BURST=100
|
||||
ADMIN_TOKEN=admin
|
||||
|
||||
SMTP_SECURITY=off
|
||||
SMTP_PORT=${MAILDEV_SMTP_PORT}
|
||||
SMTP_FROM_NAME=Vaultwarden
|
||||
SMTP_TIMEOUT=5
|
||||
|
||||
SSO_CLIENT_ID=warden
|
||||
SSO_CLIENT_SECRET=warden
|
||||
SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM}
|
||||
SSO_DEBUG_TOKENS=true
|
||||
|
||||
# Custom web-vault build
|
||||
# PW_VW_REPO_URL=https://github.com/vaultwarden/vw_web_builds.git
|
||||
# PW_VW_COMMIT_HASH=b5f5b2157b9b64b5813bc334a75a277d0377b5d3
|
||||
|
||||
###########################
|
||||
# Docker MariaDb container#
|
||||
###########################
|
||||
MARIADB_PORT=3307
|
||||
MARIADB_ROOT_PASSWORD=warden
|
||||
MARIADB_USER=warden
|
||||
MARIADB_PASSWORD=warden
|
||||
MARIADB_DATABASE=warden
|
||||
|
||||
###########################
|
||||
# Docker Mysql container#
|
||||
###########################
|
||||
MYSQL_PORT=3309
|
||||
MYSQL_ROOT_PASSWORD=warden
|
||||
MYSQL_USER=warden
|
||||
MYSQL_PASSWORD=warden
|
||||
MYSQL_DATABASE=warden
|
||||
|
||||
############################
|
||||
# Docker Postgres container#
|
||||
############################
|
||||
POSTGRES_PORT=5433
|
||||
POSTGRES_USER=warden
|
||||
POSTGRES_PASSWORD=warden
|
||||
POSTGRES_DB=warden
|
||||
37
playwright/tests/collection.spec.ts
Normal file
37
playwright/tests/collection.spec.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
|
||||
import * as utils from "../global-utils";
|
||||
import { createAccount } from './setups/user';
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
await utils.startVault(browser, testInfo);
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
});
|
||||
|
||||
test('Create', async ({ page }) => {
|
||||
await createAccount(test, page, users.user1);
|
||||
|
||||
await test.step('Create Org', async () => {
|
||||
await page.getByRole('link', { name: 'New organisation' }).click();
|
||||
await page.getByLabel('Organisation name (required)').fill('Test');
|
||||
await page.getByRole('button', { name: 'Submit' }).click();
|
||||
await page.locator('div').filter({ hasText: 'Members' }).nth(2).click();
|
||||
|
||||
await utils.checkNotification(page, 'Organisation created');
|
||||
});
|
||||
|
||||
await test.step('Create Collection', async () => {
|
||||
await page.getByRole('link', { name: 'Collections' }).click();
|
||||
await page.getByRole('button', { name: 'New' }).click();
|
||||
await page.getByRole('menuitem', { name: 'Collection' }).click();
|
||||
await page.getByLabel('Name (required)').fill('RandomCollec');
|
||||
await page.getByRole('button', { name: 'Save' }).click();
|
||||
await utils.checkNotification(page, 'Created collection RandomCollec');
|
||||
await expect(page.getByRole('button', { name: 'RandomCollec' })).toBeVisible();
|
||||
});
|
||||
});
|
||||
103
playwright/tests/login.smtp.spec.ts
Normal file
103
playwright/tests/login.smtp.spec.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
import { MailDev } from 'maildev';
|
||||
|
||||
const utils = require('../global-utils');
|
||||
import { createAccount, logUser } from './setups/user';
|
||||
import { activateEmail, retrieveEmailCode, disableEmail } from './setups/2fa';
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
let mailserver;
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
mailserver = new MailDev({
|
||||
port: process.env.MAILDEV_SMTP_PORT,
|
||||
web: { port: process.env.MAILDEV_HTTP_PORT },
|
||||
})
|
||||
|
||||
await mailserver.listen();
|
||||
|
||||
await utils.startVault(browser, testInfo, {
|
||||
SMTP_HOST: process.env.MAILDEV_HOST,
|
||||
SMTP_FROM: process.env.PW_SMTP_FROM,
|
||||
});
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
if( mailserver ){
|
||||
await mailserver.close();
|
||||
}
|
||||
});
|
||||
|
||||
test('Account creation', async ({ page }) => {
|
||||
const mailBuffer = mailserver.buffer(users.user1.email);
|
||||
|
||||
await createAccount(test, page, users.user1, mailBuffer);
|
||||
|
||||
mailBuffer.close();
|
||||
});
|
||||
|
||||
test('Login', async ({ context, page }) => {
|
||||
const mailBuffer = mailserver.buffer(users.user1.email);
|
||||
|
||||
await logUser(test, page, users.user1, mailBuffer);
|
||||
|
||||
await test.step('verify email', async () => {
|
||||
await page.getByText('Verify your account\'s email').click();
|
||||
await expect(page.getByText('Verify your account\'s email')).toBeVisible();
|
||||
await page.getByRole('button', { name: 'Send email' }).click();
|
||||
|
||||
await utils.checkNotification(page, 'Check your email inbox for a verification link');
|
||||
|
||||
const verify = await mailBuffer.expect((m) => m.subject === "Verify Your Email");
|
||||
expect(verify.from[0]?.address).toBe(process.env.PW_SMTP_FROM);
|
||||
|
||||
const page2 = await context.newPage();
|
||||
await page2.setContent(verify.html);
|
||||
const link = await page2.getByTestId("verify").getAttribute("href");
|
||||
await page2.close();
|
||||
|
||||
await page.goto(link);
|
||||
await utils.checkNotification(page, 'Account email verified');
|
||||
});
|
||||
|
||||
mailBuffer.close();
|
||||
});
|
||||
|
||||
test('Activate 2fa', async ({ page }) => {
|
||||
const emails = mailserver.buffer(users.user1.email);
|
||||
|
||||
await logUser(test, page, users.user1);
|
||||
|
||||
await activateEmail(test, page, users.user1, emails);
|
||||
|
||||
emails.close();
|
||||
});
|
||||
|
||||
test('2fa', async ({ page }) => {
|
||||
const emails = mailserver.buffer(users.user1.email);
|
||||
|
||||
await test.step('login', async () => {
|
||||
await page.goto('/');
|
||||
|
||||
await page.getByLabel(/Email address/).fill(users.user1.email);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
await page.getByLabel('Master password').fill(users.user1.password);
|
||||
await page.getByRole('button', { name: 'Log in with master password' }).click();
|
||||
|
||||
await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible();
|
||||
const code = await retrieveEmailCode(test, page, emails);
|
||||
await page.getByLabel(/Verification code/).fill(code);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
|
||||
await page.getByRole('button', { name: 'Add it later' }).click();
|
||||
await page.getByRole('link', { name: 'Skip to web app' }).click();
|
||||
|
||||
await expect(page).toHaveTitle(/Vaults/);
|
||||
})
|
||||
|
||||
await disableEmail(test, page, users.user1);
|
||||
|
||||
emails.close();
|
||||
});
|
||||
51
playwright/tests/login.spec.ts
Normal file
51
playwright/tests/login.spec.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { test, expect, type Page, type TestInfo } from '@playwright/test';
|
||||
import * as OTPAuth from "otpauth";
|
||||
|
||||
import * as utils from "../global-utils";
|
||||
import { createAccount, logUser } from './setups/user';
|
||||
import { activateTOTP, disableTOTP } from './setups/2fa';
|
||||
|
||||
let users = utils.loadEnv();
|
||||
let totp;
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
await utils.startVault(browser, testInfo, {});
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
});
|
||||
|
||||
test('Account creation', async ({ page }) => {
|
||||
await createAccount(test, page, users.user1);
|
||||
});
|
||||
|
||||
test('Master password login', async ({ page }) => {
|
||||
await logUser(test, page, users.user1);
|
||||
});
|
||||
|
||||
test('Authenticator 2fa', async ({ page }) => {
|
||||
await logUser(test, page, users.user1);
|
||||
|
||||
let totp = await activateTOTP(test, page, users.user1);
|
||||
|
||||
await utils.logout(test, page, users.user1);
|
||||
|
||||
await test.step('login', async () => {
|
||||
let timestamp = Date.now(); // Needed to use the next token
|
||||
timestamp = timestamp + (totp.period - (Math.floor(timestamp / 1000) % totp.period) + 1) * 1000;
|
||||
|
||||
await page.getByLabel(/Email address/).fill(users.user1.email);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
await page.getByLabel('Master password').fill(users.user1.password);
|
||||
await page.getByRole('button', { name: 'Log in with master password' }).click();
|
||||
|
||||
await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible();
|
||||
await page.getByLabel(/Verification code/).fill(totp.generate({timestamp}));
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
});
|
||||
|
||||
await disableTOTP(test, page, users.user1);
|
||||
});
|
||||
119
playwright/tests/organization.smtp.spec.ts
Normal file
119
playwright/tests/organization.smtp.spec.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
import { MailDev } from 'maildev';
|
||||
|
||||
import * as utils from '../global-utils';
|
||||
import * as orgs from './setups/orgs';
|
||||
import { createAccount, logUser } from './setups/user';
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
let mailServer, mail1Buffer, mail2Buffer, mail3Buffer;
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
mailServer = new MailDev({
|
||||
port: process.env.MAILDEV_SMTP_PORT,
|
||||
web: { port: process.env.MAILDEV_HTTP_PORT },
|
||||
})
|
||||
|
||||
await mailServer.listen();
|
||||
|
||||
await utils.startVault(browser, testInfo, {
|
||||
SMTP_HOST: process.env.MAILDEV_HOST,
|
||||
SMTP_FROM: process.env.PW_SMTP_FROM,
|
||||
});
|
||||
|
||||
mail1Buffer = mailServer.buffer(users.user1.email);
|
||||
mail2Buffer = mailServer.buffer(users.user2.email);
|
||||
mail3Buffer = mailServer.buffer(users.user3.email);
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}, testInfo: TestInfo) => {
|
||||
utils.stopVault(testInfo);
|
||||
[mail1Buffer, mail2Buffer, mail3Buffer, mailServer].map((m) => m?.close());
|
||||
});
|
||||
|
||||
test('Create user3', async ({ page }) => {
|
||||
await createAccount(test, page, users.user3, mail3Buffer);
|
||||
});
|
||||
|
||||
test('Invite users', async ({ page }) => {
|
||||
await createAccount(test, page, users.user1, mail1Buffer);
|
||||
|
||||
await orgs.create(test, page, 'Test');
|
||||
await orgs.members(test, page, 'Test');
|
||||
await orgs.invite(test, page, 'Test', users.user2.email);
|
||||
await orgs.invite(test, page, 'Test', users.user3.email, {
|
||||
navigate: false,
|
||||
});
|
||||
});
|
||||
|
||||
test('invited with new account', async ({ page }) => {
|
||||
const invited = await mail2Buffer.expect((mail) => mail.subject === 'Join Test');
|
||||
|
||||
await test.step('Create account', async () => {
|
||||
await page.setContent(invited.html);
|
||||
const link = await page.getByTestId('invite').getAttribute('href');
|
||||
await page.goto(link);
|
||||
await expect(page).toHaveTitle(/Create account | Vaultwarden Web/);
|
||||
|
||||
//await page.getByLabel('Name').fill(users.user2.name);
|
||||
await page.getByLabel('Master password (required)', { exact: true }).fill(users.user2.password);
|
||||
await page.getByLabel('Confirm master password (').fill(users.user2.password);
|
||||
await page.getByRole('button', { name: 'Create account' }).click();
|
||||
await utils.checkNotification(page, 'Your new account has been created');
|
||||
|
||||
await utils.checkNotification(page, 'Invitation accepted');
|
||||
await utils.ignoreExtension(page);
|
||||
|
||||
// Redirected to the vault
|
||||
await expect(page).toHaveTitle('Vaults | Vaultwarden Web');
|
||||
// await utils.checkNotification(page, 'You have been logged in!');
|
||||
});
|
||||
|
||||
await test.step('Check mails', async () => {
|
||||
await mail2Buffer.expect((m) => m.subject === 'Welcome');
|
||||
await mail2Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox');
|
||||
await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted'));
|
||||
});
|
||||
});
|
||||
|
||||
test('invited with existing account', async ({ page }) => {
|
||||
const invited = await mail3Buffer.expect((mail) => mail.subject === 'Join Test');
|
||||
|
||||
await page.setContent(invited.html);
|
||||
const link = await page.getByTestId('invite').getAttribute('href');
|
||||
|
||||
await page.goto(link);
|
||||
|
||||
// We should be on login page with email prefilled
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
|
||||
// Unlock page
|
||||
await page.getByLabel('Master password').fill(users.user3.password);
|
||||
await page.getByRole('button', { name: 'Log in with master password' }).click();
|
||||
|
||||
await utils.checkNotification(page, 'Invitation accepted');
|
||||
await utils.ignoreExtension(page);
|
||||
|
||||
// We are now in the default vault page
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
|
||||
await mail3Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox');
|
||||
await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted'));
|
||||
});
|
||||
|
||||
test('Confirm invited user', async ({ page }) => {
|
||||
await logUser(test, page, users.user1, mail1Buffer);
|
||||
|
||||
await orgs.members(test, page, 'Test');
|
||||
await orgs.confirm(test, page, 'Test', users.user2.email);
|
||||
|
||||
await mail2Buffer.expect((m) => m.subject.includes('Invitation to Test confirmed'));
|
||||
});
|
||||
|
||||
test('Organization is visible', async ({ page }) => {
|
||||
await logUser(test, page, users.user2, mail2Buffer);
|
||||
await page.getByRole('button', { name: 'vault: Test', exact: true }).click();
|
||||
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
|
||||
});
|
||||
54
playwright/tests/organization.spec.ts
Normal file
54
playwright/tests/organization.spec.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
import { MailDev } from 'maildev';
|
||||
|
||||
import * as utils from "../global-utils";
|
||||
import * as orgs from './setups/orgs';
|
||||
import { createAccount, logUser } from './setups/user';
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
await utils.startVault(browser, testInfo);
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
});
|
||||
|
||||
test('Invite', async ({ page }) => {
|
||||
await createAccount(test, page, users.user3);
|
||||
await createAccount(test, page, users.user1);
|
||||
|
||||
await orgs.create(test, page, 'New organisation');
|
||||
await orgs.members(test, page, 'New organisation');
|
||||
|
||||
await test.step('missing user2', async () => {
|
||||
await orgs.invite(test, page, 'New organisation', users.user2.email);
|
||||
await expect(page.getByRole('row', { name: users.user2.email })).toHaveText(/Invited/);
|
||||
});
|
||||
|
||||
await test.step('existing user3', async () => {
|
||||
await orgs.invite(test, page, 'New organisation', users.user3.email);
|
||||
await expect(page.getByRole('row', { name: users.user3.email })).toHaveText(/Needs confirmation/);
|
||||
await orgs.confirm(test, page, 'New organisation', users.user3.email);
|
||||
});
|
||||
|
||||
await test.step('confirm user2', async () => {
|
||||
await createAccount(test, page, users.user2);
|
||||
await logUser(test, page, users.user1);
|
||||
await orgs.members(test, page, 'New organisation');
|
||||
await orgs.confirm(test, page, 'New organisation', users.user2.email);
|
||||
});
|
||||
|
||||
await test.step('Org visible user2 ', async () => {
|
||||
await logUser(test, page, users.user2);
|
||||
await page.getByRole('button', { name: 'vault: New organisation', exact: true }).click();
|
||||
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
|
||||
});
|
||||
|
||||
await test.step('Org visible user3 ', async () => {
|
||||
await logUser(test, page, users.user3);
|
||||
await page.getByRole('button', { name: 'vault: New organisation', exact: true }).click();
|
||||
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
|
||||
});
|
||||
});
|
||||
92
playwright/tests/setups/2fa.ts
Normal file
92
playwright/tests/setups/2fa.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { expect, type Page, Test } from '@playwright/test';
|
||||
import { type MailBuffer } from 'maildev';
|
||||
import * as OTPAuth from "otpauth";
|
||||
|
||||
import * as utils from '../../global-utils';
|
||||
|
||||
export async function activateTOTP(test: Test, page: Page, user: { name: string, password: string }): OTPAuth.TOTP {
|
||||
return await test.step('Activate TOTP 2FA', async () => {
|
||||
await page.getByRole('button', { name: user.name }).click();
|
||||
await page.getByRole('menuitem', { name: 'Account settings' }).click();
|
||||
await page.getByRole('link', { name: 'Security' }).click();
|
||||
await page.getByRole('link', { name: 'Two-step login' }).click();
|
||||
await page.locator('bit-item').filter({ hasText: /Authenticator app/ }).getByRole('button').click();
|
||||
await page.getByLabel('Master password (required)').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
|
||||
const secret = await page.getByLabel('Key').innerText();
|
||||
let totp = new OTPAuth.TOTP({ secret, period: 30 });
|
||||
|
||||
await page.getByLabel(/Verification code/).fill(totp.generate());
|
||||
await page.getByRole('button', { name: 'Turn on' }).click();
|
||||
await page.getByRole('heading', { name: 'Turned on', exact: true });
|
||||
await page.getByLabel('Close').click();
|
||||
|
||||
return totp;
|
||||
})
|
||||
}
|
||||
|
||||
export async function disableTOTP(test: Test, page: Page, user: { password: string }) {
|
||||
await test.step('Disable TOTP 2FA', async () => {
|
||||
await page.getByRole('button', { name: 'Test' }).click();
|
||||
await page.getByRole('menuitem', { name: 'Account settings' }).click();
|
||||
await page.getByRole('link', { name: 'Security' }).click();
|
||||
await page.getByRole('link', { name: 'Two-step login' }).click();
|
||||
await page.locator('bit-item').filter({ hasText: /Authenticator app/ }).getByRole('button').click();
|
||||
await page.getByLabel('Master password (required)').click();
|
||||
await page.getByLabel('Master password (required)').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
await page.getByRole('button', { name: 'Turn off' }).click();
|
||||
await page.getByRole('button', { name: 'Yes' }).click();
|
||||
await utils.checkNotification(page, 'Two-step login provider turned off');
|
||||
});
|
||||
}
|
||||
|
||||
export async function activateEmail(test: Test, page: Page, user: { name: string, password: string }, mailBuffer: MailBuffer) {
|
||||
await test.step('Activate Email 2FA', async () => {
|
||||
await page.getByRole('button', { name: user.name }).click();
|
||||
await page.getByRole('menuitem', { name: 'Account settings' }).click();
|
||||
await page.getByRole('link', { name: 'Security' }).click();
|
||||
await page.getByRole('link', { name: 'Two-step login' }).click();
|
||||
await page.locator('bit-item').filter({ hasText: 'Enter a code sent to your email' }).getByRole('button').click();
|
||||
await page.getByLabel('Master password (required)').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
await page.getByRole('button', { name: 'Send email' }).click();
|
||||
});
|
||||
|
||||
let code = await retrieveEmailCode(test, page, mailBuffer);
|
||||
|
||||
await test.step('input code', async () => {
|
||||
await page.getByLabel('2. Enter the resulting 6').fill(code);
|
||||
await page.getByRole('button', { name: 'Turn on' }).click();
|
||||
await page.getByRole('heading', { name: 'Turned on', exact: true });
|
||||
});
|
||||
}
|
||||
|
||||
export async function retrieveEmailCode(test: Test, page: Page, mailBuffer: MailBuffer): string {
|
||||
return await test.step('retrieve code', async () => {
|
||||
const codeMail = await mailBuffer.expect((mail) => mail.subject.includes("Login Verification Code"));
|
||||
const page2 = await page.context().newPage();
|
||||
await page2.setContent(codeMail.html);
|
||||
const code = await page2.getByTestId("2fa").innerText();
|
||||
await page2.close();
|
||||
return code;
|
||||
});
|
||||
}
|
||||
|
||||
export async function disableEmail(test: Test, page: Page, user: { password: string }) {
|
||||
await test.step('Disable Email 2FA', async () => {
|
||||
await page.getByRole('button', { name: 'Test' }).click();
|
||||
await page.getByRole('menuitem', { name: 'Account settings' }).click();
|
||||
await page.getByRole('link', { name: 'Security' }).click();
|
||||
await page.getByRole('link', { name: 'Two-step login' }).click();
|
||||
await page.locator('bit-item').filter({ hasText: 'Email' }).getByRole('button').click();
|
||||
await page.getByLabel('Master password (required)').click();
|
||||
await page.getByLabel('Master password (required)').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
await page.getByRole('button', { name: 'Turn off' }).click();
|
||||
await page.getByRole('button', { name: 'Yes' }).click();
|
||||
|
||||
await utils.checkNotification(page, 'Two-step login provider turned off');
|
||||
});
|
||||
}
|
||||
7
playwright/tests/setups/db-setup.ts
Normal file
7
playwright/tests/setups/db-setup.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { test } from './db-test';
|
||||
|
||||
const utils = require('../../global-utils');
|
||||
|
||||
test('DB start', async ({ serviceName }) => {
|
||||
utils.startComposeService(serviceName);
|
||||
});
|
||||
11
playwright/tests/setups/db-teardown.ts
Normal file
11
playwright/tests/setups/db-teardown.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { test } from './db-test';
|
||||
|
||||
const utils = require('../../global-utils');
|
||||
|
||||
utils.loadEnv();
|
||||
|
||||
test('DB teardown ?', async ({ serviceName }) => {
|
||||
if( process.env.PW_KEEP_SERVICE_RUNNNING !== "true" ) {
|
||||
utils.stopComposeService(serviceName);
|
||||
}
|
||||
});
|
||||
9
playwright/tests/setups/db-test.ts
Normal file
9
playwright/tests/setups/db-test.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { test as base } from '@playwright/test';
|
||||
|
||||
export type TestOptions = {
|
||||
serviceName: string;
|
||||
};
|
||||
|
||||
export const test = base.extend<TestOptions>({
|
||||
serviceName: ['', { option: true }],
|
||||
});
|
||||
77
playwright/tests/setups/orgs.ts
Normal file
77
playwright/tests/setups/orgs.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { expect, type Browser,Page } from '@playwright/test';
|
||||
|
||||
import * as utils from '../../global-utils';
|
||||
|
||||
export async function create(test, page: Page, name: string) {
|
||||
await test.step('Create Org', async () => {
|
||||
await page.locator('a').filter({ hasText: 'Password Manager' }).first().click();
|
||||
await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible();
|
||||
await page.getByRole('link', { name: 'New organisation' }).click();
|
||||
await page.getByLabel('Organisation name (required)').fill(name);
|
||||
await page.getByRole('button', { name: 'Submit' }).click();
|
||||
|
||||
await utils.checkNotification(page, 'Organisation created');
|
||||
});
|
||||
}
|
||||
|
||||
export async function policies(test, page: Page, name: string) {
|
||||
await test.step(`Navigate to ${name} policies`, async () => {
|
||||
await page.locator('a').filter({ hasText: 'Admin Console' }).first().click();
|
||||
await page.locator('org-switcher').getByLabel(/Toggle collapse/).click();
|
||||
await page.locator('org-switcher').getByRole('link', { name: `${name}` }).first().click();
|
||||
await expect(page.getByRole('heading', { name: `${name} collections` })).toBeVisible();
|
||||
await page.getByRole('button', { name: 'Toggle collapse Settings' }).click();
|
||||
await page.getByRole('link', { name: 'Policies' }).click();
|
||||
await expect(page.getByRole('heading', { name: 'Policies' })).toBeVisible();
|
||||
});
|
||||
}
|
||||
|
||||
export async function members(test, page: Page, name: string) {
|
||||
await test.step(`Navigate to ${name} members`, async () => {
|
||||
await page.locator('a').filter({ hasText: 'Admin Console' }).first().click();
|
||||
await page.locator('org-switcher').getByLabel(/Toggle collapse/).click();
|
||||
await page.locator('org-switcher').getByRole('link', { name: `${name}` }).first().click();
|
||||
await expect(page.getByRole('heading', { name: `${name} collections` })).toBeVisible();
|
||||
await page.locator('div').filter({ hasText: 'Members' }).nth(2).click();
|
||||
await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible();
|
||||
await expect(page.getByRole('cell', { name: 'All' })).toBeVisible();
|
||||
});
|
||||
}
|
||||
|
||||
export async function invite(test, page: Page, name: string, email: string) {
|
||||
await test.step(`Invite ${email}`, async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible();
|
||||
await page.getByRole('button', { name: 'Invite member' }).click();
|
||||
await page.getByLabel('Email (required)').fill(email);
|
||||
await page.getByRole('tab', { name: 'Collections' }).click();
|
||||
await page.getByRole('combobox', { name: 'Permission' }).click();
|
||||
await page.getByText('Edit items', { exact: true }).click();
|
||||
await page.getByLabel('Select collections').click();
|
||||
await page.getByText('Default collection').click();
|
||||
await page.getByRole('cell', { name: 'Collection', exact: true }).click();
|
||||
await page.getByRole('button', { name: 'Save' }).click();
|
||||
await utils.checkNotification(page, 'User(s) invited');
|
||||
});
|
||||
}
|
||||
|
||||
export async function confirm(test, page: Page, name: string, user_email: string) {
|
||||
await test.step(`Confirm ${user_email}`, async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible();
|
||||
await page.getByRole('row').filter({hasText: user_email}).getByLabel('Options').click();
|
||||
await page.getByRole('menuitem', { name: 'Confirm' }).click();
|
||||
await expect(page.getByRole('heading', { name: 'Confirm user' })).toBeVisible();
|
||||
await page.getByRole('button', { name: 'Confirm' }).click();
|
||||
await utils.checkNotification(page, 'confirmed');
|
||||
});
|
||||
}
|
||||
|
||||
export async function revoke(test, page: Page, name: string, user_email: string) {
|
||||
await test.step(`Revoke ${user_email}`, async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible();
|
||||
await page.getByRole('row').filter({hasText: user_email}).getByLabel('Options').click();
|
||||
await page.getByRole('menuitem', { name: 'Revoke access' }).click();
|
||||
await expect(page.getByRole('heading', { name: 'Revoke access' })).toBeVisible();
|
||||
await page.getByRole('button', { name: 'Revoke access' }).click();
|
||||
await utils.checkNotification(page, 'Revoked organisation access');
|
||||
});
|
||||
}
|
||||
18
playwright/tests/setups/sso-setup.ts
Normal file
18
playwright/tests/setups/sso-setup.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
|
||||
const { exec } = require('node:child_process');
|
||||
const utils = require('../../global-utils');
|
||||
|
||||
utils.loadEnv();
|
||||
|
||||
test.beforeAll('Setup', async () => {
|
||||
console.log("Starting Keycloak");
|
||||
exec(`docker compose --profile keycloak --env-file test.env up`);
|
||||
});
|
||||
|
||||
test('Keycloak is up', async ({ page }) => {
|
||||
await utils.waitFor(process.env.SSO_AUTHORITY, page.context().browser());
|
||||
// Dummy authority is created at the end of the setup
|
||||
await utils.waitFor(process.env.DUMMY_AUTHORITY, page.context().browser());
|
||||
console.log(`Keycloak running on: ${process.env.SSO_AUTHORITY}`);
|
||||
});
|
||||
15
playwright/tests/setups/sso-teardown.ts
Normal file
15
playwright/tests/setups/sso-teardown.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { test, type FullConfig } from '@playwright/test';
|
||||
|
||||
const { execSync } = require('node:child_process');
|
||||
const utils = require('../../global-utils');
|
||||
|
||||
utils.loadEnv();
|
||||
|
||||
test('Keycloak teardown', async () => {
|
||||
if( process.env.PW_KEEP_SERVICE_RUNNNING === "true" ) {
|
||||
console.log("Keep Keycloak running");
|
||||
} else {
|
||||
console.log("Keycloak stopping");
|
||||
execSync(`docker compose --profile keycloak --env-file test.env stop Keycloak`);
|
||||
}
|
||||
});
|
||||
133
playwright/tests/setups/sso.ts
Normal file
133
playwright/tests/setups/sso.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import { expect, type Page, Test } from '@playwright/test';
|
||||
import { type MailBuffer, MailServer } from 'maildev';
|
||||
import * as OTPAuth from "otpauth";
|
||||
|
||||
import * as utils from '../../global-utils';
|
||||
import { retrieveEmailCode } from './2fa';
|
||||
|
||||
/**
|
||||
* If a MailBuffer is passed it will be used and consume the expected emails
|
||||
*/
|
||||
export async function logNewUser(
|
||||
test: Test,
|
||||
page: Page,
|
||||
user: { email: string, name: string, password: string },
|
||||
options: { mailBuffer?: MailBuffer } = {}
|
||||
) {
|
||||
await test.step(`Create user ${user.name}`, async () => {
|
||||
await page.context().clearCookies();
|
||||
|
||||
await test.step('Landing page', async () => {
|
||||
await utils.cleanLanding(page);
|
||||
|
||||
await page.locator("input[type=email].vw-email-sso").fill(user.email);
|
||||
await page.getByRole('button', { name: /Use single sign-on/ }).click();
|
||||
});
|
||||
|
||||
await test.step('Keycloak login', async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible();
|
||||
await page.getByLabel(/Username/).fill(user.name);
|
||||
await page.getByLabel('Password', { exact: true }).fill(user.password);
|
||||
await page.getByRole('button', { name: 'Sign In' }).click();
|
||||
});
|
||||
|
||||
await test.step('Create Vault account', async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible();
|
||||
await page.getByLabel('Master password (required)', { exact: true }).fill(user.password);
|
||||
await page.getByLabel('Confirm master password (').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Create account' }).click();
|
||||
});
|
||||
|
||||
await utils.checkNotification(page, 'Account successfully created!');
|
||||
await utils.checkNotification(page, 'Invitation accepted');
|
||||
|
||||
await utils.ignoreExtension(page);
|
||||
|
||||
await test.step('Default vault page', async () => {
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible();
|
||||
});
|
||||
|
||||
if( options.mailBuffer ){
|
||||
let mailBuffer = options.mailBuffer;
|
||||
await test.step('Check emails', async () => {
|
||||
await mailBuffer.expect((m) => m.subject === "Welcome");
|
||||
await mailBuffer.expect((m) => m.subject.includes("New Device Logged"));
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* If a MailBuffer is passed it will be used and consume the expected emails
|
||||
*/
|
||||
export async function logUser(
|
||||
test: Test,
|
||||
page: Page,
|
||||
user: { email: string, password: string },
|
||||
options: {
|
||||
mailBuffer ?: MailBuffer,
|
||||
totp?: OTPAuth.TOTP,
|
||||
mail2fa?: boolean,
|
||||
} = {}
|
||||
) {
|
||||
let mailBuffer = options.mailBuffer;
|
||||
|
||||
await test.step(`Log user ${user.email}`, async () => {
|
||||
await page.context().clearCookies();
|
||||
|
||||
await test.step('Landing page', async () => {
|
||||
await utils.cleanLanding(page);
|
||||
|
||||
await page.locator("input[type=email].vw-email-sso").fill(user.email);
|
||||
await page.getByRole('button', { name: /Use single sign-on/ }).click();
|
||||
});
|
||||
|
||||
await test.step('Keycloak login', async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible();
|
||||
await page.getByLabel(/Username/).fill(user.name);
|
||||
await page.getByLabel('Password', { exact: true }).fill(user.password);
|
||||
await page.getByRole('button', { name: 'Sign In' }).click();
|
||||
});
|
||||
|
||||
if( options.totp || options.mail2fa ){
|
||||
let code;
|
||||
|
||||
await test.step('2FA check', async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible();
|
||||
|
||||
if( options.totp ) {
|
||||
const totp = options.totp;
|
||||
let timestamp = Date.now(); // Needed to use the next token
|
||||
timestamp = timestamp + (totp.period - (Math.floor(timestamp / 1000) % totp.period) + 1) * 1000;
|
||||
code = totp.generate({timestamp});
|
||||
} else if( options.mail2fa ){
|
||||
code = await retrieveEmailCode(test, page, mailBuffer);
|
||||
}
|
||||
|
||||
await page.getByLabel(/Verification code/).fill(code);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
});
|
||||
}
|
||||
|
||||
await test.step('Unlock vault', async () => {
|
||||
await expect(page).toHaveTitle('Vaultwarden Web');
|
||||
await expect(page.getByRole('heading', { name: 'Your vault is locked' })).toBeVisible();
|
||||
await page.getByLabel('Master password').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Unlock' }).click();
|
||||
});
|
||||
|
||||
await utils.ignoreExtension(page);
|
||||
|
||||
await test.step('Default vault page', async () => {
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible();
|
||||
});
|
||||
|
||||
if( mailBuffer ){
|
||||
await test.step('Check email', async () => {
|
||||
await mailBuffer.expect((m) => m.subject.includes("New Device Logged"));
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
58
playwright/tests/setups/user.ts
Normal file
58
playwright/tests/setups/user.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { expect, type Browser, Page } from '@playwright/test';
|
||||
|
||||
import { type MailBuffer } from 'maildev';
|
||||
|
||||
import * as utils from '../../global-utils';
|
||||
|
||||
export async function createAccount(test, page: Page, user: { email: string, name: string, password: string }, mailBuffer?: MailBuffer) {
|
||||
await test.step(`Create user ${user.name}`, async () => {
|
||||
await utils.cleanLanding(page);
|
||||
|
||||
await page.getByRole('link', { name: 'Create account' }).click();
|
||||
|
||||
// Back to Vault create account
|
||||
await expect(page).toHaveTitle(/Create account | Vaultwarden Web/);
|
||||
await page.getByLabel(/Email address/).fill(user.email);
|
||||
await page.getByLabel('Name').fill(user.name);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
|
||||
// Vault finish Creation
|
||||
await page.getByLabel('Master password (required)', { exact: true }).fill(user.password);
|
||||
await page.getByLabel('Confirm master password (').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Create account' }).click();
|
||||
|
||||
await utils.checkNotification(page, 'Your new account has been created')
|
||||
await utils.ignoreExtension(page);
|
||||
|
||||
// We are now in the default vault page
|
||||
await expect(page).toHaveTitle('Vaults | Vaultwarden Web');
|
||||
// await utils.checkNotification(page, 'You have been logged in!');
|
||||
|
||||
if( mailBuffer ){
|
||||
await mailBuffer.expect((m) => m.subject === "Welcome");
|
||||
await mailBuffer.expect((m) => m.subject === "New Device Logged In From Firefox");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function logUser(test, page: Page, user: { email: string, password: string }, mailBuffer?: MailBuffer) {
|
||||
await test.step(`Log user ${user.email}`, async () => {
|
||||
await utils.cleanLanding(page);
|
||||
|
||||
await page.getByLabel(/Email address/).fill(user.email);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
|
||||
// Unlock page
|
||||
await page.getByLabel('Master password').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Log in with master password' }).click();
|
||||
|
||||
await utils.ignoreExtension(page);
|
||||
|
||||
// We are now in the default vault page
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
|
||||
if( mailBuffer ){
|
||||
await mailBuffer.expect((m) => m.subject === "New Device Logged In From Firefox");
|
||||
}
|
||||
});
|
||||
}
|
||||
53
playwright/tests/sso_login.smtp.spec.ts
Normal file
53
playwright/tests/sso_login.smtp.spec.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
import { MailDev } from 'maildev';
|
||||
|
||||
import { logNewUser, logUser } from './setups/sso';
|
||||
import { activateEmail, disableEmail } from './setups/2fa';
|
||||
import * as utils from "../global-utils";
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
let mailserver;
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
mailserver = new MailDev({
|
||||
port: process.env.MAILDEV_SMTP_PORT,
|
||||
web: { port: process.env.MAILDEV_HTTP_PORT },
|
||||
})
|
||||
|
||||
await mailserver.listen();
|
||||
|
||||
await utils.startVault(browser, testInfo, {
|
||||
SSO_ENABLED: true,
|
||||
SSO_ONLY: false,
|
||||
SMTP_HOST: process.env.MAILDEV_HOST,
|
||||
SMTP_FROM: process.env.PW_SMTP_FROM,
|
||||
});
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
if( mailserver ){
|
||||
await mailserver.close();
|
||||
}
|
||||
});
|
||||
|
||||
test('Create and activate 2FA', async ({ page }) => {
|
||||
const mailBuffer = mailserver.buffer(users.user1.email);
|
||||
|
||||
await logNewUser(test, page, users.user1, {mailBuffer: mailBuffer});
|
||||
|
||||
await activateEmail(test, page, users.user1, mailBuffer);
|
||||
|
||||
mailBuffer.close();
|
||||
});
|
||||
|
||||
test('Log and disable', async ({ page }) => {
|
||||
const mailBuffer = mailserver.buffer(users.user1.email);
|
||||
|
||||
await logUser(test, page, users.user1, {mailBuffer: mailBuffer, mail2fa: true});
|
||||
|
||||
await disableEmail(test, page, users.user1);
|
||||
|
||||
mailBuffer.close();
|
||||
});
|
||||
85
playwright/tests/sso_login.spec.ts
Normal file
85
playwright/tests/sso_login.spec.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
|
||||
import { logNewUser, logUser } from './setups/sso';
|
||||
import { activateTOTP, disableTOTP } from './setups/2fa';
|
||||
import * as utils from "../global-utils";
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
await utils.startVault(browser, testInfo, {
|
||||
SSO_ENABLED: true,
|
||||
SSO_ONLY: false
|
||||
});
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
});
|
||||
|
||||
test('Account creation using SSO', async ({ page }) => {
|
||||
// Landing page
|
||||
await logNewUser(test, page, users.user1);
|
||||
});
|
||||
|
||||
test('SSO login', async ({ page }) => {
|
||||
await logUser(test, page, users.user1);
|
||||
});
|
||||
|
||||
test('Non SSO login', async ({ page }) => {
|
||||
// Landing page
|
||||
await page.goto('/');
|
||||
await page.locator("input[type=email].vw-email-sso").fill(users.user1.email);
|
||||
await page.getByRole('button', { name: 'Other' }).click();
|
||||
|
||||
// Unlock page
|
||||
await page.getByLabel('Master password').fill(users.user1.password);
|
||||
await page.getByRole('button', { name: 'Log in with master password' }).click();
|
||||
|
||||
// We are now in the default vault page
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
});
|
||||
|
||||
test('SSO login with TOTP 2fa', async ({ page }) => {
|
||||
await logUser(test, page, users.user1);
|
||||
|
||||
let totp = await activateTOTP(test, page, users.user1);
|
||||
|
||||
await logUser(test, page, users.user1, { totp });
|
||||
|
||||
await disableTOTP(test, page, users.user1);
|
||||
});
|
||||
|
||||
test('Non SSO login impossible', async ({ page, browser }, testInfo: TestInfo) => {
|
||||
await utils.restartVault(page, testInfo, {
|
||||
SSO_ENABLED: true,
|
||||
SSO_ONLY: true
|
||||
}, false);
|
||||
|
||||
// Landing page
|
||||
await page.goto('/');
|
||||
|
||||
// Check that SSO login is available
|
||||
await expect(page.getByRole('button', { name: /Use single sign-on/ })).toHaveCount(1);
|
||||
|
||||
// No Continue/Other
|
||||
await expect(page.getByRole('button', { name: 'Other' })).toHaveCount(0);
|
||||
});
|
||||
|
||||
|
||||
test('No SSO login', async ({ page }, testInfo: TestInfo) => {
|
||||
await utils.restartVault(page, testInfo, {
|
||||
SSO_ENABLED: false
|
||||
}, false);
|
||||
|
||||
// Landing page
|
||||
await page.goto('/');
|
||||
|
||||
// No SSO button (rely on a correct selector checked in previous test)
|
||||
await expect(page.getByRole('button', { name: /Use single sign-on/ })).toHaveCount(0);
|
||||
|
||||
// Can continue to Master password
|
||||
await page.getByLabel(/Email address/).fill(users.user1.email);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
await expect(page.getByRole('button', { name: 'Log in with master password' })).toHaveCount(1);
|
||||
});
|
||||
124
playwright/tests/sso_organization.smtp.spec.ts
Normal file
124
playwright/tests/sso_organization.smtp.spec.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
import { MailDev } from 'maildev';
|
||||
|
||||
import * as utils from "../global-utils";
|
||||
import * as orgs from './setups/orgs';
|
||||
import { logNewUser, logUser } from './setups/sso';
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
let mailServer, mail1Buffer, mail2Buffer, mail3Buffer;
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
mailServer = new MailDev({
|
||||
port: process.env.MAILDEV_SMTP_PORT,
|
||||
web: { port: process.env.MAILDEV_HTTP_PORT },
|
||||
})
|
||||
|
||||
await mailServer.listen();
|
||||
|
||||
await utils.startVault(browser, testInfo, {
|
||||
SMTP_HOST: process.env.MAILDEV_HOST,
|
||||
SMTP_FROM: process.env.PW_SMTP_FROM,
|
||||
SSO_ENABLED: true,
|
||||
SSO_ONLY: true,
|
||||
});
|
||||
|
||||
mail1Buffer = mailServer.buffer(users.user1.email);
|
||||
mail2Buffer = mailServer.buffer(users.user2.email);
|
||||
mail3Buffer = mailServer.buffer(users.user3.email);
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
[mail1Buffer, mail2Buffer, mail3Buffer, mailServer].map((m) => m?.close());
|
||||
});
|
||||
|
||||
test('Create user3', async ({ page }) => {
|
||||
await logNewUser(test, page, users.user3, { mailBuffer: mail3Buffer });
|
||||
});
|
||||
|
||||
test('Invite users', async ({ page }) => {
|
||||
await logNewUser(test, page, users.user1, { mailBuffer: mail1Buffer });
|
||||
|
||||
await orgs.create(test, page, '/Test');
|
||||
await orgs.members(test, page, '/Test');
|
||||
await orgs.invite(test, page, '/Test', users.user2.email);
|
||||
await orgs.invite(test, page, '/Test', users.user3.email);
|
||||
});
|
||||
|
||||
test('invited with new account', async ({ page }) => {
|
||||
const link = await test.step('Extract email link', async () => {
|
||||
const invited = await mail2Buffer.expect((m) => m.subject === "Join /Test");
|
||||
await page.setContent(invited.html);
|
||||
return await page.getByTestId("invite").getAttribute("href");
|
||||
});
|
||||
|
||||
await test.step('Redirect to Keycloak', async () => {
|
||||
await page.goto(link);
|
||||
});
|
||||
|
||||
await test.step('Keycloak login', async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible();
|
||||
await page.getByLabel(/Username/).fill(users.user2.name);
|
||||
await page.getByLabel('Password', { exact: true }).fill(users.user2.password);
|
||||
await page.getByRole('button', { name: 'Sign In' }).click();
|
||||
});
|
||||
|
||||
await test.step('Create Vault account', async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible();
|
||||
await page.getByLabel('Master password (required)', { exact: true }).fill(users.user2.password);
|
||||
await page.getByLabel('Confirm master password (').fill(users.user2.password);
|
||||
await page.getByRole('button', { name: 'Create account' }).click();
|
||||
|
||||
await utils.checkNotification(page, 'Account successfully created!');
|
||||
await utils.checkNotification(page, 'Invitation accepted');
|
||||
await utils.ignoreExtension(page);
|
||||
});
|
||||
|
||||
await test.step('Default vault page', async () => {
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
});
|
||||
|
||||
await test.step('Check mails', async () => {
|
||||
await mail2Buffer.expect((m) => m.subject.includes("New Device Logged"));
|
||||
await mail1Buffer.expect((m) => m.subject === "Invitation to /Test accepted");
|
||||
});
|
||||
});
|
||||
|
||||
test('invited with existing account', async ({ page }) => {
|
||||
const link = await test.step('Extract email link', async () => {
|
||||
const invited = await mail3Buffer.expect((m) => m.subject === "Join /Test");
|
||||
await page.setContent(invited.html);
|
||||
return await page.getByTestId("invite").getAttribute("href");
|
||||
});
|
||||
|
||||
await test.step('Redirect to Keycloak', async () => {
|
||||
await page.goto(link);
|
||||
});
|
||||
|
||||
await test.step('Keycloak login', async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible();
|
||||
await page.getByLabel(/Username/).fill(users.user3.name);
|
||||
await page.getByLabel('Password', { exact: true }).fill(users.user3.password);
|
||||
await page.getByRole('button', { name: 'Sign In' }).click();
|
||||
});
|
||||
|
||||
await test.step('Unlock vault', async () => {
|
||||
await expect(page).toHaveTitle('Vaultwarden Web');
|
||||
await page.getByLabel('Master password').fill(users.user3.password);
|
||||
await page.getByRole('button', { name: 'Unlock' }).click();
|
||||
|
||||
await utils.checkNotification(page, 'Invitation accepted');
|
||||
await utils.ignoreExtension(page);
|
||||
});
|
||||
|
||||
await test.step('Default vault page', async () => {
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
});
|
||||
|
||||
await test.step('Check mails', async () => {
|
||||
await mail3Buffer.expect((m) => m.subject.includes("New Device Logged"));
|
||||
await mail1Buffer.expect((m) => m.subject === "Invitation to /Test accepted");
|
||||
});
|
||||
});
|
||||
76
playwright/tests/sso_organization.spec.ts
Normal file
76
playwright/tests/sso_organization.spec.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
import { MailDev } from 'maildev';
|
||||
|
||||
import * as utils from "../global-utils";
|
||||
import * as orgs from './setups/orgs';
|
||||
import { logNewUser, logUser } from './setups/sso';
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
await utils.startVault(browser, testInfo, {
|
||||
SSO_ENABLED: true,
|
||||
SSO_ONLY: true,
|
||||
});
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
});
|
||||
|
||||
test('Create user3', async ({ page }) => {
|
||||
await logNewUser(test, page, users.user3);
|
||||
});
|
||||
|
||||
test('Invite users', async ({ page }) => {
|
||||
await logNewUser(test, page, users.user1);
|
||||
|
||||
await orgs.create(test, page, '/Test');
|
||||
await orgs.members(test, page, '/Test');
|
||||
await orgs.invite(test, page, '/Test', users.user2.email);
|
||||
await orgs.invite(test, page, '/Test', users.user3.email);
|
||||
await orgs.confirm(test, page, '/Test', users.user3.email);
|
||||
});
|
||||
|
||||
test('Create invited account', async ({ page }) => {
|
||||
await logNewUser(test, page, users.user2);
|
||||
});
|
||||
|
||||
test('Confirm invited user', async ({ page }) => {
|
||||
await logUser(test, page, users.user1);
|
||||
await orgs.members(test, page, '/Test');
|
||||
await expect(page.getByRole('row', { name: users.user2.name })).toHaveText(/Needs confirmation/);
|
||||
await orgs.confirm(test, page, '/Test', users.user2.email);
|
||||
});
|
||||
|
||||
test('Organization is visible', async ({ page }) => {
|
||||
await logUser(test, page, users.user2);
|
||||
await page.getByLabel('vault: /Test').click();
|
||||
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
|
||||
});
|
||||
|
||||
test('Enforce password policy', async ({ page }) => {
|
||||
await logUser(test, page, users.user1);
|
||||
await orgs.policies(test, page, '/Test');
|
||||
|
||||
await test.step(`Set master password policy`, async () => {
|
||||
await page.getByRole('button', { name: 'Master password requirements' }).click();
|
||||
await page.getByRole('checkbox', { name: 'Turn on' }).check();
|
||||
await page.getByRole('checkbox', { name: 'Require existing members to' }).check();
|
||||
await page.getByRole('spinbutton', { name: 'Minimum length' }).fill('42');
|
||||
await page.getByRole('button', { name: 'Save' }).click();
|
||||
await utils.checkNotification(page, 'Edited policy Master password requirements.');
|
||||
});
|
||||
|
||||
await utils.logout(test, page, users.user1);
|
||||
|
||||
await test.step(`Unlock trigger policy`, async () => {
|
||||
await page.locator("input[type=email].vw-email-sso").fill(users.user1.email);
|
||||
await page.getByRole('button', { name: 'Use single sign-on' }).click();
|
||||
|
||||
await page.getByRole('textbox', { name: 'Master password (required)' }).fill(users.user1.password);
|
||||
await page.getByRole('button', { name: 'Unlock' }).click();
|
||||
|
||||
await expect(page.getByRole('heading', { name: 'Update master password' })).toBeVisible();
|
||||
});
|
||||
});
|
||||
@@ -1,4 +1,4 @@
|
||||
[toolchain]
|
||||
channel = "1.88.0"
|
||||
channel = "1.91.1"
|
||||
components = [ "rustfmt", "clippy" ]
|
||||
profile = "minimal"
|
||||
|
||||
266
src/api/admin.rs
266
src/api/admin.rs
@@ -1,17 +1,16 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use reqwest::Method;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde_json::Value;
|
||||
use std::env;
|
||||
use std::{env, sync::LazyLock};
|
||||
|
||||
use rocket::serde::json::Json;
|
||||
use reqwest::Method;
|
||||
use rocket::{
|
||||
form::Form,
|
||||
http::{Cookie, CookieJar, MediaType, SameSite, Status},
|
||||
request::{FromRequest, Outcome, Request},
|
||||
response::{content::RawHtml as Html, Redirect},
|
||||
serde::json::Json,
|
||||
Catcher, Route,
|
||||
};
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
@@ -20,7 +19,14 @@ use crate::{
|
||||
},
|
||||
auth::{decode_admin, encode_jwt, generate_admin_claims, ClientIp, Secure},
|
||||
config::ConfigBuilder,
|
||||
db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType},
|
||||
db::{
|
||||
backup_sqlite, get_sql_server_version,
|
||||
models::{
|
||||
Attachment, Cipher, Collection, Device, Event, EventType, Group, Invitation, Membership, MembershipId,
|
||||
MembershipType, OrgPolicy, Organization, OrganizationId, SsoUser, TwoFactor, User, UserId,
|
||||
},
|
||||
DbConn, DbConnType, ACTIVE_DB_TYPE,
|
||||
},
|
||||
error::{Error, MapResult},
|
||||
http_client::make_http_request,
|
||||
mail,
|
||||
@@ -46,6 +52,7 @@ pub fn routes() -> Vec<Route> {
|
||||
invite_user,
|
||||
logout,
|
||||
delete_user,
|
||||
delete_sso_user,
|
||||
deauth_user,
|
||||
disable_user,
|
||||
enable_user,
|
||||
@@ -74,18 +81,21 @@ pub fn catchers() -> Vec<Catcher> {
|
||||
}
|
||||
}
|
||||
|
||||
static DB_TYPE: Lazy<&str> = Lazy::new(|| {
|
||||
DbConnType::from_url(&CONFIG.database_url())
|
||||
.map(|t| match t {
|
||||
DbConnType::sqlite => "SQLite",
|
||||
DbConnType::mysql => "MySQL",
|
||||
DbConnType::postgresql => "PostgreSQL",
|
||||
})
|
||||
.unwrap_or("Unknown")
|
||||
static DB_TYPE: LazyLock<&str> = LazyLock::new(|| match ACTIVE_DB_TYPE.get() {
|
||||
#[cfg(mysql)]
|
||||
Some(DbConnType::Mysql) => "MySQL",
|
||||
#[cfg(postgresql)]
|
||||
Some(DbConnType::Postgresql) => "PostgreSQL",
|
||||
#[cfg(sqlite)]
|
||||
Some(DbConnType::Sqlite) => "SQLite",
|
||||
_ => "Unknown",
|
||||
});
|
||||
|
||||
static CAN_BACKUP: Lazy<bool> =
|
||||
Lazy::new(|| DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false));
|
||||
#[cfg(sqlite)]
|
||||
static CAN_BACKUP: LazyLock<bool> =
|
||||
LazyLock::new(|| ACTIVE_DB_TYPE.get().map(|t| *t == DbConnType::Sqlite).unwrap_or(false));
|
||||
#[cfg(not(sqlite))]
|
||||
static CAN_BACKUP: LazyLock<bool> = LazyLock::new(|| false);
|
||||
|
||||
#[get("/")]
|
||||
fn admin_disabled() -> &'static str {
|
||||
@@ -147,10 +157,10 @@ fn admin_login(request: &Request<'_>) -> ApiResult<Html<String>> {
|
||||
err_code!("Authorization failed.", Status::Unauthorized.code);
|
||||
}
|
||||
let redirect = request.segments::<std::path::PathBuf>(0..).unwrap_or_default().display().to_string();
|
||||
render_admin_login(None, Some(redirect))
|
||||
render_admin_login(None, Some(&redirect))
|
||||
}
|
||||
|
||||
fn render_admin_login(msg: Option<&str>, redirect: Option<String>) -> ApiResult<Html<String>> {
|
||||
fn render_admin_login(msg: Option<&str>, redirect: Option<&str>) -> ApiResult<Html<String>> {
|
||||
// If there is an error, show it
|
||||
let msg = msg.map(|msg| format!("Error: {msg}"));
|
||||
let json = json!({
|
||||
@@ -184,14 +194,17 @@ fn post_admin_login(
|
||||
if crate::ratelimit::check_limit_admin(&ip.ip).is_err() {
|
||||
return Err(AdminResponse::TooManyRequests(render_admin_login(
|
||||
Some("Too many requests, try again later."),
|
||||
redirect,
|
||||
redirect.as_deref(),
|
||||
)));
|
||||
}
|
||||
|
||||
// If the token is invalid, redirect to login page
|
||||
if !_validate_token(&data.token) {
|
||||
error!("Invalid admin token. IP: {}", ip.ip);
|
||||
Err(AdminResponse::Unauthorized(render_admin_login(Some("Invalid admin token, please try again."), redirect)))
|
||||
Err(AdminResponse::Unauthorized(render_admin_login(
|
||||
Some("Invalid admin token, please try again."),
|
||||
redirect.as_deref(),
|
||||
)))
|
||||
} else {
|
||||
// If the token received is valid, generate JWT and save it as a cookie
|
||||
let claims = generate_admin_claims();
|
||||
@@ -239,6 +252,7 @@ struct AdminTemplateData {
|
||||
page_data: Option<Value>,
|
||||
logged_in: bool,
|
||||
urlpath: String,
|
||||
sso_enabled: bool,
|
||||
}
|
||||
|
||||
impl AdminTemplateData {
|
||||
@@ -248,6 +262,7 @@ impl AdminTemplateData {
|
||||
page_data: Some(page_data),
|
||||
logged_in: true,
|
||||
urlpath: CONFIG.domain_path(),
|
||||
sso_enabled: CONFIG.sso_enabled(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -281,7 +296,7 @@ struct InviteData {
|
||||
email: String,
|
||||
}
|
||||
|
||||
async fn get_user_or_404(user_id: &UserId, conn: &mut DbConn) -> ApiResult<User> {
|
||||
async fn get_user_or_404(user_id: &UserId, conn: &DbConn) -> ApiResult<User> {
|
||||
if let Some(user) = User::find_by_uuid(user_id, conn).await {
|
||||
Ok(user)
|
||||
} else {
|
||||
@@ -290,15 +305,15 @@ async fn get_user_or_404(user_id: &UserId, conn: &mut DbConn) -> ApiResult<User>
|
||||
}
|
||||
|
||||
#[post("/invite", format = "application/json", data = "<data>")]
|
||||
async fn invite_user(data: Json<InviteData>, _token: AdminToken, mut conn: DbConn) -> JsonResult {
|
||||
async fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -> JsonResult {
|
||||
let data: InviteData = data.into_inner();
|
||||
if User::find_by_mail(&data.email, &mut conn).await.is_some() {
|
||||
if User::find_by_mail(&data.email, &conn).await.is_some() {
|
||||
err_code!("User already exists", Status::Conflict.code)
|
||||
}
|
||||
|
||||
let mut user = User::new(data.email);
|
||||
let mut user = User::new(&data.email, None);
|
||||
|
||||
async fn _generate_invite(user: &User, conn: &mut DbConn) -> EmptyResult {
|
||||
async fn _generate_invite(user: &User, conn: &DbConn) -> EmptyResult {
|
||||
if CONFIG.mail_enabled() {
|
||||
let org_id: OrganizationId = FAKE_ADMIN_UUID.to_string().into();
|
||||
let member_id: MembershipId = FAKE_ADMIN_UUID.to_string().into();
|
||||
@@ -309,10 +324,10 @@ async fn invite_user(data: Json<InviteData>, _token: AdminToken, mut conn: DbCon
|
||||
}
|
||||
}
|
||||
|
||||
_generate_invite(&user, &mut conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
|
||||
user.save(&mut conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
|
||||
_generate_invite(&user, &conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
|
||||
user.save(&conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
|
||||
|
||||
Ok(Json(user.to_json(&mut conn).await))
|
||||
Ok(Json(user.to_json(&conn).await))
|
||||
}
|
||||
|
||||
#[post("/test/smtp", format = "application/json", data = "<data>")]
|
||||
@@ -333,14 +348,14 @@ fn logout(cookies: &CookieJar<'_>) -> Redirect {
|
||||
}
|
||||
|
||||
#[get("/users")]
|
||||
async fn get_users_json(_token: AdminToken, mut conn: DbConn) -> Json<Value> {
|
||||
let users = User::get_all(&mut conn).await;
|
||||
async fn get_users_json(_token: AdminToken, conn: DbConn) -> Json<Value> {
|
||||
let users = User::get_all(&conn).await;
|
||||
let mut users_json = Vec::with_capacity(users.len());
|
||||
for u in users {
|
||||
let mut usr = u.to_json(&mut conn).await;
|
||||
for (u, _) in users {
|
||||
let mut usr = u.to_json(&conn).await;
|
||||
usr["userEnabled"] = json!(u.enabled);
|
||||
usr["createdAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
||||
usr["lastActive"] = match u.last_active(&mut conn).await {
|
||||
usr["lastActive"] = match u.last_active(&conn).await {
|
||||
Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)),
|
||||
None => json!(None::<String>),
|
||||
};
|
||||
@@ -351,20 +366,23 @@ async fn get_users_json(_token: AdminToken, mut conn: DbConn) -> Json<Value> {
|
||||
}
|
||||
|
||||
#[get("/users/overview")]
|
||||
async fn users_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html<String>> {
|
||||
let users = User::get_all(&mut conn).await;
|
||||
async fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
||||
let users = User::get_all(&conn).await;
|
||||
let mut users_json = Vec::with_capacity(users.len());
|
||||
for u in users {
|
||||
let mut usr = u.to_json(&mut conn).await;
|
||||
usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &mut conn).await);
|
||||
usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &mut conn).await);
|
||||
usr["attachment_size"] = json!(get_display_size(Attachment::size_by_user(&u.uuid, &mut conn).await));
|
||||
for (u, sso_u) in users {
|
||||
let mut usr = u.to_json(&conn).await;
|
||||
usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn).await);
|
||||
usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &conn).await);
|
||||
usr["attachment_size"] = json!(get_display_size(Attachment::size_by_user(&u.uuid, &conn).await));
|
||||
usr["user_enabled"] = json!(u.enabled);
|
||||
usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
||||
usr["last_active"] = match u.last_active(&mut conn).await {
|
||||
usr["last_active"] = match u.last_active(&conn).await {
|
||||
Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)),
|
||||
None => json!("Never"),
|
||||
};
|
||||
|
||||
usr["sso_identifier"] = json!(sso_u.map(|u| u.identifier.to_string()).unwrap_or(String::new()));
|
||||
|
||||
users_json.push(usr);
|
||||
}
|
||||
|
||||
@@ -373,9 +391,9 @@ async fn users_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html<
|
||||
}
|
||||
|
||||
#[get("/users/by-mail/<mail>")]
|
||||
async fn get_user_by_mail_json(mail: &str, _token: AdminToken, mut conn: DbConn) -> JsonResult {
|
||||
if let Some(u) = User::find_by_mail(mail, &mut conn).await {
|
||||
let mut usr = u.to_json(&mut conn).await;
|
||||
async fn get_user_by_mail_json(mail: &str, _token: AdminToken, conn: DbConn) -> JsonResult {
|
||||
if let Some(u) = User::find_by_mail(mail, &conn).await {
|
||||
let mut usr = u.to_json(&conn).await;
|
||||
usr["userEnabled"] = json!(u.enabled);
|
||||
usr["createdAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
||||
Ok(Json(usr))
|
||||
@@ -385,21 +403,21 @@ async fn get_user_by_mail_json(mail: &str, _token: AdminToken, mut conn: DbConn)
|
||||
}
|
||||
|
||||
#[get("/users/<user_id>")]
|
||||
async fn get_user_json(user_id: UserId, _token: AdminToken, mut conn: DbConn) -> JsonResult {
|
||||
let u = get_user_or_404(&user_id, &mut conn).await?;
|
||||
let mut usr = u.to_json(&mut conn).await;
|
||||
async fn get_user_json(user_id: UserId, _token: AdminToken, conn: DbConn) -> JsonResult {
|
||||
let u = get_user_or_404(&user_id, &conn).await?;
|
||||
let mut usr = u.to_json(&conn).await;
|
||||
usr["userEnabled"] = json!(u.enabled);
|
||||
usr["createdAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
||||
Ok(Json(usr))
|
||||
}
|
||||
|
||||
#[post("/users/<user_id>/delete", format = "application/json")]
|
||||
async fn delete_user(user_id: UserId, token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||
let user = get_user_or_404(&user_id, &mut conn).await?;
|
||||
async fn delete_user(user_id: UserId, token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||
let user = get_user_or_404(&user_id, &conn).await?;
|
||||
|
||||
// Get the membership records before deleting the actual user
|
||||
let memberships = Membership::find_any_state_by_user(&user_id, &mut conn).await;
|
||||
let res = user.delete(&mut conn).await;
|
||||
let memberships = Membership::find_any_state_by_user(&user_id, &conn).await;
|
||||
let res = user.delete(&conn).await;
|
||||
|
||||
for membership in memberships {
|
||||
log_event(
|
||||
@@ -409,7 +427,28 @@ async fn delete_user(user_id: UserId, token: AdminToken, mut conn: DbConn) -> Em
|
||||
&ACTING_ADMIN_USER.into(),
|
||||
14, // Use UnknownBrowser type
|
||||
&token.ip.ip,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
#[delete("/users/<user_id>/sso", format = "application/json")]
|
||||
async fn delete_sso_user(user_id: UserId, token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||
let memberships = Membership::find_any_state_by_user(&user_id, &conn).await;
|
||||
let res = SsoUser::delete(&user_id, &conn).await;
|
||||
|
||||
for membership in memberships {
|
||||
log_event(
|
||||
EventType::OrganizationUserUnlinkedSso as i32,
|
||||
&membership.uuid,
|
||||
&membership.org_uuid,
|
||||
&ACTING_ADMIN_USER.into(),
|
||||
14, // Use UnknownBrowser type
|
||||
&token.ip.ip,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
@@ -418,13 +457,13 @@ async fn delete_user(user_id: UserId, token: AdminToken, mut conn: DbConn) -> Em
|
||||
}
|
||||
|
||||
#[post("/users/<user_id>/deauth", format = "application/json")]
|
||||
async fn deauth_user(user_id: UserId, _token: AdminToken, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let mut user = get_user_or_404(&user_id, &mut conn).await?;
|
||||
async fn deauth_user(user_id: UserId, _token: AdminToken, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let mut user = get_user_or_404(&user_id, &conn).await?;
|
||||
|
||||
nt.send_logout(&user, None, &mut conn).await;
|
||||
nt.send_logout(&user, None, &conn).await;
|
||||
|
||||
if CONFIG.push_enabled() {
|
||||
for device in Device::find_push_devices_by_user(&user.uuid, &mut conn).await {
|
||||
for device in Device::find_push_devices_by_user(&user.uuid, &conn).await {
|
||||
match unregister_push_device(&device.push_uuid).await {
|
||||
Ok(r) => r,
|
||||
Err(e) => error!("Unable to unregister devices from Bitwarden server: {e}"),
|
||||
@@ -432,46 +471,46 @@ async fn deauth_user(user_id: UserId, _token: AdminToken, mut conn: DbConn, nt:
|
||||
}
|
||||
}
|
||||
|
||||
Device::delete_all_by_user(&user.uuid, &mut conn).await?;
|
||||
Device::delete_all_by_user(&user.uuid, &conn).await?;
|
||||
user.reset_security_stamp();
|
||||
|
||||
user.save(&mut conn).await
|
||||
user.save(&conn).await
|
||||
}
|
||||
|
||||
#[post("/users/<user_id>/disable", format = "application/json")]
|
||||
async fn disable_user(user_id: UserId, _token: AdminToken, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let mut user = get_user_or_404(&user_id, &mut conn).await?;
|
||||
Device::delete_all_by_user(&user.uuid, &mut conn).await?;
|
||||
async fn disable_user(user_id: UserId, _token: AdminToken, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let mut user = get_user_or_404(&user_id, &conn).await?;
|
||||
Device::delete_all_by_user(&user.uuid, &conn).await?;
|
||||
user.reset_security_stamp();
|
||||
user.enabled = false;
|
||||
|
||||
let save_result = user.save(&mut conn).await;
|
||||
let save_result = user.save(&conn).await;
|
||||
|
||||
nt.send_logout(&user, None, &mut conn).await;
|
||||
nt.send_logout(&user, None, &conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
|
||||
#[post("/users/<user_id>/enable", format = "application/json")]
|
||||
async fn enable_user(user_id: UserId, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||
let mut user = get_user_or_404(&user_id, &mut conn).await?;
|
||||
async fn enable_user(user_id: UserId, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||
let mut user = get_user_or_404(&user_id, &conn).await?;
|
||||
user.enabled = true;
|
||||
|
||||
user.save(&mut conn).await
|
||||
user.save(&conn).await
|
||||
}
|
||||
|
||||
#[post("/users/<user_id>/remove-2fa", format = "application/json")]
|
||||
async fn remove_2fa(user_id: UserId, token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||
let mut user = get_user_or_404(&user_id, &mut conn).await?;
|
||||
TwoFactor::delete_all_by_user(&user.uuid, &mut conn).await?;
|
||||
two_factor::enforce_2fa_policy(&user, &ACTING_ADMIN_USER.into(), 14, &token.ip.ip, &mut conn).await?;
|
||||
async fn remove_2fa(user_id: UserId, token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||
let mut user = get_user_or_404(&user_id, &conn).await?;
|
||||
TwoFactor::delete_all_by_user(&user.uuid, &conn).await?;
|
||||
two_factor::enforce_2fa_policy(&user, &ACTING_ADMIN_USER.into(), 14, &token.ip.ip, &conn).await?;
|
||||
user.totp_recover = None;
|
||||
user.save(&mut conn).await
|
||||
user.save(&conn).await
|
||||
}
|
||||
|
||||
#[post("/users/<user_id>/invite/resend", format = "application/json")]
|
||||
async fn resend_user_invite(user_id: UserId, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||
if let Some(user) = User::find_by_uuid(&user_id, &mut conn).await {
|
||||
async fn resend_user_invite(user_id: UserId, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||
if let Some(user) = User::find_by_uuid(&user_id, &conn).await {
|
||||
//TODO: replace this with user.status check when it will be available (PR#3397)
|
||||
if !user.password_hash.is_empty() {
|
||||
err_code!("User already accepted invitation", Status::BadRequest.code);
|
||||
@@ -497,10 +536,10 @@ struct MembershipTypeData {
|
||||
}
|
||||
|
||||
#[post("/users/org_type", format = "application/json", data = "<data>")]
|
||||
async fn update_membership_type(data: Json<MembershipTypeData>, token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||
async fn update_membership_type(data: Json<MembershipTypeData>, token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||
let data: MembershipTypeData = data.into_inner();
|
||||
|
||||
let Some(mut member_to_edit) = Membership::find_by_user_and_org(&data.user_uuid, &data.org_uuid, &mut conn).await
|
||||
let Some(mut member_to_edit) = Membership::find_by_user_and_org(&data.user_uuid, &data.org_uuid, &conn).await
|
||||
else {
|
||||
err!("The specified user isn't member of the organization")
|
||||
};
|
||||
@@ -512,28 +551,14 @@ async fn update_membership_type(data: Json<MembershipTypeData>, token: AdminToke
|
||||
|
||||
if member_to_edit.atype == MembershipType::Owner && new_type != MembershipType::Owner {
|
||||
// Removing owner permission, check that there is at least one other confirmed owner
|
||||
if Membership::count_confirmed_by_org_and_type(&data.org_uuid, MembershipType::Owner, &mut conn).await <= 1 {
|
||||
if Membership::count_confirmed_by_org_and_type(&data.org_uuid, MembershipType::Owner, &conn).await <= 1 {
|
||||
err!("Can't change the type of the last owner")
|
||||
}
|
||||
}
|
||||
|
||||
member_to_edit.atype = new_type;
|
||||
// This check is also done at api::organizations::{accept_invite, _confirm_invite, _activate_member, edit_member}, update_membership_type
|
||||
// It returns different error messages per function.
|
||||
if new_type < MembershipType::Admin {
|
||||
match OrgPolicy::is_user_allowed(&member_to_edit.user_uuid, &member_to_edit.org_uuid, true, &mut conn).await {
|
||||
Ok(_) => {}
|
||||
Err(OrgPolicyErr::TwoFactorMissing) => {
|
||||
if CONFIG.email_2fa_auto_fallback() {
|
||||
two_factor::email::find_and_activate_email_2fa(&member_to_edit.user_uuid, &mut conn).await?;
|
||||
} else {
|
||||
err!("You cannot modify this user to this type because they have not setup 2FA");
|
||||
}
|
||||
}
|
||||
Err(OrgPolicyErr::SingleOrgEnforced) => {
|
||||
err!("You cannot modify this user to this type because it is a member of an organization which forbids it");
|
||||
}
|
||||
}
|
||||
}
|
||||
OrgPolicy::check_user_allowed(&member_to_edit, "modify", &conn).await?;
|
||||
|
||||
log_event(
|
||||
EventType::OrganizationUserUpdated as i32,
|
||||
@@ -542,32 +567,31 @@ async fn update_membership_type(data: Json<MembershipTypeData>, token: AdminToke
|
||||
&ACTING_ADMIN_USER.into(),
|
||||
14, // Use UnknownBrowser type
|
||||
&token.ip.ip,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
|
||||
member_to_edit.atype = new_type;
|
||||
member_to_edit.save(&mut conn).await
|
||||
member_to_edit.save(&conn).await
|
||||
}
|
||||
|
||||
#[post("/users/update_revision", format = "application/json")]
|
||||
async fn update_revision_users(_token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||
User::update_all_revisions(&mut conn).await
|
||||
async fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||
User::update_all_revisions(&conn).await
|
||||
}
|
||||
|
||||
#[get("/organizations/overview")]
|
||||
async fn organizations_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html<String>> {
|
||||
let organizations = Organization::get_all(&mut conn).await;
|
||||
async fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
||||
let organizations = Organization::get_all(&conn).await;
|
||||
let mut organizations_json = Vec::with_capacity(organizations.len());
|
||||
for o in organizations {
|
||||
let mut org = o.to_json();
|
||||
org["user_count"] = json!(Membership::count_by_org(&o.uuid, &mut conn).await);
|
||||
org["cipher_count"] = json!(Cipher::count_by_org(&o.uuid, &mut conn).await);
|
||||
org["collection_count"] = json!(Collection::count_by_org(&o.uuid, &mut conn).await);
|
||||
org["group_count"] = json!(Group::count_by_org(&o.uuid, &mut conn).await);
|
||||
org["event_count"] = json!(Event::count_by_org(&o.uuid, &mut conn).await);
|
||||
org["attachment_count"] = json!(Attachment::count_by_org(&o.uuid, &mut conn).await);
|
||||
org["attachment_size"] = json!(get_display_size(Attachment::size_by_org(&o.uuid, &mut conn).await));
|
||||
org["user_count"] = json!(Membership::count_by_org(&o.uuid, &conn).await);
|
||||
org["cipher_count"] = json!(Cipher::count_by_org(&o.uuid, &conn).await);
|
||||
org["collection_count"] = json!(Collection::count_by_org(&o.uuid, &conn).await);
|
||||
org["group_count"] = json!(Group::count_by_org(&o.uuid, &conn).await);
|
||||
org["event_count"] = json!(Event::count_by_org(&o.uuid, &conn).await);
|
||||
org["attachment_count"] = json!(Attachment::count_by_org(&o.uuid, &conn).await);
|
||||
org["attachment_size"] = json!(get_display_size(Attachment::size_by_org(&o.uuid, &conn).await));
|
||||
organizations_json.push(org);
|
||||
}
|
||||
|
||||
@@ -576,9 +600,9 @@ async fn organizations_overview(_token: AdminToken, mut conn: DbConn) -> ApiResu
|
||||
}
|
||||
|
||||
#[post("/organizations/<org_id>/delete", format = "application/json")]
|
||||
async fn delete_organization(org_id: OrganizationId, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||
let org = Organization::find_by_uuid(&org_id, &mut conn).await.map_res("Organization doesn't exist")?;
|
||||
org.delete(&mut conn).await
|
||||
async fn delete_organization(org_id: OrganizationId, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||
let org = Organization::find_by_uuid(&org_id, &conn).await.map_res("Organization doesn't exist")?;
|
||||
org.delete(&conn).await
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -666,7 +690,7 @@ async fn get_ntp_time(has_http_access: bool) -> String {
|
||||
}
|
||||
|
||||
#[get("/diagnostics")]
|
||||
async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn) -> ApiResult<Html<String>> {
|
||||
async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResult<Html<String>> {
|
||||
use chrono::prelude::*;
|
||||
use std::net::ToSocketAddrs;
|
||||
|
||||
@@ -720,7 +744,7 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn)
|
||||
"uses_proxy": uses_proxy,
|
||||
"enable_websocket": &CONFIG.enable_websocket(),
|
||||
"db_type": *DB_TYPE,
|
||||
"db_version": get_sql_server_version(&mut conn).await,
|
||||
"db_version": get_sql_server_version(&conn).await,
|
||||
"admin_url": format!("{}/diagnostics", admin_url()),
|
||||
"overrides": &CONFIG.get_overrides().join(", "),
|
||||
"host_arch": env::consts::ARCH,
|
||||
@@ -764,9 +788,9 @@ async fn delete_config(_token: AdminToken) -> EmptyResult {
|
||||
}
|
||||
|
||||
#[post("/config/backup_db", format = "application/json")]
|
||||
async fn backup_db(_token: AdminToken, mut conn: DbConn) -> ApiResult<String> {
|
||||
fn backup_db(_token: AdminToken) -> ApiResult<String> {
|
||||
if *CAN_BACKUP {
|
||||
match backup_database(&mut conn).await {
|
||||
match backup_sqlite() {
|
||||
Ok(f) => Ok(format!("Backup to '{f}' was successful")),
|
||||
Err(e) => err!(format!("Backup was unsuccessful {e}")),
|
||||
}
|
||||
@@ -789,11 +813,7 @@ impl<'r> FromRequest<'r> for AdminToken {
|
||||
_ => err_handler!("Error getting Client IP"),
|
||||
};
|
||||
|
||||
if CONFIG.disable_admin_token() {
|
||||
Outcome::Success(Self {
|
||||
ip,
|
||||
})
|
||||
} else {
|
||||
if !CONFIG.disable_admin_token() {
|
||||
let cookies = request.cookies();
|
||||
|
||||
let access_token = match cookies.get(COOKIE_NAME) {
|
||||
@@ -817,10 +837,10 @@ impl<'r> FromRequest<'r> for AdminToken {
|
||||
error!("Invalid or expired admin JWT. IP: {}.", &ip.ip);
|
||||
return Outcome::Error((Status::Unauthorized, "Session expired"));
|
||||
}
|
||||
|
||||
Outcome::Success(Self {
|
||||
ip,
|
||||
})
|
||||
}
|
||||
|
||||
Outcome::Success(Self {
|
||||
ip,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,13 +7,20 @@ use serde_json::Value;
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
core::{log_user_event, two_factor::email},
|
||||
master_password_policy, register_push_device, unregister_push_device, AnonymousNotify, EmptyResult, JsonResult,
|
||||
Notify, PasswordOrOtpData, UpdateType,
|
||||
core::{accept_org_invite, log_user_event, two_factor::email},
|
||||
master_password_policy, register_push_device, unregister_push_device, AnonymousNotify, ApiResult, EmptyResult,
|
||||
JsonResult, Notify, PasswordOrOtpData, UpdateType,
|
||||
},
|
||||
auth::{decode_delete, decode_invite, decode_verify_email, ClientHeaders, Headers},
|
||||
crypto,
|
||||
db::{models::*, DbConn},
|
||||
db::{
|
||||
models::{
|
||||
AuthRequest, AuthRequestId, Cipher, CipherId, Device, DeviceId, DeviceType, EmergencyAccess,
|
||||
EmergencyAccessId, EventType, Folder, FolderId, Invitation, Membership, MembershipId, OrgPolicy,
|
||||
OrgPolicyType, Organization, OrganizationId, Send, SendId, User, UserId, UserKdfType,
|
||||
},
|
||||
DbConn,
|
||||
},
|
||||
mail,
|
||||
util::{format_date, NumberOrString},
|
||||
CONFIG,
|
||||
@@ -34,6 +41,7 @@ pub fn routes() -> Vec<rocket::Route> {
|
||||
get_public_keys,
|
||||
post_keys,
|
||||
post_password,
|
||||
post_set_password,
|
||||
post_kdf,
|
||||
post_rotatekey,
|
||||
post_sstamp,
|
||||
@@ -63,18 +71,30 @@ pub fn routes() -> Vec<rocket::Route> {
|
||||
put_auth_request,
|
||||
get_auth_request_response,
|
||||
get_auth_requests,
|
||||
get_auth_requests_pending,
|
||||
]
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Eq, PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct KDFData {
|
||||
#[serde(alias = "kdfType")]
|
||||
kdf: i32,
|
||||
#[serde(alias = "iterations")]
|
||||
kdf_iterations: i32,
|
||||
#[serde(alias = "memory")]
|
||||
kdf_memory: Option<i32>,
|
||||
#[serde(alias = "parallelism")]
|
||||
kdf_parallelism: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RegisterData {
|
||||
email: String,
|
||||
|
||||
kdf: Option<i32>,
|
||||
kdf_iterations: Option<i32>,
|
||||
kdf_memory: Option<i32>,
|
||||
kdf_parallelism: Option<i32>,
|
||||
#[serde(flatten)]
|
||||
kdf: KDFData,
|
||||
|
||||
#[serde(alias = "userSymmetricKey")]
|
||||
key: String,
|
||||
@@ -97,6 +117,19 @@ pub struct RegisterData {
|
||||
org_invite_token: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetPasswordData {
|
||||
#[serde(flatten)]
|
||||
kdf: KDFData,
|
||||
|
||||
key: String,
|
||||
keys: Option<KeysData>,
|
||||
master_password_hash: String,
|
||||
master_password_hint: Option<String>,
|
||||
org_identifier: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct KeysData {
|
||||
@@ -121,7 +154,7 @@ fn enforce_password_hint_setting(password_hint: &Option<String>) -> EmptyResult
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
async fn is_email_2fa_required(member_id: Option<MembershipId>, conn: &mut DbConn) -> bool {
|
||||
async fn is_email_2fa_required(member_id: Option<MembershipId>, conn: &DbConn) -> bool {
|
||||
if !CONFIG._enable_email_2fa() {
|
||||
return false;
|
||||
}
|
||||
@@ -139,7 +172,7 @@ async fn register(data: Json<RegisterData>, conn: DbConn) -> JsonResult {
|
||||
_register(data, false, conn).await
|
||||
}
|
||||
|
||||
pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut conn: DbConn) -> JsonResult {
|
||||
pub async fn _register(data: Json<RegisterData>, email_verification: bool, conn: DbConn) -> JsonResult {
|
||||
let mut data: RegisterData = data.into_inner();
|
||||
let email = data.email.to_lowercase();
|
||||
|
||||
@@ -221,7 +254,7 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut c
|
||||
let password_hint = clean_password_hint(&data.master_password_hint);
|
||||
enforce_password_hint_setting(&password_hint)?;
|
||||
|
||||
let mut user = match User::find_by_mail(&email, &mut conn).await {
|
||||
let mut user = match User::find_by_mail(&email, &conn).await {
|
||||
Some(user) => {
|
||||
if !user.password_hash.is_empty() {
|
||||
err!("Registration not allowed or user already exists")
|
||||
@@ -236,15 +269,12 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut c
|
||||
} else {
|
||||
err!("Registration email does not match invite email")
|
||||
}
|
||||
} else if Invitation::take(&email, &mut conn).await {
|
||||
for membership in Membership::find_invited_by_user(&user.uuid, &mut conn).await.iter_mut() {
|
||||
membership.status = MembershipStatus::Accepted as i32;
|
||||
membership.save(&mut conn).await?;
|
||||
}
|
||||
} else if Invitation::take(&email, &conn).await {
|
||||
Membership::accept_user_invitations(&user.uuid, &conn).await?;
|
||||
user
|
||||
} else if CONFIG.is_signup_allowed(&email)
|
||||
|| (CONFIG.emergency_access_allowed()
|
||||
&& EmergencyAccess::find_invited_by_grantee_email(&email, &mut conn).await.is_some())
|
||||
&& EmergencyAccess::find_invited_by_grantee_email(&email, &conn).await.is_some())
|
||||
{
|
||||
user
|
||||
} else {
|
||||
@@ -255,11 +285,11 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut c
|
||||
// Order is important here; the invitation check must come first
|
||||
// because the vaultwarden admin can invite anyone, regardless
|
||||
// of other signup restrictions.
|
||||
if Invitation::take(&email, &mut conn).await
|
||||
if Invitation::take(&email, &conn).await
|
||||
|| CONFIG.is_signup_allowed(&email)
|
||||
|| pending_emergency_access.is_some()
|
||||
{
|
||||
User::new(email.clone())
|
||||
User::new(&email, None)
|
||||
} else {
|
||||
err!("Registration not allowed or user already exists")
|
||||
}
|
||||
@@ -267,18 +297,9 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut c
|
||||
};
|
||||
|
||||
// Make sure we don't leave a lingering invitation.
|
||||
Invitation::take(&email, &mut conn).await;
|
||||
Invitation::take(&email, &conn).await;
|
||||
|
||||
if let Some(client_kdf_type) = data.kdf {
|
||||
user.client_kdf_type = client_kdf_type;
|
||||
}
|
||||
|
||||
if let Some(client_kdf_iter) = data.kdf_iterations {
|
||||
user.client_kdf_iter = client_kdf_iter;
|
||||
}
|
||||
|
||||
user.client_kdf_memory = data.kdf_memory;
|
||||
user.client_kdf_parallelism = data.kdf_parallelism;
|
||||
set_kdf_data(&mut user, &data.kdf)?;
|
||||
|
||||
user.set_password(&data.master_password_hash, Some(data.key), true, None);
|
||||
user.password_hint = password_hint;
|
||||
@@ -307,17 +328,17 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut c
|
||||
error!("Error sending welcome email: {e:#?}");
|
||||
}
|
||||
|
||||
if email_verified && is_email_2fa_required(data.organization_user_id, &mut conn).await {
|
||||
email::activate_email_2fa(&user, &mut conn).await.ok();
|
||||
if email_verified && is_email_2fa_required(data.organization_user_id, &conn).await {
|
||||
email::activate_email_2fa(&user, &conn).await.ok();
|
||||
}
|
||||
}
|
||||
|
||||
user.save(&mut conn).await?;
|
||||
user.save(&conn).await?;
|
||||
|
||||
// accept any open emergency access invitations
|
||||
if !CONFIG.mail_enabled() && CONFIG.emergency_access_allowed() {
|
||||
for mut emergency_invite in EmergencyAccess::find_all_invited_by_grantee_email(&user.email, &mut conn).await {
|
||||
emergency_invite.accept_invite(&user.uuid, &user.email, &mut conn).await.ok();
|
||||
for mut emergency_invite in EmergencyAccess::find_all_invited_by_grantee_email(&user.email, &conn).await {
|
||||
emergency_invite.accept_invite(&user.uuid, &user.email, &conn).await.ok();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -327,9 +348,71 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut c
|
||||
})))
|
||||
}
|
||||
|
||||
#[post("/accounts/set-password", data = "<data>")]
|
||||
async fn post_set_password(data: Json<SetPasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: SetPasswordData = data.into_inner();
|
||||
let mut user = headers.user;
|
||||
|
||||
if user.private_key.is_some() {
|
||||
err!("Account already initialized, cannot set password")
|
||||
}
|
||||
|
||||
// Check against the password hint setting here so if it fails,
|
||||
// the user can retry without losing their invitation below.
|
||||
let password_hint = clean_password_hint(&data.master_password_hint);
|
||||
enforce_password_hint_setting(&password_hint)?;
|
||||
|
||||
set_kdf_data(&mut user, &data.kdf)?;
|
||||
|
||||
user.set_password(
|
||||
&data.master_password_hash,
|
||||
Some(data.key),
|
||||
false,
|
||||
Some(vec![String::from("revision_date")]), // We need to allow revision-date to use the old security_timestamp
|
||||
);
|
||||
user.password_hint = password_hint;
|
||||
|
||||
if let Some(keys) = data.keys {
|
||||
user.private_key = Some(keys.encrypted_private_key);
|
||||
user.public_key = Some(keys.public_key);
|
||||
}
|
||||
|
||||
if let Some(identifier) = data.org_identifier {
|
||||
if identifier != crate::sso::FAKE_IDENTIFIER {
|
||||
let org = match Organization::find_by_uuid(&identifier.into(), &conn).await {
|
||||
None => err!("Failed to retrieve the associated organization"),
|
||||
Some(org) => org,
|
||||
};
|
||||
|
||||
let membership = match Membership::find_by_user_and_org(&user.uuid, &org.uuid, &conn).await {
|
||||
None => err!("Failed to retrieve the invitation"),
|
||||
Some(org) => org,
|
||||
};
|
||||
|
||||
accept_org_invite(&user, membership, None, &conn).await?;
|
||||
}
|
||||
}
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
mail::send_welcome(&user.email.to_lowercase()).await?;
|
||||
} else {
|
||||
Membership::accept_user_invitations(&user.uuid, &conn).await?;
|
||||
}
|
||||
|
||||
log_user_event(EventType::UserChangedPassword as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &conn)
|
||||
.await;
|
||||
|
||||
user.save(&conn).await?;
|
||||
|
||||
Ok(Json(json!({
|
||||
"Object": "set-password",
|
||||
"CaptchaBypassToken": "",
|
||||
})))
|
||||
}
|
||||
|
||||
#[get("/accounts/profile")]
|
||||
async fn profile(headers: Headers, mut conn: DbConn) -> Json<Value> {
|
||||
Json(headers.user.to_json(&mut conn).await)
|
||||
async fn profile(headers: Headers, conn: DbConn) -> Json<Value> {
|
||||
Json(headers.user.to_json(&conn).await)
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -345,7 +428,7 @@ async fn put_profile(data: Json<ProfileData>, headers: Headers, conn: DbConn) ->
|
||||
}
|
||||
|
||||
#[post("/accounts/profile", data = "<data>")]
|
||||
async fn post_profile(data: Json<ProfileData>, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
async fn post_profile(data: Json<ProfileData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: ProfileData = data.into_inner();
|
||||
|
||||
// Check if the length of the username exceeds 50 characters (Same is Upstream Bitwarden)
|
||||
@@ -357,8 +440,8 @@ async fn post_profile(data: Json<ProfileData>, headers: Headers, mut conn: DbCon
|
||||
let mut user = headers.user;
|
||||
user.name = data.name;
|
||||
|
||||
user.save(&mut conn).await?;
|
||||
Ok(Json(user.to_json(&mut conn).await))
|
||||
user.save(&conn).await?;
|
||||
Ok(Json(user.to_json(&conn).await))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -368,7 +451,7 @@ struct AvatarData {
|
||||
}
|
||||
|
||||
#[put("/accounts/avatar", data = "<data>")]
|
||||
async fn put_avatar(data: Json<AvatarData>, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
async fn put_avatar(data: Json<AvatarData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: AvatarData = data.into_inner();
|
||||
|
||||
// It looks like it only supports the 6 hex color format.
|
||||
@@ -383,13 +466,13 @@ async fn put_avatar(data: Json<AvatarData>, headers: Headers, mut conn: DbConn)
|
||||
let mut user = headers.user;
|
||||
user.avatar_color = data.avatar_color;
|
||||
|
||||
user.save(&mut conn).await?;
|
||||
Ok(Json(user.to_json(&mut conn).await))
|
||||
user.save(&conn).await?;
|
||||
Ok(Json(user.to_json(&conn).await))
|
||||
}
|
||||
|
||||
#[get("/users/<user_id>/public-key")]
|
||||
async fn get_public_keys(user_id: UserId, _headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
let user = match User::find_by_uuid(&user_id, &mut conn).await {
|
||||
async fn get_public_keys(user_id: UserId, _headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let user = match User::find_by_uuid(&user_id, &conn).await {
|
||||
Some(user) if user.public_key.is_some() => user,
|
||||
Some(_) => err_code!("User has no public_key", Status::NotFound.code),
|
||||
None => err_code!("User doesn't exist", Status::NotFound.code),
|
||||
@@ -403,7 +486,7 @@ async fn get_public_keys(user_id: UserId, _headers: Headers, mut conn: DbConn) -
|
||||
}
|
||||
|
||||
#[post("/accounts/keys", data = "<data>")]
|
||||
async fn post_keys(data: Json<KeysData>, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
async fn post_keys(data: Json<KeysData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: KeysData = data.into_inner();
|
||||
|
||||
let mut user = headers.user;
|
||||
@@ -411,7 +494,7 @@ async fn post_keys(data: Json<KeysData>, headers: Headers, mut conn: DbConn) ->
|
||||
user.private_key = Some(data.encrypted_private_key);
|
||||
user.public_key = Some(data.public_key);
|
||||
|
||||
user.save(&mut conn).await?;
|
||||
user.save(&conn).await?;
|
||||
|
||||
Ok(Json(json!({
|
||||
"privateKey": user.private_key,
|
||||
@@ -430,7 +513,7 @@ struct ChangePassData {
|
||||
}
|
||||
|
||||
#[post("/accounts/password", data = "<data>")]
|
||||
async fn post_password(data: Json<ChangePassData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
async fn post_password(data: Json<ChangePassData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let data: ChangePassData = data.into_inner();
|
||||
let mut user = headers.user;
|
||||
|
||||
@@ -441,7 +524,7 @@ async fn post_password(data: Json<ChangePassData>, headers: Headers, mut conn: D
|
||||
user.password_hint = clean_password_hint(&data.master_password_hint);
|
||||
enforce_password_hint_setting(&user.password_hint)?;
|
||||
|
||||
log_user_event(EventType::UserChangedPassword as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn)
|
||||
log_user_event(EventType::UserChangedPassword as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &conn)
|
||||
.await;
|
||||
|
||||
user.set_password(
|
||||
@@ -456,38 +539,17 @@ async fn post_password(data: Json<ChangePassData>, headers: Headers, mut conn: D
|
||||
]),
|
||||
);
|
||||
|
||||
let save_result = user.save(&mut conn).await;
|
||||
let save_result = user.save(&conn).await;
|
||||
|
||||
// Prevent logging out the client where the user requested this endpoint from.
|
||||
// If you do logout the user it will causes issues at the client side.
|
||||
// Adding the device uuid will prevent this.
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await;
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone()), &conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ChangeKdfData {
|
||||
kdf: i32,
|
||||
kdf_iterations: i32,
|
||||
kdf_memory: Option<i32>,
|
||||
kdf_parallelism: Option<i32>,
|
||||
|
||||
master_password_hash: String,
|
||||
new_master_password_hash: String,
|
||||
key: String,
|
||||
}
|
||||
|
||||
#[post("/accounts/kdf", data = "<data>")]
|
||||
async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let data: ChangeKdfData = data.into_inner();
|
||||
let mut user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.master_password_hash) {
|
||||
err!("Invalid password")
|
||||
}
|
||||
|
||||
fn set_kdf_data(user: &mut User, data: &KDFData) -> EmptyResult {
|
||||
if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 {
|
||||
err!("PBKDF2 KDF iterations must be at least 100000.")
|
||||
}
|
||||
@@ -518,10 +580,68 @@ async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn,
|
||||
}
|
||||
user.client_kdf_iter = data.kdf_iterations;
|
||||
user.client_kdf_type = data.kdf;
|
||||
user.set_password(&data.new_master_password_hash, Some(data.key), true, None);
|
||||
let save_result = user.save(&mut conn).await;
|
||||
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct AuthenticationData {
|
||||
salt: String,
|
||||
kdf: KDFData,
|
||||
master_password_authentication_hash: String,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct UnlockData {
|
||||
salt: String,
|
||||
kdf: KDFData,
|
||||
master_key_wrapped_user_key: String,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ChangeKdfData {
|
||||
new_master_password_hash: String,
|
||||
key: String,
|
||||
authentication_data: AuthenticationData,
|
||||
unlock_data: UnlockData,
|
||||
master_password_hash: String,
|
||||
}
|
||||
|
||||
#[post("/accounts/kdf", data = "<data>")]
|
||||
async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let data: ChangeKdfData = data.into_inner();
|
||||
|
||||
if !headers.user.check_valid_password(&data.master_password_hash) {
|
||||
err!("Invalid password")
|
||||
}
|
||||
|
||||
if data.authentication_data.kdf != data.unlock_data.kdf {
|
||||
err!("KDF settings must be equal for authentication and unlock")
|
||||
}
|
||||
|
||||
if headers.user.email != data.authentication_data.salt || headers.user.email != data.unlock_data.salt {
|
||||
err!("Invalid master password salt")
|
||||
}
|
||||
|
||||
let mut user = headers.user;
|
||||
|
||||
set_kdf_data(&mut user, &data.unlock_data.kdf)?;
|
||||
|
||||
user.set_password(
|
||||
&data.authentication_data.master_password_authentication_hash,
|
||||
Some(data.unlock_data.master_key_wrapped_user_key),
|
||||
true,
|
||||
None,
|
||||
);
|
||||
let save_result = user.save(&conn).await;
|
||||
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone()), &conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
@@ -676,7 +796,7 @@ fn validate_keydata(
|
||||
}
|
||||
|
||||
#[post("/accounts/key-management/rotate-user-account-keys", data = "<data>")]
|
||||
async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
async fn post_rotatekey(data: Json<KeyData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
// TODO: See if we can wrap everything within a SQL Transaction. If something fails it should revert everything.
|
||||
let data: KeyData = data.into_inner();
|
||||
|
||||
@@ -694,13 +814,13 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
||||
|
||||
// TODO: Ideally we'd do everything after this point in a single transaction.
|
||||
|
||||
let mut existing_ciphers = Cipher::find_owned_by_user(user_id, &mut conn).await;
|
||||
let mut existing_folders = Folder::find_by_user(user_id, &mut conn).await;
|
||||
let mut existing_emergency_access = EmergencyAccess::find_all_by_grantor_uuid(user_id, &mut conn).await;
|
||||
let mut existing_memberships = Membership::find_by_user(user_id, &mut conn).await;
|
||||
let mut existing_ciphers = Cipher::find_owned_by_user(user_id, &conn).await;
|
||||
let mut existing_folders = Folder::find_by_user(user_id, &conn).await;
|
||||
let mut existing_emergency_access = EmergencyAccess::find_all_confirmed_by_grantor_uuid(user_id, &conn).await;
|
||||
let mut existing_memberships = Membership::find_by_user(user_id, &conn).await;
|
||||
// We only rotate the reset password key if it is set.
|
||||
existing_memberships.retain(|m| m.reset_password_key.is_some());
|
||||
let mut existing_sends = Send::find_by_user(user_id, &mut conn).await;
|
||||
let mut existing_sends = Send::find_by_user(user_id, &conn).await;
|
||||
|
||||
validate_keydata(
|
||||
&data,
|
||||
@@ -722,7 +842,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
||||
};
|
||||
|
||||
saved_folder.name = folder_data.name;
|
||||
saved_folder.save(&mut conn).await?
|
||||
saved_folder.save(&conn).await?
|
||||
}
|
||||
}
|
||||
|
||||
@@ -735,7 +855,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
||||
};
|
||||
|
||||
saved_emergency_access.key_encrypted = Some(emergency_access_data.key_encrypted);
|
||||
saved_emergency_access.save(&mut conn).await?
|
||||
saved_emergency_access.save(&conn).await?
|
||||
}
|
||||
|
||||
// Update reset password data
|
||||
@@ -747,7 +867,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
||||
};
|
||||
|
||||
membership.reset_password_key = Some(reset_password_data.reset_password_key);
|
||||
membership.save(&mut conn).await?
|
||||
membership.save(&conn).await?
|
||||
}
|
||||
|
||||
// Update send data
|
||||
@@ -756,7 +876,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
||||
err!("Send doesn't exist")
|
||||
};
|
||||
|
||||
update_send_from_data(send, send_data, &headers, &mut conn, &nt, UpdateType::None).await?;
|
||||
update_send_from_data(send, send_data, &headers, &conn, &nt, UpdateType::None).await?;
|
||||
}
|
||||
|
||||
// Update cipher data
|
||||
@@ -772,7 +892,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
||||
// Prevent triggering cipher updates via WebSockets by settings UpdateType::None
|
||||
// The user sessions are invalidated because all the ciphers were re-encrypted and thus triggering an update could cause issues.
|
||||
// We force the users to logout after the user has been saved to try and prevent these issues.
|
||||
update_cipher_from_data(saved_cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await?
|
||||
update_cipher_from_data(saved_cipher, cipher_data, &headers, None, &conn, &nt, UpdateType::None).await?
|
||||
}
|
||||
}
|
||||
|
||||
@@ -787,28 +907,28 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
||||
None,
|
||||
);
|
||||
|
||||
let save_result = user.save(&mut conn).await;
|
||||
let save_result = user.save(&conn).await;
|
||||
|
||||
// Prevent logging out the client where the user requested this endpoint from.
|
||||
// If you do logout the user it will causes issues at the client side.
|
||||
// Adding the device uuid will prevent this.
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await;
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone()), &conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
|
||||
#[post("/accounts/security-stamp", data = "<data>")]
|
||||
async fn post_sstamp(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
async fn post_sstamp(data: Json<PasswordOrOtpData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let data: PasswordOrOtpData = data.into_inner();
|
||||
let mut user = headers.user;
|
||||
|
||||
data.validate(&user, true, &mut conn).await?;
|
||||
data.validate(&user, true, &conn).await?;
|
||||
|
||||
Device::delete_all_by_user(&user.uuid, &mut conn).await?;
|
||||
Device::delete_all_by_user(&user.uuid, &conn).await?;
|
||||
user.reset_security_stamp();
|
||||
let save_result = user.save(&mut conn).await;
|
||||
let save_result = user.save(&conn).await;
|
||||
|
||||
nt.send_logout(&user, None, &mut conn).await;
|
||||
nt.send_logout(&user, None, &conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
@@ -821,7 +941,7 @@ struct EmailTokenData {
|
||||
}
|
||||
|
||||
#[post("/accounts/email-token", data = "<data>")]
|
||||
async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
|
||||
async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||
if !CONFIG.email_change_allowed() {
|
||||
err!("Email change is not allowed.");
|
||||
}
|
||||
@@ -833,10 +953,20 @@ async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, mut conn
|
||||
err!("Invalid password")
|
||||
}
|
||||
|
||||
if User::find_by_mail(&data.new_email, &mut conn).await.is_some() {
|
||||
if let Some(existing_user) = User::find_by_mail(&data.new_email, &conn).await {
|
||||
if CONFIG.mail_enabled() {
|
||||
if let Err(e) = mail::send_change_email_existing(&data.new_email, &user.email).await {
|
||||
error!("Error sending change-email-existing email: {e:#?}");
|
||||
// check if existing_user has already registered
|
||||
if existing_user.password_hash.is_empty() {
|
||||
// inform an invited user about how to delete their temporary account if the
|
||||
// request was done intentionally and they want to update their mail address
|
||||
if let Err(e) = mail::send_change_email_invited(&data.new_email, &user.email).await {
|
||||
error!("Error sending change-email-invited email: {e:#?}");
|
||||
}
|
||||
} else {
|
||||
// inform existing user about the failed attempt to change their mail address
|
||||
if let Err(e) = mail::send_change_email_existing(&data.new_email, &user.email).await {
|
||||
error!("Error sending change-email-existing email: {e:#?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
err!("Email already in use");
|
||||
@@ -858,7 +988,7 @@ async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, mut conn
|
||||
|
||||
user.email_new = Some(data.new_email);
|
||||
user.email_new_token = Some(token);
|
||||
user.save(&mut conn).await
|
||||
user.save(&conn).await
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -873,7 +1003,7 @@ struct ChangeEmailData {
|
||||
}
|
||||
|
||||
#[post("/accounts/email", data = "<data>")]
|
||||
async fn post_email(data: Json<ChangeEmailData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
async fn post_email(data: Json<ChangeEmailData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
if !CONFIG.email_change_allowed() {
|
||||
err!("Email change is not allowed.");
|
||||
}
|
||||
@@ -885,7 +1015,7 @@ async fn post_email(data: Json<ChangeEmailData>, headers: Headers, mut conn: DbC
|
||||
err!("Invalid password")
|
||||
}
|
||||
|
||||
if User::find_by_mail(&data.new_email, &mut conn).await.is_some() {
|
||||
if User::find_by_mail(&data.new_email, &conn).await.is_some() {
|
||||
err!("Email already in use");
|
||||
}
|
||||
|
||||
@@ -919,9 +1049,9 @@ async fn post_email(data: Json<ChangeEmailData>, headers: Headers, mut conn: DbC
|
||||
|
||||
user.set_password(&data.new_master_password_hash, Some(data.key), true, None);
|
||||
|
||||
let save_result = user.save(&mut conn).await;
|
||||
let save_result = user.save(&conn).await;
|
||||
|
||||
nt.send_logout(&user, None, &mut conn).await;
|
||||
nt.send_logout(&user, None, &conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
@@ -949,10 +1079,10 @@ struct VerifyEmailTokenData {
|
||||
}
|
||||
|
||||
#[post("/accounts/verify-email-token", data = "<data>")]
|
||||
async fn post_verify_email_token(data: Json<VerifyEmailTokenData>, mut conn: DbConn) -> EmptyResult {
|
||||
async fn post_verify_email_token(data: Json<VerifyEmailTokenData>, conn: DbConn) -> EmptyResult {
|
||||
let data: VerifyEmailTokenData = data.into_inner();
|
||||
|
||||
let Some(mut user) = User::find_by_uuid(&data.user_id, &mut conn).await else {
|
||||
let Some(mut user) = User::find_by_uuid(&data.user_id, &conn).await else {
|
||||
err!("User doesn't exist")
|
||||
};
|
||||
|
||||
@@ -965,7 +1095,7 @@ async fn post_verify_email_token(data: Json<VerifyEmailTokenData>, mut conn: DbC
|
||||
user.verified_at = Some(Utc::now().naive_utc());
|
||||
user.last_verifying_at = None;
|
||||
user.login_verify_count = 0;
|
||||
if let Err(e) = user.save(&mut conn).await {
|
||||
if let Err(e) = user.save(&conn).await {
|
||||
error!("Error saving email verification: {e:#?}");
|
||||
}
|
||||
|
||||
@@ -979,11 +1109,11 @@ struct DeleteRecoverData {
|
||||
}
|
||||
|
||||
#[post("/accounts/delete-recover", data = "<data>")]
|
||||
async fn post_delete_recover(data: Json<DeleteRecoverData>, mut conn: DbConn) -> EmptyResult {
|
||||
async fn post_delete_recover(data: Json<DeleteRecoverData>, conn: DbConn) -> EmptyResult {
|
||||
let data: DeleteRecoverData = data.into_inner();
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
if let Some(user) = User::find_by_mail(&data.email, &mut conn).await {
|
||||
if let Some(user) = User::find_by_mail(&data.email, &conn).await {
|
||||
if let Err(e) = mail::send_delete_account(&user.email, &user.uuid).await {
|
||||
error!("Error sending delete account email: {e:#?}");
|
||||
}
|
||||
@@ -1006,21 +1136,21 @@ struct DeleteRecoverTokenData {
|
||||
}
|
||||
|
||||
#[post("/accounts/delete-recover-token", data = "<data>")]
|
||||
async fn post_delete_recover_token(data: Json<DeleteRecoverTokenData>, mut conn: DbConn) -> EmptyResult {
|
||||
async fn post_delete_recover_token(data: Json<DeleteRecoverTokenData>, conn: DbConn) -> EmptyResult {
|
||||
let data: DeleteRecoverTokenData = data.into_inner();
|
||||
|
||||
let Ok(claims) = decode_delete(&data.token) else {
|
||||
err!("Invalid claim")
|
||||
};
|
||||
|
||||
let Some(user) = User::find_by_uuid(&data.user_id, &mut conn).await else {
|
||||
let Some(user) = User::find_by_uuid(&data.user_id, &conn).await else {
|
||||
err!("User doesn't exist")
|
||||
};
|
||||
|
||||
if claims.sub != *user.uuid {
|
||||
err!("Invalid claim");
|
||||
}
|
||||
user.delete(&mut conn).await
|
||||
user.delete(&conn).await
|
||||
}
|
||||
|
||||
#[post("/accounts/delete", data = "<data>")]
|
||||
@@ -1029,13 +1159,13 @@ async fn post_delete_account(data: Json<PasswordOrOtpData>, headers: Headers, co
|
||||
}
|
||||
|
||||
#[delete("/accounts", data = "<data>")]
|
||||
async fn delete_account(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
|
||||
async fn delete_account(data: Json<PasswordOrOtpData>, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||
let data: PasswordOrOtpData = data.into_inner();
|
||||
let user = headers.user;
|
||||
|
||||
data.validate(&user, true, &mut conn).await?;
|
||||
data.validate(&user, true, &conn).await?;
|
||||
|
||||
user.delete(&mut conn).await
|
||||
user.delete(&conn).await
|
||||
}
|
||||
|
||||
#[get("/accounts/revision-date")]
|
||||
@@ -1051,7 +1181,7 @@ struct PasswordHintData {
|
||||
}
|
||||
|
||||
#[post("/accounts/password-hint", data = "<data>")]
|
||||
async fn password_hint(data: Json<PasswordHintData>, mut conn: DbConn) -> EmptyResult {
|
||||
async fn password_hint(data: Json<PasswordHintData>, conn: DbConn) -> EmptyResult {
|
||||
if !CONFIG.password_hints_allowed() || (!CONFIG.mail_enabled() && !CONFIG.show_password_hint()) {
|
||||
err!("This server is not configured to provide password hints.");
|
||||
}
|
||||
@@ -1061,7 +1191,7 @@ async fn password_hint(data: Json<PasswordHintData>, mut conn: DbConn) -> EmptyR
|
||||
let data: PasswordHintData = data.into_inner();
|
||||
let email = &data.email;
|
||||
|
||||
match User::find_by_mail(email, &mut conn).await {
|
||||
match User::find_by_mail(email, &conn).await {
|
||||
None => {
|
||||
// To prevent user enumeration, act as if the user exists.
|
||||
if CONFIG.mail_enabled() {
|
||||
@@ -1103,10 +1233,10 @@ async fn prelogin(data: Json<PreloginData>, conn: DbConn) -> Json<Value> {
|
||||
_prelogin(data, conn).await
|
||||
}
|
||||
|
||||
pub async fn _prelogin(data: Json<PreloginData>, mut conn: DbConn) -> Json<Value> {
|
||||
pub async fn _prelogin(data: Json<PreloginData>, conn: DbConn) -> Json<Value> {
|
||||
let data: PreloginData = data.into_inner();
|
||||
|
||||
let (kdf_type, kdf_iter, kdf_mem, kdf_para) = match User::find_by_mail(&data.email, &mut conn).await {
|
||||
let (kdf_type, kdf_iter, kdf_mem, kdf_para) = match User::find_by_mail(&data.email, &conn).await {
|
||||
Some(user) => (user.client_kdf_type, user.client_kdf_iter, user.client_kdf_memory, user.client_kdf_parallelism),
|
||||
None => (User::CLIENT_KDF_TYPE_DEFAULT, User::CLIENT_KDF_ITER_DEFAULT, None, None),
|
||||
};
|
||||
@@ -1126,29 +1256,44 @@ struct SecretVerificationRequest {
|
||||
master_password_hash: String,
|
||||
}
|
||||
|
||||
// Change the KDF Iterations if necessary
|
||||
pub async fn kdf_upgrade(user: &mut User, pwd_hash: &str, conn: &DbConn) -> ApiResult<()> {
|
||||
if user.password_iterations < CONFIG.password_iterations() {
|
||||
user.password_iterations = CONFIG.password_iterations();
|
||||
user.set_password(pwd_hash, None, false, None);
|
||||
|
||||
if let Err(e) = user.save(conn).await {
|
||||
error!("Error updating user: {e:#?}");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[post("/accounts/verify-password", data = "<data>")]
|
||||
async fn verify_password(data: Json<SecretVerificationRequest>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: SecretVerificationRequest = data.into_inner();
|
||||
let user = headers.user;
|
||||
let mut user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.master_password_hash) {
|
||||
err!("Invalid password")
|
||||
}
|
||||
|
||||
kdf_upgrade(&mut user, &data.master_password_hash, &conn).await?;
|
||||
|
||||
Ok(Json(master_password_policy(&user, &conn).await))
|
||||
}
|
||||
|
||||
async fn _api_key(data: Json<PasswordOrOtpData>, rotate: bool, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
async fn _api_key(data: Json<PasswordOrOtpData>, rotate: bool, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
use crate::util::format_date;
|
||||
|
||||
let data: PasswordOrOtpData = data.into_inner();
|
||||
let mut user = headers.user;
|
||||
|
||||
data.validate(&user, true, &mut conn).await?;
|
||||
data.validate(&user, true, &conn).await?;
|
||||
|
||||
if rotate || user.api_key.is_none() {
|
||||
user.api_key = Some(crypto::generate_api_key());
|
||||
user.save(&mut conn).await.expect("Error saving API key");
|
||||
user.save(&conn).await.expect("Error saving API key");
|
||||
}
|
||||
|
||||
Ok(Json(json!({
|
||||
@@ -1169,11 +1314,12 @@ async fn rotate_api_key(data: Json<PasswordOrOtpData>, headers: Headers, conn: D
|
||||
}
|
||||
|
||||
#[get("/devices/knowndevice")]
|
||||
async fn get_known_device(device: KnownDevice, mut conn: DbConn) -> JsonResult {
|
||||
let mut result = false;
|
||||
if let Some(user) = User::find_by_mail(&device.email, &mut conn).await {
|
||||
result = Device::find_by_uuid_and_user(&device.uuid, &user.uuid, &mut conn).await.is_some();
|
||||
}
|
||||
async fn get_known_device(device: KnownDevice, conn: DbConn) -> JsonResult {
|
||||
let result = if let Some(user) = User::find_by_mail(&device.email, &conn).await {
|
||||
Device::find_by_uuid_and_user(&device.uuid, &user.uuid, &conn).await.is_some()
|
||||
} else {
|
||||
false
|
||||
};
|
||||
Ok(Json(json!(result)))
|
||||
}
|
||||
|
||||
@@ -1215,8 +1361,8 @@ impl<'r> FromRequest<'r> for KnownDevice {
|
||||
}
|
||||
|
||||
#[get("/devices")]
|
||||
async fn get_all_devices(headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
let devices = Device::find_with_auth_request_by_user(&headers.user.uuid, &mut conn).await;
|
||||
async fn get_all_devices(headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let devices = Device::find_with_auth_request_by_user(&headers.user.uuid, &conn).await;
|
||||
let devices = devices.iter().map(|device| device.to_json()).collect::<Vec<Value>>();
|
||||
|
||||
Ok(Json(json!({
|
||||
@@ -1227,8 +1373,8 @@ async fn get_all_devices(headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
}
|
||||
|
||||
#[get("/devices/identifier/<device_id>")]
|
||||
async fn get_device(device_id: DeviceId, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
let Some(device) = Device::find_by_uuid_and_user(&device_id, &headers.user.uuid, &mut conn).await else {
|
||||
async fn get_device(device_id: DeviceId, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let Some(device) = Device::find_by_uuid_and_user(&device_id, &headers.user.uuid, &conn).await else {
|
||||
err!("No device found");
|
||||
};
|
||||
Ok(Json(device.to_json()))
|
||||
@@ -1246,17 +1392,11 @@ async fn post_device_token(device_id: DeviceId, data: Json<PushToken>, headers:
|
||||
}
|
||||
|
||||
#[put("/devices/identifier/<device_id>/token", data = "<data>")]
|
||||
async fn put_device_token(
|
||||
device_id: DeviceId,
|
||||
data: Json<PushToken>,
|
||||
headers: Headers,
|
||||
mut conn: DbConn,
|
||||
) -> EmptyResult {
|
||||
async fn put_device_token(device_id: DeviceId, data: Json<PushToken>, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||
let data = data.into_inner();
|
||||
let token = data.push_token;
|
||||
|
||||
let Some(mut device) = Device::find_by_uuid_and_user(&headers.device.uuid, &headers.user.uuid, &mut conn).await
|
||||
else {
|
||||
let Some(mut device) = Device::find_by_uuid_and_user(&headers.device.uuid, &headers.user.uuid, &conn).await else {
|
||||
err!(format!("Error: device {device_id} should be present before a token can be assigned"))
|
||||
};
|
||||
|
||||
@@ -1269,17 +1409,17 @@ async fn put_device_token(
|
||||
}
|
||||
|
||||
device.push_token = Some(token);
|
||||
if let Err(e) = device.save(&mut conn).await {
|
||||
if let Err(e) = device.save(&conn).await {
|
||||
err!(format!("An error occurred while trying to save the device push token: {e}"));
|
||||
}
|
||||
|
||||
register_push_device(&mut device, &mut conn).await?;
|
||||
register_push_device(&mut device, &conn).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[put("/devices/identifier/<device_id>/clear-token")]
|
||||
async fn put_clear_device_token(device_id: DeviceId, mut conn: DbConn) -> EmptyResult {
|
||||
async fn put_clear_device_token(device_id: DeviceId, conn: DbConn) -> EmptyResult {
|
||||
// This only clears push token
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Controllers/DevicesController.cs#L215
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/Services/Implementations/DeviceService.cs#L37
|
||||
@@ -1291,8 +1431,8 @@ async fn put_clear_device_token(device_id: DeviceId, mut conn: DbConn) -> EmptyR
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(device) = Device::find_by_uuid(&device_id, &mut conn).await {
|
||||
Device::clear_push_token_by_uuid(&device_id, &mut conn).await?;
|
||||
if let Some(device) = Device::find_by_uuid(&device_id, &conn).await {
|
||||
Device::clear_push_token_by_uuid(&device_id, &conn).await?;
|
||||
unregister_push_device(&device.push_uuid).await?;
|
||||
}
|
||||
|
||||
@@ -1321,17 +1461,17 @@ struct AuthRequestRequest {
|
||||
async fn post_auth_request(
|
||||
data: Json<AuthRequestRequest>,
|
||||
client_headers: ClientHeaders,
|
||||
mut conn: DbConn,
|
||||
conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> JsonResult {
|
||||
let data = data.into_inner();
|
||||
|
||||
let Some(user) = User::find_by_mail(&data.email, &mut conn).await else {
|
||||
let Some(user) = User::find_by_mail(&data.email, &conn).await else {
|
||||
err!("AuthRequest doesn't exist", "User not found")
|
||||
};
|
||||
|
||||
// Validate device uuid and type
|
||||
let device = match Device::find_by_uuid_and_user(&data.device_identifier, &user.uuid, &mut conn).await {
|
||||
let device = match Device::find_by_uuid_and_user(&data.device_identifier, &user.uuid, &conn).await {
|
||||
Some(device) if device.atype == client_headers.device_type => device,
|
||||
_ => err!("AuthRequest doesn't exist", "Device verification failed"),
|
||||
};
|
||||
@@ -1344,16 +1484,16 @@ async fn post_auth_request(
|
||||
data.access_code,
|
||||
data.public_key,
|
||||
);
|
||||
auth_request.save(&mut conn).await?;
|
||||
auth_request.save(&conn).await?;
|
||||
|
||||
nt.send_auth_request(&user.uuid, &auth_request.uuid, &device, &mut conn).await;
|
||||
nt.send_auth_request(&user.uuid, &auth_request.uuid, &device, &conn).await;
|
||||
|
||||
log_user_event(
|
||||
EventType::UserRequestedDeviceApproval as i32,
|
||||
&user.uuid,
|
||||
client_headers.device_type,
|
||||
&client_headers.ip.ip,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -1373,8 +1513,8 @@ async fn post_auth_request(
|
||||
}
|
||||
|
||||
#[get("/auth-requests/<auth_request_id>")]
|
||||
async fn get_auth_request(auth_request_id: AuthRequestId, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
let Some(auth_request) = AuthRequest::find_by_uuid_and_user(&auth_request_id, &headers.user.uuid, &mut conn).await
|
||||
async fn get_auth_request(auth_request_id: AuthRequestId, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let Some(auth_request) = AuthRequest::find_by_uuid_and_user(&auth_request_id, &headers.user.uuid, &conn).await
|
||||
else {
|
||||
err!("AuthRequest doesn't exist", "Record not found or user uuid does not match")
|
||||
};
|
||||
@@ -1410,13 +1550,12 @@ async fn put_auth_request(
|
||||
auth_request_id: AuthRequestId,
|
||||
data: Json<AuthResponseRequest>,
|
||||
headers: Headers,
|
||||
mut conn: DbConn,
|
||||
conn: DbConn,
|
||||
ant: AnonymousNotify<'_>,
|
||||
nt: Notify<'_>,
|
||||
) -> JsonResult {
|
||||
let data = data.into_inner();
|
||||
let Some(mut auth_request) =
|
||||
AuthRequest::find_by_uuid_and_user(&auth_request_id, &headers.user.uuid, &mut conn).await
|
||||
let Some(mut auth_request) = AuthRequest::find_by_uuid_and_user(&auth_request_id, &headers.user.uuid, &conn).await
|
||||
else {
|
||||
err!("AuthRequest doesn't exist", "Record not found or user uuid does not match")
|
||||
};
|
||||
@@ -1438,28 +1577,28 @@ async fn put_auth_request(
|
||||
auth_request.master_password_hash = data.master_password_hash;
|
||||
auth_request.response_device_id = Some(data.device_identifier.clone());
|
||||
auth_request.response_date = Some(response_date);
|
||||
auth_request.save(&mut conn).await?;
|
||||
auth_request.save(&conn).await?;
|
||||
|
||||
ant.send_auth_response(&auth_request.user_uuid, &auth_request.uuid).await;
|
||||
nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, &headers.device, &mut conn).await;
|
||||
nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, &headers.device, &conn).await;
|
||||
|
||||
log_user_event(
|
||||
EventType::OrganizationUserApprovedAuthRequest as i32,
|
||||
&headers.user.uuid,
|
||||
headers.device.atype,
|
||||
&headers.ip.ip,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
} else {
|
||||
// If denied, there's no reason to keep the request
|
||||
auth_request.delete(&mut conn).await?;
|
||||
auth_request.delete(&conn).await?;
|
||||
log_user_event(
|
||||
EventType::OrganizationUserRejectedAuthRequest as i32,
|
||||
&headers.user.uuid,
|
||||
headers.device.atype,
|
||||
&headers.ip.ip,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
@@ -1484,9 +1623,9 @@ async fn get_auth_request_response(
|
||||
auth_request_id: AuthRequestId,
|
||||
code: &str,
|
||||
client_headers: ClientHeaders,
|
||||
mut conn: DbConn,
|
||||
conn: DbConn,
|
||||
) -> JsonResult {
|
||||
let Some(auth_request) = AuthRequest::find_by_uuid(&auth_request_id, &mut conn).await else {
|
||||
let Some(auth_request) = AuthRequest::find_by_uuid(&auth_request_id, &conn).await else {
|
||||
err!("AuthRequest doesn't exist", "User not found")
|
||||
};
|
||||
|
||||
@@ -1514,9 +1653,16 @@ async fn get_auth_request_response(
|
||||
})))
|
||||
}
|
||||
|
||||
// Now unused but not yet removed
|
||||
// cf https://github.com/bitwarden/clients/blob/9b2fbdba1c028bf3394064609630d2ec224baefa/libs/common/src/services/api.service.ts#L245
|
||||
#[get("/auth-requests")]
|
||||
async fn get_auth_requests(headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
let auth_requests = AuthRequest::find_by_user(&headers.user.uuid, &mut conn).await;
|
||||
async fn get_auth_requests(headers: Headers, conn: DbConn) -> JsonResult {
|
||||
get_auth_requests_pending(headers, conn).await
|
||||
}
|
||||
|
||||
#[get("/auth-requests/pending")]
|
||||
async fn get_auth_requests_pending(headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let auth_requests = AuthRequest::find_by_user(&headers.user.uuid, &conn).await;
|
||||
|
||||
Ok(Json(json!({
|
||||
"data": auth_requests
|
||||
@@ -1546,8 +1692,8 @@ async fn get_auth_requests(headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
|
||||
pub async fn purge_auth_requests(pool: DbPool) {
|
||||
debug!("Purging auth requests");
|
||||
if let Ok(mut conn) = pool.get().await {
|
||||
AuthRequest::purge_expired_auth_requests(&mut conn).await;
|
||||
if let Ok(conn) = pool.get().await {
|
||||
AuthRequest::purge_expired_auth_requests(&conn).await;
|
||||
} else {
|
||||
error!("Failed to get DB connection while purging trashed ciphers")
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -8,7 +8,13 @@ use crate::{
|
||||
EmptyResult, JsonResult,
|
||||
},
|
||||
auth::{decode_emergency_access_invite, Headers},
|
||||
db::{models::*, DbConn, DbPool},
|
||||
db::{
|
||||
models::{
|
||||
Cipher, EmergencyAccess, EmergencyAccessId, EmergencyAccessStatus, EmergencyAccessType, Invitation,
|
||||
Membership, MembershipType, OrgPolicy, TwoFactor, User, UserId,
|
||||
},
|
||||
DbConn, DbPool,
|
||||
},
|
||||
mail,
|
||||
util::NumberOrString,
|
||||
CONFIG,
|
||||
@@ -40,28 +46,15 @@ pub fn routes() -> Vec<Route> {
|
||||
// region get
|
||||
|
||||
#[get("/emergency-access/trusted")]
|
||||
async fn get_contacts(headers: Headers, mut conn: DbConn) -> Json<Value> {
|
||||
if !CONFIG.emergency_access_allowed() {
|
||||
return Json(json!({
|
||||
"data": [{
|
||||
"id": "",
|
||||
"status": 2,
|
||||
"type": 0,
|
||||
"waitTimeDays": 0,
|
||||
"granteeId": "",
|
||||
"email": "",
|
||||
"name": "NOTE: Emergency Access is disabled!",
|
||||
"object": "emergencyAccessGranteeDetails",
|
||||
|
||||
}],
|
||||
"object": "list",
|
||||
"continuationToken": null
|
||||
}));
|
||||
}
|
||||
let emergency_access_list = EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &mut conn).await;
|
||||
async fn get_contacts(headers: Headers, conn: DbConn) -> Json<Value> {
|
||||
let emergency_access_list = if CONFIG.emergency_access_allowed() {
|
||||
EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &conn).await
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
let mut emergency_access_list_json = Vec::with_capacity(emergency_access_list.len());
|
||||
for ea in emergency_access_list {
|
||||
if let Some(grantee) = ea.to_json_grantee_details(&mut conn).await {
|
||||
if let Some(grantee) = ea.to_json_grantee_details(&conn).await {
|
||||
emergency_access_list_json.push(grantee)
|
||||
}
|
||||
}
|
||||
@@ -74,15 +67,15 @@ async fn get_contacts(headers: Headers, mut conn: DbConn) -> Json<Value> {
|
||||
}
|
||||
|
||||
#[get("/emergency-access/granted")]
|
||||
async fn get_grantees(headers: Headers, mut conn: DbConn) -> Json<Value> {
|
||||
async fn get_grantees(headers: Headers, conn: DbConn) -> Json<Value> {
|
||||
let emergency_access_list = if CONFIG.emergency_access_allowed() {
|
||||
EmergencyAccess::find_all_by_grantee_uuid(&headers.user.uuid, &mut conn).await
|
||||
EmergencyAccess::find_all_by_grantee_uuid(&headers.user.uuid, &conn).await
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
let mut emergency_access_list_json = Vec::with_capacity(emergency_access_list.len());
|
||||
for ea in emergency_access_list {
|
||||
emergency_access_list_json.push(ea.to_json_grantor_details(&mut conn).await);
|
||||
emergency_access_list_json.push(ea.to_json_grantor_details(&conn).await);
|
||||
}
|
||||
|
||||
Json(json!({
|
||||
@@ -93,12 +86,12 @@ async fn get_grantees(headers: Headers, mut conn: DbConn) -> Json<Value> {
|
||||
}
|
||||
|
||||
#[get("/emergency-access/<emer_id>")]
|
||||
async fn get_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
async fn get_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
check_emergency_access_enabled()?;
|
||||
|
||||
match EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &mut conn).await {
|
||||
match EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &conn).await {
|
||||
Some(emergency_access) => Ok(Json(
|
||||
emergency_access.to_json_grantee_details(&mut conn).await.expect("Grantee user should exist but does not!"),
|
||||
emergency_access.to_json_grantee_details(&conn).await.expect("Grantee user should exist but does not!"),
|
||||
)),
|
||||
None => err!("Emergency access not valid."),
|
||||
}
|
||||
@@ -131,14 +124,14 @@ async fn post_emergency_access(
|
||||
emer_id: EmergencyAccessId,
|
||||
data: Json<EmergencyAccessUpdateData>,
|
||||
headers: Headers,
|
||||
mut conn: DbConn,
|
||||
conn: DbConn,
|
||||
) -> JsonResult {
|
||||
check_emergency_access_enabled()?;
|
||||
|
||||
let data: EmergencyAccessUpdateData = data.into_inner();
|
||||
|
||||
let Some(mut emergency_access) =
|
||||
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &mut conn).await
|
||||
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &conn).await
|
||||
else {
|
||||
err!("Emergency access not valid.")
|
||||
};
|
||||
@@ -154,7 +147,7 @@ async fn post_emergency_access(
|
||||
emergency_access.key_encrypted = data.key_encrypted;
|
||||
}
|
||||
|
||||
emergency_access.save(&mut conn).await?;
|
||||
emergency_access.save(&conn).await?;
|
||||
Ok(Json(emergency_access.to_json()))
|
||||
}
|
||||
|
||||
@@ -163,12 +156,12 @@ async fn post_emergency_access(
|
||||
// region delete
|
||||
|
||||
#[delete("/emergency-access/<emer_id>")]
|
||||
async fn delete_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> EmptyResult {
|
||||
async fn delete_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||
check_emergency_access_enabled()?;
|
||||
|
||||
let emergency_access = match (
|
||||
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &mut conn).await,
|
||||
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &headers.user.uuid, &mut conn).await,
|
||||
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &conn).await,
|
||||
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &headers.user.uuid, &conn).await,
|
||||
) {
|
||||
(Some(grantor_emer), None) => {
|
||||
info!("Grantor deleted emergency access {emer_id}");
|
||||
@@ -181,7 +174,7 @@ async fn delete_emergency_access(emer_id: EmergencyAccessId, headers: Headers, m
|
||||
_ => err!("Emergency access not valid."),
|
||||
};
|
||||
|
||||
emergency_access.delete(&mut conn).await?;
|
||||
emergency_access.delete(&conn).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -203,7 +196,7 @@ struct EmergencyAccessInviteData {
|
||||
}
|
||||
|
||||
#[post("/emergency-access/invite", data = "<data>")]
|
||||
async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
|
||||
async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||
check_emergency_access_enabled()?;
|
||||
|
||||
let data: EmergencyAccessInviteData = data.into_inner();
|
||||
@@ -224,7 +217,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mu
|
||||
err!("You can not set yourself as an emergency contact.")
|
||||
}
|
||||
|
||||
let (grantee_user, new_user) = match User::find_by_mail(&email, &mut conn).await {
|
||||
let (grantee_user, new_user) = match User::find_by_mail(&email, &conn).await {
|
||||
None => {
|
||||
if !CONFIG.invitations_allowed() {
|
||||
err!(format!("Grantee user does not exist: {email}"))
|
||||
@@ -236,11 +229,11 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mu
|
||||
|
||||
if !CONFIG.mail_enabled() {
|
||||
let invitation = Invitation::new(&email);
|
||||
invitation.save(&mut conn).await?;
|
||||
invitation.save(&conn).await?;
|
||||
}
|
||||
|
||||
let mut user = User::new(email.clone());
|
||||
user.save(&mut conn).await?;
|
||||
let mut user = User::new(&email, None);
|
||||
user.save(&conn).await?;
|
||||
(user, true)
|
||||
}
|
||||
Some(user) if user.password_hash.is_empty() => (user, true),
|
||||
@@ -251,7 +244,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mu
|
||||
&grantor_user.uuid,
|
||||
&grantee_user.uuid,
|
||||
&grantee_user.email,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await
|
||||
.is_some()
|
||||
@@ -261,7 +254,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mu
|
||||
|
||||
let mut new_emergency_access =
|
||||
EmergencyAccess::new(grantor_user.uuid, grantee_user.email, emergency_access_status, new_type, wait_time_days);
|
||||
new_emergency_access.save(&mut conn).await?;
|
||||
new_emergency_access.save(&conn).await?;
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
mail::send_emergency_access_invite(
|
||||
@@ -274,18 +267,18 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mu
|
||||
.await?;
|
||||
} else if !new_user {
|
||||
// if mail is not enabled immediately accept the invitation for existing users
|
||||
new_emergency_access.accept_invite(&grantee_user.uuid, &email, &mut conn).await?;
|
||||
new_emergency_access.accept_invite(&grantee_user.uuid, &email, &conn).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[post("/emergency-access/<emer_id>/reinvite")]
|
||||
async fn resend_invite(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> EmptyResult {
|
||||
async fn resend_invite(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||
check_emergency_access_enabled()?;
|
||||
|
||||
let Some(mut emergency_access) =
|
||||
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &mut conn).await
|
||||
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &conn).await
|
||||
else {
|
||||
err!("Emergency access not valid.")
|
||||
};
|
||||
@@ -298,7 +291,7 @@ async fn resend_invite(emer_id: EmergencyAccessId, headers: Headers, mut conn: D
|
||||
err!("Email not valid.")
|
||||
};
|
||||
|
||||
let Some(grantee_user) = User::find_by_mail(&email, &mut conn).await else {
|
||||
let Some(grantee_user) = User::find_by_mail(&email, &conn).await else {
|
||||
err!("Grantee user not found.")
|
||||
};
|
||||
|
||||
@@ -315,10 +308,10 @@ async fn resend_invite(emer_id: EmergencyAccessId, headers: Headers, mut conn: D
|
||||
.await?;
|
||||
} else if !grantee_user.password_hash.is_empty() {
|
||||
// accept the invitation for existing user
|
||||
emergency_access.accept_invite(&grantee_user.uuid, &email, &mut conn).await?;
|
||||
} else if CONFIG.invitations_allowed() && Invitation::find_by_mail(&email, &mut conn).await.is_none() {
|
||||
emergency_access.accept_invite(&grantee_user.uuid, &email, &conn).await?;
|
||||
} else if CONFIG.invitations_allowed() && Invitation::find_by_mail(&email, &conn).await.is_none() {
|
||||
let invitation = Invitation::new(&email);
|
||||
invitation.save(&mut conn).await?;
|
||||
invitation.save(&conn).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -335,7 +328,7 @@ async fn accept_invite(
|
||||
emer_id: EmergencyAccessId,
|
||||
data: Json<AcceptData>,
|
||||
headers: Headers,
|
||||
mut conn: DbConn,
|
||||
conn: DbConn,
|
||||
) -> EmptyResult {
|
||||
check_emergency_access_enabled()?;
|
||||
|
||||
@@ -349,9 +342,9 @@ async fn accept_invite(
|
||||
err!("Claim email does not match current users email")
|
||||
}
|
||||
|
||||
let grantee_user = match User::find_by_mail(&claims.email, &mut conn).await {
|
||||
let grantee_user = match User::find_by_mail(&claims.email, &conn).await {
|
||||
Some(user) => {
|
||||
Invitation::take(&claims.email, &mut conn).await;
|
||||
Invitation::take(&claims.email, &conn).await;
|
||||
user
|
||||
}
|
||||
None => err!("Invited user not found"),
|
||||
@@ -360,13 +353,13 @@ async fn accept_invite(
|
||||
// We need to search for the uuid in combination with the email, since we do not yet store the uuid of the grantee in the database.
|
||||
// The uuid of the grantee gets stored once accepted.
|
||||
let Some(mut emergency_access) =
|
||||
EmergencyAccess::find_by_uuid_and_grantee_email(&emer_id, &headers.user.email, &mut conn).await
|
||||
EmergencyAccess::find_by_uuid_and_grantee_email(&emer_id, &headers.user.email, &conn).await
|
||||
else {
|
||||
err!("Emergency access not valid.")
|
||||
};
|
||||
|
||||
// get grantor user to send Accepted email
|
||||
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
|
||||
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await else {
|
||||
err!("Grantor user not found.")
|
||||
};
|
||||
|
||||
@@ -374,7 +367,7 @@ async fn accept_invite(
|
||||
&& grantor_user.name == claims.grantor_name
|
||||
&& grantor_user.email == claims.grantor_email
|
||||
{
|
||||
emergency_access.accept_invite(&grantee_user.uuid, &grantee_user.email, &mut conn).await?;
|
||||
emergency_access.accept_invite(&grantee_user.uuid, &grantee_user.email, &conn).await?;
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
mail::send_emergency_access_invite_accepted(&grantor_user.email, &grantee_user.email).await?;
|
||||
@@ -397,7 +390,7 @@ async fn confirm_emergency_access(
|
||||
emer_id: EmergencyAccessId,
|
||||
data: Json<ConfirmData>,
|
||||
headers: Headers,
|
||||
mut conn: DbConn,
|
||||
conn: DbConn,
|
||||
) -> JsonResult {
|
||||
check_emergency_access_enabled()?;
|
||||
|
||||
@@ -406,7 +399,7 @@ async fn confirm_emergency_access(
|
||||
let key = data.key;
|
||||
|
||||
let Some(mut emergency_access) =
|
||||
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &confirming_user.uuid, &mut conn).await
|
||||
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &confirming_user.uuid, &conn).await
|
||||
else {
|
||||
err!("Emergency access not valid.")
|
||||
};
|
||||
@@ -417,12 +410,12 @@ async fn confirm_emergency_access(
|
||||
err!("Emergency access not valid.")
|
||||
}
|
||||
|
||||
let Some(grantor_user) = User::find_by_uuid(&confirming_user.uuid, &mut conn).await else {
|
||||
let Some(grantor_user) = User::find_by_uuid(&confirming_user.uuid, &conn).await else {
|
||||
err!("Grantor user not found.")
|
||||
};
|
||||
|
||||
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
|
||||
let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &mut conn).await else {
|
||||
let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &conn).await else {
|
||||
err!("Grantee user not found.")
|
||||
};
|
||||
|
||||
@@ -430,7 +423,7 @@ async fn confirm_emergency_access(
|
||||
emergency_access.key_encrypted = Some(key);
|
||||
emergency_access.email = None;
|
||||
|
||||
emergency_access.save(&mut conn).await?;
|
||||
emergency_access.save(&conn).await?;
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
mail::send_emergency_access_invite_confirmed(&grantee_user.email, &grantor_user.name).await?;
|
||||
@@ -446,12 +439,12 @@ async fn confirm_emergency_access(
|
||||
// region access emergency access
|
||||
|
||||
#[post("/emergency-access/<emer_id>/initiate")]
|
||||
async fn initiate_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
async fn initiate_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
check_emergency_access_enabled()?;
|
||||
|
||||
let initiating_user = headers.user;
|
||||
let Some(mut emergency_access) =
|
||||
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &initiating_user.uuid, &mut conn).await
|
||||
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &initiating_user.uuid, &conn).await
|
||||
else {
|
||||
err!("Emergency access not valid.")
|
||||
};
|
||||
@@ -460,7 +453,7 @@ async fn initiate_emergency_access(emer_id: EmergencyAccessId, headers: Headers,
|
||||
err!("Emergency access not valid.")
|
||||
}
|
||||
|
||||
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
|
||||
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await else {
|
||||
err!("Grantor user not found.")
|
||||
};
|
||||
|
||||
@@ -469,7 +462,7 @@ async fn initiate_emergency_access(emer_id: EmergencyAccessId, headers: Headers,
|
||||
emergency_access.updated_at = now;
|
||||
emergency_access.recovery_initiated_at = Some(now);
|
||||
emergency_access.last_notification_at = Some(now);
|
||||
emergency_access.save(&mut conn).await?;
|
||||
emergency_access.save(&conn).await?;
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
mail::send_emergency_access_recovery_initiated(
|
||||
@@ -484,11 +477,11 @@ async fn initiate_emergency_access(emer_id: EmergencyAccessId, headers: Headers,
|
||||
}
|
||||
|
||||
#[post("/emergency-access/<emer_id>/approve")]
|
||||
async fn approve_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
async fn approve_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
check_emergency_access_enabled()?;
|
||||
|
||||
let Some(mut emergency_access) =
|
||||
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &mut conn).await
|
||||
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &conn).await
|
||||
else {
|
||||
err!("Emergency access not valid.")
|
||||
};
|
||||
@@ -497,17 +490,17 @@ async fn approve_emergency_access(emer_id: EmergencyAccessId, headers: Headers,
|
||||
err!("Emergency access not valid.")
|
||||
}
|
||||
|
||||
let Some(grantor_user) = User::find_by_uuid(&headers.user.uuid, &mut conn).await else {
|
||||
let Some(grantor_user) = User::find_by_uuid(&headers.user.uuid, &conn).await else {
|
||||
err!("Grantor user not found.")
|
||||
};
|
||||
|
||||
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
|
||||
let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &mut conn).await else {
|
||||
let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &conn).await else {
|
||||
err!("Grantee user not found.")
|
||||
};
|
||||
|
||||
emergency_access.status = EmergencyAccessStatus::RecoveryApproved as i32;
|
||||
emergency_access.save(&mut conn).await?;
|
||||
emergency_access.save(&conn).await?;
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
mail::send_emergency_access_recovery_approved(&grantee_user.email, &grantor_user.name).await?;
|
||||
@@ -519,11 +512,11 @@ async fn approve_emergency_access(emer_id: EmergencyAccessId, headers: Headers,
|
||||
}
|
||||
|
||||
#[post("/emergency-access/<emer_id>/reject")]
|
||||
async fn reject_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
async fn reject_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
check_emergency_access_enabled()?;
|
||||
|
||||
let Some(mut emergency_access) =
|
||||
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &mut conn).await
|
||||
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &conn).await
|
||||
else {
|
||||
err!("Emergency access not valid.")
|
||||
};
|
||||
@@ -535,12 +528,12 @@ async fn reject_emergency_access(emer_id: EmergencyAccessId, headers: Headers, m
|
||||
}
|
||||
|
||||
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
|
||||
let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &mut conn).await else {
|
||||
let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &conn).await else {
|
||||
err!("Grantee user not found.")
|
||||
};
|
||||
|
||||
emergency_access.status = EmergencyAccessStatus::Confirmed as i32;
|
||||
emergency_access.save(&mut conn).await?;
|
||||
emergency_access.save(&conn).await?;
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
mail::send_emergency_access_recovery_rejected(&grantee_user.email, &headers.user.name).await?;
|
||||
@@ -556,11 +549,11 @@ async fn reject_emergency_access(emer_id: EmergencyAccessId, headers: Headers, m
|
||||
// region action
|
||||
|
||||
#[post("/emergency-access/<emer_id>/view")]
|
||||
async fn view_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
async fn view_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
check_emergency_access_enabled()?;
|
||||
|
||||
let Some(emergency_access) =
|
||||
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &headers.user.uuid, &mut conn).await
|
||||
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &headers.user.uuid, &conn).await
|
||||
else {
|
||||
err!("Emergency access not valid.")
|
||||
};
|
||||
@@ -569,8 +562,8 @@ async fn view_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut
|
||||
err!("Emergency access not valid.")
|
||||
}
|
||||
|
||||
let ciphers = Cipher::find_owned_by_user(&emergency_access.grantor_uuid, &mut conn).await;
|
||||
let cipher_sync_data = CipherSyncData::new(&emergency_access.grantor_uuid, CipherSyncType::User, &mut conn).await;
|
||||
let ciphers = Cipher::find_owned_by_user(&emergency_access.grantor_uuid, &conn).await;
|
||||
let cipher_sync_data = CipherSyncData::new(&emergency_access.grantor_uuid, CipherSyncType::User, &conn).await;
|
||||
|
||||
let mut ciphers_json = Vec::with_capacity(ciphers.len());
|
||||
for c in ciphers {
|
||||
@@ -580,7 +573,7 @@ async fn view_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut
|
||||
&emergency_access.grantor_uuid,
|
||||
Some(&cipher_sync_data),
|
||||
CipherSyncType::User,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
@@ -594,12 +587,12 @@ async fn view_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut
|
||||
}
|
||||
|
||||
#[post("/emergency-access/<emer_id>/takeover")]
|
||||
async fn takeover_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
async fn takeover_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
check_emergency_access_enabled()?;
|
||||
|
||||
let requesting_user = headers.user;
|
||||
let Some(emergency_access) =
|
||||
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &requesting_user.uuid, &mut conn).await
|
||||
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &requesting_user.uuid, &conn).await
|
||||
else {
|
||||
err!("Emergency access not valid.")
|
||||
};
|
||||
@@ -608,7 +601,7 @@ async fn takeover_emergency_access(emer_id: EmergencyAccessId, headers: Headers,
|
||||
err!("Emergency access not valid.")
|
||||
}
|
||||
|
||||
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
|
||||
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await else {
|
||||
err!("Grantor user not found.")
|
||||
};
|
||||
|
||||
@@ -636,7 +629,7 @@ async fn password_emergency_access(
|
||||
emer_id: EmergencyAccessId,
|
||||
data: Json<EmergencyAccessPasswordData>,
|
||||
headers: Headers,
|
||||
mut conn: DbConn,
|
||||
conn: DbConn,
|
||||
) -> EmptyResult {
|
||||
check_emergency_access_enabled()?;
|
||||
|
||||
@@ -646,7 +639,7 @@ async fn password_emergency_access(
|
||||
|
||||
let requesting_user = headers.user;
|
||||
let Some(emergency_access) =
|
||||
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &requesting_user.uuid, &mut conn).await
|
||||
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &requesting_user.uuid, &conn).await
|
||||
else {
|
||||
err!("Emergency access not valid.")
|
||||
};
|
||||
@@ -655,21 +648,21 @@ async fn password_emergency_access(
|
||||
err!("Emergency access not valid.")
|
||||
}
|
||||
|
||||
let Some(mut grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
|
||||
let Some(mut grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await else {
|
||||
err!("Grantor user not found.")
|
||||
};
|
||||
|
||||
// change grantor_user password
|
||||
grantor_user.set_password(new_master_password_hash, Some(data.key), true, None);
|
||||
grantor_user.save(&mut conn).await?;
|
||||
grantor_user.save(&conn).await?;
|
||||
|
||||
// Disable TwoFactor providers since they will otherwise block logins
|
||||
TwoFactor::delete_all_by_user(&grantor_user.uuid, &mut conn).await?;
|
||||
TwoFactor::delete_all_by_user(&grantor_user.uuid, &conn).await?;
|
||||
|
||||
// Remove grantor from all organisations unless Owner
|
||||
for member in Membership::find_any_state_by_user(&grantor_user.uuid, &mut conn).await {
|
||||
for member in Membership::find_any_state_by_user(&grantor_user.uuid, &conn).await {
|
||||
if member.atype != MembershipType::Owner as i32 {
|
||||
member.delete(&mut conn).await?;
|
||||
member.delete(&conn).await?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
@@ -678,10 +671,10 @@ async fn password_emergency_access(
|
||||
// endregion
|
||||
|
||||
#[get("/emergency-access/<emer_id>/policies")]
|
||||
async fn policies_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
async fn policies_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let requesting_user = headers.user;
|
||||
let Some(emergency_access) =
|
||||
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &requesting_user.uuid, &mut conn).await
|
||||
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &requesting_user.uuid, &conn).await
|
||||
else {
|
||||
err!("Emergency access not valid.")
|
||||
};
|
||||
@@ -690,11 +683,11 @@ async fn policies_emergency_access(emer_id: EmergencyAccessId, headers: Headers,
|
||||
err!("Emergency access not valid.")
|
||||
}
|
||||
|
||||
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
|
||||
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await else {
|
||||
err!("Grantor user not found.")
|
||||
};
|
||||
|
||||
let policies = OrgPolicy::find_confirmed_by_user(&grantor_user.uuid, &mut conn);
|
||||
let policies = OrgPolicy::find_confirmed_by_user(&grantor_user.uuid, &conn);
|
||||
let policies_json: Vec<Value> = policies.await.iter().map(OrgPolicy::to_json).collect();
|
||||
|
||||
Ok(Json(json!({
|
||||
@@ -728,8 +721,8 @@ pub async fn emergency_request_timeout_job(pool: DbPool) {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Ok(mut conn) = pool.get().await {
|
||||
let emergency_access_list = EmergencyAccess::find_all_recoveries_initiated(&mut conn).await;
|
||||
if let Ok(conn) = pool.get().await {
|
||||
let emergency_access_list = EmergencyAccess::find_all_recoveries_initiated(&conn).await;
|
||||
|
||||
if emergency_access_list.is_empty() {
|
||||
debug!("No emergency request timeout to approve");
|
||||
@@ -743,18 +736,18 @@ pub async fn emergency_request_timeout_job(pool: DbPool) {
|
||||
if recovery_allowed_at.le(&now) {
|
||||
// Only update the access status
|
||||
// Updating the whole record could cause issues when the emergency_notification_reminder_job is also active
|
||||
emer.update_access_status_and_save(EmergencyAccessStatus::RecoveryApproved as i32, &now, &mut conn)
|
||||
emer.update_access_status_and_save(EmergencyAccessStatus::RecoveryApproved as i32, &now, &conn)
|
||||
.await
|
||||
.expect("Unable to update emergency access status");
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
// get grantor user to send Accepted email
|
||||
let grantor_user =
|
||||
User::find_by_uuid(&emer.grantor_uuid, &mut conn).await.expect("Grantor user not found");
|
||||
User::find_by_uuid(&emer.grantor_uuid, &conn).await.expect("Grantor user not found");
|
||||
|
||||
// get grantee user to send Accepted email
|
||||
let grantee_user =
|
||||
User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid"), &mut conn)
|
||||
User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid"), &conn)
|
||||
.await
|
||||
.expect("Grantee user not found");
|
||||
|
||||
@@ -783,8 +776,8 @@ pub async fn emergency_notification_reminder_job(pool: DbPool) {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Ok(mut conn) = pool.get().await {
|
||||
let emergency_access_list = EmergencyAccess::find_all_recoveries_initiated(&mut conn).await;
|
||||
if let Ok(conn) = pool.get().await {
|
||||
let emergency_access_list = EmergencyAccess::find_all_recoveries_initiated(&conn).await;
|
||||
|
||||
if emergency_access_list.is_empty() {
|
||||
debug!("No emergency request reminder notification to send");
|
||||
@@ -805,18 +798,18 @@ pub async fn emergency_notification_reminder_job(pool: DbPool) {
|
||||
if final_recovery_reminder_at.le(&now) && next_recovery_reminder_at.le(&now) {
|
||||
// Only update the last notification date
|
||||
// Updating the whole record could cause issues when the emergency_request_timeout_job is also active
|
||||
emer.update_last_notification_date_and_save(&now, &mut conn)
|
||||
emer.update_last_notification_date_and_save(&now, &conn)
|
||||
.await
|
||||
.expect("Unable to update emergency access notification date");
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
// get grantor user to send Accepted email
|
||||
let grantor_user =
|
||||
User::find_by_uuid(&emer.grantor_uuid, &mut conn).await.expect("Grantor user not found");
|
||||
User::find_by_uuid(&emer.grantor_uuid, &conn).await.expect("Grantor user not found");
|
||||
|
||||
// get grantee user to send Accepted email
|
||||
let grantee_user =
|
||||
User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid"), &mut conn)
|
||||
User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid"), &conn)
|
||||
.await
|
||||
.expect("Grantee user not found");
|
||||
|
||||
|
||||
@@ -31,12 +31,7 @@ struct EventRange {
|
||||
|
||||
// Upstream: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/AdminConsole/Controllers/EventsController.cs#L87
|
||||
#[get("/organizations/<org_id>/events?<data..>")]
|
||||
async fn get_org_events(
|
||||
org_id: OrganizationId,
|
||||
data: EventRange,
|
||||
headers: AdminHeaders,
|
||||
mut conn: DbConn,
|
||||
) -> JsonResult {
|
||||
async fn get_org_events(org_id: OrganizationId, data: EventRange, headers: AdminHeaders, conn: DbConn) -> JsonResult {
|
||||
if org_id != headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
@@ -53,7 +48,7 @@ async fn get_org_events(
|
||||
parse_date(&data.end)
|
||||
};
|
||||
|
||||
Event::find_by_organization_uuid(&org_id, &start_date, &end_date, &mut conn)
|
||||
Event::find_by_organization_uuid(&org_id, &start_date, &end_date, &conn)
|
||||
.await
|
||||
.iter()
|
||||
.map(|e| e.to_json())
|
||||
@@ -68,14 +63,14 @@ async fn get_org_events(
|
||||
}
|
||||
|
||||
#[get("/ciphers/<cipher_id>/events?<data..>")]
|
||||
async fn get_cipher_events(cipher_id: CipherId, data: EventRange, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
async fn get_cipher_events(cipher_id: CipherId, data: EventRange, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
// Return an empty vec when we org events are disabled.
|
||||
// This prevents client errors
|
||||
let events_json: Vec<Value> = if !CONFIG.org_events_enabled() {
|
||||
Vec::with_capacity(0)
|
||||
} else {
|
||||
let mut events_json = Vec::with_capacity(0);
|
||||
if Membership::user_has_ge_admin_access_to_cipher(&headers.user.uuid, &cipher_id, &mut conn).await {
|
||||
if Membership::user_has_ge_admin_access_to_cipher(&headers.user.uuid, &cipher_id, &conn).await {
|
||||
let start_date = parse_date(&data.start);
|
||||
let end_date = if let Some(before_date) = &data.continuation_token {
|
||||
parse_date(before_date)
|
||||
@@ -83,7 +78,7 @@ async fn get_cipher_events(cipher_id: CipherId, data: EventRange, headers: Heade
|
||||
parse_date(&data.end)
|
||||
};
|
||||
|
||||
events_json = Event::find_by_cipher_uuid(&cipher_id, &start_date, &end_date, &mut conn)
|
||||
events_json = Event::find_by_cipher_uuid(&cipher_id, &start_date, &end_date, &conn)
|
||||
.await
|
||||
.iter()
|
||||
.map(|e| e.to_json())
|
||||
@@ -105,7 +100,7 @@ async fn get_user_events(
|
||||
member_id: MembershipId,
|
||||
data: EventRange,
|
||||
headers: AdminHeaders,
|
||||
mut conn: DbConn,
|
||||
conn: DbConn,
|
||||
) -> JsonResult {
|
||||
if org_id != headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
@@ -122,7 +117,7 @@ async fn get_user_events(
|
||||
parse_date(&data.end)
|
||||
};
|
||||
|
||||
Event::find_by_org_and_member(&org_id, &member_id, &start_date, &end_date, &mut conn)
|
||||
Event::find_by_org_and_member(&org_id, &member_id, &start_date, &end_date, &conn)
|
||||
.await
|
||||
.iter()
|
||||
.map(|e| e.to_json())
|
||||
@@ -172,7 +167,7 @@ struct EventCollection {
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Events/Controllers/CollectController.cs
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Services/Implementations/EventService.cs
|
||||
#[post("/collect", format = "application/json", data = "<data>")]
|
||||
async fn post_events_collect(data: Json<Vec<EventCollection>>, headers: Headers, mut conn: DbConn) -> EmptyResult {
|
||||
async fn post_events_collect(data: Json<Vec<EventCollection>>, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||
if !CONFIG.org_events_enabled() {
|
||||
return Ok(());
|
||||
}
|
||||
@@ -187,7 +182,7 @@ async fn post_events_collect(data: Json<Vec<EventCollection>>, headers: Headers,
|
||||
headers.device.atype,
|
||||
Some(event_date),
|
||||
&headers.ip.ip,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
@@ -201,14 +196,14 @@ async fn post_events_collect(data: Json<Vec<EventCollection>>, headers: Headers,
|
||||
headers.device.atype,
|
||||
Some(event_date),
|
||||
&headers.ip.ip,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if let Some(cipher_uuid) = &event.cipher_id {
|
||||
if let Some(cipher) = Cipher::find_by_uuid(cipher_uuid, &mut conn).await {
|
||||
if let Some(cipher) = Cipher::find_by_uuid(cipher_uuid, &conn).await {
|
||||
if let Some(org_id) = cipher.organization_uuid {
|
||||
_log_event(
|
||||
event.r#type,
|
||||
@@ -218,7 +213,7 @@ async fn post_events_collect(data: Json<Vec<EventCollection>>, headers: Headers,
|
||||
headers.device.atype,
|
||||
Some(event_date),
|
||||
&headers.ip.ip,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
@@ -230,7 +225,7 @@ async fn post_events_collect(data: Json<Vec<EventCollection>>, headers: Headers,
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn log_user_event(event_type: i32, user_id: &UserId, device_type: i32, ip: &IpAddr, conn: &mut DbConn) {
|
||||
pub async fn log_user_event(event_type: i32, user_id: &UserId, device_type: i32, ip: &IpAddr, conn: &DbConn) {
|
||||
if !CONFIG.org_events_enabled() {
|
||||
return;
|
||||
}
|
||||
@@ -243,7 +238,7 @@ async fn _log_user_event(
|
||||
device_type: i32,
|
||||
event_date: Option<NaiveDateTime>,
|
||||
ip: &IpAddr,
|
||||
conn: &mut DbConn,
|
||||
conn: &DbConn,
|
||||
) {
|
||||
let memberships = Membership::find_by_user(user_id, conn).await;
|
||||
let mut events: Vec<Event> = Vec::with_capacity(memberships.len() + 1); // We need an event per org and one without an org
|
||||
@@ -278,7 +273,7 @@ pub async fn log_event(
|
||||
act_user_id: &UserId,
|
||||
device_type: i32,
|
||||
ip: &IpAddr,
|
||||
conn: &mut DbConn,
|
||||
conn: &DbConn,
|
||||
) {
|
||||
if !CONFIG.org_events_enabled() {
|
||||
return;
|
||||
@@ -295,7 +290,7 @@ async fn _log_event(
|
||||
device_type: i32,
|
||||
event_date: Option<NaiveDateTime>,
|
||||
ip: &IpAddr,
|
||||
conn: &mut DbConn,
|
||||
conn: &DbConn,
|
||||
) {
|
||||
// Create a new empty event
|
||||
let mut event = Event::new(event_type, event_date);
|
||||
@@ -340,8 +335,8 @@ pub async fn event_cleanup_job(pool: DbPool) {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Ok(mut conn) = pool.get().await {
|
||||
Event::clean_events(&mut conn).await.ok();
|
||||
if let Ok(conn) = pool.get().await {
|
||||
Event::clean_events(&conn).await.ok();
|
||||
} else {
|
||||
error!("Failed to get DB connection while trying to cleanup the events table")
|
||||
}
|
||||
|
||||
@@ -4,7 +4,10 @@ use serde_json::Value;
|
||||
use crate::{
|
||||
api::{EmptyResult, JsonResult, Notify, UpdateType},
|
||||
auth::Headers,
|
||||
db::{models::*, DbConn},
|
||||
db::{
|
||||
models::{Folder, FolderId},
|
||||
DbConn,
|
||||
},
|
||||
};
|
||||
|
||||
pub fn routes() -> Vec<rocket::Route> {
|
||||
@@ -12,8 +15,8 @@ pub fn routes() -> Vec<rocket::Route> {
|
||||
}
|
||||
|
||||
#[get("/folders")]
|
||||
async fn get_folders(headers: Headers, mut conn: DbConn) -> Json<Value> {
|
||||
let folders = Folder::find_by_user(&headers.user.uuid, &mut conn).await;
|
||||
async fn get_folders(headers: Headers, conn: DbConn) -> Json<Value> {
|
||||
let folders = Folder::find_by_user(&headers.user.uuid, &conn).await;
|
||||
let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect();
|
||||
|
||||
Json(json!({
|
||||
@@ -24,8 +27,8 @@ async fn get_folders(headers: Headers, mut conn: DbConn) -> Json<Value> {
|
||||
}
|
||||
|
||||
#[get("/folders/<folder_id>")]
|
||||
async fn get_folder(folder_id: FolderId, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
match Folder::find_by_uuid_and_user(&folder_id, &headers.user.uuid, &mut conn).await {
|
||||
async fn get_folder(folder_id: FolderId, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
match Folder::find_by_uuid_and_user(&folder_id, &headers.user.uuid, &conn).await {
|
||||
Some(folder) => Ok(Json(folder.to_json())),
|
||||
_ => err!("Invalid folder", "Folder does not exist or belongs to another user"),
|
||||
}
|
||||
@@ -39,13 +42,13 @@ pub struct FolderData {
|
||||
}
|
||||
|
||||
#[post("/folders", data = "<data>")]
|
||||
async fn post_folders(data: Json<FolderData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
|
||||
async fn post_folders(data: Json<FolderData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult {
|
||||
let data: FolderData = data.into_inner();
|
||||
|
||||
let mut folder = Folder::new(headers.user.uuid, data.name);
|
||||
|
||||
folder.save(&mut conn).await?;
|
||||
nt.send_folder_update(UpdateType::SyncFolderCreate, &folder, &headers.device, &mut conn).await;
|
||||
folder.save(&conn).await?;
|
||||
nt.send_folder_update(UpdateType::SyncFolderCreate, &folder, &headers.device, &conn).await;
|
||||
|
||||
Ok(Json(folder.to_json()))
|
||||
}
|
||||
@@ -66,19 +69,19 @@ async fn put_folder(
|
||||
folder_id: FolderId,
|
||||
data: Json<FolderData>,
|
||||
headers: Headers,
|
||||
mut conn: DbConn,
|
||||
conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> JsonResult {
|
||||
let data: FolderData = data.into_inner();
|
||||
|
||||
let Some(mut folder) = Folder::find_by_uuid_and_user(&folder_id, &headers.user.uuid, &mut conn).await else {
|
||||
let Some(mut folder) = Folder::find_by_uuid_and_user(&folder_id, &headers.user.uuid, &conn).await else {
|
||||
err!("Invalid folder", "Folder does not exist or belongs to another user")
|
||||
};
|
||||
|
||||
folder.name = data.name;
|
||||
|
||||
folder.save(&mut conn).await?;
|
||||
nt.send_folder_update(UpdateType::SyncFolderUpdate, &folder, &headers.device, &mut conn).await;
|
||||
folder.save(&conn).await?;
|
||||
nt.send_folder_update(UpdateType::SyncFolderUpdate, &folder, &headers.device, &conn).await;
|
||||
|
||||
Ok(Json(folder.to_json()))
|
||||
}
|
||||
@@ -89,14 +92,14 @@ async fn delete_folder_post(folder_id: FolderId, headers: Headers, conn: DbConn,
|
||||
}
|
||||
|
||||
#[delete("/folders/<folder_id>")]
|
||||
async fn delete_folder(folder_id: FolderId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let Some(folder) = Folder::find_by_uuid_and_user(&folder_id, &headers.user.uuid, &mut conn).await else {
|
||||
async fn delete_folder(folder_id: FolderId, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let Some(folder) = Folder::find_by_uuid_and_user(&folder_id, &headers.user.uuid, &conn).await else {
|
||||
err!("Invalid folder", "Folder does not exist or belongs to another user")
|
||||
};
|
||||
|
||||
// Delete the actual folder entry
|
||||
folder.delete(&mut conn).await?;
|
||||
folder.delete(&conn).await?;
|
||||
|
||||
nt.send_folder_update(UpdateType::SyncFolderDelete, &folder, &headers.device, &mut conn).await;
|
||||
nt.send_folder_update(UpdateType::SyncFolderDelete, &folder, &headers.device, &conn).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -50,11 +50,15 @@ pub fn events_routes() -> Vec<Route> {
|
||||
use rocket::{serde::json::Json, serde::json::Value, Catcher, Route};
|
||||
|
||||
use crate::{
|
||||
api::{JsonResult, Notify, UpdateType},
|
||||
api::{EmptyResult, JsonResult, Notify, UpdateType},
|
||||
auth::Headers,
|
||||
db::DbConn,
|
||||
db::{
|
||||
models::{Membership, MembershipStatus, OrgPolicy, Organization, User},
|
||||
DbConn,
|
||||
},
|
||||
error::Error,
|
||||
http_client::make_http_request,
|
||||
mail,
|
||||
util::parse_experimental_client_feature_flags,
|
||||
};
|
||||
|
||||
@@ -105,12 +109,7 @@ struct EquivDomainData {
|
||||
}
|
||||
|
||||
#[post("/settings/domains", data = "<data>")]
|
||||
async fn post_eq_domains(
|
||||
data: Json<EquivDomainData>,
|
||||
headers: Headers,
|
||||
mut conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> JsonResult {
|
||||
async fn post_eq_domains(data: Json<EquivDomainData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult {
|
||||
let data: EquivDomainData = data.into_inner();
|
||||
|
||||
let excluded_globals = data.excluded_global_equivalent_domains.unwrap_or_default();
|
||||
@@ -122,9 +121,9 @@ async fn post_eq_domains(
|
||||
user.excluded_globals = to_string(&excluded_globals).unwrap_or_else(|_| "[]".to_string());
|
||||
user.equivalent_domains = to_string(&equivalent_domains).unwrap_or_else(|_| "[]".to_string());
|
||||
|
||||
user.save(&mut conn).await?;
|
||||
user.save(&conn).await?;
|
||||
|
||||
nt.send_user_update(UpdateType::SyncSettings, &user, &headers.device.push_uuid, &mut conn).await;
|
||||
nt.send_user_update(UpdateType::SyncSettings, &user, &headers.device.push_uuid, &conn).await;
|
||||
|
||||
Ok(Json(json!({})))
|
||||
}
|
||||
@@ -259,3 +258,34 @@ fn api_not_found() -> Json<Value> {
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
async fn accept_org_invite(
|
||||
user: &User,
|
||||
mut member: Membership,
|
||||
reset_password_key: Option<String>,
|
||||
conn: &DbConn,
|
||||
) -> EmptyResult {
|
||||
if member.status != MembershipStatus::Invited as i32 {
|
||||
err!("User already accepted the invitation");
|
||||
}
|
||||
|
||||
member.status = MembershipStatus::Accepted as i32;
|
||||
member.reset_password_key = reset_password_key;
|
||||
|
||||
// This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type
|
||||
OrgPolicy::check_user_allowed(&member, "join", conn).await?;
|
||||
|
||||
member.save(conn).await?;
|
||||
|
||||
if crate::CONFIG.mail_enabled() {
|
||||
let org = match Organization::find_by_uuid(&member.org_uuid, conn).await {
|
||||
Some(org) => org,
|
||||
None => err!("Organization not found."),
|
||||
};
|
||||
// User was invited to an organization, so they must be confirmed manually after acceptance
|
||||
mail::send_invite_accepted(&user.email, &member.invited_by_email.unwrap_or(org.billing_email), &org.name)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -10,7 +10,13 @@ use std::collections::HashSet;
|
||||
use crate::{
|
||||
api::EmptyResult,
|
||||
auth,
|
||||
db::{models::*, DbConn},
|
||||
db::{
|
||||
models::{
|
||||
Group, GroupUser, Invitation, Membership, MembershipStatus, MembershipType, Organization,
|
||||
OrganizationApiKey, OrganizationId, User,
|
||||
},
|
||||
DbConn,
|
||||
},
|
||||
mail, CONFIG,
|
||||
};
|
||||
|
||||
@@ -44,7 +50,7 @@ struct OrgImportData {
|
||||
}
|
||||
|
||||
#[post("/public/organization/import", data = "<data>")]
|
||||
async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: DbConn) -> EmptyResult {
|
||||
async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, conn: DbConn) -> EmptyResult {
|
||||
// Most of the logic for this function can be found here
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Services/Implementations/OrganizationService.cs#L1203
|
||||
|
||||
@@ -55,13 +61,12 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: Db
|
||||
let mut user_created: bool = false;
|
||||
if user_data.deleted {
|
||||
// If user is marked for deletion and it exists, revoke it
|
||||
if let Some(mut member) = Membership::find_by_email_and_org(&user_data.email, &org_id, &mut conn).await {
|
||||
if let Some(mut member) = Membership::find_by_email_and_org(&user_data.email, &org_id, &conn).await {
|
||||
// Only revoke a user if it is not the last confirmed owner
|
||||
let revoked = if member.atype == MembershipType::Owner
|
||||
&& member.status == MembershipStatus::Confirmed as i32
|
||||
{
|
||||
if Membership::count_confirmed_by_org_and_type(&org_id, MembershipType::Owner, &mut conn).await <= 1
|
||||
{
|
||||
if Membership::count_confirmed_by_org_and_type(&org_id, MembershipType::Owner, &conn).await <= 1 {
|
||||
warn!("Can't revoke the last owner");
|
||||
false
|
||||
} else {
|
||||
@@ -73,27 +78,27 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: Db
|
||||
|
||||
let ext_modified = member.set_external_id(Some(user_data.external_id.clone()));
|
||||
if revoked || ext_modified {
|
||||
member.save(&mut conn).await?;
|
||||
member.save(&conn).await?;
|
||||
}
|
||||
}
|
||||
// If user is part of the organization, restore it
|
||||
} else if let Some(mut member) = Membership::find_by_email_and_org(&user_data.email, &org_id, &mut conn).await {
|
||||
} else if let Some(mut member) = Membership::find_by_email_and_org(&user_data.email, &org_id, &conn).await {
|
||||
let restored = member.restore();
|
||||
let ext_modified = member.set_external_id(Some(user_data.external_id.clone()));
|
||||
if restored || ext_modified {
|
||||
member.save(&mut conn).await?;
|
||||
member.save(&conn).await?;
|
||||
}
|
||||
} else {
|
||||
// If user is not part of the organization
|
||||
let user = match User::find_by_mail(&user_data.email, &mut conn).await {
|
||||
let user = match User::find_by_mail(&user_data.email, &conn).await {
|
||||
Some(user) => user, // exists in vaultwarden
|
||||
None => {
|
||||
// User does not exist yet
|
||||
let mut new_user = User::new(user_data.email.clone());
|
||||
new_user.save(&mut conn).await?;
|
||||
let mut new_user = User::new(&user_data.email, None);
|
||||
new_user.save(&conn).await?;
|
||||
|
||||
if !CONFIG.mail_enabled() {
|
||||
Invitation::new(&new_user.email).save(&mut conn).await?;
|
||||
Invitation::new(&new_user.email).save(&conn).await?;
|
||||
}
|
||||
user_created = true;
|
||||
new_user
|
||||
@@ -105,28 +110,28 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: Db
|
||||
MembershipStatus::Accepted as i32 // Automatically mark user as accepted if no email invites
|
||||
};
|
||||
|
||||
let mut new_member = Membership::new(user.uuid.clone(), org_id.clone());
|
||||
let (org_name, org_email) = match Organization::find_by_uuid(&org_id, &conn).await {
|
||||
Some(org) => (org.name, org.billing_email),
|
||||
None => err!("Error looking up organization"),
|
||||
};
|
||||
|
||||
let mut new_member = Membership::new(user.uuid.clone(), org_id.clone(), Some(org_email.clone()));
|
||||
new_member.set_external_id(Some(user_data.external_id.clone()));
|
||||
new_member.access_all = false;
|
||||
new_member.atype = MembershipType::User as i32;
|
||||
new_member.status = member_status;
|
||||
|
||||
new_member.save(&mut conn).await?;
|
||||
new_member.save(&conn).await?;
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
let (org_name, org_email) = match Organization::find_by_uuid(&org_id, &mut conn).await {
|
||||
Some(org) => (org.name, org.billing_email),
|
||||
None => err!("Error looking up organization"),
|
||||
};
|
||||
|
||||
if let Err(e) =
|
||||
mail::send_invite(&user, org_id.clone(), new_member.uuid.clone(), &org_name, Some(org_email)).await
|
||||
{
|
||||
// Upon error delete the user, invite and org member records when needed
|
||||
if user_created {
|
||||
user.delete(&mut conn).await?;
|
||||
user.delete(&conn).await?;
|
||||
} else {
|
||||
new_member.delete(&mut conn).await?;
|
||||
new_member.delete(&conn).await?;
|
||||
}
|
||||
|
||||
err!(format!("Error sending invite: {e:?} "));
|
||||
@@ -137,8 +142,7 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: Db
|
||||
|
||||
if CONFIG.org_groups_enabled() {
|
||||
for group_data in &data.groups {
|
||||
let group_uuid = match Group::find_by_external_id_and_org(&group_data.external_id, &org_id, &mut conn).await
|
||||
{
|
||||
let group_uuid = match Group::find_by_external_id_and_org(&group_data.external_id, &org_id, &conn).await {
|
||||
Some(group) => group.uuid,
|
||||
None => {
|
||||
let mut group = Group::new(
|
||||
@@ -147,17 +151,17 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: Db
|
||||
false,
|
||||
Some(group_data.external_id.clone()),
|
||||
);
|
||||
group.save(&mut conn).await?;
|
||||
group.save(&conn).await?;
|
||||
group.uuid
|
||||
}
|
||||
};
|
||||
|
||||
GroupUser::delete_all_by_group(&group_uuid, &mut conn).await?;
|
||||
GroupUser::delete_all_by_group(&group_uuid, &conn).await?;
|
||||
|
||||
for ext_id in &group_data.member_external_ids {
|
||||
if let Some(member) = Membership::find_by_external_id_and_org(ext_id, &org_id, &mut conn).await {
|
||||
if let Some(member) = Membership::find_by_external_id_and_org(ext_id, &org_id, &conn).await {
|
||||
let mut group_user = GroupUser::new(group_uuid.clone(), member.uuid.clone());
|
||||
group_user.save(&mut conn).await?;
|
||||
group_user.save(&conn).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -169,19 +173,18 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: Db
|
||||
if data.overwrite_existing {
|
||||
// Generate a HashSet to quickly verify if a member is listed or not.
|
||||
let sync_members: HashSet<String> = data.members.into_iter().map(|m| m.external_id).collect();
|
||||
for member in Membership::find_by_org(&org_id, &mut conn).await {
|
||||
for member in Membership::find_by_org(&org_id, &conn).await {
|
||||
if let Some(ref user_external_id) = member.external_id {
|
||||
if !sync_members.contains(user_external_id) {
|
||||
if member.atype == MembershipType::Owner && member.status == MembershipStatus::Confirmed as i32 {
|
||||
// Removing owner, check that there is at least one other confirmed owner
|
||||
if Membership::count_confirmed_by_org_and_type(&org_id, MembershipType::Owner, &mut conn).await
|
||||
<= 1
|
||||
if Membership::count_confirmed_by_org_and_type(&org_id, MembershipType::Owner, &conn).await <= 1
|
||||
{
|
||||
warn!("Can't delete the last owner");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
member.delete(&mut conn).await?;
|
||||
member.delete(&conn).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,26 +1,28 @@
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
use std::{path::Path, sync::LazyLock, time::Duration};
|
||||
|
||||
use chrono::{DateTime, TimeDelta, Utc};
|
||||
use num_traits::ToPrimitive;
|
||||
use once_cell::sync::Lazy;
|
||||
use rocket::form::Form;
|
||||
use rocket::fs::NamedFile;
|
||||
use rocket::fs::TempFile;
|
||||
use rocket::serde::json::Json;
|
||||
use rocket::{
|
||||
form::Form,
|
||||
fs::{NamedFile, TempFile},
|
||||
serde::json::Json,
|
||||
};
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::{
|
||||
api::{ApiResult, EmptyResult, JsonResult, Notify, UpdateType},
|
||||
auth::{ClientIp, Headers, Host},
|
||||
config::PathType,
|
||||
db::{models::*, DbConn, DbPool},
|
||||
db::{
|
||||
models::{Device, OrgPolicy, OrgPolicyType, Send, SendFileId, SendId, SendType, UserId},
|
||||
DbConn, DbPool,
|
||||
},
|
||||
util::{save_temp_file, NumberOrString},
|
||||
CONFIG,
|
||||
};
|
||||
|
||||
const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available";
|
||||
static ANON_PUSH_DEVICE: Lazy<Device> = Lazy::new(|| {
|
||||
static ANON_PUSH_DEVICE: LazyLock<Device> = LazyLock::new(|| {
|
||||
let dt = crate::util::parse_date("1970-01-01T00:00:00.000000Z");
|
||||
Device {
|
||||
uuid: String::from("00000000-0000-0000-0000-000000000000").into(),
|
||||
@@ -58,8 +60,8 @@ pub fn routes() -> Vec<rocket::Route> {
|
||||
|
||||
pub async fn purge_sends(pool: DbPool) {
|
||||
debug!("Purging sends");
|
||||
if let Ok(mut conn) = pool.get().await {
|
||||
Send::purge(&mut conn).await;
|
||||
if let Ok(conn) = pool.get().await {
|
||||
Send::purge(&conn).await;
|
||||
} else {
|
||||
error!("Failed to get DB connection while purging sends")
|
||||
}
|
||||
@@ -96,7 +98,7 @@ pub struct SendData {
|
||||
///
|
||||
/// There is also a Vaultwarden-specific `sends_allowed` config setting that
|
||||
/// controls this policy globally.
|
||||
async fn enforce_disable_send_policy(headers: &Headers, conn: &mut DbConn) -> EmptyResult {
|
||||
async fn enforce_disable_send_policy(headers: &Headers, conn: &DbConn) -> EmptyResult {
|
||||
let user_id = &headers.user.uuid;
|
||||
if !CONFIG.sends_allowed()
|
||||
|| OrgPolicy::is_applicable_to_user(user_id, OrgPolicyType::DisableSend, None, conn).await
|
||||
@@ -112,7 +114,7 @@ async fn enforce_disable_send_policy(headers: &Headers, conn: &mut DbConn) -> Em
|
||||
/// but is allowed to remove this option from an existing Send.
|
||||
///
|
||||
/// Ref: https://bitwarden.com/help/article/policies/#send-options
|
||||
async fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, conn: &mut DbConn) -> EmptyResult {
|
||||
async fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, conn: &DbConn) -> EmptyResult {
|
||||
let user_id = &headers.user.uuid;
|
||||
let hide_email = data.hide_email.unwrap_or(false);
|
||||
if hide_email && OrgPolicy::is_hide_email_disabled(user_id, conn).await {
|
||||
@@ -164,8 +166,8 @@ fn create_send(data: SendData, user_id: UserId) -> ApiResult<Send> {
|
||||
}
|
||||
|
||||
#[get("/sends")]
|
||||
async fn get_sends(headers: Headers, mut conn: DbConn) -> Json<Value> {
|
||||
let sends = Send::find_by_user(&headers.user.uuid, &mut conn);
|
||||
async fn get_sends(headers: Headers, conn: DbConn) -> Json<Value> {
|
||||
let sends = Send::find_by_user(&headers.user.uuid, &conn);
|
||||
let sends_json: Vec<Value> = sends.await.iter().map(|s| s.to_json()).collect();
|
||||
|
||||
Json(json!({
|
||||
@@ -176,32 +178,32 @@ async fn get_sends(headers: Headers, mut conn: DbConn) -> Json<Value> {
|
||||
}
|
||||
|
||||
#[get("/sends/<send_id>")]
|
||||
async fn get_send(send_id: SendId, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
match Send::find_by_uuid_and_user(&send_id, &headers.user.uuid, &mut conn).await {
|
||||
async fn get_send(send_id: SendId, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
match Send::find_by_uuid_and_user(&send_id, &headers.user.uuid, &conn).await {
|
||||
Some(send) => Ok(Json(send.to_json())),
|
||||
None => err!("Send not found", "Invalid send uuid or does not belong to user"),
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/sends", data = "<data>")]
|
||||
async fn post_send(data: Json<SendData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
|
||||
enforce_disable_send_policy(&headers, &mut conn).await?;
|
||||
async fn post_send(data: Json<SendData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult {
|
||||
enforce_disable_send_policy(&headers, &conn).await?;
|
||||
|
||||
let data: SendData = data.into_inner();
|
||||
enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?;
|
||||
enforce_disable_hide_email_policy(&data, &headers, &conn).await?;
|
||||
|
||||
if data.r#type == SendType::File as i32 {
|
||||
err!("File sends should use /api/sends/file")
|
||||
}
|
||||
|
||||
let mut send = create_send(data, headers.user.uuid)?;
|
||||
send.save(&mut conn).await?;
|
||||
send.save(&conn).await?;
|
||||
nt.send_send_update(
|
||||
UpdateType::SyncSendCreate,
|
||||
&send,
|
||||
&send.update_users_revision(&mut conn).await,
|
||||
&send.update_users_revision(&conn).await,
|
||||
&headers.device,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -225,8 +227,8 @@ struct UploadDataV2<'f> {
|
||||
// 2025: This endpoint doesn't seem to exists anymore in the latest version
|
||||
// See: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Tools/Controllers/SendsController.cs
|
||||
#[post("/sends/file", format = "multipart/form-data", data = "<data>")]
|
||||
async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
|
||||
enforce_disable_send_policy(&headers, &mut conn).await?;
|
||||
async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult {
|
||||
enforce_disable_send_policy(&headers, &conn).await?;
|
||||
|
||||
let UploadData {
|
||||
model,
|
||||
@@ -241,12 +243,12 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
|
||||
err!("Send size can't be negative")
|
||||
}
|
||||
|
||||
enforce_disable_hide_email_policy(&model, &headers, &mut conn).await?;
|
||||
enforce_disable_hide_email_policy(&model, &headers, &conn).await?;
|
||||
|
||||
let size_limit = match CONFIG.user_send_limit() {
|
||||
Some(0) => err!("File uploads are disabled"),
|
||||
Some(limit_kb) => {
|
||||
let Some(already_used) = Send::size_by_user(&headers.user.uuid, &mut conn).await else {
|
||||
let Some(already_used) = Send::size_by_user(&headers.user.uuid, &conn).await else {
|
||||
err!("Existing sends overflow")
|
||||
};
|
||||
let Some(left) = limit_kb.checked_mul(1024).and_then(|l| l.checked_sub(already_used)) else {
|
||||
@@ -271,7 +273,7 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
|
||||
|
||||
let file_id = crate::crypto::generate_send_file_id();
|
||||
|
||||
save_temp_file(PathType::Sends, &format!("{}/{file_id}", send.uuid), data, true).await?;
|
||||
save_temp_file(&PathType::Sends, &format!("{}/{file_id}", send.uuid), data, true).await?;
|
||||
|
||||
let mut data_value: Value = serde_json::from_str(&send.data)?;
|
||||
if let Some(o) = data_value.as_object_mut() {
|
||||
@@ -282,13 +284,13 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
|
||||
send.data = serde_json::to_string(&data_value)?;
|
||||
|
||||
// Save the changes in the database
|
||||
send.save(&mut conn).await?;
|
||||
send.save(&conn).await?;
|
||||
nt.send_send_update(
|
||||
UpdateType::SyncSendCreate,
|
||||
&send,
|
||||
&send.update_users_revision(&mut conn).await,
|
||||
&send.update_users_revision(&conn).await,
|
||||
&headers.device,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -297,8 +299,8 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
|
||||
|
||||
// Upstream: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Tools/Controllers/SendsController.cs#L165
|
||||
#[post("/sends/file/v2", data = "<data>")]
|
||||
async fn post_send_file_v2(data: Json<SendData>, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
enforce_disable_send_policy(&headers, &mut conn).await?;
|
||||
async fn post_send_file_v2(data: Json<SendData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
enforce_disable_send_policy(&headers, &conn).await?;
|
||||
|
||||
let data = data.into_inner();
|
||||
|
||||
@@ -306,7 +308,7 @@ async fn post_send_file_v2(data: Json<SendData>, headers: Headers, mut conn: DbC
|
||||
err!("Send content is not a file");
|
||||
}
|
||||
|
||||
enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?;
|
||||
enforce_disable_hide_email_policy(&data, &headers, &conn).await?;
|
||||
|
||||
let file_length = match &data.file_length {
|
||||
Some(m) => m.into_i64()?,
|
||||
@@ -319,7 +321,7 @@ async fn post_send_file_v2(data: Json<SendData>, headers: Headers, mut conn: DbC
|
||||
let size_limit = match CONFIG.user_send_limit() {
|
||||
Some(0) => err!("File uploads are disabled"),
|
||||
Some(limit_kb) => {
|
||||
let Some(already_used) = Send::size_by_user(&headers.user.uuid, &mut conn).await else {
|
||||
let Some(already_used) = Send::size_by_user(&headers.user.uuid, &conn).await else {
|
||||
err!("Existing sends overflow")
|
||||
};
|
||||
let Some(left) = limit_kb.checked_mul(1024).and_then(|l| l.checked_sub(already_used)) else {
|
||||
@@ -348,7 +350,7 @@ async fn post_send_file_v2(data: Json<SendData>, headers: Headers, mut conn: DbC
|
||||
o.insert(String::from("sizeName"), Value::String(crate::util::get_display_size(file_length)));
|
||||
}
|
||||
send.data = serde_json::to_string(&data_value)?;
|
||||
send.save(&mut conn).await?;
|
||||
send.save(&conn).await?;
|
||||
|
||||
Ok(Json(json!({
|
||||
"fileUploadType": 0, // 0 == Direct | 1 == Azure
|
||||
@@ -373,14 +375,14 @@ async fn post_send_file_v2_data(
|
||||
file_id: SendFileId,
|
||||
data: Form<UploadDataV2<'_>>,
|
||||
headers: Headers,
|
||||
mut conn: DbConn,
|
||||
conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
enforce_disable_send_policy(&headers, &mut conn).await?;
|
||||
enforce_disable_send_policy(&headers, &conn).await?;
|
||||
|
||||
let data = data.into_inner();
|
||||
|
||||
let Some(send) = Send::find_by_uuid_and_user(&send_id, &headers.user.uuid, &mut conn).await else {
|
||||
let Some(send) = Send::find_by_uuid_and_user(&send_id, &headers.user.uuid, &conn).await else {
|
||||
err!("Send not found. Unable to save the file.", "Invalid send uuid or does not belong to user.")
|
||||
};
|
||||
|
||||
@@ -423,14 +425,14 @@ async fn post_send_file_v2_data(
|
||||
|
||||
let file_path = format!("{send_id}/{file_id}");
|
||||
|
||||
save_temp_file(PathType::Sends, &file_path, data.data, false).await?;
|
||||
save_temp_file(&PathType::Sends, &file_path, data.data, false).await?;
|
||||
|
||||
nt.send_send_update(
|
||||
UpdateType::SyncSendCreate,
|
||||
&send,
|
||||
&send.update_users_revision(&mut conn).await,
|
||||
&send.update_users_revision(&conn).await,
|
||||
&headers.device,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -447,11 +449,11 @@ pub struct SendAccessData {
|
||||
async fn post_access(
|
||||
access_id: &str,
|
||||
data: Json<SendAccessData>,
|
||||
mut conn: DbConn,
|
||||
conn: DbConn,
|
||||
ip: ClientIp,
|
||||
nt: Notify<'_>,
|
||||
) -> JsonResult {
|
||||
let Some(mut send) = Send::find_by_access_id(access_id, &mut conn).await else {
|
||||
let Some(mut send) = Send::find_by_access_id(access_id, &conn).await else {
|
||||
err_code!(SEND_INACCESSIBLE_MSG, 404)
|
||||
};
|
||||
|
||||
@@ -488,18 +490,18 @@ async fn post_access(
|
||||
send.access_count += 1;
|
||||
}
|
||||
|
||||
send.save(&mut conn).await?;
|
||||
send.save(&conn).await?;
|
||||
|
||||
nt.send_send_update(
|
||||
UpdateType::SyncSendUpdate,
|
||||
&send,
|
||||
&send.update_users_revision(&mut conn).await,
|
||||
&send.update_users_revision(&conn).await,
|
||||
&ANON_PUSH_DEVICE,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
|
||||
Ok(Json(send.to_json_access(&mut conn).await))
|
||||
Ok(Json(send.to_json_access(&conn).await))
|
||||
}
|
||||
|
||||
#[post("/sends/<send_id>/access/file/<file_id>", data = "<data>")]
|
||||
@@ -508,10 +510,10 @@ async fn post_access_file(
|
||||
file_id: SendFileId,
|
||||
data: Json<SendAccessData>,
|
||||
host: Host,
|
||||
mut conn: DbConn,
|
||||
conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> JsonResult {
|
||||
let Some(mut send) = Send::find_by_uuid(&send_id, &mut conn).await else {
|
||||
let Some(mut send) = Send::find_by_uuid(&send_id, &conn).await else {
|
||||
err_code!(SEND_INACCESSIBLE_MSG, 404)
|
||||
};
|
||||
|
||||
@@ -545,14 +547,14 @@ async fn post_access_file(
|
||||
|
||||
send.access_count += 1;
|
||||
|
||||
send.save(&mut conn).await?;
|
||||
send.save(&conn).await?;
|
||||
|
||||
nt.send_send_update(
|
||||
UpdateType::SyncSendUpdate,
|
||||
&send,
|
||||
&send.update_users_revision(&mut conn).await,
|
||||
&send.update_users_revision(&conn).await,
|
||||
&ANON_PUSH_DEVICE,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -564,9 +566,9 @@ async fn post_access_file(
|
||||
}
|
||||
|
||||
async fn download_url(host: &Host, send_id: &SendId, file_id: &SendFileId) -> Result<String, crate::Error> {
|
||||
let operator = CONFIG.opendal_operator_for_path_type(PathType::Sends)?;
|
||||
let operator = CONFIG.opendal_operator_for_path_type(&PathType::Sends)?;
|
||||
|
||||
if operator.info().scheme() == opendal::Scheme::Fs {
|
||||
if operator.info().scheme() == <&'static str>::from(opendal::Scheme::Fs) {
|
||||
let token_claims = crate::auth::generate_send_claims(send_id, file_id);
|
||||
let token = crate::auth::encode_jwt(&token_claims);
|
||||
|
||||
@@ -587,23 +589,17 @@ async fn download_send(send_id: SendId, file_id: SendFileId, t: &str) -> Option<
|
||||
}
|
||||
|
||||
#[put("/sends/<send_id>", data = "<data>")]
|
||||
async fn put_send(
|
||||
send_id: SendId,
|
||||
data: Json<SendData>,
|
||||
headers: Headers,
|
||||
mut conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> JsonResult {
|
||||
enforce_disable_send_policy(&headers, &mut conn).await?;
|
||||
async fn put_send(send_id: SendId, data: Json<SendData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult {
|
||||
enforce_disable_send_policy(&headers, &conn).await?;
|
||||
|
||||
let data: SendData = data.into_inner();
|
||||
enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?;
|
||||
enforce_disable_hide_email_policy(&data, &headers, &conn).await?;
|
||||
|
||||
let Some(mut send) = Send::find_by_uuid_and_user(&send_id, &headers.user.uuid, &mut conn).await else {
|
||||
let Some(mut send) = Send::find_by_uuid_and_user(&send_id, &headers.user.uuid, &conn).await else {
|
||||
err!("Send not found", "Send send_id is invalid or does not belong to user")
|
||||
};
|
||||
|
||||
update_send_from_data(&mut send, data, &headers, &mut conn, &nt, UpdateType::SyncSendUpdate).await?;
|
||||
update_send_from_data(&mut send, data, &headers, &conn, &nt, UpdateType::SyncSendUpdate).await?;
|
||||
|
||||
Ok(Json(send.to_json()))
|
||||
}
|
||||
@@ -612,7 +608,7 @@ pub async fn update_send_from_data(
|
||||
send: &mut Send,
|
||||
data: SendData,
|
||||
headers: &Headers,
|
||||
conn: &mut DbConn,
|
||||
conn: &DbConn,
|
||||
nt: &Notify<'_>,
|
||||
ut: UpdateType,
|
||||
) -> EmptyResult {
|
||||
@@ -667,18 +663,18 @@ pub async fn update_send_from_data(
|
||||
}
|
||||
|
||||
#[delete("/sends/<send_id>")]
|
||||
async fn delete_send(send_id: SendId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let Some(send) = Send::find_by_uuid_and_user(&send_id, &headers.user.uuid, &mut conn).await else {
|
||||
async fn delete_send(send_id: SendId, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let Some(send) = Send::find_by_uuid_and_user(&send_id, &headers.user.uuid, &conn).await else {
|
||||
err!("Send not found", "Invalid send uuid, or does not belong to user")
|
||||
};
|
||||
|
||||
send.delete(&mut conn).await?;
|
||||
send.delete(&conn).await?;
|
||||
nt.send_send_update(
|
||||
UpdateType::SyncSendDelete,
|
||||
&send,
|
||||
&send.update_users_revision(&mut conn).await,
|
||||
&send.update_users_revision(&conn).await,
|
||||
&headers.device,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -686,21 +682,21 @@ async fn delete_send(send_id: SendId, headers: Headers, mut conn: DbConn, nt: No
|
||||
}
|
||||
|
||||
#[put("/sends/<send_id>/remove-password")]
|
||||
async fn put_remove_password(send_id: SendId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
|
||||
enforce_disable_send_policy(&headers, &mut conn).await?;
|
||||
async fn put_remove_password(send_id: SendId, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult {
|
||||
enforce_disable_send_policy(&headers, &conn).await?;
|
||||
|
||||
let Some(mut send) = Send::find_by_uuid_and_user(&send_id, &headers.user.uuid, &mut conn).await else {
|
||||
let Some(mut send) = Send::find_by_uuid_and_user(&send_id, &headers.user.uuid, &conn).await else {
|
||||
err!("Send not found", "Invalid send uuid, or does not belong to user")
|
||||
};
|
||||
|
||||
send.set_password(None);
|
||||
send.save(&mut conn).await?;
|
||||
send.save(&conn).await?;
|
||||
nt.send_send_update(
|
||||
UpdateType::SyncSendUpdate,
|
||||
&send,
|
||||
&send.update_users_revision(&mut conn).await,
|
||||
&send.update_users_revision(&conn).await,
|
||||
&headers.device,
|
||||
&mut conn,
|
||||
&conn,
|
||||
)
|
||||
.await;
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user