mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-09-09 18:25:58 +03:00
Compare commits
85 Commits
1.33.1
...
8e7eeab293
Author | SHA1 | Date | |
---|---|---|---|
|
8e7eeab293 | ||
|
e35c6f8705 | ||
|
ae7b725c0f | ||
|
2a5489a4b2 | ||
|
8fd0ee4211 | ||
|
4a5516e150 | ||
|
7fc94516ce | ||
|
5ea0779d6b | ||
|
a133d4e90c | ||
|
49eff787de | ||
|
cff6c2b3af | ||
|
a0c76284fd | ||
|
318653b0e5 | ||
|
5d84f17600 | ||
|
0db4b00007 | ||
|
a0198d8d7c | ||
|
dfad931dca | ||
|
25865efd79 | ||
|
bcf627930e | ||
|
ce70cd2cf4 | ||
|
2ac589d4b4 | ||
|
b2e2aef7de | ||
|
0755bb19c0 | ||
|
fee0c1c711 | ||
|
f58539f0b4 | ||
|
e718afb441 | ||
|
55945ad793 | ||
|
4fd22d8e3b | ||
|
d6a8fb8e48 | ||
|
3b48e6e903 | ||
|
6b9333b33e | ||
|
a545636ee5 | ||
|
f125d5f1a1 | ||
|
ad75ce281e | ||
|
9059437c35 | ||
|
c84db0daca | ||
|
72adc239f5 | ||
|
34ebeeca76 | ||
|
0469d9ba4c | ||
|
eaa6ad06ed | ||
|
0d3f283c37 | ||
|
51a1d641c5 | ||
|
90f7e5ff80 | ||
|
200999c94e | ||
|
d363e647e9 | ||
|
53f58b14d5 | ||
|
ef7835d1b0 | ||
|
3a44dc963b | ||
|
a039e227c7 | ||
|
602b18fdd6 | ||
|
bf04c64759 | ||
|
2f1d86b7f1 | ||
|
ff97bcfdda | ||
|
73f2441d1a | ||
|
ad8484a2d5 | ||
|
9813e480c0 | ||
|
bfe172702a | ||
|
df42b6d6b0 | ||
|
2697fe8aba | ||
|
674e444d67 | ||
|
0d16da440d | ||
|
66cf179bca | ||
|
025bb90f8f | ||
|
d5039d9c17 | ||
|
e7c796a660 | ||
|
bbbd2f6d15 | ||
|
a2d7895586 | ||
|
8a0cb1137e | ||
|
f960bf59bb | ||
|
3a1f1bae00 | ||
|
8dfe805954 | ||
|
07b869b3ef | ||
|
2a18665288 | ||
|
71952a4ab5 | ||
|
994d157064 | ||
|
1dae6093c9 | ||
|
6edceb5f7a | ||
|
359a4a088a | ||
|
3baffeee9a | ||
|
d5c353427d | ||
|
1f868b8d22 | ||
|
8d1df08b81 | ||
|
3b6bccde97 | ||
|
d2b36642a6 | ||
|
a02fb0fd24 |
102
.env.template
102
.env.template
@@ -15,6 +15,14 @@
|
||||
####################
|
||||
|
||||
## Main data folder
|
||||
## This can be a path to local folder or a path to an external location
|
||||
## depending on features enabled at build time. Possible external locations:
|
||||
##
|
||||
## - AWS S3 Bucket (via `s3` feature): s3://bucket-name/path/to/folder
|
||||
##
|
||||
## When using an external location, make sure to set TMP_FOLDER,
|
||||
## TEMPLATES_FOLDER, and DATABASE_URL to local paths and/or a remote database
|
||||
## location.
|
||||
# DATA_FOLDER=data
|
||||
|
||||
## Individual folders, these override %DATA_FOLDER%
|
||||
@@ -22,10 +30,13 @@
|
||||
# ICON_CACHE_FOLDER=data/icon_cache
|
||||
# ATTACHMENTS_FOLDER=data/attachments
|
||||
# SENDS_FOLDER=data/sends
|
||||
|
||||
## Temporary folder used for storing temporary file uploads
|
||||
## Must be a local path.
|
||||
# TMP_FOLDER=data/tmp
|
||||
|
||||
## Templates data folder, by default uses embedded templates
|
||||
## Check source code to see the format
|
||||
## HTML template overrides data folder
|
||||
## Must be a local path.
|
||||
# TEMPLATES_FOLDER=data/templates
|
||||
## Automatically reload the templates for every request, slow, use only for development
|
||||
# RELOAD_TEMPLATES=false
|
||||
@@ -39,7 +50,9 @@
|
||||
#########################
|
||||
|
||||
## Database URL
|
||||
## When using SQLite, this is the path to the DB file, default to %DATA_FOLDER%/db.sqlite3
|
||||
## When using SQLite, this is the path to the DB file, and it defaults to
|
||||
## %DATA_FOLDER%/db.sqlite3. If DATA_FOLDER is set to an external location, this
|
||||
## must be set to a local sqlite3 file path.
|
||||
# DATABASE_URL=data/db.sqlite3
|
||||
## When using MySQL, specify an appropriate connection URI.
|
||||
## Details: https://docs.diesel.rs/2.1.x/diesel/mysql/struct.MysqlConnection.html
|
||||
@@ -117,7 +130,7 @@
|
||||
## and are always in terms of UTC time (regardless of your local time zone settings).
|
||||
##
|
||||
## The schedule format is a bit different from crontab as crontab does not contains seconds.
|
||||
## You can test the the format here: https://crontab.guru, but remove the first digit!
|
||||
## You can test the format here: https://crontab.guru, but remove the first digit!
|
||||
## SEC MIN HOUR DAY OF MONTH MONTH DAY OF WEEK
|
||||
## "0 30 9,12,15 1,15 May-Aug Mon,Wed,Fri"
|
||||
## "0 30 * * * * "
|
||||
@@ -161,6 +174,10 @@
|
||||
## Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt.
|
||||
## Defaults to every minute. Set blank to disable this job.
|
||||
# DUO_CONTEXT_PURGE_SCHEDULE="30 * * * * *"
|
||||
#
|
||||
## Cron schedule of the job that cleans sso nonce from incomplete flow
|
||||
## Defaults to daily (20 minutes after midnight). Set blank to disable this job.
|
||||
# PURGE_INCOMPLETE_SSO_NONCE="0 20 0 * * *"
|
||||
|
||||
########################
|
||||
### General settings ###
|
||||
@@ -229,7 +246,8 @@
|
||||
# SIGNUPS_ALLOWED=true
|
||||
|
||||
## Controls if new users need to verify their email address upon registration
|
||||
## Note that setting this option to true prevents logins until the email address has been verified!
|
||||
## On new client versions, this will require the user to verify their email at signup time.
|
||||
## On older clients, it will require the user to verify their email before they can log in.
|
||||
## The welcome email will include a verification link, and login attempts will periodically
|
||||
## trigger another verification email to be sent.
|
||||
# SIGNUPS_VERIFY=false
|
||||
@@ -259,7 +277,7 @@
|
||||
## A comma-separated list means only those users can create orgs:
|
||||
# ORG_CREATION_USERS=admin1@example.com,admin2@example.com
|
||||
|
||||
## Invitations org admins to invite users, even when signups are disabled
|
||||
## Allows org admins to invite users, even when signups are disabled
|
||||
# INVITATIONS_ALLOWED=true
|
||||
## Name shown in the invitation emails that don't come from a specific organization
|
||||
# INVITATION_ORG_NAME=Vaultwarden
|
||||
@@ -327,32 +345,33 @@
|
||||
|
||||
## Icon download timeout
|
||||
## Configure the timeout value when downloading the favicons.
|
||||
## The default is 10 seconds, but this could be to low on slower network connections
|
||||
## The default is 10 seconds, but this could be too low on slower network connections
|
||||
# ICON_DOWNLOAD_TIMEOUT=10
|
||||
|
||||
## Block HTTP domains/IPs by Regex
|
||||
## Any domains or IPs that match this regex won't be fetched by the internal HTTP client.
|
||||
## Useful to hide other servers in the local network. Check the WIKI for more details
|
||||
## NOTE: Always enclose this regex withing single quotes!
|
||||
## NOTE: Always enclose this regex within single quotes!
|
||||
# HTTP_REQUEST_BLOCK_REGEX='^(192\.168\.0\.[0-9]+|192\.168\.1\.[0-9]+)$'
|
||||
|
||||
## Enabling this will cause the internal HTTP client to refuse to connect to any non global IP address.
|
||||
## Enabling this will cause the internal HTTP client to refuse to connect to any non-global IP address.
|
||||
## Useful to secure your internal environment: See https://en.wikipedia.org/wiki/Reserved_IP_addresses for a list of IPs which it will block
|
||||
# HTTP_REQUEST_BLOCK_NON_GLOBAL_IPS=true
|
||||
|
||||
## Client Settings
|
||||
## Enable experimental feature flags for clients.
|
||||
## This is a comma-separated list of flags, e.g. "flag1,flag2,flag3".
|
||||
## Note that clients cache the /api/config endpoint for about 1 hour and it could take some time before they are enabled or disabled!
|
||||
##
|
||||
## The following flags are available:
|
||||
## - "autofill-overlay": Add an overlay menu to form fields for quick access to credentials.
|
||||
## - "autofill-v2": Use the new autofill implementation.
|
||||
## - "browser-fileless-import": Directly import credentials from other providers without a file.
|
||||
## - "extension-refresh": Temporarily enable the new extension design until general availability (should be used with the beta Chrome extension)
|
||||
## - "fido2-vault-credentials": Enable the use of FIDO2 security keys as second factor.
|
||||
## - "inline-menu-positioning-improvements": Enable the use of inline menu password generator and identity suggestions in the browser extension.
|
||||
## - "ssh-key-vault-item": Enable the creation and use of SSH key vault items. (Needs clients >=2024.12.0)
|
||||
## - "inline-menu-totp": Enable the use of inline menu TOTP codes in the browser extension.
|
||||
## - "ssh-agent": Enable SSH agent support on Desktop. (Needs desktop >=2024.12.0)
|
||||
## - "ssh-key-vault-item": Enable the creation and use of SSH key vault items. (Needs clients >=2024.12.0)
|
||||
## - "export-attachments": Enable support for exporting attachments (Clients >=2025.4.0)
|
||||
## - "anon-addy-self-host-alias": Enable configuring self-hosted Anon Addy alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0)
|
||||
## - "simple-login-self-host-alias": Enable configuring self-hosted Simple Login alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0)
|
||||
## - "mutual-tls": Enable the use of mutual TLS on Android (Client >= 2025.2.0)
|
||||
# EXPERIMENTAL_CLIENT_FEATURE_FLAGS=fido2-vault-credentials
|
||||
|
||||
## Require new device emails. When a user logs in an email is required to be sent.
|
||||
@@ -444,6 +463,55 @@
|
||||
## Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy.
|
||||
# ENFORCE_SINGLE_ORG_WITH_RESET_PW_POLICY=false
|
||||
|
||||
#####################################
|
||||
### SSO settings (OpenID Connect) ###
|
||||
#####################################
|
||||
|
||||
## Controls whether users can login using an OpenID Connect identity provider
|
||||
# SSO_ENABLED=false
|
||||
|
||||
## Prevent users from logging in directly without going through SSO
|
||||
# SSO_ONLY=false
|
||||
|
||||
## On SSO Signup if a user with a matching email already exists make the association
|
||||
# SSO_SIGNUPS_MATCH_EMAIL=true
|
||||
|
||||
## Allow unknown email verification status. Allowing this with `SSO_SIGNUPS_MATCH_EMAIL=true` open potential account takeover.
|
||||
# SSO_ALLOW_UNKNOWN_EMAIL_VERIFICATION=false
|
||||
|
||||
## Base URL of the OIDC server (auto-discovery is used)
|
||||
## - Should not include the `/.well-known/openid-configuration` part and no trailing `/`
|
||||
## - ${SSO_AUTHORITY}/.well-known/openid-configuration should return a json document: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationResponse
|
||||
# SSO_AUTHORITY=https://auth.example.com
|
||||
|
||||
## Authorization request scopes. Optional SSO scopes, override if email and profile are not enough (`openid` is implicit).
|
||||
# SSO_SCOPES="email profile"
|
||||
|
||||
## Additional authorization url parameters (ex: to obtain a `refresh_token` with Google Auth).
|
||||
# SSO_AUTHORIZE_EXTRA_PARAMS="access_type=offline&prompt=consent"
|
||||
|
||||
## Activate PKCE for the Auth Code flow.
|
||||
# SSO_PKCE=true
|
||||
|
||||
## Regex for additional trusted Id token audience (by default only the client_id is trusted).
|
||||
# SSO_AUDIENCE_TRUSTED='^$'
|
||||
|
||||
## Set your Client ID and Client Key
|
||||
# SSO_CLIENT_ID=11111
|
||||
# SSO_CLIENT_SECRET=AAAAAAAAAAAAAAAAAAAAAAAA
|
||||
|
||||
## Optional Master password policy (minComplexity=[0-4]), `enforceOnLogin` is not supported at the moment.
|
||||
# SSO_MASTER_PASSWORD_POLICY='{"enforceOnLogin":false,"minComplexity":3,"minLength":12,"requireLower":false,"requireNumbers":false,"requireSpecial":false,"requireUpper":false}'
|
||||
|
||||
## Use sso only for authentication not the session lifecycle
|
||||
# SSO_AUTH_ONLY_NOT_SESSION=false
|
||||
|
||||
## Client cache for discovery endpoint. Duration in seconds (0 to disable).
|
||||
# SSO_CLIENT_CACHE_EXPIRATION=0
|
||||
|
||||
## Log all the tokens, LOG_LEVEL=debug is required
|
||||
# SSO_DEBUG_TOKENS=false
|
||||
|
||||
########################
|
||||
### MFA/2FA settings ###
|
||||
########################
|
||||
@@ -486,7 +554,7 @@
|
||||
## Maximum attempts before an email token is reset and a new email will need to be sent.
|
||||
# EMAIL_ATTEMPTS_LIMIT=3
|
||||
##
|
||||
## Setup email 2FA regardless of any organization policy
|
||||
## Setup email 2FA on registration regardless of any organization policy
|
||||
# EMAIL_2FA_ENFORCE_ON_VERIFIED_INVITE=false
|
||||
## Automatically setup email 2FA as fallback provider when needed
|
||||
# EMAIL_2FA_AUTO_FALLBACK=false
|
||||
@@ -503,7 +571,7 @@
|
||||
##
|
||||
## According to the RFC6238 (https://tools.ietf.org/html/rfc6238),
|
||||
## we allow by default the TOTP code which was valid one step back and one in the future.
|
||||
## This can however allow attackers to be a bit more lucky with there attempts because there are 3 valid codes.
|
||||
## This can however allow attackers to be a bit more lucky with their attempts because there are 3 valid codes.
|
||||
## You can disable this, so that only the current TOTP Code is allowed.
|
||||
## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid.
|
||||
## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid.
|
||||
|
3
.github/CODEOWNERS
vendored
3
.github/CODEOWNERS
vendored
@@ -1,3 +1,6 @@
|
||||
/.github @dani-garcia @BlackDex
|
||||
/.github/** @dani-garcia @BlackDex
|
||||
/.github/CODEOWNERS @dani-garcia @BlackDex
|
||||
/.github/ISSUE_TEMPLATE/** @dani-garcia @BlackDex
|
||||
/.github/workflows/** @dani-garcia @BlackDex
|
||||
/SECURITY.md @dani-garcia @BlackDex
|
||||
|
25
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
25
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -8,15 +8,30 @@ body:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
|
||||
Please *do not* submit feature requests or ask for help on how to configure Vaultwarden here.
|
||||
Please **do not** submit feature requests or ask for help on how to configure Vaultwarden here!
|
||||
|
||||
The [GitHub Discussions](https://github.com/dani-garcia/vaultwarden/discussions/) has sections for Questions and Ideas.
|
||||
|
||||
Our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki/) has topics on how to configure Vaultwarden.
|
||||
|
||||
Also, make sure you are running [](https://github.com/dani-garcia/vaultwarden/releases/latest) of Vaultwarden!
|
||||
And search for existing open or closed issues or discussions regarding your topic before posting.
|
||||
|
||||
Be sure to check and validate the Vaultwarden Admin Diagnostics (`/admin/diagnostics`) page for any errors!
|
||||
See here [how to enable the admin page](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-admin-page).
|
||||
|
||||
> [!IMPORTANT]
|
||||
> ## :bangbang: Search for existing **Closed _AND_ Open** [Issues](https://github.com/dani-garcia/vaultwarden/issues?q=is%3Aissue%20) **_AND_** [Discussions](https://github.com/dani-garcia/vaultwarden/discussions?discussions_q=) regarding your topic before posting! :bangbang:
|
||||
#
|
||||
- type: checkboxes
|
||||
id: checklist
|
||||
attributes:
|
||||
label: Prerequisites
|
||||
description: Please confirm you have completed the following before submitting an issue!
|
||||
options:
|
||||
- label: I have searched the existing **Closed _AND_ Open** [Issues](https://github.com/dani-garcia/vaultwarden/issues?q=is%3Aissue%20) **_AND_** [Discussions](https://github.com/dani-garcia/vaultwarden/discussions?discussions_q=)
|
||||
required: true
|
||||
- label: I have searched and read the [documentation](https://github.com/dani-garcia/vaultwarden/wiki/)
|
||||
required: true
|
||||
#
|
||||
- id: support-string
|
||||
type: textarea
|
||||
@@ -36,7 +51,7 @@ body:
|
||||
attributes:
|
||||
label: Vaultwarden Build Version
|
||||
description: What version of Vaultwarden are you running?
|
||||
placeholder: ex. v1.31.0 or v1.32.0-3466a804
|
||||
placeholder: ex. v1.34.0 or v1.34.1-53f58b14
|
||||
validations:
|
||||
required: true
|
||||
#
|
||||
@@ -67,7 +82,7 @@ body:
|
||||
attributes:
|
||||
label: Reverse Proxy
|
||||
description: Are you using a reverse proxy, if so which and what version?
|
||||
placeholder: ex. nginx 1.26.2, caddy 2.8.4, traefik 3.1.2, haproxy 3.0
|
||||
placeholder: ex. nginx 1.29.0, caddy 2.10.0, traefik 3.4.4, haproxy 3.2
|
||||
validations:
|
||||
required: true
|
||||
#
|
||||
@@ -115,7 +130,7 @@ body:
|
||||
attributes:
|
||||
label: Client Version
|
||||
description: What version(s) of the client(s) are you seeing the problem on?
|
||||
placeholder: ex. CLI v2024.7.2, Firefox 130 - v2024.7.0
|
||||
placeholder: ex. CLI v2025.7.0, Firefox 140 - v2025.6.1
|
||||
#
|
||||
- id: reproduce
|
||||
type: textarea
|
||||
|
86
.github/workflows/build.yml
vendored
86
.github/workflows/build.yml
vendored
@@ -1,4 +1,5 @@
|
||||
name: Build
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -13,6 +14,7 @@ on:
|
||||
- "diesel.toml"
|
||||
- "docker/Dockerfile.j2"
|
||||
- "docker/DockerSettings.yaml"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/build.yml"
|
||||
@@ -28,13 +30,17 @@ on:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build and Test ${{ matrix.channel }}
|
||||
permissions:
|
||||
actions: write
|
||||
contents: read
|
||||
# We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 120
|
||||
# Make warnings errors, this is to prevent warnings slipping through.
|
||||
# This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes.
|
||||
env:
|
||||
RUSTFLAGS: "-D warnings"
|
||||
RUSTFLAGS: "-Dwarnings"
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -42,32 +48,33 @@ jobs:
|
||||
- "rust-toolchain" # The version defined in rust-toolchain
|
||||
- "msrv" # The supported MSRV
|
||||
|
||||
name: Build and Test ${{ matrix.channel }}
|
||||
|
||||
steps:
|
||||
# Checkout the repo
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
# End Checkout the repo
|
||||
|
||||
|
||||
# Install dependencies
|
||||
- name: "Install dependencies Ubuntu"
|
||||
run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config
|
||||
# End Install dependencies
|
||||
|
||||
# Checkout the repo
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
# End Checkout the repo
|
||||
|
||||
# Determine rust-toolchain version
|
||||
- name: Init Variables
|
||||
id: toolchain
|
||||
shell: bash
|
||||
env:
|
||||
CHANNEL: ${{ matrix.channel }}
|
||||
run: |
|
||||
if [[ "${{ matrix.channel }}" == 'rust-toolchain' ]]; then
|
||||
if [[ "${CHANNEL}" == 'rust-toolchain' ]]; then
|
||||
RUST_TOOLCHAIN="$(grep -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)"
|
||||
elif [[ "${{ matrix.channel }}" == 'msrv' ]]; then
|
||||
elif [[ "${CHANNEL}" == 'msrv' ]]; then
|
||||
RUST_TOOLCHAIN="$(grep -oP 'rust-version.*"(\K.*?)(?=")' Cargo.toml)"
|
||||
else
|
||||
RUST_TOOLCHAIN="${{ matrix.channel }}"
|
||||
RUST_TOOLCHAIN="${CHANNEL}"
|
||||
fi
|
||||
echo "RUST_TOOLCHAIN=${RUST_TOOLCHAIN}" | tee -a "${GITHUB_OUTPUT}"
|
||||
# End Determine rust-toolchain version
|
||||
@@ -75,7 +82,7 @@ jobs:
|
||||
|
||||
# Only install the clippy and rustfmt components on the default rust-toolchain
|
||||
- name: "Install rust-toolchain version"
|
||||
uses: dtolnay/rust-toolchain@a54c7afa936fefeb4456b2dd8068152669aa8203 # master @ Dec 14, 2024, 5:49 AM GMT+1
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master @ Apr 29, 2025, 9:22 PM GMT+2
|
||||
if: ${{ matrix.channel == 'rust-toolchain' }}
|
||||
with:
|
||||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||
@@ -85,7 +92,7 @@ jobs:
|
||||
|
||||
# Install the any other channel to be used for which we do not execute clippy and rustfmt
|
||||
- name: "Install MSRV version"
|
||||
uses: dtolnay/rust-toolchain@a54c7afa936fefeb4456b2dd8068152669aa8203 # master @ Dec 14, 2024, 5:49 AM GMT+1
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master @ Apr 29, 2025, 9:22 PM GMT+2
|
||||
if: ${{ matrix.channel != 'rust-toolchain' }}
|
||||
with:
|
||||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||
@@ -93,11 +100,13 @@ jobs:
|
||||
|
||||
# Set the current matrix toolchain version as default
|
||||
- name: "Set toolchain ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} as default"
|
||||
env:
|
||||
RUST_TOOLCHAIN: ${{steps.toolchain.outputs.RUST_TOOLCHAIN}}
|
||||
run: |
|
||||
# Remove the rust-toolchain.toml
|
||||
rm rust-toolchain.toml
|
||||
# Set the default
|
||||
rustup default ${{steps.toolchain.outputs.RUST_TOOLCHAIN}}
|
||||
rustup default "${RUST_TOOLCHAIN}"
|
||||
|
||||
# Show environment
|
||||
- name: "Show environment"
|
||||
@@ -108,7 +117,7 @@ jobs:
|
||||
|
||||
# Enable Rust Caching
|
||||
- name: Rust Caching
|
||||
uses: Swatinem/rust-cache@f0deed1e0edfc6a9be95417288c0e1099b1eeec3 # v2.7.7
|
||||
uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
# Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
|
||||
# Like changing the build host from Ubuntu 20.04 to 22.04 for example.
|
||||
@@ -161,7 +170,7 @@ jobs:
|
||||
id: clippy
|
||||
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }}
|
||||
run: |
|
||||
cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc -- -D warnings
|
||||
cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc
|
||||
# End Run cargo clippy
|
||||
|
||||
|
||||
@@ -178,22 +187,31 @@ jobs:
|
||||
# This is useful so all test/clippy/fmt actions are done, and they can all be addressed
|
||||
- name: "Some checks failed"
|
||||
if: ${{ failure() }}
|
||||
env:
|
||||
TEST_DB_M_L: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}
|
||||
TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}
|
||||
TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }}
|
||||
TEST_SQLITE: ${{ steps.test_sqlite.outcome }}
|
||||
TEST_MYSQL: ${{ steps.test_mysql.outcome }}
|
||||
TEST_POSTGRESQL: ${{ steps.test_postgresql.outcome }}
|
||||
CLIPPY: ${{ steps.clippy.outcome }}
|
||||
FMT: ${{ steps.formatting.outcome }}
|
||||
run: |
|
||||
echo "### :x: Checks Failed!" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|Job|Status|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|---|------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (sqlite,mysql,postgresql)|${{ steps.test_sqlite_mysql_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (sqlite)|${{ steps.test_sqlite.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (mysql)|${{ steps.test_mysql.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (postgresql)|${{ steps.test_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.clippy.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|fmt|${{ steps.formatting.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Please check the failed jobs and fix where needed." >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### :x: Checks Failed!" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${TEST_DB_M_L}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||
exit 1
|
||||
|
||||
|
||||
@@ -202,5 +220,5 @@ jobs:
|
||||
- name: "All checks passed"
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
echo "### :tada: Checks Passed!" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### :tada: Checks Passed!" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||
|
29
.github/workflows/check-templates.yml
vendored
Normal file
29
.github/workflows/check-templates.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Check templates
|
||||
permissions: {}
|
||||
|
||||
on: [ push, pull_request ]
|
||||
|
||||
jobs:
|
||||
docker-templates:
|
||||
name: Validate docker templates
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
# Checkout the repo
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
# End Checkout the repo
|
||||
|
||||
- name: Run make to rebuild templates
|
||||
working-directory: docker
|
||||
run: make
|
||||
|
||||
- name: Check for unstaged changes
|
||||
working-directory: docker
|
||||
run: git diff --exit-code
|
||||
continue-on-error: false
|
22
.github/workflows/hadolint.yml
vendored
22
.github/workflows/hadolint.yml
vendored
@@ -1,24 +1,20 @@
|
||||
name: Hadolint
|
||||
permissions: {}
|
||||
|
||||
on: [
|
||||
push,
|
||||
pull_request
|
||||
]
|
||||
on: [ push, pull_request ]
|
||||
|
||||
jobs:
|
||||
hadolint:
|
||||
name: Validate Dockerfile syntax
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
# End Checkout the repo
|
||||
|
||||
steps:
|
||||
# Start Docker Buildx
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
# https://github.com/moby/buildkit/issues/3969
|
||||
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
|
||||
with:
|
||||
@@ -37,6 +33,12 @@ jobs:
|
||||
env:
|
||||
HADOLINT_VERSION: 2.12.0
|
||||
# End Download hadolint
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
# End Checkout the repo
|
||||
|
||||
# Test Dockerfiles with hadolint
|
||||
- name: Run hadolint
|
||||
|
112
.github/workflows/release.yml
vendored
112
.github/workflows/release.yml
vendored
@@ -1,4 +1,5 @@
|
||||
name: Release
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -6,17 +7,23 @@ on:
|
||||
- main
|
||||
|
||||
tags:
|
||||
- '*'
|
||||
# https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
|
||||
- '[1-2].[0-9]+.[0-9]+'
|
||||
|
||||
jobs:
|
||||
# https://github.com/marketplace/actions/skip-duplicate-actions
|
||||
# Some checks to determine if we need to continue with building a new docker.
|
||||
# We will skip this check if we are creating a tag, because that has the same hash as a previous run already.
|
||||
skip_check:
|
||||
runs-on: ubuntu-24.04
|
||||
# Only run this in the upstream repo and not on forks
|
||||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||
name: Cancel older jobs when running
|
||||
permissions:
|
||||
actions: write
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
should_skip: ${{ steps.skip_check.outputs.should_skip }}
|
||||
|
||||
steps:
|
||||
- name: Skip Duplicates Actions
|
||||
id: skip_check
|
||||
@@ -27,6 +34,9 @@ jobs:
|
||||
if: ${{ github.ref_type == 'branch' }}
|
||||
|
||||
docker-build:
|
||||
needs: skip_check
|
||||
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
|
||||
name: Build Vaultwarden containers
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
@@ -34,12 +44,10 @@ jobs:
|
||||
id-token: write
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 120
|
||||
needs: skip_check
|
||||
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
|
||||
# Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them
|
||||
services:
|
||||
registry:
|
||||
image: registry:2
|
||||
image: registry@sha256:1fc7de654f2ac1247f0b67e8a459e273b0993be7d2beda1f3f56fbf1001ed3e7 # v3.0.0
|
||||
ports:
|
||||
- 5000:5000
|
||||
env:
|
||||
@@ -61,37 +69,42 @@ jobs:
|
||||
base_image: ["debian","alpine"]
|
||||
|
||||
steps:
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Initialize QEMU binfmt support
|
||||
uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
with:
|
||||
platforms: "arm64,arm"
|
||||
|
||||
# Start Docker Buildx
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
# https://github.com/moby/buildkit/issues/3969
|
||||
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
|
||||
with:
|
||||
cache-binary: false
|
||||
buildkitd-config-inline: |
|
||||
[worker.oci]
|
||||
max-parallelism = 2
|
||||
driver-opts: |
|
||||
network=host
|
||||
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
# We need fetch-depth of 0 so we also get all the tag metadata
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
# Determine Base Tags and Source Version
|
||||
- name: Determine Base Tags and Source Version
|
||||
shell: bash
|
||||
env:
|
||||
REF_TYPE: ${{ github.ref_type }}
|
||||
run: |
|
||||
# Check which main tag we are going to build determined by github.ref_type
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
# Check which main tag we are going to build determined by ref_type
|
||||
if [[ "${REF_TYPE}" == "tag" ]]; then
|
||||
echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}"
|
||||
elif [[ "${{ github.ref_type }}" == "branch" ]]; then
|
||||
elif [[ "${REF_TYPE}" == "branch" ]]; then
|
||||
echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}"
|
||||
fi
|
||||
|
||||
@@ -107,7 +120,7 @@ jobs:
|
||||
|
||||
# Login to Docker Hub
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
@@ -116,12 +129,14 @@ jobs:
|
||||
- name: Add registry for DockerHub
|
||||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }}
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${{ vars.DOCKERHUB_REPO }}" | tee -a "${GITHUB_ENV}"
|
||||
echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
# Login to GitHub Container Registry
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -131,12 +146,14 @@ jobs:
|
||||
- name: Add registry for ghcr.io
|
||||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
GHCR_REPO: ${{ vars.GHCR_REPO }}
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.GHCR_REPO }}" | tee -a "${GITHUB_ENV}"
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
# Login to Quay.io
|
||||
- name: Login to Quay.io
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USERNAME }}
|
||||
@@ -146,17 +163,22 @@ jobs:
|
||||
- name: Add registry for Quay.io
|
||||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
QUAY_REPO: ${{ vars.QUAY_REPO }}
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.QUAY_REPO }}" | tee -a "${GITHUB_ENV}"
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
- name: Configure build cache from/to
|
||||
shell: bash
|
||||
env:
|
||||
GHCR_REPO: ${{ vars.GHCR_REPO }}
|
||||
BASE_IMAGE: ${{ matrix.base_image }}
|
||||
run: |
|
||||
#
|
||||
# Check if there is a GitHub Container Registry Login and use it for caching
|
||||
if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then
|
||||
echo "BAKE_CACHE_FROM=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }}" | tee -a "${GITHUB_ENV}"
|
||||
echo "BAKE_CACHE_TO=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
|
||||
echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}" | tee -a "${GITHUB_ENV}"
|
||||
echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
|
||||
else
|
||||
echo "BAKE_CACHE_FROM="
|
||||
echo "BAKE_CACHE_TO="
|
||||
@@ -170,7 +192,7 @@ jobs:
|
||||
|
||||
- name: Bake ${{ matrix.base_image }} containers
|
||||
id: bake_vw
|
||||
uses: docker/bake-action@5ca506d06f70338a4968df87fd8bfee5cbfb84c7 # v6.0.0
|
||||
uses: docker/bake-action@37816e747588cb137173af99ab33873600c46ea8 # v6.8.0
|
||||
env:
|
||||
BASE_TAGS: "${{ env.BASE_TAGS }}"
|
||||
SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}"
|
||||
@@ -189,14 +211,17 @@ jobs:
|
||||
|
||||
- name: Extract digest SHA
|
||||
shell: bash
|
||||
env:
|
||||
BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }}
|
||||
BASE_IMAGE: ${{ matrix.base_image }}
|
||||
run: |
|
||||
GET_DIGEST_SHA="$(jq -r '.["${{ matrix.base_image }}-multi"]."containerimage.digest"' <<< '${{ steps.bake_vw.outputs.metadata }}')"
|
||||
GET_DIGEST_SHA="$(jq -r --arg base "$BASE_IMAGE" '.[$base + "-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")"
|
||||
echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
# Attest container images
|
||||
- name: Attest - docker.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
|
||||
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ vars.DOCKERHUB_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
@@ -204,7 +229,7 @@ jobs:
|
||||
|
||||
- name: Attest - ghcr.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
|
||||
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ vars.GHCR_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
@@ -212,7 +237,7 @@ jobs:
|
||||
|
||||
- name: Attest - quay.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
|
||||
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ vars.QUAY_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
@@ -222,16 +247,19 @@ jobs:
|
||||
# Extract the Alpine binaries from the containers
|
||||
- name: Extract binaries
|
||||
shell: bash
|
||||
env:
|
||||
REF_TYPE: ${{ github.ref_type }}
|
||||
BASE_IMAGE: ${{ matrix.base_image }}
|
||||
run: |
|
||||
# Check which main tag we are going to build determined by github.ref_type
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
# Check which main tag we are going to build determined by ref_type
|
||||
if [[ "${REF_TYPE}" == "tag" ]]; then
|
||||
EXTRACT_TAG="latest"
|
||||
elif [[ "${{ github.ref_type }}" == "branch" ]]; then
|
||||
elif [[ "${REF_TYPE}" == "branch" ]]; then
|
||||
EXTRACT_TAG="testing"
|
||||
fi
|
||||
|
||||
# Check which base_image was used and append -alpine if needed
|
||||
if [[ "${{ matrix.base_image }}" == "alpine" ]]; then
|
||||
if [[ "${BASE_IMAGE}" == "alpine" ]]; then
|
||||
EXTRACT_TAG="${EXTRACT_TAG}-alpine"
|
||||
fi
|
||||
|
||||
@@ -240,55 +268,55 @@ jobs:
|
||||
|
||||
# Extract amd64 binary
|
||||
docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
docker cp amd64:/vaultwarden vaultwarden-amd64-${{ matrix.base_image }}
|
||||
docker cp amd64:/vaultwarden vaultwarden-amd64-${BASE_IMAGE}
|
||||
docker rm --force amd64
|
||||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
|
||||
# Extract arm64 binary
|
||||
docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
docker cp arm64:/vaultwarden vaultwarden-arm64-${{ matrix.base_image }}
|
||||
docker cp arm64:/vaultwarden vaultwarden-arm64-${BASE_IMAGE}
|
||||
docker rm --force arm64
|
||||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
|
||||
# Extract armv7 binary
|
||||
docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
docker cp armv7:/vaultwarden vaultwarden-armv7-${{ matrix.base_image }}
|
||||
docker cp armv7:/vaultwarden vaultwarden-armv7-${BASE_IMAGE}
|
||||
docker rm --force armv7
|
||||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
|
||||
# Extract armv6 binary
|
||||
docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
docker cp armv6:/vaultwarden vaultwarden-armv6-${{ matrix.base_image }}
|
||||
docker cp armv6:/vaultwarden vaultwarden-armv6-${BASE_IMAGE}
|
||||
docker rm --force armv6
|
||||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
|
||||
# Upload artifacts to Github Actions and Attest the binaries
|
||||
- name: "Upload amd64 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }}
|
||||
path: vaultwarden-amd64-${{ matrix.base_image }}
|
||||
|
||||
- name: "Upload arm64 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }}
|
||||
path: vaultwarden-arm64-${{ matrix.base_image }}
|
||||
|
||||
- name: "Upload armv7 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }}
|
||||
path: vaultwarden-armv7-${{ matrix.base_image }}
|
||||
|
||||
- name: "Upload armv6 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }}
|
||||
path: vaultwarden-armv6-${{ matrix.base_image }}
|
||||
|
||||
- name: "Attest artifacts ${{ matrix.base_image }}"
|
||||
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-path: vaultwarden-*
|
||||
# End Upload artifacts to Github Actions
|
||||
|
6
.github/workflows/releasecache-cleanup.yml
vendored
6
.github/workflows/releasecache-cleanup.yml
vendored
@@ -1,3 +1,6 @@
|
||||
name: Cleanup
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
@@ -9,10 +12,11 @@ on:
|
||||
schedule:
|
||||
- cron: '0 1 * * FRI'
|
||||
|
||||
name: Cleanup
|
||||
jobs:
|
||||
releasecache-cleanup:
|
||||
name: Releasecache Cleanup
|
||||
permissions:
|
||||
packages: write
|
||||
runs-on: ubuntu-24.04
|
||||
continue-on-error: true
|
||||
timeout-minutes: 30
|
||||
|
29
.github/workflows/trivy.yml
vendored
29
.github/workflows/trivy.yml
vendored
@@ -1,37 +1,42 @@
|
||||
name: trivy
|
||||
name: Trivy
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
branches:
|
||||
- main
|
||||
|
||||
schedule:
|
||||
- cron: '08 11 * * *'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
trivy-scan:
|
||||
# Only run this in the master repo and not on forks
|
||||
# Only run this in the upstream repo and not on forks
|
||||
# When all forks run this at the same time, it is causing `Too Many Requests` issues
|
||||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||
name: Check
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
name: Trivy Scan
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
actions: read
|
||||
security-events: write
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@18f2510ee396bbf400402947b394f2dd8c87dbb0 # v0.29.0
|
||||
uses: aquasecurity/trivy-action@dc5a429b52fcf669ce959baa2c2dd26090d2a6c4 # v0.32.0
|
||||
env:
|
||||
TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2,public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2
|
||||
TRIVY_JAVA_DB_REPOSITORY: docker.io/aquasec/trivy-java-db:1,public.ecr.aws/aquasecurity/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1
|
||||
@@ -43,6 +48,6 @@ jobs:
|
||||
severity: CRITICAL,HIGH
|
||||
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@86b04fb0e47484f7282357688f21d5d0e32175fe # v3.27.5
|
||||
uses: github/codeql-action/upload-sarif@4e828ff8d448a8a6e532957b1811f387a63867e8 # v3.29.4
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
|
28
.github/workflows/zizmor.yml
vendored
Normal file
28
.github/workflows/zizmor.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
name: Security Analysis with zizmor
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
branches: ["**"]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
zizmor:
|
||||
name: Run zizmor
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
security-events: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run zizmor
|
||||
uses: zizmorcore/zizmor-action@f52a838cfabf134edcbaa7c8b3677dde20045018 # v0.1.1
|
||||
with:
|
||||
# intentionally not scanning the entire repository,
|
||||
# since it contains integration tests.
|
||||
inputs: ./.github/
|
@@ -1,7 +1,7 @@
|
||||
---
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
- id: check-json
|
||||
@@ -31,7 +31,7 @@ repos:
|
||||
language: system
|
||||
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--"]
|
||||
types_or: [rust, file]
|
||||
files: (Cargo.toml|Cargo.lock|rust-toolchain|.*\.rs$)
|
||||
files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
|
||||
pass_filenames: false
|
||||
- id: cargo-clippy
|
||||
name: cargo clippy
|
||||
@@ -40,5 +40,13 @@ repos:
|
||||
language: system
|
||||
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--", "-D", "warnings"]
|
||||
types_or: [rust, file]
|
||||
files: (Cargo.toml|Cargo.lock|rust-toolchain|clippy.toml|.*\.rs$)
|
||||
files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
|
||||
pass_filenames: false
|
||||
- id: check-docker-templates
|
||||
name: check docker templates
|
||||
description: Check if the Docker templates are updated
|
||||
language: system
|
||||
entry: sh
|
||||
args:
|
||||
- "-c"
|
||||
- "cd docker && make"
|
||||
|
3264
Cargo.lock
generated
3264
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
117
Cargo.toml
117
Cargo.toml
@@ -1,11 +1,12 @@
|
||||
workspace = { members = ["macros"] }
|
||||
[workspace]
|
||||
members = ["macros"]
|
||||
|
||||
[package]
|
||||
name = "vaultwarden"
|
||||
version = "1.0.0"
|
||||
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.83.0"
|
||||
rust-version = "1.87.0"
|
||||
resolver = "2"
|
||||
|
||||
repository = "https://github.com/dani-garcia/vaultwarden"
|
||||
@@ -31,6 +32,11 @@ enable_mimalloc = ["dep:mimalloc"]
|
||||
# You also need to set an env variable `QUERY_LOGGER=1` to fully activate this so you do not have to re-compile
|
||||
# if you want to turn off the logging for a specific run.
|
||||
query_logger = ["dep:diesel_logger"]
|
||||
s3 = ["opendal/services-s3", "dep:aws-config", "dep:aws-credential-types", "dep:aws-smithy-runtime-api", "dep:anyhow", "dep:http", "dep:reqsign"]
|
||||
|
||||
# OIDC specific features
|
||||
oidc-accept-rfc3339-timestamps = ["openidconnect/accept-rfc3339-timestamps"]
|
||||
oidc-accept-string-booleans = ["openidconnect/accept-string-booleans"]
|
||||
|
||||
# Enable unstable features, requires nightly
|
||||
# Currently only used to enable rusts official ip support
|
||||
@@ -44,7 +50,7 @@ syslog = "7.0.0"
|
||||
macros = { path = "./macros" }
|
||||
|
||||
# Logging
|
||||
log = "0.4.25"
|
||||
log = "0.4.27"
|
||||
fern = { version = "0.7.1", features = ["syslog-7", "reopen-1"] }
|
||||
tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
|
||||
|
||||
@@ -52,12 +58,12 @@ tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and
|
||||
dotenvy = { version = "0.15.7", default-features = false }
|
||||
|
||||
# Lazy initialization
|
||||
once_cell = "1.20.2"
|
||||
once_cell = "1.21.3"
|
||||
|
||||
# Numerical libraries
|
||||
num-traits = "0.2.19"
|
||||
num-derive = "0.4.2"
|
||||
bigdecimal = "0.4.7"
|
||||
bigdecimal = "0.4.8"
|
||||
|
||||
# Web framework
|
||||
rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false }
|
||||
@@ -71,114 +77,134 @@ dashmap = "6.1.0"
|
||||
|
||||
# Async futures
|
||||
futures = "0.3.31"
|
||||
tokio = { version = "1.43.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
||||
tokio = { version = "1.47.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
||||
tokio-util = { version = "0.7.16", features = ["compat"]}
|
||||
|
||||
# A generic serialization/deserialization framework
|
||||
serde = { version = "1.0.217", features = ["derive"] }
|
||||
serde_json = "1.0.138"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = "1.0.142"
|
||||
|
||||
# A safe, extensible ORM and Query builder
|
||||
diesel = { version = "2.2.7", features = ["chrono", "r2d2", "numeric"] }
|
||||
diesel = { version = "2.2.12", features = ["chrono", "r2d2", "numeric"] }
|
||||
diesel_migrations = "2.2.0"
|
||||
diesel_logger = { version = "0.4.0", optional = true }
|
||||
|
||||
derive_more = { version = "1.0.0", features = ["from", "into", "as_ref", "deref", "display"] }
|
||||
derive_more = { version = "2.0.1", features = ["from", "into", "as_ref", "deref", "display"] }
|
||||
diesel-derive-newtype = "2.1.2"
|
||||
|
||||
# Bundled/Static SQLite
|
||||
libsqlite3-sys = { version = "0.31.0", features = ["bundled"], optional = true }
|
||||
libsqlite3-sys = { version = "0.35.0", features = ["bundled"], optional = true }
|
||||
|
||||
# Crypto-related libraries
|
||||
rand = "0.9.0"
|
||||
ring = "0.17.8"
|
||||
rand = "0.9.2"
|
||||
ring = "0.17.14"
|
||||
subtle = "2.6.1"
|
||||
|
||||
# UUID generation
|
||||
uuid = { version = "1.12.1", features = ["v4"] }
|
||||
uuid = { version = "1.17.0", features = ["v4"] }
|
||||
|
||||
# Date and time libraries
|
||||
chrono = { version = "0.4.39", features = ["clock", "serde"], default-features = false }
|
||||
chrono-tz = "0.10.1"
|
||||
time = "0.3.37"
|
||||
chrono = { version = "0.4.41", features = ["clock", "serde"], default-features = false }
|
||||
chrono-tz = "0.10.4"
|
||||
time = "0.3.41"
|
||||
|
||||
# Job scheduler
|
||||
job_scheduler_ng = "2.0.5"
|
||||
job_scheduler_ng = "2.2.0"
|
||||
|
||||
# Data encoding library Hex/Base32/Base64
|
||||
data-encoding = "2.7.0"
|
||||
data-encoding = "2.9.0"
|
||||
|
||||
# JWT library
|
||||
jsonwebtoken = "9.3.0"
|
||||
jsonwebtoken = "9.3.1"
|
||||
|
||||
# TOTP library
|
||||
totp-lite = "2.0.1"
|
||||
|
||||
# Yubico Library
|
||||
yubico = { version = "0.12.0", features = ["online-tokio"], default-features = false }
|
||||
yubico = { package = "yubico_ng", version = "0.13.0", features = ["online-tokio"], default-features = false }
|
||||
|
||||
# WebAuthn libraries
|
||||
webauthn-rs = "0.3.2"
|
||||
# danger-allow-state-serialisation is needed to save the state in the db
|
||||
# danger-credential-internals is needed to support U2F to Webauthn migration
|
||||
# danger-user-presence-only-security-keys is needed to disable UV
|
||||
webauthn-rs = { version = "0.5.2", features = ["danger-allow-state-serialisation", "danger-credential-internals", "danger-user-presence-only-security-keys"] }
|
||||
webauthn-rs-proto = "0.5.2"
|
||||
webauthn-rs-core = "0.5.2"
|
||||
|
||||
# Handling of URL's for WebAuthn and favicons
|
||||
url = "2.5.4"
|
||||
|
||||
# Email libraries
|
||||
lettre = { version = "0.11.11", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
||||
lettre = { version = "0.11.18", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "hostname", "tracing", "tokio1-rustls", "ring", "rustls-native-certs"], default-features = false }
|
||||
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
|
||||
email_address = "0.2.9"
|
||||
|
||||
# HTML Template library
|
||||
handlebars = { version = "6.3.0", features = ["dir_source"] }
|
||||
handlebars = { version = "6.3.2", features = ["dir_source"] }
|
||||
|
||||
# HTTP client (Used for favicons, version check, DUO and HIBP API)
|
||||
reqwest = { version = "0.12.12", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] }
|
||||
hickory-resolver = "0.24.2"
|
||||
reqwest = { version = "0.12.22", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false}
|
||||
hickory-resolver = "0.25.2"
|
||||
|
||||
# Favicon extraction libraries
|
||||
html5gum = "0.7.0"
|
||||
regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false }
|
||||
data-url = "0.3.1"
|
||||
bytes = "1.9.0"
|
||||
bytes = "1.10.1"
|
||||
svg-hush = "0.9.5"
|
||||
|
||||
# Cache function results (Used for version check and favicon fetching)
|
||||
cached = { version = "0.54.0", features = ["async"] }
|
||||
cached = { version = "0.56.0", features = ["async"] }
|
||||
|
||||
# Used for custom short lived cookie jar during favicon extraction
|
||||
cookie = "0.18.1"
|
||||
cookie_store = "0.21.1"
|
||||
|
||||
# Used by U2F, JWT and PostgreSQL
|
||||
openssl = "0.10.69"
|
||||
openssl = "0.10.73"
|
||||
|
||||
# CLI argument parsing
|
||||
pico-args = "0.5.0"
|
||||
|
||||
# Macro ident concatenation
|
||||
paste = "1.0.15"
|
||||
governor = "0.8.0"
|
||||
pastey = "0.1.0"
|
||||
governor = "0.10.1"
|
||||
|
||||
# OIDC for SSO
|
||||
openidconnect = { version = "4.0.1", features = ["reqwest", "native-tls"] }
|
||||
mini-moka = "0.10.3"
|
||||
|
||||
# Check client versions for specific features.
|
||||
semver = "1.0.25"
|
||||
semver = "1.0.26"
|
||||
|
||||
# Allow overriding the default memory allocator
|
||||
# Mainly used for the musl builds, since the default musl malloc is very slow
|
||||
mimalloc = { version = "0.1.43", features = ["secure"], default-features = false, optional = true }
|
||||
which = "7.0.1"
|
||||
mimalloc = { version = "0.1.47", features = ["secure"], default-features = false, optional = true }
|
||||
|
||||
which = "8.0.0"
|
||||
|
||||
# Argon2 library with support for the PHC format
|
||||
argon2 = "0.5.3"
|
||||
|
||||
# Reading a password from the cli for generating the Argon2id ADMIN_TOKEN
|
||||
rpassword = "7.3.1"
|
||||
rpassword = "7.4.0"
|
||||
|
||||
# Loading a dynamic CSS Stylesheet
|
||||
grass_compiler = { version = "0.13.4", default-features = false }
|
||||
|
||||
[patch.crates-io]
|
||||
# Patch yubico to remove duplicate crates of older versions
|
||||
yubico = { git = "https://github.com/BlackDex/yubico-rs", rev = "00df14811f58155c0f02e3ab10f1570ed3e115c6" }
|
||||
# File are accessed through Apache OpenDAL
|
||||
opendal = { version = "0.54.0", features = ["services-fs"], default-features = false }
|
||||
|
||||
# For retrieving AWS credentials, including temporary SSO credentials
|
||||
anyhow = { version = "1.0.98", optional = true }
|
||||
aws-config = { version = "1.8.4", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true }
|
||||
aws-credential-types = { version = "1.2.5", optional = true }
|
||||
aws-smithy-runtime-api = { version = "1.8.7", optional = true }
|
||||
http = { version = "1.3.1", optional = true }
|
||||
reqsign = { version = "0.16.5", optional = true }
|
||||
|
||||
# Strip debuginfo from the release builds
|
||||
# The symbols are the provide better panic traces
|
||||
# The debug symbols are to provide better panic traces
|
||||
# Also enable fat LTO and use 1 codegen unit for optimizations
|
||||
[profile.release]
|
||||
strip = "debuginfo"
|
||||
@@ -213,7 +239,7 @@ codegen-units = 16
|
||||
|
||||
# Linting config
|
||||
# https://doc.rust-lang.org/rustc/lints/groups.html
|
||||
[lints.rust]
|
||||
[workspace.lints.rust]
|
||||
# Forbid
|
||||
unsafe_code = "forbid"
|
||||
non_ascii_idents = "forbid"
|
||||
@@ -243,11 +269,14 @@ if_let_rescope = "allow"
|
||||
tail_expr_drop_order = "allow"
|
||||
|
||||
# https://rust-lang.github.io/rust-clippy/stable/index.html
|
||||
[lints.clippy]
|
||||
[workspace.lints.clippy]
|
||||
# Warn
|
||||
dbg_macro = "warn"
|
||||
todo = "warn"
|
||||
|
||||
# Ignore/Allow
|
||||
result_large_err = "allow"
|
||||
|
||||
# Deny
|
||||
case_sensitive_file_extension_comparisons = "deny"
|
||||
cast_lossless = "deny"
|
||||
@@ -255,6 +284,7 @@ clone_on_ref_ptr = "deny"
|
||||
equatable_if_let = "deny"
|
||||
filter_map_next = "deny"
|
||||
float_cmp_const = "deny"
|
||||
implicit_clone = "deny"
|
||||
inefficient_to_string = "deny"
|
||||
iter_on_empty_collections = "deny"
|
||||
iter_on_single_items = "deny"
|
||||
@@ -263,14 +293,12 @@ macro_use_imports = "deny"
|
||||
manual_assert = "deny"
|
||||
manual_instant_elapsed = "deny"
|
||||
manual_string_new = "deny"
|
||||
match_on_vec_items = "deny"
|
||||
match_wildcard_for_single_variants = "deny"
|
||||
mem_forget = "deny"
|
||||
needless_continue = "deny"
|
||||
needless_lifetimes = "deny"
|
||||
option_option = "deny"
|
||||
string_add_assign = "deny"
|
||||
string_to_string = "deny"
|
||||
unnecessary_join = "deny"
|
||||
unnecessary_self_imports = "deny"
|
||||
unnested_or_patterns = "deny"
|
||||
@@ -278,3 +306,6 @@ unused_async = "deny"
|
||||
unused_self = "deny"
|
||||
verbose_file_reads = "deny"
|
||||
zero_sized_map_values = "deny"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
24
README.md
24
README.md
@@ -59,19 +59,21 @@ A nearly complete implementation of the Bitwarden Client API is provided, includ
|
||||
## Usage
|
||||
|
||||
> [!IMPORTANT]
|
||||
> Most modern web browsers disallow the use of Web Crypto APIs in insecure contexts. In this case, you might get an error like `Cannot read property 'importKey'`. To solve this problem, you need to access the web vault via HTTPS or localhost.
|
||||
>
|
||||
>This can be configured in [Vaultwarden directly](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS) or using a third-party reverse proxy ([some examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)).
|
||||
>
|
||||
>If you have an available domain name, you can get HTTPS certificates with [Let's Encrypt](https://letsencrypt.org/), or you can generate self-signed certificates with utilities like [mkcert](https://github.com/FiloSottile/mkcert). Some proxies automatically do this step, like Caddy or Traefik (see examples linked above).
|
||||
> The web-vault requires the use a secure context for the [Web Crypto API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Crypto_API).
|
||||
> That means it will only work via `http://localhost:8000` (using the port from the example below) or if you [enable HTTPS](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS).
|
||||
|
||||
The recommended way to install and use Vaultwarden is via our container images which are published to [ghcr.io](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden), [docker.io](https://hub.docker.com/r/vaultwarden/server) and [quay.io](https://quay.io/repository/vaultwarden/server).
|
||||
See [which container image to use](https://github.com/dani-garcia/vaultwarden/wiki/Which-container-image-to-use) for an explanation of the provided tags.
|
||||
|
||||
There are also [community driven packages](https://github.com/dani-garcia/vaultwarden/wiki/Third-party-packages) which can be used, but those might be lagging behind the latest version or might deviate in the way Vaultwarden is configured, as described in our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).
|
||||
|
||||
Alternatively, you can also [build Vaultwarden](https://github.com/dani-garcia/vaultwarden/wiki/Building-binary) yourself.
|
||||
|
||||
While Vaultwarden is based upon the [Rocket web framework](https://rocket.rs) which has built-in support for TLS our recommendation would be that you setup a reverse proxy (see [proxy examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)).
|
||||
|
||||
> [!TIP]
|
||||
>**For more detailed examples on how to install, use and configure Vaultwarden you can check our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).**
|
||||
|
||||
The main way to use Vaultwarden is via our container images which are published to [ghcr.io](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden), [docker.io](https://hub.docker.com/r/vaultwarden/server) and [quay.io](https://quay.io/repository/vaultwarden/server).
|
||||
|
||||
There are also [community driven packages](https://github.com/dani-garcia/vaultwarden/wiki/Third-party-packages) which can be used, but those might be lagging behind the latest version or might deviate in the way Vaultwarden is configured, as described in our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).
|
||||
|
||||
### Docker/Podman CLI
|
||||
|
||||
Pull the container image and mount a volume from the host for persistent storage.<br>
|
||||
@@ -83,7 +85,7 @@ docker run --detach --name vaultwarden \
|
||||
--env DOMAIN="https://vw.domain.tld" \
|
||||
--volume /vw-data/:/data/ \
|
||||
--restart unless-stopped \
|
||||
--publish 80:80 \
|
||||
--publish 127.0.0.1:8000:80 \
|
||||
vaultwarden/server:latest
|
||||
```
|
||||
|
||||
@@ -104,7 +106,7 @@ services:
|
||||
volumes:
|
||||
- ./vw-data/:/data/
|
||||
ports:
|
||||
- 80:80
|
||||
- 127.0.0.1:8000:80
|
||||
```
|
||||
|
||||
<br>
|
||||
|
7
build.rs
7
build.rs
@@ -11,6 +11,8 @@ fn main() {
|
||||
println!("cargo:rustc-cfg=postgresql");
|
||||
#[cfg(feature = "query_logger")]
|
||||
println!("cargo:rustc-cfg=query_logger");
|
||||
#[cfg(feature = "s3")]
|
||||
println!("cargo:rustc-cfg=s3");
|
||||
|
||||
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
|
||||
compile_error!(
|
||||
@@ -23,6 +25,7 @@ fn main() {
|
||||
println!("cargo::rustc-check-cfg=cfg(mysql)");
|
||||
println!("cargo::rustc-check-cfg=cfg(postgresql)");
|
||||
println!("cargo::rustc-check-cfg=cfg(query_logger)");
|
||||
println!("cargo::rustc-check-cfg=cfg(s3)");
|
||||
|
||||
// Rerun when these paths are changed.
|
||||
// Someone could have checked-out a tag or specific commit, but no other files changed.
|
||||
@@ -48,8 +51,8 @@ fn main() {
|
||||
fn run(args: &[&str]) -> Result<String, std::io::Error> {
|
||||
let out = Command::new(args[0]).args(&args[1..]).output()?;
|
||||
if !out.status.success() {
|
||||
use std::io::{Error, ErrorKind};
|
||||
return Err(Error::new(ErrorKind::Other, "Command not successful"));
|
||||
use std::io::Error;
|
||||
return Err(Error::other("Command not successful"));
|
||||
}
|
||||
Ok(String::from_utf8(out.stdout).unwrap().trim().to_string())
|
||||
}
|
||||
|
@@ -1,13 +1,13 @@
|
||||
---
|
||||
vault_version: "v2025.1.1"
|
||||
vault_image_digest: "sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025df5ff175a4918"
|
||||
vault_version: "v2025.7.0"
|
||||
vault_image_digest: "sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e"
|
||||
# Cross Compile Docker Helper Scripts v1.6.1
|
||||
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
|
||||
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
|
||||
xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894"
|
||||
rust_version: 1.84.1 # Rust version to be used
|
||||
rust_version: 1.89.0 # Rust version to be used
|
||||
debian_version: bookworm # Debian release name to be used
|
||||
alpine_version: "3.21" # Alpine version to be used
|
||||
alpine_version: "3.22" # Alpine version to be used
|
||||
# For which platforms/architectures will we try to build images
|
||||
platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"]
|
||||
# Determine the build images per OS/Arch
|
||||
|
@@ -19,23 +19,23 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.1.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.1.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025df5ff175a4918]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.7.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025df5ff175a4918
|
||||
# [docker.io/vaultwarden/web-vault:v2025.1.1]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e
|
||||
# [docker.io/vaultwarden/web-vault:v2025.7.0]
|
||||
#
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025df5ff175a4918 AS vault
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e AS vault
|
||||
|
||||
########################## ALPINE BUILD IMAGES ##########################
|
||||
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
||||
## And for Alpine we define all build images here, they will only be loaded when actually used
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.84.1 AS build_amd64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.84.1 AS build_arm64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.84.1 AS build_armv7
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.84.1 AS build_armv6
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.89.0 AS build_amd64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.89.0 AS build_arm64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.89.0 AS build_armv7
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.89.0 AS build_armv6
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# hadolint ignore=DL3006
|
||||
@@ -127,7 +127,7 @@ RUN source /env-cargo && \
|
||||
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
|
||||
#
|
||||
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
|
||||
FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.21
|
||||
FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.22
|
||||
|
||||
ENV ROCKET_PROFILE="release" \
|
||||
ROCKET_ADDRESS=0.0.0.0 \
|
||||
|
@@ -19,15 +19,15 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.1.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.1.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025df5ff175a4918]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.7.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025df5ff175a4918
|
||||
# [docker.io/vaultwarden/web-vault:v2025.1.1]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e
|
||||
# [docker.io/vaultwarden/web-vault:v2025.7.0]
|
||||
#
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025df5ff175a4918 AS vault
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e AS vault
|
||||
|
||||
########################## Cross Compile Docker Helper Scripts ##########################
|
||||
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
|
||||
@@ -36,7 +36,7 @@ FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bd
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# hadolint ignore=DL3006
|
||||
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.84.1-slim-bookworm AS build
|
||||
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.89.0-slim-bookworm AS build
|
||||
COPY --from=xx / /
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
@@ -89,24 +89,24 @@ RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Environment variables for Cargo on Debian based builds
|
||||
ARG ARCH_OPENSSL_LIB_DIR \
|
||||
ARCH_OPENSSL_INCLUDE_DIR
|
||||
ARG TARGET_PKG_CONFIG_PATH
|
||||
|
||||
RUN source /env-cargo && \
|
||||
if xx-info is-cross ; then \
|
||||
# Some special variables if needed to override some build paths
|
||||
if [[ -n "${ARCH_OPENSSL_LIB_DIR}" && -n "${ARCH_OPENSSL_INCLUDE_DIR}" ]]; then \
|
||||
echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_LIB_DIR=${ARCH_OPENSSL_LIB_DIR}" >> /env-cargo && \
|
||||
echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_INCLUDE_DIR=${ARCH_OPENSSL_INCLUDE_DIR}" >> /env-cargo ; \
|
||||
fi && \
|
||||
# We can't use xx-cargo since that uses clang, which doesn't work for our libraries.
|
||||
# Because of this we generate the needed environment variables here which we can load in the needed steps.
|
||||
echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
|
||||
echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
|
||||
echo "export PKG_CONFIG=/usr/bin/$(xx-info)-pkg-config" >> /env-cargo && \
|
||||
echo "export CROSS_COMPILE=1" >> /env-cargo && \
|
||||
echo "export OPENSSL_INCLUDE_DIR=/usr/include/$(xx-info)" >> /env-cargo && \
|
||||
echo "export OPENSSL_LIB_DIR=/usr/lib/$(xx-info)" >> /env-cargo ; \
|
||||
echo "export PKG_CONFIG_ALLOW_CROSS=1" >> /env-cargo && \
|
||||
# For some architectures `xx-info` returns a triple which doesn't matches the path on disk
|
||||
# In those cases you can override this by setting the `TARGET_PKG_CONFIG_PATH` build-arg
|
||||
if [[ -n "${TARGET_PKG_CONFIG_PATH}" ]]; then \
|
||||
echo "export TARGET_PKG_CONFIG_PATH=${TARGET_PKG_CONFIG_PATH}" >> /env-cargo ; \
|
||||
else \
|
||||
echo "export PKG_CONFIG_PATH=/usr/lib/$(xx-info)/pkgconfig" >> /env-cargo ; \
|
||||
fi && \
|
||||
echo "# End of env-cargo" >> /env-cargo ; \
|
||||
fi && \
|
||||
# Output the current contents of the file
|
||||
cat /env-cargo
|
||||
|
@@ -109,24 +109,24 @@ WORKDIR /app
|
||||
|
||||
{% if base == "debian" %}
|
||||
# Environment variables for Cargo on Debian based builds
|
||||
ARG ARCH_OPENSSL_LIB_DIR \
|
||||
ARCH_OPENSSL_INCLUDE_DIR
|
||||
ARG TARGET_PKG_CONFIG_PATH
|
||||
|
||||
RUN source /env-cargo && \
|
||||
if xx-info is-cross ; then \
|
||||
# Some special variables if needed to override some build paths
|
||||
if [[ -n "${ARCH_OPENSSL_LIB_DIR}" && -n "${ARCH_OPENSSL_INCLUDE_DIR}" ]]; then \
|
||||
echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_LIB_DIR=${ARCH_OPENSSL_LIB_DIR}" >> /env-cargo && \
|
||||
echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_INCLUDE_DIR=${ARCH_OPENSSL_INCLUDE_DIR}" >> /env-cargo ; \
|
||||
fi && \
|
||||
# We can't use xx-cargo since that uses clang, which doesn't work for our libraries.
|
||||
# Because of this we generate the needed environment variables here which we can load in the needed steps.
|
||||
echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
|
||||
echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
|
||||
echo "export PKG_CONFIG=/usr/bin/$(xx-info)-pkg-config" >> /env-cargo && \
|
||||
echo "export CROSS_COMPILE=1" >> /env-cargo && \
|
||||
echo "export OPENSSL_INCLUDE_DIR=/usr/include/$(xx-info)" >> /env-cargo && \
|
||||
echo "export OPENSSL_LIB_DIR=/usr/lib/$(xx-info)" >> /env-cargo ; \
|
||||
echo "export PKG_CONFIG_ALLOW_CROSS=1" >> /env-cargo && \
|
||||
# For some architectures `xx-info` returns a triple which doesn't matches the path on disk
|
||||
# In those cases you can override this by setting the `TARGET_PKG_CONFIG_PATH` build-arg
|
||||
if [[ -n "${TARGET_PKG_CONFIG_PATH}" ]]; then \
|
||||
echo "export TARGET_PKG_CONFIG_PATH=${TARGET_PKG_CONFIG_PATH}" >> /env-cargo ; \
|
||||
else \
|
||||
echo "export PKG_CONFIG_PATH=/usr/lib/$(xx-info)/pkgconfig" >> /env-cargo ; \
|
||||
fi && \
|
||||
echo "# End of env-cargo" >> /env-cargo ; \
|
||||
fi && \
|
||||
# Output the current contents of the file
|
||||
cat /env-cargo
|
||||
|
@@ -133,8 +133,7 @@ target "debian-386" {
|
||||
platforms = ["linux/386"]
|
||||
tags = generate_tags("", "-386")
|
||||
args = {
|
||||
ARCH_OPENSSL_LIB_DIR = "/usr/lib/i386-linux-gnu"
|
||||
ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/i386-linux-gnu"
|
||||
TARGET_PKG_CONFIG_PATH = "/usr/lib/i386-linux-gnu/pkgconfig"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -142,20 +141,12 @@ target "debian-ppc64le" {
|
||||
inherits = ["debian"]
|
||||
platforms = ["linux/ppc64le"]
|
||||
tags = generate_tags("", "-ppc64le")
|
||||
args = {
|
||||
ARCH_OPENSSL_LIB_DIR = "/usr/lib/powerpc64le-linux-gnu"
|
||||
ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/powerpc64le-linux-gnu"
|
||||
}
|
||||
}
|
||||
|
||||
target "debian-s390x" {
|
||||
inherits = ["debian"]
|
||||
platforms = ["linux/s390x"]
|
||||
tags = generate_tags("", "-s390x")
|
||||
args = {
|
||||
ARCH_OPENSSL_LIB_DIR = "/usr/lib/s390x-linux-gnu"
|
||||
ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/s390x-linux-gnu"
|
||||
}
|
||||
}
|
||||
// ==== End of unsupported Debian architecture targets ===
|
||||
|
||||
|
@@ -9,5 +9,8 @@ path = "src/lib.rs"
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
quote = "1.0.38"
|
||||
syn = "2.0.96"
|
||||
quote = "1.0.40"
|
||||
syn = "2.0.104"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
@@ -1,5 +1,3 @@
|
||||
extern crate proc_macro;
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
|
||||
@@ -12,7 +10,7 @@ pub fn derive_uuid_from_param(input: TokenStream) -> TokenStream {
|
||||
|
||||
fn impl_derive_uuid_macro(ast: &syn::DeriveInput) -> TokenStream {
|
||||
let name = &ast.ident;
|
||||
let gen = quote! {
|
||||
let gen_derive = quote! {
|
||||
#[automatically_derived]
|
||||
impl<'r> rocket::request::FromParam<'r> for #name {
|
||||
type Error = ();
|
||||
@@ -27,7 +25,7 @@ fn impl_derive_uuid_macro(ast: &syn::DeriveInput) -> TokenStream {
|
||||
}
|
||||
}
|
||||
};
|
||||
gen.into()
|
||||
gen_derive.into()
|
||||
}
|
||||
|
||||
#[proc_macro_derive(IdFromParam)]
|
||||
@@ -39,7 +37,7 @@ pub fn derive_id_from_param(input: TokenStream) -> TokenStream {
|
||||
|
||||
fn impl_derive_safestring_macro(ast: &syn::DeriveInput) -> TokenStream {
|
||||
let name = &ast.ident;
|
||||
let gen = quote! {
|
||||
let gen_derive = quote! {
|
||||
#[automatically_derived]
|
||||
impl<'r> rocket::request::FromParam<'r> for #name {
|
||||
type Error = ();
|
||||
@@ -54,5 +52,5 @@ fn impl_derive_safestring_macro(ast: &syn::DeriveInput) -> TokenStream {
|
||||
}
|
||||
}
|
||||
};
|
||||
gen.into()
|
||||
gen_derive.into()
|
||||
}
|
||||
|
1
migrations/mysql/2023-09-10-133000_add_sso/down.sql
Normal file
1
migrations/mysql/2023-09-10-133000_add_sso/down.sql
Normal file
@@ -0,0 +1 @@
|
||||
DROP TABLE sso_nonce;
|
4
migrations/mysql/2023-09-10-133000_add_sso/up.sql
Normal file
4
migrations/mysql/2023-09-10-133000_add_sso/up.sql
Normal file
@@ -0,0 +1,4 @@
|
||||
CREATE TABLE sso_nonce (
|
||||
nonce CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE users_organizations DROP COLUMN invited_by_email;
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL;
|
@@ -0,0 +1,6 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
nonce CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
@@ -0,0 +1,8 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state VARCHAR(512) NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
@@ -0,0 +1,8 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state VARCHAR(512) NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
@@ -0,0 +1,9 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state VARCHAR(512) NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
verifier TEXT,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
@@ -0,0 +1 @@
|
||||
DROP TABLE IF EXISTS sso_users;
|
7
migrations/mysql/2024-03-06-170000_add_sso_users/up.sql
Normal file
7
migrations/mysql/2024-03-06-170000_add_sso_users/up.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
CREATE TABLE sso_users (
|
||||
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
identifier VARCHAR(768) NOT NULL UNIQUE,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now(),
|
||||
|
||||
FOREIGN KEY(user_uuid) REFERENCES users(uuid)
|
||||
);
|
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE sso_users DROP FOREIGN KEY `sso_users_ibfk_1`;
|
||||
ALTER TABLE sso_users ADD FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE;
|
1
migrations/postgresql/2023-09-10-133000_add_sso/down.sql
Normal file
1
migrations/postgresql/2023-09-10-133000_add_sso/down.sql
Normal file
@@ -0,0 +1 @@
|
||||
DROP TABLE sso_nonce;
|
4
migrations/postgresql/2023-09-10-133000_add_sso/up.sql
Normal file
4
migrations/postgresql/2023-09-10-133000_add_sso/up.sql
Normal file
@@ -0,0 +1,4 @@
|
||||
CREATE TABLE sso_nonce (
|
||||
nonce CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE users_organizations DROP COLUMN invited_by_email;
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL;
|
@@ -0,0 +1,6 @@
|
||||
DROP TABLE sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
nonce CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
@@ -0,0 +1,8 @@
|
||||
DROP TABLE sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state TEXT NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
@@ -0,0 +1,8 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state TEXT NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
@@ -0,0 +1,9 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state TEXT NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
verifier TEXT,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now()
|
||||
);
|
@@ -0,0 +1 @@
|
||||
DROP TABLE IF EXISTS sso_users;
|
@@ -0,0 +1,7 @@
|
||||
CREATE TABLE sso_users (
|
||||
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
identifier TEXT NOT NULL UNIQUE,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT now(),
|
||||
|
||||
FOREIGN KEY(user_uuid) REFERENCES users(uuid)
|
||||
);
|
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE sso_users
|
||||
DROP CONSTRAINT "sso_users_user_uuid_fkey",
|
||||
ADD CONSTRAINT "sso_users_user_uuid_fkey" FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE;
|
1
migrations/sqlite/2023-09-10-133000_add_sso/down.sql
Normal file
1
migrations/sqlite/2023-09-10-133000_add_sso/down.sql
Normal file
@@ -0,0 +1 @@
|
||||
DROP TABLE sso_nonce;
|
4
migrations/sqlite/2023-09-10-133000_add_sso/up.sql
Normal file
4
migrations/sqlite/2023-09-10-133000_add_sso/up.sql
Normal file
@@ -0,0 +1,4 @@
|
||||
CREATE TABLE sso_nonce (
|
||||
nonce CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE users_organizations DROP COLUMN invited_by_email;
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL;
|
@@ -0,0 +1,6 @@
|
||||
DROP TABLE sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
nonce CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
@@ -0,0 +1,8 @@
|
||||
DROP TABLE sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state TEXT NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
@@ -0,0 +1,8 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state TEXT NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
@@ -0,0 +1,9 @@
|
||||
DROP TABLE IF EXISTS sso_nonce;
|
||||
|
||||
CREATE TABLE sso_nonce (
|
||||
state TEXT NOT NULL PRIMARY KEY,
|
||||
nonce TEXT NOT NULL,
|
||||
verifier TEXT,
|
||||
redirect_uri TEXT NOT NULL,
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
@@ -0,0 +1 @@
|
||||
DROP TABLE IF EXISTS sso_users;
|
7
migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql
Normal file
7
migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
CREATE TABLE sso_users (
|
||||
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
identifier TEXT NOT NULL UNIQUE,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
FOREIGN KEY(user_uuid) REFERENCES users(uuid)
|
||||
);
|
@@ -0,0 +1,9 @@
|
||||
DROP TABLE IF EXISTS sso_users;
|
||||
|
||||
CREATE TABLE sso_users (
|
||||
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
identifier TEXT NOT NULL UNIQUE,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE
|
||||
);
|
64
playwright/.env.template
Normal file
64
playwright/.env.template
Normal file
@@ -0,0 +1,64 @@
|
||||
#################################
|
||||
### Conf to run dev instances ###
|
||||
#################################
|
||||
ENV=dev
|
||||
DC_ENV_FILE=.env
|
||||
COMPOSE_IGNORE_ORPHANS=True
|
||||
DOCKER_BUILDKIT=1
|
||||
|
||||
################
|
||||
# Users Config #
|
||||
################
|
||||
TEST_USER=test
|
||||
TEST_USER_PASSWORD=${TEST_USER}
|
||||
TEST_USER_MAIL=${TEST_USER}@yopmail.com
|
||||
|
||||
TEST_USER2=test2
|
||||
TEST_USER2_PASSWORD=${TEST_USER2}
|
||||
TEST_USER2_MAIL=${TEST_USER2}@yopmail.com
|
||||
|
||||
TEST_USER3=test3
|
||||
TEST_USER3_PASSWORD=${TEST_USER3}
|
||||
TEST_USER3_MAIL=${TEST_USER3}@yopmail.com
|
||||
|
||||
###################
|
||||
# Keycloak Config #
|
||||
###################
|
||||
KEYCLOAK_ADMIN=admin
|
||||
KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN}
|
||||
KC_HTTP_HOST=127.0.0.1
|
||||
KC_HTTP_PORT=8080
|
||||
|
||||
# Script parameters (use Keycloak and Vaultwarden config too)
|
||||
TEST_REALM=test
|
||||
DUMMY_REALM=dummy
|
||||
DUMMY_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${DUMMY_REALM}
|
||||
|
||||
######################
|
||||
# Vaultwarden Config #
|
||||
######################
|
||||
ROCKET_ADDRESS=0.0.0.0
|
||||
ROCKET_PORT=8000
|
||||
DOMAIN=http://127.0.0.1:${ROCKET_PORT}
|
||||
LOG_LEVEL=info,oidcwarden::sso=debug
|
||||
I_REALLY_WANT_VOLATILE_STORAGE=true
|
||||
|
||||
SSO_ENABLED=true
|
||||
SSO_ONLY=false
|
||||
SSO_CLIENT_ID=warden
|
||||
SSO_CLIENT_SECRET=warden
|
||||
SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM}
|
||||
|
||||
SMTP_HOST=127.0.0.1
|
||||
SMTP_PORT=1025
|
||||
SMTP_SECURITY=off
|
||||
SMTP_TIMEOUT=5
|
||||
SMTP_FROM=vaultwarden@test
|
||||
SMTP_FROM_NAME=Vaultwarden
|
||||
|
||||
########################################################
|
||||
# DUMMY values for docker-compose to stop bothering us #
|
||||
########################################################
|
||||
MARIADB_PORT=3305
|
||||
MYSQL_PORT=3307
|
||||
POSTGRES_PORT=5432
|
6
playwright/.gitignore
vendored
Normal file
6
playwright/.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
logs
|
||||
node_modules/
|
||||
/test-results/
|
||||
/playwright-report/
|
||||
/playwright/.cache/
|
||||
temp
|
169
playwright/README.md
Normal file
169
playwright/README.md
Normal file
@@ -0,0 +1,169 @@
|
||||
# Integration tests
|
||||
|
||||
This allows running integration tests using [Playwright](https://playwright.dev/).
|
||||
|
||||
It uses its own `test.env` with different ports to not collide with a running dev instance.
|
||||
|
||||
## Install
|
||||
|
||||
This relies on `docker` and the `compose` [plugin](https://docs.docker.com/compose/install/).
|
||||
Databases (`Mariadb`, `Mysql` and `Postgres`) and `Playwright` will run in containers.
|
||||
|
||||
### Running Playwright outside docker
|
||||
|
||||
It is possible to run `Playwright` outside of the container, this removes the need to rebuild the image for each change.
|
||||
You will additionally need `nodejs` then run:
|
||||
|
||||
```bash
|
||||
npm install
|
||||
npx playwright install-deps
|
||||
npx playwright install firefox
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
To run all the tests:
|
||||
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright
|
||||
```
|
||||
|
||||
To force a rebuild of the Playwright image:
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --env-file test.env build Playwright
|
||||
```
|
||||
|
||||
To access the UI to easily run test individually and debug if needed (this will not work in docker):
|
||||
|
||||
```bash
|
||||
npx playwright test --ui
|
||||
```
|
||||
|
||||
### DB
|
||||
|
||||
Projects are configured to allow to run tests only on specific database.
|
||||
|
||||
You can use:
|
||||
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=mariadb
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=mysql
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=postgres
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite
|
||||
```
|
||||
|
||||
### SSO
|
||||
|
||||
To run the SSO tests:
|
||||
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project sso-sqlite
|
||||
```
|
||||
|
||||
### Keep services running
|
||||
|
||||
If you want you can keep the DB and Keycloak runnning (states are not impacted by the tests):
|
||||
|
||||
```bash
|
||||
PW_KEEP_SERVICE_RUNNNING=true npx playwright test
|
||||
```
|
||||
|
||||
### Running specific tests
|
||||
|
||||
To run a whole file you can :
|
||||
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite tests/login.spec.ts
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite login
|
||||
```
|
||||
|
||||
To run only a specifc test (It might fail if it has dependency):
|
||||
|
||||
```bash
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite -g "Account creation"
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite tests/login.spec.ts:16
|
||||
```
|
||||
|
||||
## Writing scenario
|
||||
|
||||
When creating new scenario use the recorder to more easily identify elements
|
||||
(in general try to rely on visible hint to identify elements and not hidden IDs).
|
||||
This does not start the server, you will need to start it manually.
|
||||
|
||||
```bash
|
||||
npx playwright codegen "http://127.0.0.1:8000"
|
||||
```
|
||||
|
||||
## Override web-vault
|
||||
|
||||
It is possible to change the `web-vault` used by referencing a different `bw_web_builds` commit.
|
||||
|
||||
```bash
|
||||
export PW_WV_REPO_URL=https://github.com/Timshel/oidc_web_builds.git
|
||||
export PW_WV_COMMIT_HASH=8707dc76df3f0cceef2be5bfae37bb29bd17fae6
|
||||
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env build Playwright
|
||||
```
|
||||
|
||||
# OpenID Connect test setup
|
||||
|
||||
Additionally this `docker-compose` template allows to run locally Vaultwarden,
|
||||
[Keycloak](https://www.keycloak.org/) and [Maildev](https://github.com/timshel/maildev) to test OIDC.
|
||||
|
||||
## Setup
|
||||
|
||||
This rely on `docker` and the `compose` [plugin](https://docs.docker.com/compose/install/).
|
||||
First create a copy of `.env.template` as `.env` (This is done to prevent committing your custom settings, Ex `SMTP_`).
|
||||
|
||||
## Usage
|
||||
|
||||
Then start the stack (the `profile` is required to run `Vaultwarden`) :
|
||||
|
||||
```bash
|
||||
> docker compose --profile vaultwarden --env-file .env up
|
||||
....
|
||||
keycloakSetup_1 | Logging into http://127.0.0.1:8080 as user admin of realm master
|
||||
keycloakSetup_1 | Created new realm with id 'test'
|
||||
keycloakSetup_1 | 74af4933-e386-4e64-ba15-a7b61212c45e
|
||||
oidc_keycloakSetup_1 exited with code 0
|
||||
```
|
||||
|
||||
Wait until `oidc_keycloakSetup_1 exited with code 0` which indicates the correct setup of the Keycloak realm, client and user
|
||||
(It is normal for this container to stop once the configuration is done).
|
||||
|
||||
Then you can access :
|
||||
|
||||
- `Vaultwarden` on http://0.0.0.0:8000 with the default user `test@yopmail.com/test`.
|
||||
- `Keycloak` on http://0.0.0.0:8080/admin/master/console/ with the default user `admin/admin`
|
||||
- `Maildev` on http://0.0.0.0:1080
|
||||
|
||||
To proceed with an SSO login after you enter the email, on the screen prompting for `Master Password` the SSO button should be visible.
|
||||
To use your computer external ip (for example when testing with a phone) you will have to configure `KC_HTTP_HOST` and `DOMAIN`.
|
||||
|
||||
## Running only Keycloak
|
||||
|
||||
You can run just `Keycloak` with `--profile keycloak`:
|
||||
|
||||
```bash
|
||||
> docker compose --profile keycloak --env-file .env up
|
||||
```
|
||||
When running with a local Vaultwarden, you can use a front-end build from [dani-garcia/bw_web_builds](https://github.com/dani-garcia/bw_web_builds/releases).
|
||||
|
||||
## Rebuilding the Vaultwarden
|
||||
|
||||
To force rebuilding the Vaultwarden image you can run
|
||||
|
||||
```bash
|
||||
docker compose --profile vaultwarden --env-file .env build VaultwardenPrebuild Vaultwarden
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
All configuration for `keycloak` / `Vaultwarden` / `keycloak_setup.sh` can be found in [.env](.env.template).
|
||||
The content of the file will be loaded as environment variables in all containers.
|
||||
|
||||
- `keycloak` [configuration](https://www.keycloak.org/server/all-config) includes `KEYCLOAK_ADMIN` / `KEYCLOAK_ADMIN_PASSWORD` and any variable prefixed `KC_` ([more information](https://www.keycloak.org/server/configuration#_example_configuring_the_db_url_host_parameter)).
|
||||
- All `Vaultwarden` configuration can be set (EX: `SMTP_*`)
|
||||
|
||||
## Cleanup
|
||||
|
||||
Use `docker compose --profile vaultwarden down`.
|
40
playwright/compose/keycloak/Dockerfile
Normal file
40
playwright/compose/keycloak/Dockerfile
Normal file
@@ -0,0 +1,40 @@
|
||||
FROM docker.io/library/debian:bookworm-slim as build
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ARG KEYCLOAK_VERSION
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y ca-certificates curl wget \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /
|
||||
|
||||
RUN wget -c https://github.com/keycloak/keycloak/releases/download/${KEYCLOAK_VERSION}/keycloak-${KEYCLOAK_VERSION}.tar.gz -O - | tar -xz
|
||||
|
||||
FROM docker.io/library/debian:bookworm-slim
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ARG KEYCLOAK_VERSION
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y ca-certificates curl wget \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ARG JAVA_URL
|
||||
ARG JAVA_VERSION
|
||||
|
||||
ENV JAVA_VERSION=${JAVA_VERSION}
|
||||
|
||||
RUN mkdir -p /opt/openjdk && cd /opt/openjdk \
|
||||
&& wget -c "${JAVA_URL}" -O - | tar -xz
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY setup.sh /setup.sh
|
||||
COPY --from=build /keycloak-${KEYCLOAK_VERSION}/bin /opt/keycloak/bin
|
||||
|
||||
CMD "/setup.sh"
|
36
playwright/compose/keycloak/setup.sh
Executable file
36
playwright/compose/keycloak/setup.sh
Executable file
@@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
|
||||
export PATH=/opt/keycloak/bin:/opt/openjdk/jdk-${JAVA_VERSION}/bin:$PATH
|
||||
export JAVA_HOME=/opt/openjdk/jdk-${JAVA_VERSION}
|
||||
|
||||
STATUS_CODE=0
|
||||
while [[ "$STATUS_CODE" != "404" ]] ; do
|
||||
echo "Will retry in 2 seconds"
|
||||
sleep 2
|
||||
|
||||
STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}" "$DUMMY_AUTHORITY")
|
||||
|
||||
if [[ "$STATUS_CODE" = "200" ]]; then
|
||||
echo "Setup should already be done. Will not run."
|
||||
exit 0
|
||||
fi
|
||||
done
|
||||
|
||||
set -e
|
||||
|
||||
kcadm.sh config credentials --server "http://${KC_HTTP_HOST}:${KC_HTTP_PORT}" --realm master --user "$KEYCLOAK_ADMIN" --password "$KEYCLOAK_ADMIN_PASSWORD" --client admin-cli
|
||||
|
||||
kcadm.sh create realms -s realm="$TEST_REALM" -s enabled=true -s "accessTokenLifespan=600"
|
||||
kcadm.sh create clients -r test -s "clientId=$SSO_CLIENT_ID" -s "secret=$SSO_CLIENT_SECRET" -s "redirectUris=[\"$DOMAIN/*\"]" -i
|
||||
|
||||
TEST_USER_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER" -s "firstName=$TEST_USER" -s "lastName=$TEST_USER" -s "email=$TEST_USER_MAIL" -s emailVerified=true -s enabled=true -i)
|
||||
kcadm.sh update users/$TEST_USER_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER_PASSWORD" -n
|
||||
|
||||
TEST_USER2_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER2" -s "firstName=$TEST_USER2" -s "lastName=$TEST_USER2" -s "email=$TEST_USER2_MAIL" -s emailVerified=true -s enabled=true -i)
|
||||
kcadm.sh update users/$TEST_USER2_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER2_PASSWORD" -n
|
||||
|
||||
TEST_USER3_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER3" -s "firstName=$TEST_USER3" -s "lastName=$TEST_USER3" -s "email=$TEST_USER3_MAIL" -s emailVerified=true -s enabled=true -i)
|
||||
kcadm.sh update users/$TEST_USER3_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER3_PASSWORD" -n
|
||||
|
||||
# Dummy realm to mark end of setup
|
||||
kcadm.sh create realms -s realm="$DUMMY_REALM" -s enabled=true -s "accessTokenLifespan=600"
|
40
playwright/compose/playwright/Dockerfile
Normal file
40
playwright/compose/playwright/Dockerfile
Normal file
@@ -0,0 +1,40 @@
|
||||
FROM docker.io/library/debian:bookworm-slim
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y ca-certificates curl \
|
||||
&& curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc \
|
||||
&& chmod a+r /etc/apt/keyrings/docker.asc \
|
||||
&& echo "deb [signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian bookworm stable" | tee /etc/apt/sources.list.d/docker.list \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
containerd.io \
|
||||
docker-buildx-plugin \
|
||||
docker-ce \
|
||||
docker-ce-cli \
|
||||
docker-compose-plugin \
|
||||
git \
|
||||
libmariadb-dev-compat \
|
||||
libpq5 \
|
||||
nodejs \
|
||||
npm \
|
||||
openssl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /playwright
|
||||
WORKDIR /playwright
|
||||
|
||||
COPY package.json .
|
||||
RUN npm install && npx playwright install-deps && npx playwright install firefox
|
||||
|
||||
COPY docker-compose.yml test.env ./
|
||||
COPY compose ./compose
|
||||
|
||||
COPY *.ts test.env ./
|
||||
COPY tests ./tests
|
||||
|
||||
ENTRYPOINT ["/usr/bin/npx", "playwright"]
|
||||
CMD ["test"]
|
40
playwright/compose/warden/Dockerfile
Normal file
40
playwright/compose/warden/Dockerfile
Normal file
@@ -0,0 +1,40 @@
|
||||
FROM playwright_oidc_vaultwarden_prebuilt AS prebuilt
|
||||
|
||||
FROM node:22-bookworm AS build
|
||||
|
||||
ARG REPO_URL
|
||||
ARG COMMIT_HASH
|
||||
|
||||
ENV REPO_URL=$REPO_URL
|
||||
ENV COMMIT_HASH=$COMMIT_HASH
|
||||
|
||||
COPY --from=prebuilt /web-vault /web-vault
|
||||
|
||||
COPY build.sh /build.sh
|
||||
RUN /build.sh
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
FROM docker.io/library/debian:bookworm-slim
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Create data folder and Install needed libraries
|
||||
RUN mkdir /data && \
|
||||
apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libmariadb-dev-compat \
|
||||
libpq5 \
|
||||
openssl && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
WORKDIR /
|
||||
|
||||
COPY --from=prebuilt /start.sh .
|
||||
COPY --from=prebuilt /vaultwarden .
|
||||
COPY --from=build /web-vault ./web-vault
|
||||
|
||||
ENTRYPOINT ["/start.sh"]
|
24
playwright/compose/warden/build.sh
Executable file
24
playwright/compose/warden/build.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo $REPO_URL
|
||||
echo $COMMIT_HASH
|
||||
|
||||
if [[ ! -z "$REPO_URL" ]] && [[ ! -z "$COMMIT_HASH" ]] ; then
|
||||
rm -rf /web-vault
|
||||
|
||||
mkdir bw_web_builds;
|
||||
cd bw_web_builds;
|
||||
|
||||
git -c init.defaultBranch=main init
|
||||
git remote add origin "$REPO_URL"
|
||||
git fetch --depth 1 origin "$COMMIT_HASH"
|
||||
git -c advice.detachedHead=false checkout FETCH_HEAD
|
||||
|
||||
export VAULT_VERSION=$(cat Dockerfile | grep "ARG VAULT_VERSION" | cut -d "=" -f2)
|
||||
./scripts/checkout_web_vault.sh
|
||||
./scripts/patch_web_vault.sh
|
||||
./scripts/build_web_vault.sh
|
||||
printf '{"version":"%s"}' "$COMMIT_HASH" > ./web-vault/apps/web/build/vw-version.json
|
||||
|
||||
mv ./web-vault/apps/web/build /web-vault
|
||||
fi
|
124
playwright/docker-compose.yml
Normal file
124
playwright/docker-compose.yml
Normal file
@@ -0,0 +1,124 @@
|
||||
services:
|
||||
VaultwardenPrebuild:
|
||||
profiles: ["playwright", "vaultwarden"]
|
||||
container_name: playwright_oidc_vaultwarden_prebuilt
|
||||
image: playwright_oidc_vaultwarden_prebuilt
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: Dockerfile
|
||||
entrypoint: /bin/bash
|
||||
restart: "no"
|
||||
|
||||
Vaultwarden:
|
||||
profiles: ["playwright", "vaultwarden"]
|
||||
container_name: playwright_oidc_vaultwarden-${ENV:-dev}
|
||||
image: playwright_oidc_vaultwarden-${ENV:-dev}
|
||||
network_mode: "host"
|
||||
build:
|
||||
context: compose/warden
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
REPO_URL: ${PW_WV_REPO_URL:-}
|
||||
COMMIT_HASH: ${PW_WV_COMMIT_HASH:-}
|
||||
env_file: ${DC_ENV_FILE:-.env}
|
||||
environment:
|
||||
- DATABASE_URL
|
||||
- I_REALLY_WANT_VOLATILE_STORAGE
|
||||
- LOG_LEVEL
|
||||
- LOGIN_RATELIMIT_MAX_BURST
|
||||
- SMTP_HOST
|
||||
- SMTP_FROM
|
||||
- SMTP_DEBUG
|
||||
- SSO_DEBUG_TOKENS
|
||||
- SSO_FRONTEND
|
||||
- SSO_ENABLED
|
||||
- SSO_ONLY
|
||||
restart: "no"
|
||||
depends_on:
|
||||
- VaultwardenPrebuild
|
||||
|
||||
Playwright:
|
||||
profiles: ["playwright"]
|
||||
container_name: playwright_oidc_playwright
|
||||
image: playwright_oidc_playwright
|
||||
network_mode: "host"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: compose/playwright/Dockerfile
|
||||
environment:
|
||||
- PW_WV_REPO_URL
|
||||
- PW_WV_COMMIT_HASH
|
||||
restart: "no"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- ..:/project
|
||||
|
||||
Mariadb:
|
||||
profiles: ["playwright"]
|
||||
container_name: playwright_mariadb
|
||||
image: mariadb:11.2.4
|
||||
env_file: test.env
|
||||
healthcheck:
|
||||
test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"]
|
||||
start_period: 10s
|
||||
interval: 10s
|
||||
ports:
|
||||
- ${MARIADB_PORT}:3306
|
||||
|
||||
Mysql:
|
||||
profiles: ["playwright"]
|
||||
container_name: playwright_mysql
|
||||
image: mysql:8.4.1
|
||||
env_file: test.env
|
||||
healthcheck:
|
||||
test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"]
|
||||
start_period: 10s
|
||||
interval: 10s
|
||||
ports:
|
||||
- ${MYSQL_PORT}:3306
|
||||
|
||||
Postgres:
|
||||
profiles: ["playwright"]
|
||||
container_name: playwright_postgres
|
||||
image: postgres:16.3
|
||||
env_file: test.env
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}"]
|
||||
start_period: 20s
|
||||
interval: 30s
|
||||
ports:
|
||||
- ${POSTGRES_PORT}:5432
|
||||
|
||||
Maildev:
|
||||
profiles: ["vaultwarden", "maildev"]
|
||||
container_name: maildev
|
||||
image: timshel/maildev:3.0.4
|
||||
ports:
|
||||
- ${SMTP_PORT}:1025
|
||||
- 1080:1080
|
||||
|
||||
Keycloak:
|
||||
profiles: ["keycloak", "vaultwarden"]
|
||||
container_name: keycloak-${ENV:-dev}
|
||||
image: quay.io/keycloak/keycloak:25.0.4
|
||||
network_mode: "host"
|
||||
command:
|
||||
- start-dev
|
||||
env_file: ${DC_ENV_FILE:-.env}
|
||||
|
||||
KeycloakSetup:
|
||||
profiles: ["keycloak", "vaultwarden"]
|
||||
container_name: keycloakSetup-${ENV:-dev}
|
||||
image: keycloak_setup-${ENV:-dev}
|
||||
build:
|
||||
context: compose/keycloak
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
KEYCLOAK_VERSION: 25.0.4
|
||||
JAVA_URL: https://download.java.net/java/GA/jdk21.0.2/f2283984656d49d69e91c558476027ac/13/GPL/openjdk-21.0.2_linux-x64_bin.tar.gz
|
||||
JAVA_VERSION: 21.0.2
|
||||
network_mode: "host"
|
||||
depends_on:
|
||||
- Keycloak
|
||||
restart: "no"
|
||||
env_file: ${DC_ENV_FILE:-.env}
|
22
playwright/global-setup.ts
Normal file
22
playwright/global-setup.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { firefox, type FullConfig } from '@playwright/test';
|
||||
import { execSync } from 'node:child_process';
|
||||
import fs from 'fs';
|
||||
|
||||
const utils = require('./global-utils');
|
||||
|
||||
utils.loadEnv();
|
||||
|
||||
async function globalSetup(config: FullConfig) {
|
||||
// Are we running in docker and the project is mounted ?
|
||||
const path = (fs.existsSync("/project/playwright/playwright.config.ts") ? "/project/playwright" : ".");
|
||||
execSync(`docker compose --project-directory ${path} --profile playwright --env-file test.env build VaultwardenPrebuild`, {
|
||||
env: { ...process.env },
|
||||
stdio: "inherit"
|
||||
});
|
||||
execSync(`docker compose --project-directory ${path} --profile playwright --env-file test.env build Vaultwarden`, {
|
||||
env: { ...process.env },
|
||||
stdio: "inherit"
|
||||
});
|
||||
}
|
||||
|
||||
export default globalSetup;
|
246
playwright/global-utils.ts
Normal file
246
playwright/global-utils.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
import { expect, type Browser, type TestInfo } from '@playwright/test';
|
||||
import { EventEmitter } from "events";
|
||||
import { type Mail, MailServer } from 'maildev';
|
||||
import { execSync } from 'node:child_process';
|
||||
|
||||
import dotenv from 'dotenv';
|
||||
import dotenvExpand from 'dotenv-expand';
|
||||
|
||||
const fs = require("fs");
|
||||
const { spawn } = require('node:child_process');
|
||||
|
||||
export function loadEnv(){
|
||||
var myEnv = dotenv.config({ path: 'test.env' });
|
||||
dotenvExpand.expand(myEnv);
|
||||
|
||||
return {
|
||||
user1: {
|
||||
email: process.env.TEST_USER_MAIL,
|
||||
name: process.env.TEST_USER,
|
||||
password: process.env.TEST_USER_PASSWORD,
|
||||
},
|
||||
user2: {
|
||||
email: process.env.TEST_USER2_MAIL,
|
||||
name: process.env.TEST_USER2,
|
||||
password: process.env.TEST_USER2_PASSWORD,
|
||||
},
|
||||
user3: {
|
||||
email: process.env.TEST_USER3_MAIL,
|
||||
name: process.env.TEST_USER3,
|
||||
password: process.env.TEST_USER3_PASSWORD,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export async function waitFor(url: String, browser: Browser) {
|
||||
var ready = false;
|
||||
var context;
|
||||
|
||||
do {
|
||||
try {
|
||||
context = await browser.newContext();
|
||||
const page = await context.newPage();
|
||||
await page.waitForTimeout(500);
|
||||
const result = await page.goto(url);
|
||||
ready = result.status() === 200;
|
||||
} catch(e) {
|
||||
if( !e.message.includes("CONNECTION_REFUSED") ){
|
||||
throw e;
|
||||
}
|
||||
} finally {
|
||||
await context.close();
|
||||
}
|
||||
} while(!ready);
|
||||
}
|
||||
|
||||
export function startComposeService(serviceName: String){
|
||||
console.log(`Starting ${serviceName}`);
|
||||
execSync(`docker compose --profile playwright --env-file test.env up -d ${serviceName}`);
|
||||
}
|
||||
|
||||
export function stopComposeService(serviceName: String){
|
||||
console.log(`Stopping ${serviceName}`);
|
||||
execSync(`docker compose --profile playwright --env-file test.env stop ${serviceName}`);
|
||||
}
|
||||
|
||||
function wipeSqlite(){
|
||||
console.log(`Delete Vaultwarden container to wipe sqlite`);
|
||||
execSync(`docker compose --env-file test.env stop Vaultwarden`);
|
||||
execSync(`docker compose --env-file test.env rm -f Vaultwarden`);
|
||||
}
|
||||
|
||||
async function wipeMariaDB(){
|
||||
var mysql = require('mysql2/promise');
|
||||
var ready = false;
|
||||
var connection;
|
||||
|
||||
do {
|
||||
try {
|
||||
connection = await mysql.createConnection({
|
||||
user: process.env.MARIADB_USER,
|
||||
host: "127.0.0.1",
|
||||
database: process.env.MARIADB_DATABASE,
|
||||
password: process.env.MARIADB_PASSWORD,
|
||||
port: process.env.MARIADB_PORT,
|
||||
});
|
||||
|
||||
await connection.execute(`DROP DATABASE ${process.env.MARIADB_DATABASE}`);
|
||||
await connection.execute(`CREATE DATABASE ${process.env.MARIADB_DATABASE}`);
|
||||
console.log('Successfully wiped mariadb');
|
||||
ready = true;
|
||||
} catch (err) {
|
||||
console.log(`Error when wiping mariadb: ${err}`);
|
||||
} finally {
|
||||
if( connection ){
|
||||
connection.end();
|
||||
}
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
} while(!ready);
|
||||
}
|
||||
|
||||
async function wipeMysqlDB(){
|
||||
var mysql = require('mysql2/promise');
|
||||
var ready = false;
|
||||
var connection;
|
||||
|
||||
do{
|
||||
try {
|
||||
connection = await mysql.createConnection({
|
||||
user: process.env.MYSQL_USER,
|
||||
host: "127.0.0.1",
|
||||
database: process.env.MYSQL_DATABASE,
|
||||
password: process.env.MYSQL_PASSWORD,
|
||||
port: process.env.MYSQL_PORT,
|
||||
});
|
||||
|
||||
await connection.execute(`DROP DATABASE ${process.env.MYSQL_DATABASE}`);
|
||||
await connection.execute(`CREATE DATABASE ${process.env.MYSQL_DATABASE}`);
|
||||
console.log('Successfully wiped mysql');
|
||||
ready = true;
|
||||
} catch (err) {
|
||||
console.log(`Error when wiping mysql: ${err}`);
|
||||
} finally {
|
||||
if( connection ){
|
||||
connection.end();
|
||||
}
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
} while(!ready);
|
||||
}
|
||||
|
||||
async function wipePostgres(){
|
||||
const { Client } = require('pg');
|
||||
|
||||
const client = new Client({
|
||||
user: process.env.POSTGRES_USER,
|
||||
host: "127.0.0.1",
|
||||
database: "postgres",
|
||||
password: process.env.POSTGRES_PASSWORD,
|
||||
port: process.env.POSTGRES_PORT,
|
||||
});
|
||||
|
||||
try {
|
||||
await client.connect();
|
||||
await client.query(`DROP DATABASE ${process.env.POSTGRES_DB}`);
|
||||
await client.query(`CREATE DATABASE ${process.env.POSTGRES_DB}`);
|
||||
console.log('Successfully wiped postgres');
|
||||
} catch (err) {
|
||||
console.log(`Error when wiping postgres: ${err}`);
|
||||
} finally {
|
||||
client.end();
|
||||
}
|
||||
}
|
||||
|
||||
function dbConfig(testInfo: TestInfo){
|
||||
switch(testInfo.project.name) {
|
||||
case "postgres":
|
||||
case "sso-postgres":
|
||||
return { DATABASE_URL: `postgresql://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PASSWORD}@127.0.0.1:${process.env.POSTGRES_PORT}/${process.env.POSTGRES_DB}` };
|
||||
case "mariadb":
|
||||
case "sso-mariadb":
|
||||
return { DATABASE_URL: `mysql://${process.env.MARIADB_USER}:${process.env.MARIADB_PASSWORD}@127.0.0.1:${process.env.MARIADB_PORT}/${process.env.MARIADB_DATABASE}` };
|
||||
case "mysql":
|
||||
case "sso-mysql":
|
||||
return { DATABASE_URL: `mysql://${process.env.MYSQL_USER}:${process.env.MYSQL_PASSWORD}@127.0.0.1:${process.env.MYSQL_PORT}/${process.env.MYSQL_DATABASE}`};
|
||||
case "sqlite":
|
||||
case "sso-sqlite":
|
||||
return { I_REALLY_WANT_VOLATILE_STORAGE: true };
|
||||
default:
|
||||
throw new Error(`Unknow database name: ${testInfo.project.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* All parameters passed in `env` need to be added to the docker-compose.yml
|
||||
**/
|
||||
export async function startVault(browser: Browser, testInfo: TestInfo, env = {}, resetDB: Boolean = true) {
|
||||
if( resetDB ){
|
||||
switch(testInfo.project.name) {
|
||||
case "postgres":
|
||||
case "sso-postgres":
|
||||
await wipePostgres();
|
||||
break;
|
||||
case "mariadb":
|
||||
case "sso-mariadb":
|
||||
await wipeMariaDB();
|
||||
break;
|
||||
case "mysql":
|
||||
case "sso-mysql":
|
||||
await wipeMysqlDB();
|
||||
break;
|
||||
case "sqlite":
|
||||
case "sso-sqlite":
|
||||
wipeSqlite();
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknow database name: ${testInfo.project.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Starting Vaultwarden`);
|
||||
execSync(`docker compose --profile playwright --env-file test.env up -d Vaultwarden`, {
|
||||
env: { ...env, ...dbConfig(testInfo) },
|
||||
});
|
||||
await waitFor("/", browser);
|
||||
console.log(`Vaultwarden running on: ${process.env.DOMAIN}`);
|
||||
}
|
||||
|
||||
export async function stopVault(force: boolean = false) {
|
||||
if( force === false && process.env.PW_KEEP_SERVICE_RUNNNING === "true" ) {
|
||||
console.log(`Keep vaultwarden running on: ${process.env.DOMAIN}`);
|
||||
} else {
|
||||
console.log(`Vaultwarden stopping`);
|
||||
execSync(`docker compose --profile playwright --env-file test.env stop Vaultwarden`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function restartVault(page: Page, testInfo: TestInfo, env, resetDB: Boolean = true) {
|
||||
stopVault(true);
|
||||
return startVault(page.context().browser(), testInfo, env, resetDB);
|
||||
}
|
||||
|
||||
export async function checkNotification(page: Page, hasText: string) {
|
||||
await expect(page.locator('bit-toast').filter({ hasText })).toBeVisible();
|
||||
await page.locator('bit-toast').filter({ hasText }).getByRole('button').click();
|
||||
await expect(page.locator('bit-toast').filter({ hasText })).toHaveCount(0);
|
||||
}
|
||||
|
||||
export async function cleanLanding(page: Page) {
|
||||
await page.goto('/', { waitUntil: 'domcontentloaded' });
|
||||
await expect(page.getByRole('button').nth(0)).toBeVisible();
|
||||
|
||||
const logged = await page.getByRole('button', { name: 'Log out' }).count();
|
||||
if( logged > 0 ){
|
||||
await page.getByRole('button', { name: 'Log out' }).click();
|
||||
await page.getByRole('button', { name: 'Log out' }).click();
|
||||
}
|
||||
}
|
||||
|
||||
export async function logout(test: Test, page: Page, user: { name: string }) {
|
||||
await test.step('logout', async () => {
|
||||
await page.getByRole('button', { name: user.name, exact: true }).click();
|
||||
await page.getByRole('menuitem', { name: 'Log out' }).click();
|
||||
await expect(page.getByRole('heading', { name: 'Log in' })).toBeVisible();
|
||||
});
|
||||
}
|
2547
playwright/package-lock.json
generated
Normal file
2547
playwright/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
21
playwright/package.json
Normal file
21
playwright/package.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "scenarios",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.53.0",
|
||||
"dotenv": "^16.5.0",
|
||||
"dotenv-expand": "^12.0.2",
|
||||
"maildev": "npm:@timshel_npm/maildev@^3.1.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"mysql2": "^3.14.1",
|
||||
"otpauth": "^9.4.0",
|
||||
"pg": "^8.16.0"
|
||||
}
|
||||
}
|
143
playwright/playwright.config.ts
Normal file
143
playwright/playwright.config.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import { defineConfig, devices } from '@playwright/test';
|
||||
import { exec } from 'node:child_process';
|
||||
|
||||
const utils = require('./global-utils');
|
||||
|
||||
utils.loadEnv();
|
||||
|
||||
/**
|
||||
* See https://playwright.dev/docs/test-configuration.
|
||||
*/
|
||||
export default defineConfig({
|
||||
testDir: './.',
|
||||
/* Run tests in files in parallel */
|
||||
fullyParallel: false,
|
||||
|
||||
/* Fail the build on CI if you accidentally left test.only in the source code. */
|
||||
forbidOnly: !!process.env.CI,
|
||||
|
||||
retries: 0,
|
||||
workers: 1,
|
||||
|
||||
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
|
||||
reporter: 'html',
|
||||
|
||||
/* Long global timeout for complex tests
|
||||
* But short action/nav/expect timeouts to fail on specific step (raise locally if not enough).
|
||||
*/
|
||||
timeout: 120 * 1000,
|
||||
actionTimeout: 10 * 1000,
|
||||
navigationTimeout: 10 * 1000,
|
||||
expect: { timeout: 10 * 1000 },
|
||||
|
||||
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
|
||||
use: {
|
||||
/* Base URL to use in actions like `await page.goto('/')`. */
|
||||
baseURL: process.env.DOMAIN,
|
||||
browserName: 'firefox',
|
||||
locale: 'en-GB',
|
||||
timezoneId: 'Europe/London',
|
||||
|
||||
/* Always collect trace (other values add random test failures) See https://playwright.dev/docs/trace-viewer */
|
||||
trace: 'on',
|
||||
viewport: {
|
||||
width: 1080,
|
||||
height: 720,
|
||||
},
|
||||
video: "on",
|
||||
},
|
||||
|
||||
/* Configure projects for major browsers */
|
||||
projects: [
|
||||
{
|
||||
name: 'mariadb-setup',
|
||||
testMatch: 'tests/setups/db-setup.ts',
|
||||
use: { serviceName: "Mariadb" },
|
||||
teardown: 'mariadb-teardown',
|
||||
},
|
||||
{
|
||||
name: 'mysql-setup',
|
||||
testMatch: 'tests/setups/db-setup.ts',
|
||||
use: { serviceName: "Mysql" },
|
||||
teardown: 'mysql-teardown',
|
||||
},
|
||||
{
|
||||
name: 'postgres-setup',
|
||||
testMatch: 'tests/setups/db-setup.ts',
|
||||
use: { serviceName: "Postgres" },
|
||||
teardown: 'postgres-teardown',
|
||||
},
|
||||
{
|
||||
name: 'sso-setup',
|
||||
testMatch: 'tests/setups/sso-setup.ts',
|
||||
teardown: 'sso-teardown',
|
||||
},
|
||||
|
||||
{
|
||||
name: 'mariadb',
|
||||
testMatch: 'tests/*.spec.ts',
|
||||
testIgnore: 'tests/sso_*.spec.ts',
|
||||
dependencies: ['mariadb-setup'],
|
||||
},
|
||||
{
|
||||
name: 'mysql',
|
||||
testMatch: 'tests/*.spec.ts',
|
||||
testIgnore: 'tests/sso_*.spec.ts',
|
||||
dependencies: ['mysql-setup'],
|
||||
},
|
||||
{
|
||||
name: 'postgres',
|
||||
testMatch: 'tests/*.spec.ts',
|
||||
testIgnore: 'tests/sso_*.spec.ts',
|
||||
dependencies: ['postgres-setup'],
|
||||
},
|
||||
{
|
||||
name: 'sqlite',
|
||||
testMatch: 'tests/*.spec.ts',
|
||||
testIgnore: 'tests/sso_*.spec.ts',
|
||||
},
|
||||
|
||||
{
|
||||
name: 'sso-mariadb',
|
||||
testMatch: 'tests/sso_*.spec.ts',
|
||||
dependencies: ['sso-setup', 'mariadb-setup'],
|
||||
},
|
||||
{
|
||||
name: 'sso-mysql',
|
||||
testMatch: 'tests/sso_*.spec.ts',
|
||||
dependencies: ['sso-setup', 'mysql-setup'],
|
||||
},
|
||||
{
|
||||
name: 'sso-postgres',
|
||||
testMatch: 'tests/sso_*.spec.ts',
|
||||
dependencies: ['sso-setup', 'postgres-setup'],
|
||||
},
|
||||
{
|
||||
name: 'sso-sqlite',
|
||||
testMatch: 'tests/sso_*.spec.ts',
|
||||
dependencies: ['sso-setup'],
|
||||
},
|
||||
|
||||
{
|
||||
name: 'mariadb-teardown',
|
||||
testMatch: 'tests/setups/db-teardown.ts',
|
||||
use: { serviceName: "Mariadb" },
|
||||
},
|
||||
{
|
||||
name: 'mysql-teardown',
|
||||
testMatch: 'tests/setups/db-teardown.ts',
|
||||
use: { serviceName: "Mysql" },
|
||||
},
|
||||
{
|
||||
name: 'postgres-teardown',
|
||||
testMatch: 'tests/setups/db-teardown.ts',
|
||||
use: { serviceName: "Postgres" },
|
||||
},
|
||||
{
|
||||
name: 'sso-teardown',
|
||||
testMatch: 'tests/setups/sso-teardown.ts',
|
||||
},
|
||||
],
|
||||
|
||||
globalSetup: require.resolve('./global-setup'),
|
||||
});
|
93
playwright/test.env
Normal file
93
playwright/test.env
Normal file
@@ -0,0 +1,93 @@
|
||||
##################################################################
|
||||
### Shared Playwright conf test file Vaultwarden and Databases ###
|
||||
##################################################################
|
||||
|
||||
ENV=test
|
||||
DC_ENV_FILE=test.env
|
||||
COMPOSE_IGNORE_ORPHANS=True
|
||||
DOCKER_BUILDKIT=1
|
||||
|
||||
#####################
|
||||
# Playwright Config #
|
||||
#####################
|
||||
PW_KEEP_SERVICE_RUNNNING=${PW_KEEP_SERVICE_RUNNNING:-false}
|
||||
PW_SMTP_FROM=vaultwarden@playwright.test
|
||||
|
||||
#####################
|
||||
# Maildev Config #
|
||||
#####################
|
||||
MAILDEV_HTTP_PORT=1081
|
||||
MAILDEV_SMTP_PORT=1026
|
||||
MAILDEV_HOST=127.0.0.1
|
||||
|
||||
################
|
||||
# Users Config #
|
||||
################
|
||||
TEST_USER=test
|
||||
TEST_USER_PASSWORD=Master Password
|
||||
TEST_USER_MAIL=${TEST_USER}@example.com
|
||||
|
||||
TEST_USER2=test2
|
||||
TEST_USER2_PASSWORD=Master Password
|
||||
TEST_USER2_MAIL=${TEST_USER2}@example.com
|
||||
|
||||
TEST_USER3=test3
|
||||
TEST_USER3_PASSWORD=Master Password
|
||||
TEST_USER3_MAIL=${TEST_USER3}@example.com
|
||||
|
||||
###################
|
||||
# Keycloak Config #
|
||||
###################
|
||||
KEYCLOAK_ADMIN=admin
|
||||
KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN}
|
||||
KC_HTTP_HOST=127.0.0.1
|
||||
KC_HTTP_PORT=8081
|
||||
|
||||
# Script parameters (use Keycloak and Vaultwarden config too)
|
||||
TEST_REALM=test
|
||||
DUMMY_REALM=dummy
|
||||
DUMMY_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${DUMMY_REALM}
|
||||
|
||||
######################
|
||||
# Vaultwarden Config #
|
||||
######################
|
||||
ROCKET_PORT=8003
|
||||
DOMAIN=http://127.0.0.1:${ROCKET_PORT}
|
||||
LOG_LEVEL=info,oidcwarden::sso=debug
|
||||
LOGIN_RATELIMIT_MAX_BURST=100
|
||||
|
||||
SMTP_SECURITY=off
|
||||
SMTP_PORT=${MAILDEV_SMTP_PORT}
|
||||
SMTP_FROM_NAME=Vaultwarden
|
||||
SMTP_TIMEOUT=5
|
||||
|
||||
SSO_CLIENT_ID=warden
|
||||
SSO_CLIENT_SECRET=warden
|
||||
SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM}
|
||||
SSO_DEBUG_TOKENS=true
|
||||
|
||||
###########################
|
||||
# Docker MariaDb container#
|
||||
###########################
|
||||
MARIADB_PORT=3307
|
||||
MARIADB_ROOT_PASSWORD=warden
|
||||
MARIADB_USER=warden
|
||||
MARIADB_PASSWORD=warden
|
||||
MARIADB_DATABASE=warden
|
||||
|
||||
###########################
|
||||
# Docker Mysql container#
|
||||
###########################
|
||||
MYSQL_PORT=3309
|
||||
MYSQL_ROOT_PASSWORD=warden
|
||||
MYSQL_USER=warden
|
||||
MYSQL_PASSWORD=warden
|
||||
MYSQL_DATABASE=warden
|
||||
|
||||
############################
|
||||
# Docker Postgres container#
|
||||
############################
|
||||
POSTGRES_PORT=5433
|
||||
POSTGRES_USER=warden
|
||||
POSTGRES_PASSWORD=warden
|
||||
POSTGRES_DB=warden
|
37
playwright/tests/collection.spec.ts
Normal file
37
playwright/tests/collection.spec.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
|
||||
import * as utils from "../global-utils";
|
||||
import { createAccount } from './setups/user';
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
await utils.startVault(browser, testInfo);
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
});
|
||||
|
||||
test('Create', async ({ page }) => {
|
||||
await createAccount(test, page, users.user1);
|
||||
|
||||
await test.step('Create Org', async () => {
|
||||
await page.getByRole('link', { name: 'New organisation' }).click();
|
||||
await page.getByLabel('Organisation name (required)').fill('Test');
|
||||
await page.getByRole('button', { name: 'Submit' }).click();
|
||||
await page.locator('div').filter({ hasText: 'Members' }).nth(2).click();
|
||||
|
||||
await utils.checkNotification(page, 'Organisation created');
|
||||
});
|
||||
|
||||
await test.step('Create Collection', async () => {
|
||||
await page.getByRole('link', { name: 'Collections' }).click();
|
||||
await page.getByRole('button', { name: 'New' }).click();
|
||||
await page.getByRole('menuitem', { name: 'Collection' }).click();
|
||||
await page.getByLabel('Name (required)').fill('RandomCollec');
|
||||
await page.getByRole('button', { name: 'Save' }).click();
|
||||
await utils.checkNotification(page, 'Created collection RandomCollec');
|
||||
await expect(page.getByRole('button', { name: 'RandomCollec' })).toBeVisible();
|
||||
});
|
||||
});
|
100
playwright/tests/login.smtp.spec.ts
Normal file
100
playwright/tests/login.smtp.spec.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
import { MailDev } from 'maildev';
|
||||
|
||||
const utils = require('../global-utils');
|
||||
import { createAccount, logUser } from './setups/user';
|
||||
import { activateEmail, retrieveEmailCode, disableEmail } from './setups/2fa';
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
let mailserver;
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
mailserver = new MailDev({
|
||||
port: process.env.MAILDEV_SMTP_PORT,
|
||||
web: { port: process.env.MAILDEV_HTTP_PORT },
|
||||
})
|
||||
|
||||
await mailserver.listen();
|
||||
|
||||
await utils.startVault(browser, testInfo, {
|
||||
SMTP_HOST: process.env.MAILDEV_HOST,
|
||||
SMTP_FROM: process.env.PW_SMTP_FROM,
|
||||
});
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
if( mailserver ){
|
||||
await mailserver.close();
|
||||
}
|
||||
});
|
||||
|
||||
test('Account creation', async ({ page }) => {
|
||||
const mailBuffer = mailserver.buffer(users.user1.email);
|
||||
|
||||
await createAccount(test, page, users.user1, mailBuffer);
|
||||
|
||||
mailBuffer.close();
|
||||
});
|
||||
|
||||
test('Login', async ({ context, page }) => {
|
||||
const mailBuffer = mailserver.buffer(users.user1.email);
|
||||
|
||||
await logUser(test, page, users.user1, mailBuffer);
|
||||
|
||||
await test.step('verify email', async () => {
|
||||
await page.getByText('Verify your account\'s email').click();
|
||||
await expect(page.getByText('Verify your account\'s email')).toBeVisible();
|
||||
await page.getByRole('button', { name: 'Send email' }).click();
|
||||
|
||||
await utils.checkNotification(page, 'Check your email inbox for a verification link');
|
||||
|
||||
const verify = await mailBuffer.expect((m) => m.subject === "Verify Your Email");
|
||||
expect(verify.from[0]?.address).toBe(process.env.PW_SMTP_FROM);
|
||||
|
||||
const page2 = await context.newPage();
|
||||
await page2.setContent(verify.html);
|
||||
const link = await page2.getByTestId("verify").getAttribute("href");
|
||||
await page2.close();
|
||||
|
||||
await page.goto(link);
|
||||
await utils.checkNotification(page, 'Account email verified');
|
||||
});
|
||||
|
||||
mailBuffer.close();
|
||||
});
|
||||
|
||||
test('Activate 2fa', async ({ page }) => {
|
||||
const emails = mailserver.buffer(users.user1.email);
|
||||
|
||||
await logUser(test, page, users.user1);
|
||||
|
||||
await activateEmail(test, page, users.user1, emails);
|
||||
|
||||
emails.close();
|
||||
});
|
||||
|
||||
test('2fa', async ({ page }) => {
|
||||
const emails = mailserver.buffer(users.user1.email);
|
||||
|
||||
await test.step('login', async () => {
|
||||
await page.goto('/');
|
||||
|
||||
await page.getByLabel(/Email address/).fill(users.user1.email);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
await page.getByLabel('Master password').fill(users.user1.password);
|
||||
await page.getByRole('button', { name: 'Log in with master password' }).click();
|
||||
|
||||
await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible();
|
||||
const code = await retrieveEmailCode(test, page, emails);
|
||||
await page.getByLabel(/Verification code/).fill(code);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
|
||||
await expect(page).toHaveTitle(/Vaults/);
|
||||
})
|
||||
|
||||
await disableEmail(test, page, users.user1);
|
||||
|
||||
emails.close();
|
||||
});
|
51
playwright/tests/login.spec.ts
Normal file
51
playwright/tests/login.spec.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { test, expect, type Page, type TestInfo } from '@playwright/test';
|
||||
import * as OTPAuth from "otpauth";
|
||||
|
||||
import * as utils from "../global-utils";
|
||||
import { createAccount, logUser } from './setups/user';
|
||||
import { activateTOTP, disableTOTP } from './setups/2fa';
|
||||
|
||||
let users = utils.loadEnv();
|
||||
let totp;
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
await utils.startVault(browser, testInfo, {});
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
});
|
||||
|
||||
test('Account creation', async ({ page }) => {
|
||||
await createAccount(test, page, users.user1);
|
||||
});
|
||||
|
||||
test('Master password login', async ({ page }) => {
|
||||
await logUser(test, page, users.user1);
|
||||
});
|
||||
|
||||
test('Authenticator 2fa', async ({ page }) => {
|
||||
await logUser(test, page, users.user1);
|
||||
|
||||
let totp = await activateTOTP(test, page, users.user1);
|
||||
|
||||
await utils.logout(test, page, users.user1);
|
||||
|
||||
await test.step('login', async () => {
|
||||
let timestamp = Date.now(); // Needed to use the next token
|
||||
timestamp = timestamp + (totp.period - (Math.floor(timestamp / 1000) % totp.period) + 1) * 1000;
|
||||
|
||||
await page.getByLabel(/Email address/).fill(users.user1.email);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
await page.getByLabel('Master password').fill(users.user1.password);
|
||||
await page.getByRole('button', { name: 'Log in with master password' }).click();
|
||||
|
||||
await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible();
|
||||
await page.getByLabel(/Verification code/).fill(totp.generate({timestamp}));
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
});
|
||||
|
||||
await disableTOTP(test, page, users.user1);
|
||||
});
|
115
playwright/tests/organization.smtp.spec.ts
Normal file
115
playwright/tests/organization.smtp.spec.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
import { MailDev } from 'maildev';
|
||||
|
||||
import * as utils from '../global-utils';
|
||||
import * as orgs from './setups/orgs';
|
||||
import { createAccount, logUser } from './setups/user';
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
let mailServer, mail1Buffer, mail2Buffer, mail3Buffer;
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
mailServer = new MailDev({
|
||||
port: process.env.MAILDEV_SMTP_PORT,
|
||||
web: { port: process.env.MAILDEV_HTTP_PORT },
|
||||
})
|
||||
|
||||
await mailServer.listen();
|
||||
|
||||
await utils.startVault(browser, testInfo, {
|
||||
SMTP_HOST: process.env.MAILDEV_HOST,
|
||||
SMTP_FROM: process.env.PW_SMTP_FROM,
|
||||
});
|
||||
|
||||
mail1Buffer = mailServer.buffer(users.user1.email);
|
||||
mail2Buffer = mailServer.buffer(users.user2.email);
|
||||
mail3Buffer = mailServer.buffer(users.user3.email);
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}, testInfo: TestInfo) => {
|
||||
utils.stopVault(testInfo);
|
||||
[mail1Buffer, mail2Buffer, mail3Buffer, mailServer].map((m) => m?.close());
|
||||
});
|
||||
|
||||
test('Create user3', async ({ page }) => {
|
||||
await createAccount(test, page, users.user3, mail3Buffer);
|
||||
});
|
||||
|
||||
test('Invite users', async ({ page }) => {
|
||||
await createAccount(test, page, users.user1, mail1Buffer);
|
||||
|
||||
await orgs.create(test, page, 'Test');
|
||||
await orgs.members(test, page, 'Test');
|
||||
await orgs.invite(test, page, 'Test', users.user2.email);
|
||||
await orgs.invite(test, page, 'Test', users.user3.email, {
|
||||
navigate: false,
|
||||
});
|
||||
});
|
||||
|
||||
test('invited with new account', async ({ page }) => {
|
||||
const invited = await mail2Buffer.expect((mail) => mail.subject === 'Join Test');
|
||||
|
||||
await test.step('Create account', async () => {
|
||||
await page.setContent(invited.html);
|
||||
const link = await page.getByTestId('invite').getAttribute('href');
|
||||
await page.goto(link);
|
||||
await expect(page).toHaveTitle(/Create account | Vaultwarden Web/);
|
||||
|
||||
//await page.getByLabel('Name').fill(users.user2.name);
|
||||
await page.getByLabel('New master password (required)', { exact: true }).fill(users.user2.password);
|
||||
await page.getByLabel('Confirm new master password (').fill(users.user2.password);
|
||||
await page.getByRole('button', { name: 'Create account' }).click();
|
||||
await utils.checkNotification(page, 'Your new account has been created');
|
||||
|
||||
// Redirected to the vault
|
||||
await expect(page).toHaveTitle('Vaults | Vaultwarden Web');
|
||||
await utils.checkNotification(page, 'You have been logged in!');
|
||||
await utils.checkNotification(page, 'Invitation accepted');
|
||||
});
|
||||
|
||||
await test.step('Check mails', async () => {
|
||||
await mail2Buffer.expect((m) => m.subject === 'Welcome');
|
||||
await mail2Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox');
|
||||
await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted'));
|
||||
});
|
||||
});
|
||||
|
||||
test('invited with existing account', async ({ page }) => {
|
||||
const invited = await mail3Buffer.expect((mail) => mail.subject === 'Join Test');
|
||||
|
||||
await page.setContent(invited.html);
|
||||
const link = await page.getByTestId('invite').getAttribute('href');
|
||||
|
||||
await page.goto(link);
|
||||
|
||||
// We should be on login page with email prefilled
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
|
||||
// Unlock page
|
||||
await page.getByLabel('Master password').fill(users.user3.password);
|
||||
await page.getByRole('button', { name: 'Log in with master password' }).click();
|
||||
|
||||
// We are now in the default vault page
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
await utils.checkNotification(page, 'Invitation accepted');
|
||||
|
||||
await mail3Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox');
|
||||
await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted'));
|
||||
});
|
||||
|
||||
test('Confirm invited user', async ({ page }) => {
|
||||
await logUser(test, page, users.user1, mail1Buffer);
|
||||
|
||||
await orgs.members(test, page, 'Test');
|
||||
await orgs.confirm(test, page, 'Test', users.user2.email);
|
||||
|
||||
await mail2Buffer.expect((m) => m.subject.includes('Invitation to Test confirmed'));
|
||||
});
|
||||
|
||||
test('Organization is visible', async ({ page }) => {
|
||||
await logUser(test, page, users.user2, mail2Buffer);
|
||||
await page.getByRole('button', { name: 'vault: Test', exact: true }).click();
|
||||
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
|
||||
});
|
54
playwright/tests/organization.spec.ts
Normal file
54
playwright/tests/organization.spec.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
import { MailDev } from 'maildev';
|
||||
|
||||
import * as utils from "../global-utils";
|
||||
import * as orgs from './setups/orgs';
|
||||
import { createAccount, logUser } from './setups/user';
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
await utils.startVault(browser, testInfo);
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
});
|
||||
|
||||
test('Invite', async ({ page }) => {
|
||||
await createAccount(test, page, users.user3);
|
||||
await createAccount(test, page, users.user1);
|
||||
|
||||
await orgs.create(test, page, 'New organisation');
|
||||
await orgs.members(test, page, 'New organisation');
|
||||
|
||||
await test.step('missing user2', async () => {
|
||||
await orgs.invite(test, page, 'New organisation', users.user2.email);
|
||||
await expect(page.getByRole('row', { name: users.user2.email })).toHaveText(/Invited/);
|
||||
});
|
||||
|
||||
await test.step('existing user3', async () => {
|
||||
await orgs.invite(test, page, 'New organisation', users.user3.email);
|
||||
await expect(page.getByRole('row', { name: users.user3.email })).toHaveText(/Needs confirmation/);
|
||||
await orgs.confirm(test, page, 'New organisation', users.user3.email);
|
||||
});
|
||||
|
||||
await test.step('confirm user2', async () => {
|
||||
await createAccount(test, page, users.user2);
|
||||
await logUser(test, page, users.user1);
|
||||
await orgs.members(test, page, 'New organisation');
|
||||
await orgs.confirm(test, page, 'New organisation', users.user2.email);
|
||||
});
|
||||
|
||||
await test.step('Org visible user2 ', async () => {
|
||||
await logUser(test, page, users.user2);
|
||||
await page.getByRole('button', { name: 'vault: New organisation', exact: true }).click();
|
||||
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
|
||||
});
|
||||
|
||||
await test.step('Org visible user3 ', async () => {
|
||||
await logUser(test, page, users.user3);
|
||||
await page.getByRole('button', { name: 'vault: New organisation', exact: true }).click();
|
||||
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
|
||||
});
|
||||
});
|
92
playwright/tests/setups/2fa.ts
Normal file
92
playwright/tests/setups/2fa.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { expect, type Page, Test } from '@playwright/test';
|
||||
import { type MailBuffer } from 'maildev';
|
||||
import * as OTPAuth from "otpauth";
|
||||
|
||||
import * as utils from '../../global-utils';
|
||||
|
||||
export async function activateTOTP(test: Test, page: Page, user: { name: string, password: string }): OTPAuth.TOTP {
|
||||
return await test.step('Activate TOTP 2FA', async () => {
|
||||
await page.getByRole('button', { name: user.name }).click();
|
||||
await page.getByRole('menuitem', { name: 'Account settings' }).click();
|
||||
await page.getByRole('link', { name: 'Security' }).click();
|
||||
await page.getByRole('link', { name: 'Two-step login' }).click();
|
||||
await page.locator('bit-item').filter({ hasText: /Authenticator app/ }).getByRole('button').click();
|
||||
await page.getByLabel('Master password (required)').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
|
||||
const secret = await page.getByLabel('Key').innerText();
|
||||
let totp = new OTPAuth.TOTP({ secret, period: 30 });
|
||||
|
||||
await page.getByLabel(/Verification code/).fill(totp.generate());
|
||||
await page.getByRole('button', { name: 'Turn on' }).click();
|
||||
await page.getByRole('heading', { name: 'Turned on', exact: true });
|
||||
await page.getByLabel('Close').click();
|
||||
|
||||
return totp;
|
||||
})
|
||||
}
|
||||
|
||||
export async function disableTOTP(test: Test, page: Page, user: { password: string }) {
|
||||
await test.step('Disable TOTP 2FA', async () => {
|
||||
await page.getByRole('button', { name: 'Test' }).click();
|
||||
await page.getByRole('menuitem', { name: 'Account settings' }).click();
|
||||
await page.getByRole('link', { name: 'Security' }).click();
|
||||
await page.getByRole('link', { name: 'Two-step login' }).click();
|
||||
await page.locator('bit-item').filter({ hasText: /Authenticator app/ }).getByRole('button').click();
|
||||
await page.getByLabel('Master password (required)').click();
|
||||
await page.getByLabel('Master password (required)').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
await page.getByRole('button', { name: 'Turn off' }).click();
|
||||
await page.getByRole('button', { name: 'Yes' }).click();
|
||||
await utils.checkNotification(page, 'Two-step login provider turned off');
|
||||
});
|
||||
}
|
||||
|
||||
export async function activateEmail(test: Test, page: Page, user: { name: string, password: string }, mailBuffer: MailBuffer) {
|
||||
await test.step('Activate Email 2FA', async () => {
|
||||
await page.getByRole('button', { name: user.name }).click();
|
||||
await page.getByRole('menuitem', { name: 'Account settings' }).click();
|
||||
await page.getByRole('link', { name: 'Security' }).click();
|
||||
await page.getByRole('link', { name: 'Two-step login' }).click();
|
||||
await page.locator('bit-item').filter({ hasText: 'Email Email Enter a code sent' }).getByRole('button').click();
|
||||
await page.getByLabel('Master password (required)').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
await page.getByRole('button', { name: 'Send email' }).click();
|
||||
});
|
||||
|
||||
let code = await retrieveEmailCode(test, page, mailBuffer);
|
||||
|
||||
await test.step('input code', async () => {
|
||||
await page.getByLabel('2. Enter the resulting 6').fill(code);
|
||||
await page.getByRole('button', { name: 'Turn on' }).click();
|
||||
await page.getByRole('heading', { name: 'Turned on', exact: true });
|
||||
});
|
||||
}
|
||||
|
||||
export async function retrieveEmailCode(test: Test, page: Page, mailBuffer: MailBuffer): string {
|
||||
return await test.step('retrieve code', async () => {
|
||||
const codeMail = await mailBuffer.expect((mail) => mail.subject.includes("Login Verification Code"));
|
||||
const page2 = await page.context().newPage();
|
||||
await page2.setContent(codeMail.html);
|
||||
const code = await page2.getByTestId("2fa").innerText();
|
||||
await page2.close();
|
||||
return code;
|
||||
});
|
||||
}
|
||||
|
||||
export async function disableEmail(test: Test, page: Page, user: { password: string }) {
|
||||
await test.step('Disable Email 2FA', async () => {
|
||||
await page.getByRole('button', { name: 'Test' }).click();
|
||||
await page.getByRole('menuitem', { name: 'Account settings' }).click();
|
||||
await page.getByRole('link', { name: 'Security' }).click();
|
||||
await page.getByRole('link', { name: 'Two-step login' }).click();
|
||||
await page.locator('bit-item').filter({ hasText: 'Email' }).getByRole('button').click();
|
||||
await page.getByLabel('Master password (required)').click();
|
||||
await page.getByLabel('Master password (required)').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
await page.getByRole('button', { name: 'Turn off' }).click();
|
||||
await page.getByRole('button', { name: 'Yes' }).click();
|
||||
|
||||
await utils.checkNotification(page, 'Two-step login provider turned off');
|
||||
});
|
||||
}
|
7
playwright/tests/setups/db-setup.ts
Normal file
7
playwright/tests/setups/db-setup.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { test } from './db-test';
|
||||
|
||||
const utils = require('../../global-utils');
|
||||
|
||||
test('DB start', async ({ serviceName }) => {
|
||||
utils.startComposeService(serviceName);
|
||||
});
|
11
playwright/tests/setups/db-teardown.ts
Normal file
11
playwright/tests/setups/db-teardown.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { test } from './db-test';
|
||||
|
||||
const utils = require('../../global-utils');
|
||||
|
||||
utils.loadEnv();
|
||||
|
||||
test('DB teardown ?', async ({ serviceName }) => {
|
||||
if( process.env.PW_KEEP_SERVICE_RUNNNING !== "true" ) {
|
||||
utils.stopComposeService(serviceName);
|
||||
}
|
||||
});
|
9
playwright/tests/setups/db-test.ts
Normal file
9
playwright/tests/setups/db-test.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { test as base } from '@playwright/test';
|
||||
|
||||
export type TestOptions = {
|
||||
serviceName: string;
|
||||
};
|
||||
|
||||
export const test = base.extend<TestOptions>({
|
||||
serviceName: ['', { option: true }],
|
||||
});
|
77
playwright/tests/setups/orgs.ts
Normal file
77
playwright/tests/setups/orgs.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { expect, type Browser,Page } from '@playwright/test';
|
||||
|
||||
import * as utils from '../../global-utils';
|
||||
|
||||
export async function create(test, page: Page, name: string) {
|
||||
await test.step('Create Org', async () => {
|
||||
await page.locator('a').filter({ hasText: 'Password Manager' }).first().click();
|
||||
await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible();
|
||||
await page.getByRole('link', { name: 'New organisation' }).click();
|
||||
await page.getByLabel('Organisation name (required)').fill(name);
|
||||
await page.getByRole('button', { name: 'Submit' }).click();
|
||||
|
||||
await utils.checkNotification(page, 'Organisation created');
|
||||
});
|
||||
}
|
||||
|
||||
export async function policies(test, page: Page, name: string) {
|
||||
await test.step(`Navigate to ${name} policies`, async () => {
|
||||
await page.locator('a').filter({ hasText: 'Admin Console' }).first().click();
|
||||
await page.locator('org-switcher').getByLabel(/Toggle collapse/).click();
|
||||
await page.locator('org-switcher').getByRole('link', { name: `${name}` }).first().click();
|
||||
await expect(page.getByRole('heading', { name: `${name} collections` })).toBeVisible();
|
||||
await page.getByRole('button', { name: 'Toggle collapse Settings' }).click();
|
||||
await page.getByRole('link', { name: 'Policies' }).click();
|
||||
await expect(page.getByRole('heading', { name: 'Policies' })).toBeVisible();
|
||||
});
|
||||
}
|
||||
|
||||
export async function members(test, page: Page, name: string) {
|
||||
await test.step(`Navigate to ${name} members`, async () => {
|
||||
await page.locator('a').filter({ hasText: 'Admin Console' }).first().click();
|
||||
await page.locator('org-switcher').getByLabel(/Toggle collapse/).click();
|
||||
await page.locator('org-switcher').getByRole('link', { name: `${name}` }).first().click();
|
||||
await expect(page.getByRole('heading', { name: `${name} collections` })).toBeVisible();
|
||||
await page.locator('div').filter({ hasText: 'Members' }).nth(2).click();
|
||||
await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible();
|
||||
await expect(page.getByRole('cell', { name: 'All' })).toBeVisible();
|
||||
});
|
||||
}
|
||||
|
||||
export async function invite(test, page: Page, name: string, email: string) {
|
||||
await test.step(`Invite ${email}`, async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible();
|
||||
await page.getByRole('button', { name: 'Invite member' }).click();
|
||||
await page.getByLabel('Email (required)').fill(email);
|
||||
await page.getByRole('tab', { name: 'Collections' }).click();
|
||||
await page.getByRole('combobox', { name: 'Permission' }).click();
|
||||
await page.getByText('Edit items', { exact: true }).click();
|
||||
await page.getByLabel('Select collections').click();
|
||||
await page.getByText('Default collection').click();
|
||||
await page.getByRole('cell', { name: 'Collection', exact: true }).click();
|
||||
await page.getByRole('button', { name: 'Save' }).click();
|
||||
await utils.checkNotification(page, 'User(s) invited');
|
||||
});
|
||||
}
|
||||
|
||||
export async function confirm(test, page: Page, name: string, user_email: string) {
|
||||
await test.step(`Confirm ${user_email}`, async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible();
|
||||
await page.getByRole('row').filter({hasText: user_email}).getByLabel('Options').click();
|
||||
await page.getByRole('menuitem', { name: 'Confirm' }).click();
|
||||
await expect(page.getByRole('heading', { name: 'Confirm user' })).toBeVisible();
|
||||
await page.getByRole('button', { name: 'Confirm' }).click();
|
||||
await utils.checkNotification(page, 'confirmed');
|
||||
});
|
||||
}
|
||||
|
||||
export async function revoke(test, page: Page, name: string, user_email: string) {
|
||||
await test.step(`Revoke ${user_email}`, async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible();
|
||||
await page.getByRole('row').filter({hasText: user_email}).getByLabel('Options').click();
|
||||
await page.getByRole('menuitem', { name: 'Revoke access' }).click();
|
||||
await expect(page.getByRole('heading', { name: 'Revoke access' })).toBeVisible();
|
||||
await page.getByRole('button', { name: 'Revoke access' }).click();
|
||||
await utils.checkNotification(page, 'Revoked organisation access');
|
||||
});
|
||||
}
|
18
playwright/tests/setups/sso-setup.ts
Normal file
18
playwright/tests/setups/sso-setup.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
|
||||
const { exec } = require('node:child_process');
|
||||
const utils = require('../../global-utils');
|
||||
|
||||
utils.loadEnv();
|
||||
|
||||
test.beforeAll('Setup', async () => {
|
||||
console.log("Starting Keycloak");
|
||||
exec(`docker compose --profile keycloak --env-file test.env up`);
|
||||
});
|
||||
|
||||
test('Keycloak is up', async ({ page }) => {
|
||||
await utils.waitFor(process.env.SSO_AUTHORITY, page.context().browser());
|
||||
// Dummy authority is created at the end of the setup
|
||||
await utils.waitFor(process.env.DUMMY_AUTHORITY, page.context().browser());
|
||||
console.log(`Keycloak running on: ${process.env.SSO_AUTHORITY}`);
|
||||
});
|
15
playwright/tests/setups/sso-teardown.ts
Normal file
15
playwright/tests/setups/sso-teardown.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { test, type FullConfig } from '@playwright/test';
|
||||
|
||||
const { execSync } = require('node:child_process');
|
||||
const utils = require('../../global-utils');
|
||||
|
||||
utils.loadEnv();
|
||||
|
||||
test('Keycloak teardown', async () => {
|
||||
if( process.env.PW_KEEP_SERVICE_RUNNNING === "true" ) {
|
||||
console.log("Keep Keycloak running");
|
||||
} else {
|
||||
console.log("Keycloak stopping");
|
||||
execSync(`docker compose --profile keycloak --env-file test.env stop Keycloak`);
|
||||
}
|
||||
});
|
138
playwright/tests/setups/sso.ts
Normal file
138
playwright/tests/setups/sso.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
import { expect, type Page, Test } from '@playwright/test';
|
||||
import { type MailBuffer, MailServer } from 'maildev';
|
||||
import * as OTPAuth from "otpauth";
|
||||
|
||||
import * as utils from '../../global-utils';
|
||||
import { retrieveEmailCode } from './2fa';
|
||||
|
||||
/**
|
||||
* If a MailBuffer is passed it will be used and consume the expected emails
|
||||
*/
|
||||
export async function logNewUser(
|
||||
test: Test,
|
||||
page: Page,
|
||||
user: { email: string, name: string, password: string },
|
||||
options: { mailBuffer?: MailBuffer, override?: boolean } = {}
|
||||
) {
|
||||
await test.step(`Create user ${user.name}`, async () => {
|
||||
await page.context().clearCookies();
|
||||
|
||||
await test.step('Landing page', async () => {
|
||||
await utils.cleanLanding(page);
|
||||
|
||||
if( options.override ) {
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
} else {
|
||||
await page.getByLabel(/Email address/).fill(user.email);
|
||||
await page.getByRole('button', { name: /Use single sign-on/ }).click();
|
||||
}
|
||||
});
|
||||
|
||||
await test.step('Keycloak login', async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible();
|
||||
await page.getByLabel(/Username/).fill(user.name);
|
||||
await page.getByLabel('Password', { exact: true }).fill(user.password);
|
||||
await page.getByRole('button', { name: 'Sign In' }).click();
|
||||
});
|
||||
|
||||
await test.step('Create Vault account', async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible();
|
||||
await page.getByLabel('New master password (required)', { exact: true }).fill(user.password);
|
||||
await page.getByLabel('Confirm new master password (').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Create account' }).click();
|
||||
});
|
||||
|
||||
await test.step('Default vault page', async () => {
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible();
|
||||
});
|
||||
|
||||
await utils.checkNotification(page, 'Account successfully created!');
|
||||
await utils.checkNotification(page, 'Invitation accepted');
|
||||
|
||||
if( options.mailBuffer ){
|
||||
let mailBuffer = options.mailBuffer;
|
||||
await test.step('Check emails', async () => {
|
||||
await mailBuffer.expect((m) => m.subject === "Welcome");
|
||||
await mailBuffer.expect((m) => m.subject.includes("New Device Logged"));
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* If a MailBuffer is passed it will be used and consume the expected emails
|
||||
*/
|
||||
export async function logUser(
|
||||
test: Test,
|
||||
page: Page,
|
||||
user: { email: string, password: string },
|
||||
options: {
|
||||
mailBuffer ?: MailBuffer,
|
||||
override?: boolean,
|
||||
totp?: OTPAuth.TOTP,
|
||||
mail2fa?: boolean,
|
||||
} = {}
|
||||
) {
|
||||
let mailBuffer = options.mailBuffer;
|
||||
|
||||
await test.step(`Log user ${user.email}`, async () => {
|
||||
await page.context().clearCookies();
|
||||
|
||||
await test.step('Landing page', async () => {
|
||||
await utils.cleanLanding(page);
|
||||
|
||||
if( options.override ) {
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
} else {
|
||||
await page.getByLabel(/Email address/).fill(user.email);
|
||||
await page.getByRole('button', { name: /Use single sign-on/ }).click();
|
||||
}
|
||||
});
|
||||
|
||||
await test.step('Keycloak login', async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible();
|
||||
await page.getByLabel(/Username/).fill(user.name);
|
||||
await page.getByLabel('Password', { exact: true }).fill(user.password);
|
||||
await page.getByRole('button', { name: 'Sign In' }).click();
|
||||
});
|
||||
|
||||
if( options.totp || options.mail2fa ){
|
||||
let code;
|
||||
|
||||
await test.step('2FA check', async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible();
|
||||
|
||||
if( options.totp ) {
|
||||
const totp = options.totp;
|
||||
let timestamp = Date.now(); // Needed to use the next token
|
||||
timestamp = timestamp + (totp.period - (Math.floor(timestamp / 1000) % totp.period) + 1) * 1000;
|
||||
code = totp.generate({timestamp});
|
||||
} else if( options.mail2fa ){
|
||||
code = await retrieveEmailCode(test, page, mailBuffer);
|
||||
}
|
||||
|
||||
await page.getByLabel(/Verification code/).fill(code);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
});
|
||||
}
|
||||
|
||||
await test.step('Unlock vault', async () => {
|
||||
await expect(page).toHaveTitle('Vaultwarden Web');
|
||||
await expect(page.getByRole('heading', { name: 'Your vault is locked' })).toBeVisible();
|
||||
await page.getByLabel('Master password').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Unlock' }).click();
|
||||
});
|
||||
|
||||
await test.step('Default vault page', async () => {
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible();
|
||||
});
|
||||
|
||||
if( mailBuffer ){
|
||||
await test.step('Check email', async () => {
|
||||
await mailBuffer.expect((m) => m.subject.includes("New Device Logged"));
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
55
playwright/tests/setups/user.ts
Normal file
55
playwright/tests/setups/user.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import { expect, type Browser, Page } from '@playwright/test';
|
||||
|
||||
import { type MailBuffer } from 'maildev';
|
||||
|
||||
import * as utils from '../../global-utils';
|
||||
|
||||
export async function createAccount(test, page: Page, user: { email: string, name: string, password: string }, mailBuffer?: MailBuffer) {
|
||||
await test.step(`Create user ${user.name}`, async () => {
|
||||
await utils.cleanLanding(page);
|
||||
|
||||
await page.getByRole('link', { name: 'Create account' }).click();
|
||||
|
||||
// Back to Vault create account
|
||||
await expect(page).toHaveTitle(/Create account | Vaultwarden Web/);
|
||||
await page.getByLabel(/Email address/).fill(user.email);
|
||||
await page.getByLabel('Name').fill(user.name);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
|
||||
// Vault finish Creation
|
||||
await page.getByLabel('New master password (required)', { exact: true }).fill(user.password);
|
||||
await page.getByLabel('Confirm new master password (').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Create account' }).click();
|
||||
|
||||
await utils.checkNotification(page, 'Your new account has been created')
|
||||
|
||||
// We are now in the default vault page
|
||||
await expect(page).toHaveTitle('Vaults | Vaultwarden Web');
|
||||
await utils.checkNotification(page, 'You have been logged in!');
|
||||
|
||||
if( mailBuffer ){
|
||||
await mailBuffer.expect((m) => m.subject === "Welcome");
|
||||
await mailBuffer.expect((m) => m.subject === "New Device Logged In From Firefox");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function logUser(test, page: Page, user: { email: string, password: string }, mailBuffer?: MailBuffer) {
|
||||
await test.step(`Log user ${user.email}`, async () => {
|
||||
await utils.cleanLanding(page);
|
||||
|
||||
await page.getByLabel(/Email address/).fill(user.email);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
|
||||
// Unlock page
|
||||
await page.getByLabel('Master password').fill(user.password);
|
||||
await page.getByRole('button', { name: 'Log in with master password' }).click();
|
||||
|
||||
// We are now in the default vault page
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
|
||||
if( mailBuffer ){
|
||||
await mailBuffer.expect((m) => m.subject === "New Device Logged In From Firefox");
|
||||
}
|
||||
});
|
||||
}
|
53
playwright/tests/sso_login.smtp.spec.ts
Normal file
53
playwright/tests/sso_login.smtp.spec.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
import { MailDev } from 'maildev';
|
||||
|
||||
import { logNewUser, logUser } from './setups/sso';
|
||||
import { activateEmail, disableEmail } from './setups/2fa';
|
||||
import * as utils from "../global-utils";
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
let mailserver;
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
mailserver = new MailDev({
|
||||
port: process.env.MAILDEV_SMTP_PORT,
|
||||
web: { port: process.env.MAILDEV_HTTP_PORT },
|
||||
})
|
||||
|
||||
await mailserver.listen();
|
||||
|
||||
await utils.startVault(browser, testInfo, {
|
||||
SSO_ENABLED: true,
|
||||
SSO_ONLY: false,
|
||||
SMTP_HOST: process.env.MAILDEV_HOST,
|
||||
SMTP_FROM: process.env.PW_SMTP_FROM,
|
||||
});
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
if( mailserver ){
|
||||
await mailserver.close();
|
||||
}
|
||||
});
|
||||
|
||||
test('Create and activate 2FA', async ({ page }) => {
|
||||
const mailBuffer = mailserver.buffer(users.user1.email);
|
||||
|
||||
await logNewUser(test, page, users.user1, {mailBuffer: mailBuffer});
|
||||
|
||||
await activateEmail(test, page, users.user1, mailBuffer);
|
||||
|
||||
mailBuffer.close();
|
||||
});
|
||||
|
||||
test('Log and disable', async ({ page }) => {
|
||||
const mailBuffer = mailserver.buffer(users.user1.email);
|
||||
|
||||
await logUser(test, page, users.user1, {mailBuffer: mailBuffer, mail2fa: true});
|
||||
|
||||
await disableEmail(test, page, users.user1);
|
||||
|
||||
mailBuffer.close();
|
||||
});
|
94
playwright/tests/sso_login.spec.ts
Normal file
94
playwright/tests/sso_login.spec.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
|
||||
import { logNewUser, logUser } from './setups/sso';
|
||||
import { activateTOTP, disableTOTP } from './setups/2fa';
|
||||
import * as utils from "../global-utils";
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
await utils.startVault(browser, testInfo, {
|
||||
SSO_ENABLED: true,
|
||||
SSO_ONLY: false
|
||||
});
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
});
|
||||
|
||||
test('Account creation using SSO', async ({ page }) => {
|
||||
// Landing page
|
||||
await logNewUser(test, page, users.user1);
|
||||
});
|
||||
|
||||
test('SSO login', async ({ page }) => {
|
||||
await logUser(test, page, users.user1);
|
||||
});
|
||||
|
||||
test('Non SSO login', async ({ page }) => {
|
||||
// Landing page
|
||||
await page.goto('/');
|
||||
await page.getByLabel(/Email address/).fill(users.user1.email);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
|
||||
// Unlock page
|
||||
await page.getByLabel('Master password').fill(users.user1.password);
|
||||
await page.getByRole('button', { name: 'Log in with master password' }).click();
|
||||
|
||||
// We are now in the default vault page
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
});
|
||||
|
||||
test('SSO login with TOTP 2fa', async ({ page }) => {
|
||||
await logUser(test, page, users.user1);
|
||||
|
||||
let totp = await activateTOTP(test, page, users.user1);
|
||||
|
||||
await logUser(test, page, users.user1, { totp });
|
||||
|
||||
await disableTOTP(test, page, users.user1);
|
||||
});
|
||||
|
||||
test('Non SSO login impossible', async ({ page, browser }, testInfo: TestInfo) => {
|
||||
await utils.restartVault(page, testInfo, {
|
||||
SSO_ENABLED: true,
|
||||
SSO_ONLY: true
|
||||
}, false);
|
||||
|
||||
// Landing page
|
||||
await page.goto('/');
|
||||
await page.getByLabel(/Email address/).fill(users.user1.email);
|
||||
|
||||
// Check that SSO login is available
|
||||
await expect(page.getByRole('button', { name: /Use single sign-on/ })).toHaveCount(1);
|
||||
|
||||
await page.getByLabel(/Email address/).fill(users.user1.email);
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
|
||||
// Unlock page
|
||||
await page.getByLabel('Master password').fill(users.user1.password);
|
||||
await page.getByRole('button', { name: 'Log in with master password' }).click();
|
||||
|
||||
// An error should appear
|
||||
await page.getByLabel('SSO sign-in is required')
|
||||
});
|
||||
|
||||
|
||||
test('No SSO login', async ({ page }, testInfo: TestInfo) => {
|
||||
await utils.restartVault(page, testInfo, {
|
||||
SSO_ENABLED: false
|
||||
}, false);
|
||||
|
||||
// Landing page
|
||||
await page.goto('/');
|
||||
await page.getByLabel(/Email address/).fill(users.user1.email);
|
||||
|
||||
// No SSO button (rely on a correct selector checked in previous test)
|
||||
await page.getByLabel('Master password');
|
||||
await expect(page.getByRole('button', { name: /Use single sign-on/ })).toHaveCount(0);
|
||||
|
||||
// Can continue to Master password
|
||||
await page.getByRole('button', { name: 'Continue' }).click();
|
||||
await expect(page.getByRole('button', { name: /Log in with master password/ })).toHaveCount(1);
|
||||
});
|
121
playwright/tests/sso_organization.smtp.spec.ts
Normal file
121
playwright/tests/sso_organization.smtp.spec.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
import { MailDev } from 'maildev';
|
||||
|
||||
import * as utils from "../global-utils";
|
||||
import * as orgs from './setups/orgs';
|
||||
import { logNewUser, logUser } from './setups/sso';
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
let mailServer, mail1Buffer, mail2Buffer, mail3Buffer;
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
mailServer = new MailDev({
|
||||
port: process.env.MAILDEV_SMTP_PORT,
|
||||
web: { port: process.env.MAILDEV_HTTP_PORT },
|
||||
})
|
||||
|
||||
await mailServer.listen();
|
||||
|
||||
await utils.startVault(browser, testInfo, {
|
||||
SMTP_HOST: process.env.MAILDEV_HOST,
|
||||
SMTP_FROM: process.env.PW_SMTP_FROM,
|
||||
SSO_ENABLED: true,
|
||||
SSO_ONLY: true,
|
||||
});
|
||||
|
||||
mail1Buffer = mailServer.buffer(users.user1.email);
|
||||
mail2Buffer = mailServer.buffer(users.user2.email);
|
||||
mail3Buffer = mailServer.buffer(users.user3.email);
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
[mail1Buffer, mail2Buffer, mail3Buffer, mailServer].map((m) => m?.close());
|
||||
});
|
||||
|
||||
test('Create user3', async ({ page }) => {
|
||||
await logNewUser(test, page, users.user3, { mailBuffer: mail3Buffer });
|
||||
});
|
||||
|
||||
test('Invite users', async ({ page }) => {
|
||||
await logNewUser(test, page, users.user1, { mailBuffer: mail1Buffer });
|
||||
|
||||
await orgs.create(test, page, '/Test');
|
||||
await orgs.members(test, page, '/Test');
|
||||
await orgs.invite(test, page, '/Test', users.user2.email);
|
||||
await orgs.invite(test, page, '/Test', users.user3.email);
|
||||
});
|
||||
|
||||
test('invited with new account', async ({ page }) => {
|
||||
const link = await test.step('Extract email link', async () => {
|
||||
const invited = await mail2Buffer.expect((m) => m.subject === "Join /Test");
|
||||
await page.setContent(invited.html);
|
||||
return await page.getByTestId("invite").getAttribute("href");
|
||||
});
|
||||
|
||||
await test.step('Redirect to Keycloak', async () => {
|
||||
await page.goto(link);
|
||||
});
|
||||
|
||||
await test.step('Keycloak login', async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible();
|
||||
await page.getByLabel(/Username/).fill(users.user2.name);
|
||||
await page.getByLabel('Password', { exact: true }).fill(users.user2.password);
|
||||
await page.getByRole('button', { name: 'Sign In' }).click();
|
||||
});
|
||||
|
||||
await test.step('Create Vault account', async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible();
|
||||
await page.getByLabel('New master password (required)', { exact: true }).fill(users.user2.password);
|
||||
await page.getByLabel('Confirm new master password (').fill(users.user2.password);
|
||||
await page.getByRole('button', { name: 'Create account' }).click();
|
||||
});
|
||||
|
||||
await test.step('Default vault page', async () => {
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
|
||||
await utils.checkNotification(page, 'Account successfully created!');
|
||||
await utils.checkNotification(page, 'Invitation accepted');
|
||||
});
|
||||
|
||||
await test.step('Check mails', async () => {
|
||||
await mail2Buffer.expect((m) => m.subject.includes("New Device Logged"));
|
||||
await mail1Buffer.expect((m) => m.subject === "Invitation to /Test accepted");
|
||||
});
|
||||
});
|
||||
|
||||
test('invited with existing account', async ({ page }) => {
|
||||
const link = await test.step('Extract email link', async () => {
|
||||
const invited = await mail3Buffer.expect((m) => m.subject === "Join /Test");
|
||||
await page.setContent(invited.html);
|
||||
return await page.getByTestId("invite").getAttribute("href");
|
||||
});
|
||||
|
||||
await test.step('Redirect to Keycloak', async () => {
|
||||
await page.goto(link);
|
||||
});
|
||||
|
||||
await test.step('Keycloak login', async () => {
|
||||
await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible();
|
||||
await page.getByLabel(/Username/).fill(users.user3.name);
|
||||
await page.getByLabel('Password', { exact: true }).fill(users.user3.password);
|
||||
await page.getByRole('button', { name: 'Sign In' }).click();
|
||||
});
|
||||
|
||||
await test.step('Unlock vault', async () => {
|
||||
await expect(page).toHaveTitle('Vaultwarden Web');
|
||||
await page.getByLabel('Master password').fill(users.user3.password);
|
||||
await page.getByRole('button', { name: 'Unlock' }).click();
|
||||
});
|
||||
|
||||
await test.step('Default vault page', async () => {
|
||||
await expect(page).toHaveTitle(/Vaultwarden Web/);
|
||||
await utils.checkNotification(page, 'Invitation accepted');
|
||||
});
|
||||
|
||||
await test.step('Check mails', async () => {
|
||||
await mail3Buffer.expect((m) => m.subject.includes("New Device Logged"));
|
||||
await mail1Buffer.expect((m) => m.subject === "Invitation to /Test accepted");
|
||||
});
|
||||
});
|
76
playwright/tests/sso_organization.spec.ts
Normal file
76
playwright/tests/sso_organization.spec.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { test, expect, type TestInfo } from '@playwright/test';
|
||||
import { MailDev } from 'maildev';
|
||||
|
||||
import * as utils from "../global-utils";
|
||||
import * as orgs from './setups/orgs';
|
||||
import { logNewUser, logUser } from './setups/sso';
|
||||
|
||||
let users = utils.loadEnv();
|
||||
|
||||
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
|
||||
await utils.startVault(browser, testInfo, {
|
||||
SSO_ENABLED: true,
|
||||
SSO_ONLY: true,
|
||||
});
|
||||
});
|
||||
|
||||
test.afterAll('Teardown', async ({}) => {
|
||||
utils.stopVault();
|
||||
});
|
||||
|
||||
test('Create user3', async ({ page }) => {
|
||||
await logNewUser(test, page, users.user3);
|
||||
});
|
||||
|
||||
test('Invite users', async ({ page }) => {
|
||||
await logNewUser(test, page, users.user1);
|
||||
|
||||
await orgs.create(test, page, '/Test');
|
||||
await orgs.members(test, page, '/Test');
|
||||
await orgs.invite(test, page, '/Test', users.user2.email);
|
||||
await orgs.invite(test, page, '/Test', users.user3.email);
|
||||
await orgs.confirm(test, page, '/Test', users.user3.email);
|
||||
});
|
||||
|
||||
test('Create invited account', async ({ page }) => {
|
||||
await logNewUser(test, page, users.user2);
|
||||
});
|
||||
|
||||
test('Confirm invited user', async ({ page }) => {
|
||||
await logUser(test, page, users.user1);
|
||||
await orgs.members(test, page, '/Test');
|
||||
await expect(page.getByRole('row', { name: users.user2.name })).toHaveText(/Needs confirmation/);
|
||||
await orgs.confirm(test, page, '/Test', users.user2.email);
|
||||
});
|
||||
|
||||
test('Organization is visible', async ({ page }) => {
|
||||
await logUser(test, page, users.user2);
|
||||
await page.getByLabel('vault: /Test').click();
|
||||
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
|
||||
});
|
||||
|
||||
test('Enforce password policy', async ({ page }) => {
|
||||
await logUser(test, page, users.user1);
|
||||
await orgs.policies(test, page, '/Test');
|
||||
|
||||
await test.step(`Set master password policy`, async () => {
|
||||
await page.getByRole('button', { name: 'Master password requirements' }).click();
|
||||
await page.getByRole('checkbox', { name: 'Turn on' }).check();
|
||||
await page.getByRole('checkbox', { name: 'Require existing members to' }).check();
|
||||
await page.getByRole('spinbutton', { name: 'Minimum length' }).fill('42');
|
||||
await page.getByRole('button', { name: 'Save' }).click();
|
||||
await utils.checkNotification(page, 'Edited policy Master password requirements.');
|
||||
});
|
||||
|
||||
await utils.logout(test, page, users.user1);
|
||||
|
||||
await test.step(`Unlock trigger policy`, async () => {
|
||||
await page.getByRole('textbox', { name: 'Email address (required)' }).fill(users.user1.email);
|
||||
await page.getByRole('button', { name: 'Use single sign-on' }).click();
|
||||
|
||||
await page.getByRole('textbox', { name: 'Master password (required)' }).fill(users.user1.password);
|
||||
await page.getByRole('button', { name: 'Unlock' }).click();
|
||||
|
||||
await expect(page.getByRole('heading', { name: 'Update master password' })).toBeVisible();
|
||||
});
|
||||
});
|
@@ -1,4 +1,4 @@
|
||||
[toolchain]
|
||||
channel = "1.84.1"
|
||||
channel = "1.89.0"
|
||||
components = [ "rustfmt", "clippy" ]
|
||||
profile = "minimal"
|
||||
|
131
src/api/admin.rs
131
src/api/admin.rs
@@ -46,6 +46,7 @@ pub fn routes() -> Vec<Route> {
|
||||
invite_user,
|
||||
logout,
|
||||
delete_user,
|
||||
delete_sso_user,
|
||||
deauth_user,
|
||||
disable_user,
|
||||
enable_user,
|
||||
@@ -102,7 +103,7 @@ const ACTING_ADMIN_USER: &str = "vaultwarden-admin-00000-000000000000";
|
||||
pub const FAKE_ADMIN_UUID: &str = "00000000-0000-0000-0000-000000000000";
|
||||
|
||||
fn admin_path() -> String {
|
||||
format!("{}{}", CONFIG.domain_path(), ADMIN_PATH)
|
||||
format!("{}{ADMIN_PATH}", CONFIG.domain_path())
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -206,7 +207,7 @@ fn post_admin_login(
|
||||
|
||||
cookies.add(cookie);
|
||||
if let Some(redirect) = redirect {
|
||||
Ok(Redirect::to(format!("{}{}", admin_path(), redirect)))
|
||||
Ok(Redirect::to(format!("{}{redirect}", admin_path())))
|
||||
} else {
|
||||
Err(AdminResponse::Ok(render_admin_page()))
|
||||
}
|
||||
@@ -239,6 +240,7 @@ struct AdminTemplateData {
|
||||
page_data: Option<Value>,
|
||||
logged_in: bool,
|
||||
urlpath: String,
|
||||
sso_enabled: bool,
|
||||
}
|
||||
|
||||
impl AdminTemplateData {
|
||||
@@ -248,6 +250,7 @@ impl AdminTemplateData {
|
||||
page_data: Some(page_data),
|
||||
logged_in: true,
|
||||
urlpath: CONFIG.domain_path(),
|
||||
sso_enabled: CONFIG.sso_enabled(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -296,7 +299,7 @@ async fn invite_user(data: Json<InviteData>, _token: AdminToken, mut conn: DbCon
|
||||
err_code!("User already exists", Status::Conflict.code)
|
||||
}
|
||||
|
||||
let mut user = User::new(data.email);
|
||||
let mut user = User::new(data.email, None);
|
||||
|
||||
async fn _generate_invite(user: &User, conn: &mut DbConn) -> EmptyResult {
|
||||
if CONFIG.mail_enabled() {
|
||||
@@ -336,7 +339,7 @@ fn logout(cookies: &CookieJar<'_>) -> Redirect {
|
||||
async fn get_users_json(_token: AdminToken, mut conn: DbConn) -> Json<Value> {
|
||||
let users = User::get_all(&mut conn).await;
|
||||
let mut users_json = Vec::with_capacity(users.len());
|
||||
for u in users {
|
||||
for (u, _) in users {
|
||||
let mut usr = u.to_json(&mut conn).await;
|
||||
usr["userEnabled"] = json!(u.enabled);
|
||||
usr["createdAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
||||
@@ -354,7 +357,7 @@ async fn get_users_json(_token: AdminToken, mut conn: DbConn) -> Json<Value> {
|
||||
async fn users_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html<String>> {
|
||||
let users = User::get_all(&mut conn).await;
|
||||
let mut users_json = Vec::with_capacity(users.len());
|
||||
for u in users {
|
||||
for (u, sso_u) in users {
|
||||
let mut usr = u.to_json(&mut conn).await;
|
||||
usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &mut conn).await);
|
||||
usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &mut conn).await);
|
||||
@@ -365,6 +368,9 @@ async fn users_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html<
|
||||
Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)),
|
||||
None => json!("Never"),
|
||||
};
|
||||
|
||||
usr["sso_identifier"] = json!(sso_u.map(|u| u.identifier.to_string()).unwrap_or(String::new()));
|
||||
|
||||
users_json.push(usr);
|
||||
}
|
||||
|
||||
@@ -417,17 +423,38 @@ async fn delete_user(user_id: UserId, token: AdminToken, mut conn: DbConn) -> Em
|
||||
res
|
||||
}
|
||||
|
||||
#[delete("/users/<user_id>/sso", format = "application/json")]
|
||||
async fn delete_sso_user(user_id: UserId, token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||
let memberships = Membership::find_any_state_by_user(&user_id, &mut conn).await;
|
||||
let res = SsoUser::delete(&user_id, &mut conn).await;
|
||||
|
||||
for membership in memberships {
|
||||
log_event(
|
||||
EventType::OrganizationUserUnlinkedSso as i32,
|
||||
&membership.uuid,
|
||||
&membership.org_uuid,
|
||||
&ACTING_ADMIN_USER.into(),
|
||||
14, // Use UnknownBrowser type
|
||||
&token.ip.ip,
|
||||
&mut conn,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
#[post("/users/<user_id>/deauth", format = "application/json")]
|
||||
async fn deauth_user(user_id: UserId, _token: AdminToken, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let mut user = get_user_or_404(&user_id, &mut conn).await?;
|
||||
|
||||
nt.send_logout(&user, None).await;
|
||||
nt.send_logout(&user, None, &mut conn).await;
|
||||
|
||||
if CONFIG.push_enabled() {
|
||||
for device in Device::find_push_devices_by_user(&user.uuid, &mut conn).await {
|
||||
match unregister_push_device(device.push_uuid).await {
|
||||
match unregister_push_device(&device.push_uuid).await {
|
||||
Ok(r) => r,
|
||||
Err(e) => error!("Unable to unregister devices from Bitwarden server: {}", e),
|
||||
Err(e) => error!("Unable to unregister devices from Bitwarden server: {e}"),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -447,7 +474,7 @@ async fn disable_user(user_id: UserId, _token: AdminToken, mut conn: DbConn, nt:
|
||||
|
||||
let save_result = user.save(&mut conn).await;
|
||||
|
||||
nt.send_logout(&user, None).await;
|
||||
nt.send_logout(&user, None, &mut conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
@@ -591,20 +618,14 @@ struct GitCommit {
|
||||
sha: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct TimeApi {
|
||||
year: u16,
|
||||
month: u8,
|
||||
day: u8,
|
||||
hour: u8,
|
||||
minute: u8,
|
||||
seconds: u8,
|
||||
}
|
||||
|
||||
async fn get_json_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> {
|
||||
Ok(make_http_request(Method::GET, url)?.send().await?.error_for_status()?.json::<T>().await?)
|
||||
}
|
||||
|
||||
async fn get_text_api(url: &str) -> Result<String, Error> {
|
||||
Ok(make_http_request(Method::GET, url)?.send().await?.error_for_status()?.text().await?)
|
||||
}
|
||||
|
||||
async fn has_http_access() -> bool {
|
||||
let Ok(req) = make_http_request(Method::HEAD, "https://github.com/dani-garcia/vaultwarden") else {
|
||||
return false;
|
||||
@@ -616,10 +637,12 @@ async fn has_http_access() -> bool {
|
||||
}
|
||||
|
||||
use cached::proc_macro::cached;
|
||||
/// Cache this function to prevent API call rate limit. Github only allows 60 requests per hour, and we use 3 here already.
|
||||
/// It will cache this function for 300 seconds (5 minutes) which should prevent the exhaustion of the rate limit.
|
||||
#[cached(time = 300, sync_writes = true)]
|
||||
async fn get_release_info(has_http_access: bool, running_within_container: bool) -> (String, String, String) {
|
||||
/// Cache this function to prevent API call rate limit. Github only allows 60 requests per hour, and we use 3 here already
|
||||
/// It will cache this function for 600 seconds (10 minutes) which should prevent the exhaustion of the rate limit
|
||||
/// Any cache will be lost if Vaultwarden is restarted
|
||||
use std::time::Duration; // Needed for cached
|
||||
#[cached(time = 600, sync_writes = "default")]
|
||||
async fn get_release_info(has_http_access: bool) -> (String, String, String) {
|
||||
// If the HTTP Check failed, do not even attempt to check for new versions since we were not able to connect with github.com anyway.
|
||||
if has_http_access {
|
||||
(
|
||||
@@ -636,19 +659,13 @@ async fn get_release_info(has_http_access: bool, running_within_container: bool)
|
||||
}
|
||||
_ => "-".to_string(),
|
||||
},
|
||||
// Do not fetch the web-vault version when running within a container.
|
||||
// Do not fetch the web-vault version when running within a container
|
||||
// The web-vault version is embedded within the container it self, and should not be updated manually
|
||||
if running_within_container {
|
||||
"-".to_string()
|
||||
} else {
|
||||
match get_json_api::<GitRelease>(
|
||||
"https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest",
|
||||
)
|
||||
match get_json_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest")
|
||||
.await
|
||||
{
|
||||
Ok(r) => r.tag_name.trim_start_matches('v').to_string(),
|
||||
_ => "-".to_string(),
|
||||
}
|
||||
{
|
||||
Ok(r) => r.tag_name.trim_start_matches('v').to_string(),
|
||||
_ => "-".to_string(),
|
||||
},
|
||||
)
|
||||
} else {
|
||||
@@ -658,17 +675,18 @@ async fn get_release_info(has_http_access: bool, running_within_container: bool)
|
||||
|
||||
async fn get_ntp_time(has_http_access: bool) -> String {
|
||||
if has_http_access {
|
||||
if let Ok(ntp_time) = get_json_api::<TimeApi>("https://www.timeapi.io/api/Time/current/zone?timeZone=UTC").await
|
||||
{
|
||||
return format!(
|
||||
"{year}-{month:02}-{day:02} {hour:02}:{minute:02}:{seconds:02} UTC",
|
||||
year = ntp_time.year,
|
||||
month = ntp_time.month,
|
||||
day = ntp_time.day,
|
||||
hour = ntp_time.hour,
|
||||
minute = ntp_time.minute,
|
||||
seconds = ntp_time.seconds
|
||||
);
|
||||
if let Ok(cf_trace) = get_text_api("https://cloudflare.com/cdn-cgi/trace").await {
|
||||
for line in cf_trace.lines() {
|
||||
if let Some((key, value)) = line.split_once('=') {
|
||||
if key == "ts" {
|
||||
let ts = value.split_once('.').map_or(value, |(s, _)| s);
|
||||
if let Ok(dt) = chrono::DateTime::parse_from_str(ts, "%s") {
|
||||
return dt.format("%Y-%m-%d %H:%M:%S UTC").to_string();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
String::from("Unable to fetch NTP time.")
|
||||
@@ -693,14 +711,23 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn)
|
||||
_ => "Unable to resolve domain name.".to_string(),
|
||||
};
|
||||
|
||||
let (latest_release, latest_commit, latest_web_build) =
|
||||
get_release_info(has_http_access, running_within_container).await;
|
||||
let (latest_release, latest_commit, latest_web_build) = get_release_info(has_http_access).await;
|
||||
|
||||
let ip_header_name = &ip_header.0.unwrap_or_default();
|
||||
|
||||
// Get current running versions
|
||||
let web_vault_version = get_web_vault_version();
|
||||
|
||||
// Check if the running version is newer than the latest stable released version
|
||||
let web_vault_pre_release = if let Ok(web_ver_match) = semver::VersionReq::parse(&format!(">{latest_web_build}")) {
|
||||
web_ver_match.matches(
|
||||
&semver::Version::parse(&web_vault_version).unwrap_or_else(|_| semver::Version::parse("2025.1.1").unwrap()),
|
||||
)
|
||||
} else {
|
||||
error!("Unable to parse latest_web_build: '{latest_web_build}'");
|
||||
false
|
||||
};
|
||||
|
||||
let diagnostics_json = json!({
|
||||
"dns_resolved": dns_resolved,
|
||||
"current_release": VERSION,
|
||||
@@ -709,6 +736,7 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn)
|
||||
"web_vault_enabled": &CONFIG.web_vault_enabled(),
|
||||
"web_vault_version": web_vault_version,
|
||||
"latest_web_build": latest_web_build,
|
||||
"web_vault_pre_release": web_vault_pre_release,
|
||||
"running_within_container": running_within_container,
|
||||
"container_base_image": if running_within_container { container_base_image() } else { "Not applicable" },
|
||||
"has_http_access": has_http_access,
|
||||
@@ -724,6 +752,7 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn)
|
||||
"overrides": &CONFIG.get_overrides().join(", "),
|
||||
"host_arch": env::consts::ARCH,
|
||||
"host_os": env::consts::OS,
|
||||
"tz_env": env::var("TZ").unwrap_or_default(),
|
||||
"server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(),
|
||||
"server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the server date/time check as late as possible to minimize the time difference
|
||||
"ntp_time": get_ntp_time(has_http_access).await, // Run the ntp check as late as possible to minimize the time difference
|
||||
@@ -745,17 +774,17 @@ fn get_diagnostics_http(code: u16, _token: AdminToken) -> EmptyResult {
|
||||
}
|
||||
|
||||
#[post("/config", format = "application/json", data = "<data>")]
|
||||
fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult {
|
||||
async fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult {
|
||||
let data: ConfigBuilder = data.into_inner();
|
||||
if let Err(e) = CONFIG.update_config(data, true) {
|
||||
if let Err(e) = CONFIG.update_config(data, true).await {
|
||||
err!(format!("Unable to save config: {e:?}"))
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[post("/config/delete", format = "application/json")]
|
||||
fn delete_config(_token: AdminToken) -> EmptyResult {
|
||||
if let Err(e) = CONFIG.delete_user_config() {
|
||||
async fn delete_config(_token: AdminToken) -> EmptyResult {
|
||||
if let Err(e) = CONFIG.delete_user_config().await {
|
||||
err!(format!("Unable to delete config: {e:?}"))
|
||||
}
|
||||
Ok(())
|
||||
|
@@ -7,9 +7,9 @@ use serde_json::Value;
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
core::{log_user_event, two_factor::email},
|
||||
register_push_device, unregister_push_device, AnonymousNotify, EmptyResult, JsonResult, Notify,
|
||||
PasswordOrOtpData, UpdateType,
|
||||
core::{accept_org_invite, log_user_event, two_factor::email},
|
||||
master_password_policy, register_push_device, unregister_push_device, AnonymousNotify, ApiResult, EmptyResult,
|
||||
JsonResult, Notify, PasswordOrOtpData, UpdateType,
|
||||
},
|
||||
auth::{decode_delete, decode_invite, decode_verify_email, ClientHeaders, Headers},
|
||||
crypto,
|
||||
@@ -34,6 +34,7 @@ pub fn routes() -> Vec<rocket::Route> {
|
||||
get_public_keys,
|
||||
post_keys,
|
||||
post_password,
|
||||
post_set_password,
|
||||
post_kdf,
|
||||
post_rotatekey,
|
||||
post_sstamp,
|
||||
@@ -68,20 +69,53 @@ pub fn routes() -> Vec<rocket::Route> {
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RegisterData {
|
||||
email: String,
|
||||
kdf: Option<i32>,
|
||||
kdf_iterations: Option<i32>,
|
||||
pub struct KDFData {
|
||||
kdf: i32,
|
||||
kdf_iterations: i32,
|
||||
kdf_memory: Option<i32>,
|
||||
kdf_parallelism: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RegisterData {
|
||||
email: String,
|
||||
|
||||
#[serde(flatten)]
|
||||
kdf: KDFData,
|
||||
|
||||
#[serde(alias = "userSymmetricKey")]
|
||||
key: String,
|
||||
#[serde(alias = "userAsymmetricKeys")]
|
||||
keys: Option<KeysData>,
|
||||
|
||||
master_password_hash: String,
|
||||
master_password_hint: Option<String>,
|
||||
|
||||
name: Option<String>,
|
||||
|
||||
#[allow(dead_code)]
|
||||
organization_user_id: Option<MembershipId>,
|
||||
|
||||
// Used only from the register/finish endpoint
|
||||
email_verification_token: Option<String>,
|
||||
accept_emergency_access_id: Option<EmergencyAccessId>,
|
||||
accept_emergency_access_invite_token: Option<String>,
|
||||
#[serde(alias = "token")]
|
||||
org_invite_token: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SetPasswordData {
|
||||
#[serde(flatten)]
|
||||
kdf: KDFData,
|
||||
|
||||
key: String,
|
||||
keys: Option<KeysData>,
|
||||
master_password_hash: String,
|
||||
master_password_hint: Option<String>,
|
||||
name: Option<String>,
|
||||
token: Option<String>,
|
||||
#[allow(dead_code)]
|
||||
organization_user_id: Option<MembershipId>,
|
||||
org_identifier: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -115,22 +149,86 @@ async fn is_email_2fa_required(member_id: Option<MembershipId>, conn: &mut DbCon
|
||||
if CONFIG.email_2fa_enforce_on_verified_invite() {
|
||||
return true;
|
||||
}
|
||||
if member_id.is_some() {
|
||||
return OrgPolicy::is_enabled_for_member(&member_id.unwrap(), OrgPolicyType::TwoFactorAuthentication, conn)
|
||||
.await;
|
||||
if let Some(member_id) = member_id {
|
||||
return OrgPolicy::is_enabled_for_member(&member_id, OrgPolicyType::TwoFactorAuthentication, conn).await;
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
#[post("/accounts/register", data = "<data>")]
|
||||
async fn register(data: Json<RegisterData>, conn: DbConn) -> JsonResult {
|
||||
_register(data, conn).await
|
||||
_register(data, false, conn).await
|
||||
}
|
||||
|
||||
pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult {
|
||||
let data: RegisterData = data.into_inner();
|
||||
pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut conn: DbConn) -> JsonResult {
|
||||
let mut data: RegisterData = data.into_inner();
|
||||
let email = data.email.to_lowercase();
|
||||
|
||||
let mut email_verified = false;
|
||||
|
||||
let mut pending_emergency_access = None;
|
||||
|
||||
// First, validate the provided verification tokens
|
||||
if email_verification {
|
||||
match (
|
||||
&data.email_verification_token,
|
||||
&data.accept_emergency_access_id,
|
||||
&data.accept_emergency_access_invite_token,
|
||||
&data.organization_user_id,
|
||||
&data.org_invite_token,
|
||||
) {
|
||||
// Normal user registration, when email verification is required
|
||||
(Some(email_verification_token), None, None, None, None) => {
|
||||
let claims = crate::auth::decode_register_verify(email_verification_token)?;
|
||||
if claims.sub != data.email {
|
||||
err!("Email verification token does not match email");
|
||||
}
|
||||
|
||||
// During this call we don't get the name, so extract it from the claims
|
||||
if claims.name.is_some() {
|
||||
data.name = claims.name;
|
||||
}
|
||||
email_verified = claims.verified;
|
||||
}
|
||||
// Emergency access registration
|
||||
(None, Some(accept_emergency_access_id), Some(accept_emergency_access_invite_token), None, None) => {
|
||||
if !CONFIG.emergency_access_allowed() {
|
||||
err!("Emergency access is not enabled.")
|
||||
}
|
||||
|
||||
let claims = crate::auth::decode_emergency_access_invite(accept_emergency_access_invite_token)?;
|
||||
|
||||
if claims.email != data.email {
|
||||
err!("Claim email does not match email")
|
||||
}
|
||||
if &claims.emer_id != accept_emergency_access_id {
|
||||
err!("Claim emer_id does not match accept_emergency_access_id")
|
||||
}
|
||||
|
||||
pending_emergency_access = Some((accept_emergency_access_id, claims));
|
||||
email_verified = true;
|
||||
}
|
||||
// Org invite
|
||||
(None, None, None, Some(organization_user_id), Some(org_invite_token)) => {
|
||||
let claims = decode_invite(org_invite_token)?;
|
||||
|
||||
if claims.email != data.email {
|
||||
err!("Claim email does not match email")
|
||||
}
|
||||
|
||||
if &claims.member_id != organization_user_id {
|
||||
err!("Claim org_user_id does not match organization_user_id")
|
||||
}
|
||||
|
||||
email_verified = true;
|
||||
}
|
||||
|
||||
_ => {
|
||||
err!("Registration is missing required parameters")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the length of the username exceeds 50 characters (Same is Upstream Bitwarden)
|
||||
// This also prevents issues with very long usernames causing to large JWT's. See #2419
|
||||
if let Some(ref name) = data.name {
|
||||
@@ -144,29 +242,23 @@ pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult
|
||||
let password_hint = clean_password_hint(&data.master_password_hint);
|
||||
enforce_password_hint_setting(&password_hint)?;
|
||||
|
||||
let mut verified_by_invite = false;
|
||||
|
||||
let mut user = match User::find_by_mail(&email, &mut conn).await {
|
||||
Some(mut user) => {
|
||||
Some(user) => {
|
||||
if !user.password_hash.is_empty() {
|
||||
err!("Registration not allowed or user already exists")
|
||||
}
|
||||
|
||||
if let Some(token) = data.token {
|
||||
if let Some(token) = data.org_invite_token {
|
||||
let claims = decode_invite(&token)?;
|
||||
if claims.email == email {
|
||||
// Verify the email address when signing up via a valid invite token
|
||||
verified_by_invite = true;
|
||||
user.verified_at = Some(Utc::now().naive_utc());
|
||||
email_verified = true;
|
||||
user
|
||||
} else {
|
||||
err!("Registration email does not match invite email")
|
||||
}
|
||||
} else if Invitation::take(&email, &mut conn).await {
|
||||
for membership in Membership::find_invited_by_user(&user.uuid, &mut conn).await.iter_mut() {
|
||||
membership.status = MembershipStatus::Accepted as i32;
|
||||
membership.save(&mut conn).await?;
|
||||
}
|
||||
Membership::accept_user_invitations(&user.uuid, &mut conn).await?;
|
||||
user
|
||||
} else if CONFIG.is_signup_allowed(&email)
|
||||
|| (CONFIG.emergency_access_allowed()
|
||||
@@ -181,8 +273,11 @@ pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult
|
||||
// Order is important here; the invitation check must come first
|
||||
// because the vaultwarden admin can invite anyone, regardless
|
||||
// of other signup restrictions.
|
||||
if Invitation::take(&email, &mut conn).await || CONFIG.is_signup_allowed(&email) {
|
||||
User::new(email.clone())
|
||||
if Invitation::take(&email, &mut conn).await
|
||||
|| CONFIG.is_signup_allowed(&email)
|
||||
|| pending_emergency_access.is_some()
|
||||
{
|
||||
User::new(email.clone(), None)
|
||||
} else {
|
||||
err!("Registration not allowed or user already exists")
|
||||
}
|
||||
@@ -192,16 +287,7 @@ pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult
|
||||
// Make sure we don't leave a lingering invitation.
|
||||
Invitation::take(&email, &mut conn).await;
|
||||
|
||||
if let Some(client_kdf_type) = data.kdf {
|
||||
user.client_kdf_type = client_kdf_type;
|
||||
}
|
||||
|
||||
if let Some(client_kdf_iter) = data.kdf_iterations {
|
||||
user.client_kdf_iter = client_kdf_iter;
|
||||
}
|
||||
|
||||
user.client_kdf_memory = data.kdf_memory;
|
||||
user.client_kdf_parallelism = data.kdf_parallelism;
|
||||
set_kdf_data(&mut user, data.kdf)?;
|
||||
|
||||
user.set_password(&data.master_password_hash, Some(data.key), true, None);
|
||||
user.password_hint = password_hint;
|
||||
@@ -216,17 +302,21 @@ pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult
|
||||
user.public_key = Some(keys.public_key);
|
||||
}
|
||||
|
||||
if email_verified {
|
||||
user.verified_at = Some(Utc::now().naive_utc());
|
||||
}
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
if CONFIG.signups_verify() && !verified_by_invite {
|
||||
if CONFIG.signups_verify() && !email_verified {
|
||||
if let Err(e) = mail::send_welcome_must_verify(&user.email, &user.uuid).await {
|
||||
error!("Error sending welcome email: {:#?}", e);
|
||||
error!("Error sending welcome email: {e:#?}");
|
||||
}
|
||||
user.last_verifying_at = Some(user.created_at);
|
||||
} else if let Err(e) = mail::send_welcome(&user.email).await {
|
||||
error!("Error sending welcome email: {:#?}", e);
|
||||
error!("Error sending welcome email: {e:#?}");
|
||||
}
|
||||
|
||||
if verified_by_invite && is_email_2fa_required(data.organization_user_id, &mut conn).await {
|
||||
if email_verified && is_email_2fa_required(data.organization_user_id, &mut conn).await {
|
||||
email::activate_email_2fa(&user, &mut conn).await.ok();
|
||||
}
|
||||
}
|
||||
@@ -246,6 +336,68 @@ pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult
|
||||
})))
|
||||
}
|
||||
|
||||
#[post("/accounts/set-password", data = "<data>")]
|
||||
async fn post_set_password(data: Json<SetPasswordData>, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
let data: SetPasswordData = data.into_inner();
|
||||
let mut user = headers.user;
|
||||
|
||||
if user.private_key.is_some() {
|
||||
err!("Account already initialized, cannot set password")
|
||||
}
|
||||
|
||||
// Check against the password hint setting here so if it fails,
|
||||
// the user can retry without losing their invitation below.
|
||||
let password_hint = clean_password_hint(&data.master_password_hint);
|
||||
enforce_password_hint_setting(&password_hint)?;
|
||||
|
||||
set_kdf_data(&mut user, data.kdf)?;
|
||||
|
||||
user.set_password(
|
||||
&data.master_password_hash,
|
||||
Some(data.key),
|
||||
false,
|
||||
Some(vec![String::from("revision_date")]), // We need to allow revision-date to use the old security_timestamp
|
||||
);
|
||||
user.password_hint = password_hint;
|
||||
|
||||
if let Some(keys) = data.keys {
|
||||
user.private_key = Some(keys.encrypted_private_key);
|
||||
user.public_key = Some(keys.public_key);
|
||||
}
|
||||
|
||||
if let Some(identifier) = data.org_identifier {
|
||||
if identifier != crate::sso::FAKE_IDENTIFIER {
|
||||
let org = match Organization::find_by_name(&identifier, &mut conn).await {
|
||||
None => err!("Failed to retrieve the associated organization"),
|
||||
Some(org) => org,
|
||||
};
|
||||
|
||||
let membership = match Membership::find_by_user_and_org(&user.uuid, &org.uuid, &mut conn).await {
|
||||
None => err!("Failed to retrieve the invitation"),
|
||||
Some(org) => org,
|
||||
};
|
||||
|
||||
accept_org_invite(&user, membership, None, &mut conn).await?;
|
||||
}
|
||||
}
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
mail::send_welcome(&user.email.to_lowercase()).await?;
|
||||
} else {
|
||||
Membership::accept_user_invitations(&user.uuid, &mut conn).await?;
|
||||
}
|
||||
|
||||
log_user_event(EventType::UserChangedPassword as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn)
|
||||
.await;
|
||||
|
||||
user.save(&mut conn).await?;
|
||||
|
||||
Ok(Json(json!({
|
||||
"Object": "set-password",
|
||||
"CaptchaBypassToken": "",
|
||||
})))
|
||||
}
|
||||
|
||||
#[get("/accounts/profile")]
|
||||
async fn profile(headers: Headers, mut conn: DbConn) -> Json<Value> {
|
||||
Json(headers.user.to_json(&mut conn).await)
|
||||
@@ -255,7 +407,6 @@ async fn profile(headers: Headers, mut conn: DbConn) -> Json<Value> {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ProfileData {
|
||||
// culture: String, // Ignored, always use en-US
|
||||
// masterPasswordHint: Option<String>, // Ignored, has been moved to ChangePassData
|
||||
name: String,
|
||||
}
|
||||
|
||||
@@ -381,7 +532,7 @@ async fn post_password(data: Json<ChangePassData>, headers: Headers, mut conn: D
|
||||
// Prevent logging out the client where the user requested this endpoint from.
|
||||
// If you do logout the user it will causes issues at the client side.
|
||||
// Adding the device uuid will prevent this.
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone())).await;
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
@@ -389,25 +540,15 @@ async fn post_password(data: Json<ChangePassData>, headers: Headers, mut conn: D
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ChangeKdfData {
|
||||
kdf: i32,
|
||||
kdf_iterations: i32,
|
||||
kdf_memory: Option<i32>,
|
||||
kdf_parallelism: Option<i32>,
|
||||
#[serde(flatten)]
|
||||
kdf: KDFData,
|
||||
|
||||
master_password_hash: String,
|
||||
new_master_password_hash: String,
|
||||
key: String,
|
||||
}
|
||||
|
||||
#[post("/accounts/kdf", data = "<data>")]
|
||||
async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let data: ChangeKdfData = data.into_inner();
|
||||
let mut user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.master_password_hash) {
|
||||
err!("Invalid password")
|
||||
}
|
||||
|
||||
fn set_kdf_data(user: &mut User, data: KDFData) -> EmptyResult {
|
||||
if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 {
|
||||
err!("PBKDF2 KDF iterations must be at least 100000.")
|
||||
}
|
||||
@@ -438,10 +579,25 @@ async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn,
|
||||
}
|
||||
user.client_kdf_iter = data.kdf_iterations;
|
||||
user.client_kdf_type = data.kdf;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[post("/accounts/kdf", data = "<data>")]
|
||||
async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
let data: ChangeKdfData = data.into_inner();
|
||||
let mut user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.master_password_hash) {
|
||||
err!("Invalid password")
|
||||
}
|
||||
|
||||
set_kdf_data(&mut user, data.kdf)?;
|
||||
|
||||
user.set_password(&data.new_master_password_hash, Some(data.key), true, None);
|
||||
let save_result = user.save(&mut conn).await;
|
||||
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone())).await;
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
@@ -476,14 +632,45 @@ use super::sends::{update_send_from_data, SendData};
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct KeyData {
|
||||
account_unlock_data: RotateAccountUnlockData,
|
||||
account_keys: RotateAccountKeys,
|
||||
account_data: RotateAccountData,
|
||||
old_master_key_authentication_hash: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct RotateAccountUnlockData {
|
||||
emergency_access_unlock_data: Vec<UpdateEmergencyAccessData>,
|
||||
master_password_unlock_data: MasterPasswordUnlockData,
|
||||
organization_account_recovery_unlock_data: Vec<UpdateResetPasswordData>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct MasterPasswordUnlockData {
|
||||
kdf_type: i32,
|
||||
kdf_iterations: i32,
|
||||
kdf_parallelism: Option<i32>,
|
||||
kdf_memory: Option<i32>,
|
||||
email: String,
|
||||
master_key_authentication_hash: String,
|
||||
master_key_encrypted_user_key: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct RotateAccountKeys {
|
||||
user_key_encrypted_account_private_key: String,
|
||||
account_public_key: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct RotateAccountData {
|
||||
ciphers: Vec<CipherData>,
|
||||
folders: Vec<UpdateFolderData>,
|
||||
sends: Vec<SendData>,
|
||||
emergency_access_keys: Vec<UpdateEmergencyAccessData>,
|
||||
reset_password_keys: Vec<UpdateResetPasswordData>,
|
||||
key: String,
|
||||
master_password_hash: String,
|
||||
private_key: String,
|
||||
}
|
||||
|
||||
fn validate_keydata(
|
||||
@@ -493,10 +680,24 @@ fn validate_keydata(
|
||||
existing_emergency_access: &[EmergencyAccess],
|
||||
existing_memberships: &[Membership],
|
||||
existing_sends: &[Send],
|
||||
user: &User,
|
||||
) -> EmptyResult {
|
||||
if user.client_kdf_type != data.account_unlock_data.master_password_unlock_data.kdf_type
|
||||
|| user.client_kdf_iter != data.account_unlock_data.master_password_unlock_data.kdf_iterations
|
||||
|| user.client_kdf_memory != data.account_unlock_data.master_password_unlock_data.kdf_memory
|
||||
|| user.client_kdf_parallelism != data.account_unlock_data.master_password_unlock_data.kdf_parallelism
|
||||
|| user.email != data.account_unlock_data.master_password_unlock_data.email
|
||||
{
|
||||
err!("Changing the kdf variant or email is not supported during key rotation");
|
||||
}
|
||||
if user.public_key.as_ref() != Some(&data.account_keys.account_public_key) {
|
||||
err!("Changing the asymmetric keypair is not possible during key rotation")
|
||||
}
|
||||
|
||||
// Check that we're correctly rotating all the user's ciphers
|
||||
let existing_cipher_ids = existing_ciphers.iter().map(|c| &c.uuid).collect::<HashSet<&CipherId>>();
|
||||
let provided_cipher_ids = data
|
||||
.account_data
|
||||
.ciphers
|
||||
.iter()
|
||||
.filter(|c| c.organization_id.is_none())
|
||||
@@ -508,7 +709,8 @@ fn validate_keydata(
|
||||
|
||||
// Check that we're correctly rotating all the user's folders
|
||||
let existing_folder_ids = existing_folders.iter().map(|f| &f.uuid).collect::<HashSet<&FolderId>>();
|
||||
let provided_folder_ids = data.folders.iter().filter_map(|f| f.id.as_ref()).collect::<HashSet<&FolderId>>();
|
||||
let provided_folder_ids =
|
||||
data.account_data.folders.iter().filter_map(|f| f.id.as_ref()).collect::<HashSet<&FolderId>>();
|
||||
if !provided_folder_ids.is_superset(&existing_folder_ids) {
|
||||
err!("All existing folders must be included in the rotation")
|
||||
}
|
||||
@@ -516,8 +718,12 @@ fn validate_keydata(
|
||||
// Check that we're correctly rotating all the user's emergency access keys
|
||||
let existing_emergency_access_ids =
|
||||
existing_emergency_access.iter().map(|ea| &ea.uuid).collect::<HashSet<&EmergencyAccessId>>();
|
||||
let provided_emergency_access_ids =
|
||||
data.emergency_access_keys.iter().map(|ea| &ea.id).collect::<HashSet<&EmergencyAccessId>>();
|
||||
let provided_emergency_access_ids = data
|
||||
.account_unlock_data
|
||||
.emergency_access_unlock_data
|
||||
.iter()
|
||||
.map(|ea| &ea.id)
|
||||
.collect::<HashSet<&EmergencyAccessId>>();
|
||||
if !provided_emergency_access_ids.is_superset(&existing_emergency_access_ids) {
|
||||
err!("All existing emergency access keys must be included in the rotation")
|
||||
}
|
||||
@@ -525,15 +731,19 @@ fn validate_keydata(
|
||||
// Check that we're correctly rotating all the user's reset password keys
|
||||
let existing_reset_password_ids =
|
||||
existing_memberships.iter().map(|m| &m.org_uuid).collect::<HashSet<&OrganizationId>>();
|
||||
let provided_reset_password_ids =
|
||||
data.reset_password_keys.iter().map(|rp| &rp.organization_id).collect::<HashSet<&OrganizationId>>();
|
||||
let provided_reset_password_ids = data
|
||||
.account_unlock_data
|
||||
.organization_account_recovery_unlock_data
|
||||
.iter()
|
||||
.map(|rp| &rp.organization_id)
|
||||
.collect::<HashSet<&OrganizationId>>();
|
||||
if !provided_reset_password_ids.is_superset(&existing_reset_password_ids) {
|
||||
err!("All existing reset password keys must be included in the rotation")
|
||||
}
|
||||
|
||||
// Check that we're correctly rotating all the user's sends
|
||||
let existing_send_ids = existing_sends.iter().map(|s| &s.uuid).collect::<HashSet<&SendId>>();
|
||||
let provided_send_ids = data.sends.iter().filter_map(|s| s.id.as_ref()).collect::<HashSet<&SendId>>();
|
||||
let provided_send_ids = data.account_data.sends.iter().filter_map(|s| s.id.as_ref()).collect::<HashSet<&SendId>>();
|
||||
if !provided_send_ids.is_superset(&existing_send_ids) {
|
||||
err!("All existing sends must be included in the rotation")
|
||||
}
|
||||
@@ -541,12 +751,12 @@ fn validate_keydata(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[post("/accounts/key", data = "<data>")]
|
||||
#[post("/accounts/key-management/rotate-user-account-keys", data = "<data>")]
|
||||
async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
// TODO: See if we can wrap everything within a SQL Transaction. If something fails it should revert everything.
|
||||
let data: KeyData = data.into_inner();
|
||||
|
||||
if !headers.user.check_valid_password(&data.master_password_hash) {
|
||||
if !headers.user.check_valid_password(&data.old_master_key_authentication_hash) {
|
||||
err!("Invalid password")
|
||||
}
|
||||
|
||||
@@ -554,7 +764,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
||||
// Bitwarden does not process the import if there is one item invalid.
|
||||
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
|
||||
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
|
||||
Cipher::validate_cipher_data(&data.ciphers)?;
|
||||
Cipher::validate_cipher_data(&data.account_data.ciphers)?;
|
||||
|
||||
let user_id = &headers.user.uuid;
|
||||
|
||||
@@ -575,10 +785,11 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
||||
&existing_emergency_access,
|
||||
&existing_memberships,
|
||||
&existing_sends,
|
||||
&headers.user,
|
||||
)?;
|
||||
|
||||
// Update folder data
|
||||
for folder_data in data.folders {
|
||||
for folder_data in data.account_data.folders {
|
||||
// Skip `null` folder id entries.
|
||||
// See: https://github.com/bitwarden/clients/issues/8453
|
||||
if let Some(folder_id) = folder_data.id {
|
||||
@@ -592,7 +803,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
||||
}
|
||||
|
||||
// Update emergency access data
|
||||
for emergency_access_data in data.emergency_access_keys {
|
||||
for emergency_access_data in data.account_unlock_data.emergency_access_unlock_data {
|
||||
let Some(saved_emergency_access) =
|
||||
existing_emergency_access.iter_mut().find(|ea| ea.uuid == emergency_access_data.id)
|
||||
else {
|
||||
@@ -604,7 +815,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
||||
}
|
||||
|
||||
// Update reset password data
|
||||
for reset_password_data in data.reset_password_keys {
|
||||
for reset_password_data in data.account_unlock_data.organization_account_recovery_unlock_data {
|
||||
let Some(membership) =
|
||||
existing_memberships.iter_mut().find(|m| m.org_uuid == reset_password_data.organization_id)
|
||||
else {
|
||||
@@ -616,7 +827,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
||||
}
|
||||
|
||||
// Update send data
|
||||
for send_data in data.sends {
|
||||
for send_data in data.account_data.sends {
|
||||
let Some(send) = existing_sends.iter_mut().find(|s| &s.uuid == send_data.id.as_ref().unwrap()) else {
|
||||
err!("Send doesn't exist")
|
||||
};
|
||||
@@ -627,7 +838,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
||||
// Update cipher data
|
||||
use super::ciphers::update_cipher_from_data;
|
||||
|
||||
for cipher_data in data.ciphers {
|
||||
for cipher_data in data.account_data.ciphers {
|
||||
if cipher_data.organization_id.is_none() {
|
||||
let Some(saved_cipher) = existing_ciphers.iter_mut().find(|c| &c.uuid == cipher_data.id.as_ref().unwrap())
|
||||
else {
|
||||
@@ -644,16 +855,20 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
||||
// Update user data
|
||||
let mut user = headers.user;
|
||||
|
||||
user.akey = data.key;
|
||||
user.private_key = Some(data.private_key);
|
||||
user.reset_security_stamp();
|
||||
user.private_key = Some(data.account_keys.user_key_encrypted_account_private_key);
|
||||
user.set_password(
|
||||
&data.account_unlock_data.master_password_unlock_data.master_key_authentication_hash,
|
||||
Some(data.account_unlock_data.master_password_unlock_data.master_key_encrypted_user_key),
|
||||
true,
|
||||
None,
|
||||
);
|
||||
|
||||
let save_result = user.save(&mut conn).await;
|
||||
|
||||
// Prevent logging out the client where the user requested this endpoint from.
|
||||
// If you do logout the user it will causes issues at the client side.
|
||||
// Adding the device uuid will prevent this.
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone())).await;
|
||||
nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
@@ -669,7 +884,7 @@ async fn post_sstamp(data: Json<PasswordOrOtpData>, headers: Headers, mut conn:
|
||||
user.reset_security_stamp();
|
||||
let save_result = user.save(&mut conn).await;
|
||||
|
||||
nt.send_logout(&user, None).await;
|
||||
nt.send_logout(&user, None, &mut conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
@@ -695,6 +910,11 @@ async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, mut conn
|
||||
}
|
||||
|
||||
if User::find_by_mail(&data.new_email, &mut conn).await.is_some() {
|
||||
if CONFIG.mail_enabled() {
|
||||
if let Err(e) = mail::send_change_email_existing(&data.new_email, &user.email).await {
|
||||
error!("Error sending change-email-existing email: {e:#?}");
|
||||
}
|
||||
}
|
||||
err!("Email already in use");
|
||||
}
|
||||
|
||||
@@ -706,10 +926,10 @@ async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, mut conn
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
if let Err(e) = mail::send_change_email(&data.new_email, &token).await {
|
||||
error!("Error sending change-email email: {:#?}", e);
|
||||
error!("Error sending change-email email: {e:#?}");
|
||||
}
|
||||
} else {
|
||||
debug!("Email change request for user ({}) to email ({}) with token ({})", user.uuid, data.new_email, token);
|
||||
debug!("Email change request for user ({}) to email ({}) with token ({token})", user.uuid, data.new_email);
|
||||
}
|
||||
|
||||
user.email_new = Some(data.new_email);
|
||||
@@ -777,7 +997,7 @@ async fn post_email(data: Json<ChangeEmailData>, headers: Headers, mut conn: DbC
|
||||
|
||||
let save_result = user.save(&mut conn).await;
|
||||
|
||||
nt.send_logout(&user, None).await;
|
||||
nt.send_logout(&user, None, &mut conn).await;
|
||||
|
||||
save_result
|
||||
}
|
||||
@@ -791,7 +1011,7 @@ async fn post_verify_email(headers: Headers) -> EmptyResult {
|
||||
}
|
||||
|
||||
if let Err(e) = mail::send_verify_email(&user.email, &user.uuid).await {
|
||||
error!("Error sending verify_email email: {:#?}", e);
|
||||
error!("Error sending verify_email email: {e:#?}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -822,7 +1042,7 @@ async fn post_verify_email_token(data: Json<VerifyEmailTokenData>, mut conn: DbC
|
||||
user.last_verifying_at = None;
|
||||
user.login_verify_count = 0;
|
||||
if let Err(e) = user.save(&mut conn).await {
|
||||
error!("Error saving email verification: {:#?}", e);
|
||||
error!("Error saving email verification: {e:#?}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -841,7 +1061,7 @@ async fn post_delete_recover(data: Json<DeleteRecoverData>, mut conn: DbConn) ->
|
||||
if CONFIG.mail_enabled() {
|
||||
if let Some(user) = User::find_by_mail(&data.email, &mut conn).await {
|
||||
if let Err(e) = mail::send_delete_account(&user.email, &user.uuid).await {
|
||||
error!("Error sending delete account email: {:#?}", e);
|
||||
error!("Error sending delete account email: {e:#?}");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
@@ -975,23 +1195,38 @@ pub async fn _prelogin(data: Json<PreloginData>, mut conn: DbConn) -> Json<Value
|
||||
}))
|
||||
}
|
||||
|
||||
// https://github.com/bitwarden/server/blob/master/src/Api/Models/Request/Accounts/SecretVerificationRequestModel.cs
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Auth/Models/Request/Accounts/SecretVerificationRequestModel.cs
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct SecretVerificationRequest {
|
||||
master_password_hash: String,
|
||||
}
|
||||
|
||||
// Change the KDF Iterations if necessary
|
||||
pub async fn kdf_upgrade(user: &mut User, pwd_hash: &str, conn: &mut DbConn) -> ApiResult<()> {
|
||||
if user.password_iterations < CONFIG.password_iterations() {
|
||||
user.password_iterations = CONFIG.password_iterations();
|
||||
user.set_password(pwd_hash, None, false, None);
|
||||
|
||||
if let Err(e) = user.save(conn).await {
|
||||
error!("Error updating user: {e:#?}");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[post("/accounts/verify-password", data = "<data>")]
|
||||
fn verify_password(data: Json<SecretVerificationRequest>, headers: Headers) -> EmptyResult {
|
||||
async fn verify_password(data: Json<SecretVerificationRequest>, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
let data: SecretVerificationRequest = data.into_inner();
|
||||
let user = headers.user;
|
||||
let mut user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.master_password_hash) {
|
||||
err!("Invalid password")
|
||||
}
|
||||
|
||||
Ok(())
|
||||
kdf_upgrade(&mut user, &data.master_password_hash, &mut conn).await?;
|
||||
|
||||
Ok(Json(master_password_policy(&user, &conn).await))
|
||||
}
|
||||
|
||||
async fn _api_key(data: Json<PasswordOrOtpData>, rotate: bool, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
@@ -1116,19 +1351,14 @@ async fn put_device_token(
|
||||
err!(format!("Error: device {device_id} should be present before a token can be assigned"))
|
||||
};
|
||||
|
||||
// if the device already has been registered
|
||||
if device.is_registered() {
|
||||
// check if the new token is the same as the registered token
|
||||
if device.push_token.is_some() && device.push_token.unwrap() == token.clone() {
|
||||
debug!("Device {} is already registered and token is the same", device_id);
|
||||
return Ok(());
|
||||
} else {
|
||||
// Try to unregister already registered device
|
||||
unregister_push_device(device.push_uuid).await.ok();
|
||||
}
|
||||
// clear the push_uuid
|
||||
device.push_uuid = None;
|
||||
// Check if the new token is the same as the registered token
|
||||
// Although upstream seems to always register a device on login, we do not.
|
||||
// Unless this causes issues, lets keep it this way, else we might need to also register on every login.
|
||||
if device.push_token.as_ref() == Some(&token) {
|
||||
debug!("Device {device_id} for user {} is already registered and token is identical", headers.user.uuid);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
device.push_token = Some(token);
|
||||
if let Err(e) = device.save(&mut conn).await {
|
||||
err!(format!("An error occurred while trying to save the device push token: {e}"));
|
||||
@@ -1142,16 +1372,19 @@ async fn put_device_token(
|
||||
#[put("/devices/identifier/<device_id>/clear-token")]
|
||||
async fn put_clear_device_token(device_id: DeviceId, mut conn: DbConn) -> EmptyResult {
|
||||
// This only clears push token
|
||||
// https://github.com/bitwarden/core/blob/master/src/Api/Controllers/DevicesController.cs#L109
|
||||
// https://github.com/bitwarden/core/blob/master/src/Core/Services/Implementations/DeviceService.cs#L37
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Controllers/DevicesController.cs#L215
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/Services/Implementations/DeviceService.cs#L37
|
||||
// This is somehow not implemented in any app, added it in case it is required
|
||||
// 2025: Also, it looks like it only clears the first found device upstream, which is probably faulty.
|
||||
// This because currently multiple accounts could be on the same device/app and that would cause issues.
|
||||
// Vaultwarden removes the push-token for all devices, but this probably means we should also unregister all these devices.
|
||||
if !CONFIG.push_enabled() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(device) = Device::find_by_uuid(&device_id, &mut conn).await {
|
||||
Device::clear_push_token_by_uuid(&device_id, &mut conn).await?;
|
||||
unregister_push_device(device.push_uuid).await?;
|
||||
unregister_push_device(&device.push_uuid).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -1189,10 +1422,10 @@ async fn post_auth_request(
|
||||
};
|
||||
|
||||
// Validate device uuid and type
|
||||
match Device::find_by_uuid_and_user(&data.device_identifier, &user.uuid, &mut conn).await {
|
||||
Some(device) if device.atype == client_headers.device_type => {}
|
||||
let device = match Device::find_by_uuid_and_user(&data.device_identifier, &user.uuid, &mut conn).await {
|
||||
Some(device) if device.atype == client_headers.device_type => device,
|
||||
_ => err!("AuthRequest doesn't exist", "Device verification failed"),
|
||||
}
|
||||
};
|
||||
|
||||
let mut auth_request = AuthRequest::new(
|
||||
user.uuid.clone(),
|
||||
@@ -1204,7 +1437,7 @@ async fn post_auth_request(
|
||||
);
|
||||
auth_request.save(&mut conn).await?;
|
||||
|
||||
nt.send_auth_request(&user.uuid, &auth_request.uuid, &data.device_identifier, &mut conn).await;
|
||||
nt.send_auth_request(&user.uuid, &auth_request.uuid, &device, &mut conn).await;
|
||||
|
||||
log_user_event(
|
||||
EventType::UserRequestedDeviceApproval as i32,
|
||||
@@ -1279,6 +1512,10 @@ async fn put_auth_request(
|
||||
err!("AuthRequest doesn't exist", "Record not found or user uuid does not match")
|
||||
};
|
||||
|
||||
if headers.device.uuid != data.device_identifier {
|
||||
err!("AuthRequest doesn't exist", "Device verification failed")
|
||||
}
|
||||
|
||||
if auth_request.approved.is_some() {
|
||||
err!("An authentication request with the same device already exists")
|
||||
}
|
||||
@@ -1295,7 +1532,7 @@ async fn put_auth_request(
|
||||
auth_request.save(&mut conn).await?;
|
||||
|
||||
ant.send_auth_response(&auth_request.user_uuid, &auth_request.uuid).await;
|
||||
nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, &data.device_identifier, &mut conn).await;
|
||||
nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, &headers.device, &mut conn).await;
|
||||
|
||||
log_user_event(
|
||||
EventType::OrganizationUserApprovedAuthRequest as i32,
|
||||
|
@@ -11,10 +11,11 @@ use rocket::{
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::auth::ClientVersion;
|
||||
use crate::util::NumberOrString;
|
||||
use crate::util::{save_temp_file, NumberOrString};
|
||||
use crate::{
|
||||
api::{self, core::log_event, EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType},
|
||||
auth::Headers,
|
||||
config::PathType,
|
||||
crypto,
|
||||
db::{models::*, DbConn, DbPool},
|
||||
CONFIG,
|
||||
@@ -77,6 +78,7 @@ pub fn routes() -> Vec<Route> {
|
||||
restore_cipher_put,
|
||||
restore_cipher_put_admin,
|
||||
restore_cipher_selected,
|
||||
restore_cipher_selected_admin,
|
||||
delete_all,
|
||||
move_cipher_selected,
|
||||
move_cipher_selected_put,
|
||||
@@ -105,12 +107,7 @@ struct SyncData {
|
||||
}
|
||||
|
||||
#[get("/sync?<data..>")]
|
||||
async fn sync(
|
||||
data: SyncData,
|
||||
headers: Headers,
|
||||
client_version: Option<ClientVersion>,
|
||||
mut conn: DbConn,
|
||||
) -> Json<Value> {
|
||||
async fn sync(data: SyncData, headers: Headers, client_version: Option<ClientVersion>, mut conn: DbConn) -> JsonResult {
|
||||
let user_json = headers.user.to_json(&mut conn).await;
|
||||
|
||||
// Get all ciphers which are visible by the user
|
||||
@@ -134,7 +131,7 @@ async fn sync(
|
||||
for c in ciphers {
|
||||
ciphers_json.push(
|
||||
c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), CipherSyncType::User, &mut conn)
|
||||
.await,
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -159,7 +156,7 @@ async fn sync(
|
||||
api::core::_get_eq_domains(headers, true).into_inner()
|
||||
};
|
||||
|
||||
Json(json!({
|
||||
Ok(Json(json!({
|
||||
"profile": user_json,
|
||||
"folders": folders_json,
|
||||
"collections": collections_json,
|
||||
@@ -168,11 +165,11 @@ async fn sync(
|
||||
"domains": domains_json,
|
||||
"sends": sends_json,
|
||||
"object": "sync"
|
||||
}))
|
||||
})))
|
||||
}
|
||||
|
||||
#[get("/ciphers")]
|
||||
async fn get_ciphers(headers: Headers, mut conn: DbConn) -> Json<Value> {
|
||||
async fn get_ciphers(headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &mut conn).await;
|
||||
let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, CipherSyncType::User, &mut conn).await;
|
||||
|
||||
@@ -180,15 +177,15 @@ async fn get_ciphers(headers: Headers, mut conn: DbConn) -> Json<Value> {
|
||||
for c in ciphers {
|
||||
ciphers_json.push(
|
||||
c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), CipherSyncType::User, &mut conn)
|
||||
.await,
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
|
||||
Json(json!({
|
||||
Ok(Json(json!({
|
||||
"data": ciphers_json,
|
||||
"object": "list",
|
||||
"continuationToken": null
|
||||
}))
|
||||
})))
|
||||
}
|
||||
|
||||
#[get("/ciphers/<cipher_id>")]
|
||||
@@ -201,7 +198,7 @@ async fn get_cipher(cipher_id: CipherId, headers: Headers, mut conn: DbConn) ->
|
||||
err!("Cipher is not owned by user")
|
||||
}
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await))
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?))
|
||||
}
|
||||
|
||||
#[get("/ciphers/<cipher_id>/admin")]
|
||||
@@ -322,7 +319,7 @@ async fn post_ciphers_create(
|
||||
// or otherwise), we can just ignore this field entirely.
|
||||
data.cipher.last_known_revision_date = None;
|
||||
|
||||
share_cipher_by_uuid(&cipher.uuid, data, &headers, &mut conn, &nt).await
|
||||
share_cipher_by_uuid(&cipher.uuid, data, &headers, &mut conn, &nt, None).await
|
||||
}
|
||||
|
||||
/// Called when creating a new user-owned cipher.
|
||||
@@ -339,7 +336,7 @@ async fn post_ciphers(data: Json<CipherData>, headers: Headers, mut conn: DbConn
|
||||
let mut cipher = Cipher::new(data.r#type, data.name.clone());
|
||||
update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherCreate).await?;
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await))
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?))
|
||||
}
|
||||
|
||||
/// Enforces the personal ownership policy on user-owned ciphers, if applicable.
|
||||
@@ -381,7 +378,7 @@ pub async fn update_cipher_from_data(
|
||||
if let Some(dt) = data.last_known_revision_date {
|
||||
match NaiveDateTime::parse_from_str(&dt, "%+") {
|
||||
// ISO 8601 format
|
||||
Err(err) => warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err),
|
||||
Err(err) => warn!("Error parsing LastKnownRevisionDate '{dt}': {err}"),
|
||||
Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => {
|
||||
err!("The client copy of this cipher is out of date. Resync the client and try again.")
|
||||
}
|
||||
@@ -535,7 +532,7 @@ pub async fn update_cipher_from_data(
|
||||
ut,
|
||||
cipher,
|
||||
&cipher.update_users_revision(conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
shared_to_collections,
|
||||
conn,
|
||||
)
|
||||
@@ -612,7 +609,7 @@ async fn post_ciphers_import(
|
||||
|
||||
let mut user = headers.user;
|
||||
user.update_revision(&mut conn).await?;
|
||||
nt.send_user_update(UpdateType::SyncVault, &user).await;
|
||||
nt.send_user_update(UpdateType::SyncVault, &user, &headers.device.push_uuid, &mut conn).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -676,7 +673,7 @@ async fn put_cipher(
|
||||
|
||||
update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherUpdate).await?;
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await))
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?))
|
||||
}
|
||||
|
||||
#[post("/ciphers/<cipher_id>/partial", data = "<data>")]
|
||||
@@ -714,7 +711,7 @@ async fn put_cipher_partial(
|
||||
// Update favorite
|
||||
cipher.set_favorite(Some(data.favorite), &headers.user.uuid, &mut conn).await?;
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await))
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -808,7 +805,7 @@ async fn post_collections_update(
|
||||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(&mut conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
Some(Vec::from_iter(posted_collections)),
|
||||
&mut conn,
|
||||
)
|
||||
@@ -825,7 +822,7 @@ async fn post_collections_update(
|
||||
)
|
||||
.await;
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await))
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?))
|
||||
}
|
||||
|
||||
#[put("/ciphers/<cipher_id>/collections-admin", data = "<data>")]
|
||||
@@ -885,7 +882,7 @@ async fn post_collections_admin(
|
||||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(&mut conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
Some(Vec::from_iter(posted_collections)),
|
||||
&mut conn,
|
||||
)
|
||||
@@ -924,7 +921,7 @@ async fn post_cipher_share(
|
||||
) -> JsonResult {
|
||||
let data: ShareCipherData = data.into_inner();
|
||||
|
||||
share_cipher_by_uuid(&cipher_id, data, &headers, &mut conn, &nt).await
|
||||
share_cipher_by_uuid(&cipher_id, data, &headers, &mut conn, &nt, None).await
|
||||
}
|
||||
|
||||
#[put("/ciphers/<cipher_id>/share", data = "<data>")]
|
||||
@@ -937,7 +934,7 @@ async fn put_cipher_share(
|
||||
) -> JsonResult {
|
||||
let data: ShareCipherData = data.into_inner();
|
||||
|
||||
share_cipher_by_uuid(&cipher_id, data, &headers, &mut conn, &nt).await
|
||||
share_cipher_by_uuid(&cipher_id, data, &headers, &mut conn, &nt, None).await
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -977,11 +974,16 @@ async fn put_cipher_share_selected(
|
||||
};
|
||||
|
||||
match shared_cipher_data.cipher.id.take() {
|
||||
Some(id) => share_cipher_by_uuid(&id, shared_cipher_data, &headers, &mut conn, &nt).await?,
|
||||
Some(id) => {
|
||||
share_cipher_by_uuid(&id, shared_cipher_data, &headers, &mut conn, &nt, Some(UpdateType::None)).await?
|
||||
}
|
||||
None => err!("Request missing ids field"),
|
||||
};
|
||||
}
|
||||
|
||||
// Multi share actions do not send out a push for each cipher, we need to send a general sync here
|
||||
nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, &mut conn).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -991,6 +993,7 @@ async fn share_cipher_by_uuid(
|
||||
headers: &Headers,
|
||||
conn: &mut DbConn,
|
||||
nt: &Notify<'_>,
|
||||
override_ut: Option<UpdateType>,
|
||||
) -> JsonResult {
|
||||
let mut cipher = match Cipher::find_by_uuid(cipher_id, conn).await {
|
||||
Some(cipher) => {
|
||||
@@ -1022,7 +1025,10 @@ async fn share_cipher_by_uuid(
|
||||
};
|
||||
|
||||
// When LastKnownRevisionDate is None, it is a new cipher, so send CipherCreate.
|
||||
let ut = if data.cipher.last_known_revision_date.is_some() {
|
||||
// If there is an override, like when handling multiple items, we want to prevent a push notification for every single item
|
||||
let ut = if let Some(ut) = override_ut {
|
||||
ut
|
||||
} else if data.cipher.last_known_revision_date.is_some() {
|
||||
UpdateType::SyncCipherUpdate
|
||||
} else {
|
||||
UpdateType::SyncCipherCreate
|
||||
@@ -1030,7 +1036,7 @@ async fn share_cipher_by_uuid(
|
||||
|
||||
update_cipher_from_data(&mut cipher, data.cipher, headers, Some(shared_to_collections), conn, nt, ut).await?;
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await))
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await?))
|
||||
}
|
||||
|
||||
/// v2 API for downloading an attachment. This just redirects the client to
|
||||
@@ -1055,7 +1061,7 @@ async fn get_attachment(
|
||||
}
|
||||
|
||||
match Attachment::find_by_id(&attachment_id, &mut conn).await {
|
||||
Some(attachment) if cipher_id == attachment.cipher_uuid => Ok(Json(attachment.to_json(&headers.host))),
|
||||
Some(attachment) if cipher_id == attachment.cipher_uuid => Ok(Json(attachment.to_json(&headers.host).await?)),
|
||||
Some(_) => err!("Attachment doesn't belong to cipher"),
|
||||
None => err!("Attachment doesn't exist"),
|
||||
}
|
||||
@@ -1105,7 +1111,7 @@ async fn post_attachment_v2(
|
||||
Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.file_name, file_size, Some(data.key));
|
||||
attachment.save(&mut conn).await.expect("Error saving attachment");
|
||||
|
||||
let url = format!("/ciphers/{}/attachment/{}", cipher.uuid, attachment_id);
|
||||
let url = format!("/ciphers/{}/attachment/{attachment_id}", cipher.uuid);
|
||||
let response_key = match data.admin_request {
|
||||
Some(b) if b => "cipherMiniResponse",
|
||||
_ => "cipherResponse",
|
||||
@@ -1116,7 +1122,7 @@ async fn post_attachment_v2(
|
||||
"attachmentId": attachment_id,
|
||||
"url": url,
|
||||
"fileUploadType": FileUploadType::Direct as i32,
|
||||
response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await,
|
||||
response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?,
|
||||
})))
|
||||
}
|
||||
|
||||
@@ -1142,7 +1148,7 @@ async fn save_attachment(
|
||||
mut conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> Result<(Cipher, DbConn), crate::error::Error> {
|
||||
let mut data = data.into_inner();
|
||||
let data = data.into_inner();
|
||||
|
||||
let Some(size) = data.data.len().to_i64() else {
|
||||
err!("Attachment data size overflow");
|
||||
@@ -1269,19 +1275,13 @@ async fn save_attachment(
|
||||
attachment.save(&mut conn).await.expect("Error saving attachment");
|
||||
}
|
||||
|
||||
let folder_path = tokio::fs::canonicalize(&CONFIG.attachments_folder()).await?.join(cipher_id.as_ref());
|
||||
let file_path = folder_path.join(file_id.as_ref());
|
||||
tokio::fs::create_dir_all(&folder_path).await?;
|
||||
|
||||
if let Err(_err) = data.data.persist_to(&file_path).await {
|
||||
data.data.move_copy_to(file_path).await?
|
||||
}
|
||||
save_temp_file(PathType::Attachments, &format!("{cipher_id}/{file_id}"), data.data, true).await?;
|
||||
|
||||
nt.send_cipher_update(
|
||||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(&mut conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
None,
|
||||
&mut conn,
|
||||
)
|
||||
@@ -1342,7 +1342,7 @@ async fn post_attachment(
|
||||
|
||||
let (cipher, mut conn) = save_attachment(attachment, cipher_id, data, &headers, conn, nt).await?;
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await))
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?))
|
||||
}
|
||||
|
||||
#[post("/ciphers/<cipher_id>/attachment-admin", format = "multipart/form-data", data = "<data>")]
|
||||
@@ -1376,7 +1376,7 @@ async fn delete_attachment_post_admin(
|
||||
headers: Headers,
|
||||
conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
) -> JsonResult {
|
||||
delete_attachment(cipher_id, attachment_id, headers, conn, nt).await
|
||||
}
|
||||
|
||||
@@ -1387,7 +1387,7 @@ async fn delete_attachment_post(
|
||||
headers: Headers,
|
||||
conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
) -> JsonResult {
|
||||
delete_attachment(cipher_id, attachment_id, headers, conn, nt).await
|
||||
}
|
||||
|
||||
@@ -1398,7 +1398,7 @@ async fn delete_attachment(
|
||||
headers: Headers,
|
||||
mut conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
) -> JsonResult {
|
||||
_delete_cipher_attachment_by_id(&cipher_id, &attachment_id, &headers, &mut conn, &nt).await
|
||||
}
|
||||
|
||||
@@ -1409,13 +1409,13 @@ async fn delete_attachment_admin(
|
||||
headers: Headers,
|
||||
mut conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
) -> JsonResult {
|
||||
_delete_cipher_attachment_by_id(&cipher_id, &attachment_id, &headers, &mut conn, &nt).await
|
||||
}
|
||||
|
||||
#[post("/ciphers/<cipher_id>/delete")]
|
||||
async fn delete_cipher_post(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
_delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, false, &nt).await
|
||||
_delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::HardSingle, &nt).await
|
||||
// permanent delete
|
||||
}
|
||||
|
||||
@@ -1426,13 +1426,13 @@ async fn delete_cipher_post_admin(
|
||||
mut conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
_delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, false, &nt).await
|
||||
_delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::HardSingle, &nt).await
|
||||
// permanent delete
|
||||
}
|
||||
|
||||
#[put("/ciphers/<cipher_id>/delete")]
|
||||
async fn delete_cipher_put(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
_delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, true, &nt).await
|
||||
_delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::SoftSingle, &nt).await
|
||||
// soft delete
|
||||
}
|
||||
|
||||
@@ -1443,18 +1443,19 @@ async fn delete_cipher_put_admin(
|
||||
mut conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
_delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, true, &nt).await
|
||||
_delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::SoftSingle, &nt).await
|
||||
// soft delete
|
||||
}
|
||||
|
||||
#[delete("/ciphers/<cipher_id>")]
|
||||
async fn delete_cipher(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
_delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, false, &nt).await
|
||||
_delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::HardSingle, &nt).await
|
||||
// permanent delete
|
||||
}
|
||||
|
||||
#[delete("/ciphers/<cipher_id>/admin")]
|
||||
async fn delete_cipher_admin(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||
_delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, false, &nt).await
|
||||
_delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::HardSingle, &nt).await
|
||||
// permanent delete
|
||||
}
|
||||
|
||||
@@ -1465,7 +1466,8 @@ async fn delete_cipher_selected(
|
||||
conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
_delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete
|
||||
_delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::HardMulti, nt).await
|
||||
// permanent delete
|
||||
}
|
||||
|
||||
#[post("/ciphers/delete", data = "<data>")]
|
||||
@@ -1475,7 +1477,8 @@ async fn delete_cipher_selected_post(
|
||||
conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
_delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete
|
||||
_delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::HardMulti, nt).await
|
||||
// permanent delete
|
||||
}
|
||||
|
||||
#[put("/ciphers/delete", data = "<data>")]
|
||||
@@ -1485,7 +1488,8 @@ async fn delete_cipher_selected_put(
|
||||
conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
_delete_multiple_ciphers(data, headers, conn, true, nt).await // soft delete
|
||||
_delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::SoftMulti, nt).await
|
||||
// soft delete
|
||||
}
|
||||
|
||||
#[delete("/ciphers/admin", data = "<data>")]
|
||||
@@ -1495,7 +1499,8 @@ async fn delete_cipher_selected_admin(
|
||||
conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
_delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete
|
||||
_delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::HardMulti, nt).await
|
||||
// permanent delete
|
||||
}
|
||||
|
||||
#[post("/ciphers/delete-admin", data = "<data>")]
|
||||
@@ -1505,7 +1510,8 @@ async fn delete_cipher_selected_post_admin(
|
||||
conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
_delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete
|
||||
_delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::HardMulti, nt).await
|
||||
// permanent delete
|
||||
}
|
||||
|
||||
#[put("/ciphers/delete-admin", data = "<data>")]
|
||||
@@ -1515,12 +1521,13 @@ async fn delete_cipher_selected_put_admin(
|
||||
conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
_delete_multiple_ciphers(data, headers, conn, true, nt).await // soft delete
|
||||
_delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::SoftMulti, nt).await
|
||||
// soft delete
|
||||
}
|
||||
|
||||
#[put("/ciphers/<cipher_id>/restore")]
|
||||
async fn restore_cipher_put(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult {
|
||||
_restore_cipher_by_uuid(&cipher_id, &headers, &mut conn, &nt).await
|
||||
_restore_cipher_by_uuid(&cipher_id, &headers, false, &mut conn, &nt).await
|
||||
}
|
||||
|
||||
#[put("/ciphers/<cipher_id>/restore-admin")]
|
||||
@@ -1530,7 +1537,17 @@ async fn restore_cipher_put_admin(
|
||||
mut conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> JsonResult {
|
||||
_restore_cipher_by_uuid(&cipher_id, &headers, &mut conn, &nt).await
|
||||
_restore_cipher_by_uuid(&cipher_id, &headers, false, &mut conn, &nt).await
|
||||
}
|
||||
|
||||
#[put("/ciphers/restore-admin", data = "<data>")]
|
||||
async fn restore_cipher_selected_admin(
|
||||
data: Json<CipherIdsData>,
|
||||
headers: Headers,
|
||||
mut conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> JsonResult {
|
||||
_restore_multiple_ciphers(data, &headers, &mut conn, &nt).await
|
||||
}
|
||||
|
||||
#[put("/ciphers/restore", data = "<data>")]
|
||||
@@ -1558,35 +1575,47 @@ async fn move_cipher_selected(
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
let data = data.into_inner();
|
||||
let user_id = headers.user.uuid;
|
||||
let user_id = &headers.user.uuid;
|
||||
|
||||
if let Some(ref folder_id) = data.folder_id {
|
||||
if Folder::find_by_uuid_and_user(folder_id, &user_id, &mut conn).await.is_none() {
|
||||
if Folder::find_by_uuid_and_user(folder_id, user_id, &mut conn).await.is_none() {
|
||||
err!("Invalid folder", "Folder does not exist or belongs to another user");
|
||||
}
|
||||
}
|
||||
|
||||
for cipher_id in data.ids {
|
||||
let Some(cipher) = Cipher::find_by_uuid(&cipher_id, &mut conn).await else {
|
||||
err!("Cipher doesn't exist")
|
||||
};
|
||||
let cipher_count = data.ids.len();
|
||||
let mut single_cipher: Option<Cipher> = None;
|
||||
|
||||
if !cipher.is_accessible_to_user(&user_id, &mut conn).await {
|
||||
err!("Cipher is not accessible by user")
|
||||
// TODO: Convert this to use a single query (or at least less) to update all items
|
||||
// Find all ciphers a user has access to, all others will be ignored
|
||||
let accessible_ciphers = Cipher::find_by_user_and_ciphers(user_id, &data.ids, &mut conn).await;
|
||||
let accessible_ciphers_count = accessible_ciphers.len();
|
||||
for cipher in accessible_ciphers {
|
||||
cipher.move_to_folder(data.folder_id.clone(), user_id, &mut conn).await?;
|
||||
if cipher_count == 1 {
|
||||
single_cipher = Some(cipher);
|
||||
}
|
||||
}
|
||||
|
||||
// Move cipher
|
||||
cipher.move_to_folder(data.folder_id.clone(), &user_id, &mut conn).await?;
|
||||
|
||||
if let Some(cipher) = single_cipher {
|
||||
nt.send_cipher_update(
|
||||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&[user_id.clone()],
|
||||
&headers.device.uuid,
|
||||
std::slice::from_ref(user_id),
|
||||
&headers.device,
|
||||
None,
|
||||
&mut conn,
|
||||
)
|
||||
.await;
|
||||
} else {
|
||||
// Multi move actions do not send out a push for each cipher, we need to send a general sync here
|
||||
nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, &mut conn).await;
|
||||
}
|
||||
|
||||
if cipher_count != accessible_ciphers_count {
|
||||
err!(format!(
|
||||
"Not all ciphers are moved! {accessible_ciphers_count} of the selected {cipher_count} were moved."
|
||||
))
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -1629,7 +1658,7 @@ async fn delete_all(
|
||||
Some(member) => {
|
||||
if member.atype == MembershipType::Owner {
|
||||
Cipher::delete_all_by_organization(&org_data.org_id, &mut conn).await?;
|
||||
nt.send_user_update(UpdateType::SyncVault, &user).await;
|
||||
nt.send_user_update(UpdateType::SyncVault, &user, &headers.device.push_uuid, &mut conn).await;
|
||||
|
||||
log_event(
|
||||
EventType::OrganizationPurgedVault as i32,
|
||||
@@ -1662,18 +1691,26 @@ async fn delete_all(
|
||||
}
|
||||
|
||||
user.update_revision(&mut conn).await?;
|
||||
nt.send_user_update(UpdateType::SyncVault, &user).await;
|
||||
nt.send_user_update(UpdateType::SyncVault, &user, &headers.device.push_uuid, &mut conn).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq)]
|
||||
pub enum CipherDeleteOptions {
|
||||
SoftSingle,
|
||||
SoftMulti,
|
||||
HardSingle,
|
||||
HardMulti,
|
||||
}
|
||||
|
||||
async fn _delete_cipher_by_uuid(
|
||||
cipher_id: &CipherId,
|
||||
headers: &Headers,
|
||||
conn: &mut DbConn,
|
||||
soft_delete: bool,
|
||||
delete_options: &CipherDeleteOptions,
|
||||
nt: &Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
let Some(mut cipher) = Cipher::find_by_uuid(cipher_id, conn).await else {
|
||||
@@ -1684,35 +1721,42 @@ async fn _delete_cipher_by_uuid(
|
||||
err!("Cipher can't be deleted by user")
|
||||
}
|
||||
|
||||
if soft_delete {
|
||||
if *delete_options == CipherDeleteOptions::SoftSingle || *delete_options == CipherDeleteOptions::SoftMulti {
|
||||
cipher.deleted_at = Some(Utc::now().naive_utc());
|
||||
cipher.save(conn).await?;
|
||||
nt.send_cipher_update(
|
||||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(conn).await,
|
||||
&headers.device.uuid,
|
||||
None,
|
||||
conn,
|
||||
)
|
||||
.await;
|
||||
if *delete_options == CipherDeleteOptions::SoftSingle {
|
||||
nt.send_cipher_update(
|
||||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(conn).await,
|
||||
&headers.device,
|
||||
None,
|
||||
conn,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
} else {
|
||||
cipher.delete(conn).await?;
|
||||
nt.send_cipher_update(
|
||||
UpdateType::SyncCipherDelete,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(conn).await,
|
||||
&headers.device.uuid,
|
||||
None,
|
||||
conn,
|
||||
)
|
||||
.await;
|
||||
if *delete_options == CipherDeleteOptions::HardSingle {
|
||||
nt.send_cipher_update(
|
||||
UpdateType::SyncLoginDelete,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(conn).await,
|
||||
&headers.device,
|
||||
None,
|
||||
conn,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(org_id) = cipher.organization_uuid {
|
||||
let event_type = match soft_delete {
|
||||
true => EventType::CipherSoftDeleted as i32,
|
||||
false => EventType::CipherDeleted as i32,
|
||||
let event_type = if *delete_options == CipherDeleteOptions::SoftSingle
|
||||
|| *delete_options == CipherDeleteOptions::SoftMulti
|
||||
{
|
||||
EventType::CipherSoftDeleted as i32
|
||||
} else {
|
||||
EventType::CipherDeleted as i32
|
||||
};
|
||||
|
||||
log_event(event_type, &cipher.uuid, &org_id, &headers.user.uuid, headers.device.atype, &headers.ip.ip, conn)
|
||||
@@ -1732,23 +1776,27 @@ async fn _delete_multiple_ciphers(
|
||||
data: Json<CipherIdsData>,
|
||||
headers: Headers,
|
||||
mut conn: DbConn,
|
||||
soft_delete: bool,
|
||||
delete_options: CipherDeleteOptions,
|
||||
nt: Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
let data = data.into_inner();
|
||||
|
||||
for cipher_id in data.ids {
|
||||
if let error @ Err(_) = _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, soft_delete, &nt).await {
|
||||
if let error @ Err(_) = _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &delete_options, &nt).await {
|
||||
return error;
|
||||
};
|
||||
}
|
||||
|
||||
// Multi delete actions do not send out a push for each cipher, we need to send a general sync here
|
||||
nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, &mut conn).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn _restore_cipher_by_uuid(
|
||||
cipher_id: &CipherId,
|
||||
headers: &Headers,
|
||||
multi_restore: bool,
|
||||
conn: &mut DbConn,
|
||||
nt: &Notify<'_>,
|
||||
) -> JsonResult {
|
||||
@@ -1763,15 +1811,17 @@ async fn _restore_cipher_by_uuid(
|
||||
cipher.deleted_at = None;
|
||||
cipher.save(conn).await?;
|
||||
|
||||
nt.send_cipher_update(
|
||||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(conn).await,
|
||||
&headers.device.uuid,
|
||||
None,
|
||||
conn,
|
||||
)
|
||||
.await;
|
||||
if !multi_restore {
|
||||
nt.send_cipher_update(
|
||||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(conn).await,
|
||||
&headers.device,
|
||||
None,
|
||||
conn,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
if let Some(org_id) = &cipher.organization_uuid {
|
||||
log_event(
|
||||
@@ -1786,7 +1836,7 @@ async fn _restore_cipher_by_uuid(
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await))
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await?))
|
||||
}
|
||||
|
||||
async fn _restore_multiple_ciphers(
|
||||
@@ -1799,12 +1849,15 @@ async fn _restore_multiple_ciphers(
|
||||
|
||||
let mut ciphers: Vec<Value> = Vec::new();
|
||||
for cipher_id in data.ids {
|
||||
match _restore_cipher_by_uuid(&cipher_id, headers, conn, nt).await {
|
||||
match _restore_cipher_by_uuid(&cipher_id, headers, true, conn, nt).await {
|
||||
Ok(json) => ciphers.push(json.into_inner()),
|
||||
err => return err,
|
||||
}
|
||||
}
|
||||
|
||||
// Multi move actions do not send out a push for each cipher, we need to send a general sync here
|
||||
nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, conn).await;
|
||||
|
||||
Ok(Json(json!({
|
||||
"data": ciphers,
|
||||
"object": "list",
|
||||
@@ -1818,7 +1871,7 @@ async fn _delete_cipher_attachment_by_id(
|
||||
headers: &Headers,
|
||||
conn: &mut DbConn,
|
||||
nt: &Notify<'_>,
|
||||
) -> EmptyResult {
|
||||
) -> JsonResult {
|
||||
let Some(attachment) = Attachment::find_by_id(attachment_id, conn).await else {
|
||||
err!("Attachment doesn't exist")
|
||||
};
|
||||
@@ -1841,17 +1894,17 @@ async fn _delete_cipher_attachment_by_id(
|
||||
UpdateType::SyncCipherUpdate,
|
||||
&cipher,
|
||||
&cipher.update_users_revision(conn).await,
|
||||
&headers.device.uuid,
|
||||
&headers.device,
|
||||
None,
|
||||
conn,
|
||||
)
|
||||
.await;
|
||||
|
||||
if let Some(org_id) = cipher.organization_uuid {
|
||||
if let Some(ref org_id) = cipher.organization_uuid {
|
||||
log_event(
|
||||
EventType::CipherAttachmentDeleted as i32,
|
||||
&cipher.uuid,
|
||||
&org_id,
|
||||
org_id,
|
||||
&headers.user.uuid,
|
||||
headers.device.atype,
|
||||
&headers.ip.ip,
|
||||
@@ -1859,7 +1912,8 @@ async fn _delete_cipher_attachment_by_id(
|
||||
)
|
||||
.await;
|
||||
}
|
||||
Ok(())
|
||||
let cipher_json = cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await?;
|
||||
Ok(Json(json!({"cipher":cipher_json})))
|
||||
}
|
||||
|
||||
/// This will hold all the necessary data to improve a full sync of all the ciphers
|
||||
@@ -1933,11 +1987,21 @@ impl CipherSyncData {
|
||||
|
||||
// Generate a HashMap with the collections_uuid as key and the CollectionGroup record
|
||||
let user_collections_groups: HashMap<CollectionId, CollectionGroup> = if CONFIG.org_groups_enabled() {
|
||||
CollectionGroup::find_by_user(user_id, conn)
|
||||
.await
|
||||
.into_iter()
|
||||
.map(|collection_group| (collection_group.collections_uuid.clone(), collection_group))
|
||||
.collect()
|
||||
CollectionGroup::find_by_user(user_id, conn).await.into_iter().fold(
|
||||
HashMap::new(),
|
||||
|mut combined_permissions, cg| {
|
||||
combined_permissions
|
||||
.entry(cg.collections_uuid.clone())
|
||||
.and_modify(|existing| {
|
||||
// Combine permissions: take the most permissive settings.
|
||||
existing.read_only &= cg.read_only; // false if ANY group allows write
|
||||
existing.hide_passwords &= cg.hide_passwords; // false if ANY group allows password view
|
||||
existing.manage |= cg.manage; // true if ANY group allows manage
|
||||
})
|
||||
.or_insert(cg);
|
||||
combined_permissions
|
||||
},
|
||||
)
|
||||
} else {
|
||||
HashMap::new()
|
||||
};
|
||||
|
@@ -227,7 +227,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mu
|
||||
let (grantee_user, new_user) = match User::find_by_mail(&email, &mut conn).await {
|
||||
None => {
|
||||
if !CONFIG.invitations_allowed() {
|
||||
err!(format!("Grantee user does not exist: {}", &email))
|
||||
err!(format!("Grantee user does not exist: {email}"))
|
||||
}
|
||||
|
||||
if !CONFIG.is_email_domain_allowed(&email) {
|
||||
@@ -239,7 +239,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mu
|
||||
invitation.save(&mut conn).await?;
|
||||
}
|
||||
|
||||
let mut user = User::new(email.clone());
|
||||
let mut user = User::new(email.clone(), None);
|
||||
user.save(&mut conn).await?;
|
||||
(user, true)
|
||||
}
|
||||
@@ -582,7 +582,7 @@ async fn view_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut
|
||||
CipherSyncType::User,
|
||||
&mut conn,
|
||||
)
|
||||
.await,
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
|
||||
|
@@ -29,7 +29,7 @@ struct EventRange {
|
||||
continuation_token: Option<String>,
|
||||
}
|
||||
|
||||
// Upstream: https://github.com/bitwarden/server/blob/9ecf69d9cabce732cf2c57976dd9afa5728578fb/src/Api/Controllers/EventsController.cs#LL84C35-L84C41
|
||||
// Upstream: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/AdminConsole/Controllers/EventsController.cs#L87
|
||||
#[get("/organizations/<org_id>/events?<data..>")]
|
||||
async fn get_org_events(
|
||||
org_id: OrganizationId,
|
||||
@@ -169,8 +169,8 @@ struct EventCollection {
|
||||
}
|
||||
|
||||
// Upstream:
|
||||
// https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Events/Controllers/CollectController.cs
|
||||
// https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Core/Services/Implementations/EventService.cs
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Events/Controllers/CollectController.cs
|
||||
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Services/Implementations/EventService.cs
|
||||
#[post("/collect", format = "application/json", data = "<data>")]
|
||||
async fn post_events_collect(data: Json<Vec<EventCollection>>, headers: Headers, mut conn: DbConn) -> EmptyResult {
|
||||
if !CONFIG.org_events_enabled() {
|
||||
|
@@ -45,7 +45,7 @@ async fn post_folders(data: Json<FolderData>, headers: Headers, mut conn: DbConn
|
||||
let mut folder = Folder::new(headers.user.uuid, data.name);
|
||||
|
||||
folder.save(&mut conn).await?;
|
||||
nt.send_folder_update(UpdateType::SyncFolderCreate, &folder, &headers.device.uuid, &mut conn).await;
|
||||
nt.send_folder_update(UpdateType::SyncFolderCreate, &folder, &headers.device, &mut conn).await;
|
||||
|
||||
Ok(Json(folder.to_json()))
|
||||
}
|
||||
@@ -78,7 +78,7 @@ async fn put_folder(
|
||||
folder.name = data.name;
|
||||
|
||||
folder.save(&mut conn).await?;
|
||||
nt.send_folder_update(UpdateType::SyncFolderUpdate, &folder, &headers.device.uuid, &mut conn).await;
|
||||
nt.send_folder_update(UpdateType::SyncFolderUpdate, &folder, &headers.device, &mut conn).await;
|
||||
|
||||
Ok(Json(folder.to_json()))
|
||||
}
|
||||
@@ -97,6 +97,6 @@ async fn delete_folder(folder_id: FolderId, headers: Headers, mut conn: DbConn,
|
||||
// Delete the actual folder entry
|
||||
folder.delete(&mut conn).await?;
|
||||
|
||||
nt.send_folder_update(UpdateType::SyncFolderDelete, &folder, &headers.device.uuid, &mut conn).await;
|
||||
nt.send_folder_update(UpdateType::SyncFolderDelete, &folder, &headers.device, &mut conn).await;
|
||||
Ok(())
|
||||
}
|
||||
|
@@ -50,11 +50,12 @@ pub fn events_routes() -> Vec<Route> {
|
||||
use rocket::{serde::json::Json, serde::json::Value, Catcher, Route};
|
||||
|
||||
use crate::{
|
||||
api::{JsonResult, Notify, UpdateType},
|
||||
api::{EmptyResult, JsonResult, Notify, UpdateType},
|
||||
auth::Headers,
|
||||
db::DbConn,
|
||||
db::{models::*, DbConn},
|
||||
error::Error,
|
||||
http_client::make_http_request,
|
||||
mail,
|
||||
util::parse_experimental_client_feature_flags,
|
||||
};
|
||||
|
||||
@@ -124,7 +125,7 @@ async fn post_eq_domains(
|
||||
|
||||
user.save(&mut conn).await?;
|
||||
|
||||
nt.send_user_update(UpdateType::SyncSettings, &user).await;
|
||||
nt.send_user_update(UpdateType::SyncSettings, &user, &headers.device.push_uuid, &mut conn).await;
|
||||
|
||||
Ok(Json(json!({})))
|
||||
}
|
||||
@@ -199,11 +200,18 @@ fn get_api_webauthn(_headers: Headers) -> Json<Value> {
|
||||
#[get("/config")]
|
||||
fn config() -> Json<Value> {
|
||||
let domain = crate::CONFIG.domain();
|
||||
// Official available feature flags can be found here:
|
||||
// Server (v2025.6.2): https://github.com/bitwarden/server/blob/d094be3267f2030bd0dc62106bc6871cf82682f5/src/Core/Constants.cs#L103
|
||||
// Client (web-v2025.6.1): https://github.com/bitwarden/clients/blob/747c2fd6a1c348a57a76e4a7de8128466ffd3c01/libs/common/src/enums/feature-flag.enum.ts#L12
|
||||
// Android (v2025.6.0): https://github.com/bitwarden/android/blob/b5b022caaad33390c31b3021b2c1205925b0e1a2/app/src/main/kotlin/com/x8bit/bitwarden/data/platform/manager/model/FlagKey.kt#L22
|
||||
// iOS (v2025.6.0): https://github.com/bitwarden/ios/blob/ff06d9c6cc8da89f78f37f376495800201d7261a/BitwardenShared/Core/Platform/Models/Enum/FeatureFlag.swift#L7
|
||||
let mut feature_states =
|
||||
parse_experimental_client_feature_flags(&crate::CONFIG.experimental_client_feature_flags());
|
||||
// Force the new key rotation feature
|
||||
feature_states.insert("key-rotation-improvements".to_string(), true);
|
||||
feature_states.insert("flexible-collections-v-1".to_string(), false);
|
||||
feature_states.insert("duo-redirect".to_string(), true);
|
||||
feature_states.insert("email-verification".to_string(), true);
|
||||
feature_states.insert("unauth-ui-refresh".to_string(), true);
|
||||
feature_states.insert("enable-pm-flight-recorder".to_string(), true);
|
||||
feature_states.insert("mobile-error-reporting".to_string(), true);
|
||||
|
||||
Json(json!({
|
||||
// Note: The clients use this version to handle backwards compatibility concerns
|
||||
@@ -211,14 +219,14 @@ fn config() -> Json<Value> {
|
||||
// We should make sure that we keep this updated when we support the new server features
|
||||
// Version history:
|
||||
// - Individual cipher key encryption: 2024.2.0
|
||||
"version": "2025.1.0",
|
||||
"version": "2025.6.0",
|
||||
"gitHash": option_env!("GIT_REV"),
|
||||
"server": {
|
||||
"name": "Vaultwarden",
|
||||
"url": "https://github.com/dani-garcia/vaultwarden"
|
||||
},
|
||||
"settings": {
|
||||
"disableUserRegistration": !crate::CONFIG.signups_allowed() && crate::CONFIG.signups_domains_whitelist().is_empty(),
|
||||
"disableUserRegistration": crate::CONFIG.is_signup_disabled()
|
||||
},
|
||||
"environment": {
|
||||
"vault": domain,
|
||||
@@ -226,6 +234,12 @@ fn config() -> Json<Value> {
|
||||
"identity": format!("{domain}/identity"),
|
||||
"notifications": format!("{domain}/notifications"),
|
||||
"sso": "",
|
||||
"cloudRegion": null,
|
||||
},
|
||||
// Bitwarden uses this for the self-hosted servers to indicate the default push technology
|
||||
"push": {
|
||||
"pushTechnology": 0,
|
||||
"vapidPublicKey": null
|
||||
},
|
||||
"featureStates": feature_states,
|
||||
"object": "config",
|
||||
@@ -246,3 +260,49 @@ fn api_not_found() -> Json<Value> {
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
async fn accept_org_invite(
|
||||
user: &User,
|
||||
mut member: Membership,
|
||||
reset_password_key: Option<String>,
|
||||
conn: &mut DbConn,
|
||||
) -> EmptyResult {
|
||||
if member.status != MembershipStatus::Invited as i32 {
|
||||
err!("User already accepted the invitation");
|
||||
}
|
||||
|
||||
// This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type
|
||||
// It returns different error messages per function.
|
||||
if member.atype < MembershipType::Admin {
|
||||
match OrgPolicy::is_user_allowed(&member.user_uuid, &member.org_uuid, false, conn).await {
|
||||
Ok(_) => {}
|
||||
Err(OrgPolicyErr::TwoFactorMissing) => {
|
||||
if crate::CONFIG.email_2fa_auto_fallback() {
|
||||
two_factor::email::activate_email_2fa(user, conn).await?;
|
||||
} else {
|
||||
err!("You cannot join this organization until you enable two-step login on your user account");
|
||||
}
|
||||
}
|
||||
Err(OrgPolicyErr::SingleOrgEnforced) => {
|
||||
err!("You cannot join this organization because you are a member of an organization which forbids it");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
member.status = MembershipStatus::Accepted as i32;
|
||||
member.reset_password_key = reset_password_key;
|
||||
|
||||
member.save(conn).await?;
|
||||
|
||||
if crate::CONFIG.mail_enabled() {
|
||||
let org = match Organization::find_by_uuid(&member.org_uuid, conn).await {
|
||||
Some(org) => org,
|
||||
None => err!("Organization not found."),
|
||||
};
|
||||
// User was invited to an organization, so they must be confirmed manually after acceptance
|
||||
mail::send_invite_accepted(&user.email, &member.invited_by_email.unwrap_or(org.billing_email), &org.name)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user