Compare commits

..

1 Commits

Author SHA1 Message Date
Daniel García
f312e00dfa Test dylint 2024-11-09 17:31:22 +01:00
192 changed files with 8073 additions and 19446 deletions

View File

@@ -5,7 +5,6 @@
!.git !.git
!docker/healthcheck.sh !docker/healthcheck.sh
!docker/start.sh !docker/start.sh
!macros
!migrations !migrations
!src !src

View File

@@ -15,14 +15,6 @@
#################### ####################
## Main data folder ## Main data folder
## This can be a path to local folder or a path to an external location
## depending on features enabled at build time. Possible external locations:
##
## - AWS S3 Bucket (via `s3` feature): s3://bucket-name/path/to/folder
##
## When using an external location, make sure to set TMP_FOLDER,
## TEMPLATES_FOLDER, and DATABASE_URL to local paths and/or a remote database
## location.
# DATA_FOLDER=data # DATA_FOLDER=data
## Individual folders, these override %DATA_FOLDER% ## Individual folders, these override %DATA_FOLDER%
@@ -30,13 +22,10 @@
# ICON_CACHE_FOLDER=data/icon_cache # ICON_CACHE_FOLDER=data/icon_cache
# ATTACHMENTS_FOLDER=data/attachments # ATTACHMENTS_FOLDER=data/attachments
# SENDS_FOLDER=data/sends # SENDS_FOLDER=data/sends
## Temporary folder used for storing temporary file uploads
## Must be a local path.
# TMP_FOLDER=data/tmp # TMP_FOLDER=data/tmp
## HTML template overrides data folder ## Templates data folder, by default uses embedded templates
## Must be a local path. ## Check source code to see the format
# TEMPLATES_FOLDER=data/templates # TEMPLATES_FOLDER=data/templates
## Automatically reload the templates for every request, slow, use only for development ## Automatically reload the templates for every request, slow, use only for development
# RELOAD_TEMPLATES=false # RELOAD_TEMPLATES=false
@@ -50,9 +39,7 @@
######################### #########################
## Database URL ## Database URL
## When using SQLite, this is the path to the DB file, and it defaults to ## When using SQLite, this is the path to the DB file, default to %DATA_FOLDER%/db.sqlite3
## %DATA_FOLDER%/db.sqlite3. If DATA_FOLDER is set to an external location, this
## must be set to a local sqlite3 file path.
# DATABASE_URL=data/db.sqlite3 # DATABASE_URL=data/db.sqlite3
## When using MySQL, specify an appropriate connection URI. ## When using MySQL, specify an appropriate connection URI.
## Details: https://docs.diesel.rs/2.1.x/diesel/mysql/struct.MysqlConnection.html ## Details: https://docs.diesel.rs/2.1.x/diesel/mysql/struct.MysqlConnection.html
@@ -130,7 +117,7 @@
## and are always in terms of UTC time (regardless of your local time zone settings). ## and are always in terms of UTC time (regardless of your local time zone settings).
## ##
## The schedule format is a bit different from crontab as crontab does not contains seconds. ## The schedule format is a bit different from crontab as crontab does not contains seconds.
## You can test the format here: https://crontab.guru, but remove the first digit! ## You can test the the format here: https://crontab.guru, but remove the first digit!
## SEC MIN HOUR DAY OF MONTH MONTH DAY OF WEEK ## SEC MIN HOUR DAY OF MONTH MONTH DAY OF WEEK
## "0 30 9,12,15 1,15 May-Aug Mon,Wed,Fri" ## "0 30 9,12,15 1,15 May-Aug Mon,Wed,Fri"
## "0 30 * * * * " ## "0 30 * * * * "
@@ -174,10 +161,6 @@
## Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt. ## Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt.
## Defaults to every minute. Set blank to disable this job. ## Defaults to every minute. Set blank to disable this job.
# DUO_CONTEXT_PURGE_SCHEDULE="30 * * * * *" # DUO_CONTEXT_PURGE_SCHEDULE="30 * * * * *"
#
## Cron schedule of the job that cleans sso nonce from incomplete flow
## Defaults to daily (20 minutes after midnight). Set blank to disable this job.
# PURGE_INCOMPLETE_SSO_NONCE="0 20 0 * * *"
######################## ########################
### General settings ### ### General settings ###
@@ -246,8 +229,7 @@
# SIGNUPS_ALLOWED=true # SIGNUPS_ALLOWED=true
## Controls if new users need to verify their email address upon registration ## Controls if new users need to verify their email address upon registration
## On new client versions, this will require the user to verify their email at signup time. ## Note that setting this option to true prevents logins until the email address has been verified!
## On older clients, it will require the user to verify their email before they can log in.
## The welcome email will include a verification link, and login attempts will periodically ## The welcome email will include a verification link, and login attempts will periodically
## trigger another verification email to be sent. ## trigger another verification email to be sent.
# SIGNUPS_VERIFY=false # SIGNUPS_VERIFY=false
@@ -277,7 +259,7 @@
## A comma-separated list means only those users can create orgs: ## A comma-separated list means only those users can create orgs:
# ORG_CREATION_USERS=admin1@example.com,admin2@example.com # ORG_CREATION_USERS=admin1@example.com,admin2@example.com
## Allows org admins to invite users, even when signups are disabled ## Invitations org admins to invite users, even when signups are disabled
# INVITATIONS_ALLOWED=true # INVITATIONS_ALLOWED=true
## Name shown in the invitation emails that don't come from a specific organization ## Name shown in the invitation emails that don't come from a specific organization
# INVITATION_ORG_NAME=Vaultwarden # INVITATION_ORG_NAME=Vaultwarden
@@ -298,13 +280,12 @@
## The default for new users. If changed, it will be updated during login for existing users. ## The default for new users. If changed, it will be updated during login for existing users.
# PASSWORD_ITERATIONS=600000 # PASSWORD_ITERATIONS=600000
## Controls whether users can set or show password hints. This setting applies globally to all users. ## Controls whether users can set password hints. This setting applies globally to all users.
# PASSWORD_HINTS_ALLOWED=true # PASSWORD_HINTS_ALLOWED=true
## Controls whether a password hint should be shown directly in the web page if ## Controls whether a password hint should be shown directly in the web page if
## SMTP service is not configured and password hints are allowed. ## SMTP service is not configured. Not recommended for publicly-accessible instances
## Not recommended for publicly-accessible instances because this provides ## as this provides unauthenticated access to potentially sensitive data.
## unauthenticated access to potentially sensitive data.
# SHOW_PASSWORD_HINT=false # SHOW_PASSWORD_HINT=false
######################### #########################
@@ -345,33 +326,29 @@
## Icon download timeout ## Icon download timeout
## Configure the timeout value when downloading the favicons. ## Configure the timeout value when downloading the favicons.
## The default is 10 seconds, but this could be too low on slower network connections ## The default is 10 seconds, but this could be to low on slower network connections
# ICON_DOWNLOAD_TIMEOUT=10 # ICON_DOWNLOAD_TIMEOUT=10
## Block HTTP domains/IPs by Regex ## Block HTTP domains/IPs by Regex
## Any domains or IPs that match this regex won't be fetched by the internal HTTP client. ## Any domains or IPs that match this regex won't be fetched by the internal HTTP client.
## Useful to hide other servers in the local network. Check the WIKI for more details ## Useful to hide other servers in the local network. Check the WIKI for more details
## NOTE: Always enclose this regex within single quotes! ## NOTE: Always enclose this regex withing single quotes!
# HTTP_REQUEST_BLOCK_REGEX='^(192\.168\.0\.[0-9]+|192\.168\.1\.[0-9]+)$' # HTTP_REQUEST_BLOCK_REGEX='^(192\.168\.0\.[0-9]+|192\.168\.1\.[0-9]+)$'
## Enabling this will cause the internal HTTP client to refuse to connect to any non-global IP address. ## Enabling this will cause the internal HTTP client to refuse to connect to any non global IP address.
## Useful to secure your internal environment: See https://en.wikipedia.org/wiki/Reserved_IP_addresses for a list of IPs which it will block ## Useful to secure your internal environment: See https://en.wikipedia.org/wiki/Reserved_IP_addresses for a list of IPs which it will block
# HTTP_REQUEST_BLOCK_NON_GLOBAL_IPS=true # HTTP_REQUEST_BLOCK_NON_GLOBAL_IPS=true
## Client Settings ## Client Settings
## Enable experimental feature flags for clients. ## Enable experimental feature flags for clients.
## This is a comma-separated list of flags, e.g. "flag1,flag2,flag3". ## This is a comma-separated list of flags, e.g. "flag1,flag2,flag3".
## Note that clients cache the /api/config endpoint for about 1 hour and it could take some time before they are enabled or disabled!
## ##
## The following flags are available: ## The following flags are available:
## - "inline-menu-positioning-improvements": Enable the use of inline menu password generator and identity suggestions in the browser extension. ## - "autofill-overlay": Add an overlay menu to form fields for quick access to credentials.
## - "inline-menu-totp": Enable the use of inline menu TOTP codes in the browser extension. ## - "autofill-v2": Use the new autofill implementation.
## - "ssh-agent": Enable SSH agent support on Desktop. (Needs desktop >=2024.12.0) ## - "browser-fileless-import": Directly import credentials from other providers without a file.
## - "ssh-key-vault-item": Enable the creation and use of SSH key vault items. (Needs clients >=2024.12.0) ## - "extension-refresh": Temporarily enable the new extension design until general availability (should be used with the beta Chrome extension)
## - "export-attachments": Enable support for exporting attachments (Clients >=2025.4.0) ## - "fido2-vault-credentials": Enable the use of FIDO2 security keys as second factor.
## - "anon-addy-self-host-alias": Enable configuring self-hosted Anon Addy alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0)
## - "simple-login-self-host-alias": Enable configuring self-hosted Simple Login alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0)
## - "mutual-tls": Enable the use of mutual TLS on Android (Client >= 2025.2.0)
# EXPERIMENTAL_CLIENT_FEATURE_FLAGS=fido2-vault-credentials # EXPERIMENTAL_CLIENT_FEATURE_FLAGS=fido2-vault-credentials
## Require new device emails. When a user logs in an email is required to be sent. ## Require new device emails. When a user logs in an email is required to be sent.
@@ -430,14 +407,6 @@
## Multiple values must be separated with a whitespace. ## Multiple values must be separated with a whitespace.
# ALLOWED_IFRAME_ANCESTORS= # ALLOWED_IFRAME_ANCESTORS=
## Allowed connect-src (Know the risks!)
## https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy/connect-src
## Allows other domains to URLs which can be loaded using script interfaces like the Forwarded email alias feature
## This adds the configured value to the 'Content-Security-Policy' headers 'connect-src' value.
## Multiple values must be separated with a whitespace. And only HTTPS values are allowed.
## Example: "https://my-addy-io.domain.tld https://my-simplelogin.domain.tld"
# ALLOWED_CONNECT_SRC=""
## Number of seconds, on average, between login requests from the same IP address before rate limiting kicks in. ## Number of seconds, on average, between login requests from the same IP address before rate limiting kicks in.
# LOGIN_RATELIMIT_SECONDS=60 # LOGIN_RATELIMIT_SECONDS=60
## Allow a burst of requests of up to this size, while maintaining the average indicated by `LOGIN_RATELIMIT_SECONDS`. ## Allow a burst of requests of up to this size, while maintaining the average indicated by `LOGIN_RATELIMIT_SECONDS`.
@@ -463,55 +432,6 @@
## Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy. ## Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy.
# ENFORCE_SINGLE_ORG_WITH_RESET_PW_POLICY=false # ENFORCE_SINGLE_ORG_WITH_RESET_PW_POLICY=false
#####################################
### SSO settings (OpenID Connect) ###
#####################################
## Controls whether users can login using an OpenID Connect identity provider
# SSO_ENABLED=false
## Prevent users from logging in directly without going through SSO
# SSO_ONLY=false
## On SSO Signup if a user with a matching email already exists make the association
# SSO_SIGNUPS_MATCH_EMAIL=true
## Allow unknown email verification status. Allowing this with `SSO_SIGNUPS_MATCH_EMAIL=true` open potential account takeover.
# SSO_ALLOW_UNKNOWN_EMAIL_VERIFICATION=false
## Base URL of the OIDC server (auto-discovery is used)
## - Should not include the `/.well-known/openid-configuration` part and no trailing `/`
## - ${SSO_AUTHORITY}/.well-known/openid-configuration should return a json document: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationResponse
# SSO_AUTHORITY=https://auth.example.com
## Authorization request scopes. Optional SSO scopes, override if email and profile are not enough (`openid` is implicit).
# SSO_SCOPES="email profile"
## Additional authorization url parameters (ex: to obtain a `refresh_token` with Google Auth).
# SSO_AUTHORIZE_EXTRA_PARAMS="access_type=offline&prompt=consent"
## Activate PKCE for the Auth Code flow.
# SSO_PKCE=true
## Regex for additional trusted Id token audience (by default only the client_id is trusted).
# SSO_AUDIENCE_TRUSTED='^$'
## Set your Client ID and Client Key
# SSO_CLIENT_ID=11111
# SSO_CLIENT_SECRET=AAAAAAAAAAAAAAAAAAAAAAAA
## Optional Master password policy (minComplexity=[0-4]), `enforceOnLogin` is not supported at the moment.
# SSO_MASTER_PASSWORD_POLICY='{"enforceOnLogin":false,"minComplexity":3,"minLength":12,"requireLower":false,"requireNumbers":false,"requireSpecial":false,"requireUpper":false}'
## Use sso only for authentication not the session lifecycle
# SSO_AUTH_ONLY_NOT_SESSION=false
## Client cache for discovery endpoint. Duration in seconds (0 to disable).
# SSO_CLIENT_CACHE_EXPIRATION=0
## Log all the tokens, LOG_LEVEL=debug is required
# SSO_DEBUG_TOKENS=false
######################## ########################
### MFA/2FA settings ### ### MFA/2FA settings ###
######################## ########################
@@ -554,7 +474,7 @@
## Maximum attempts before an email token is reset and a new email will need to be sent. ## Maximum attempts before an email token is reset and a new email will need to be sent.
# EMAIL_ATTEMPTS_LIMIT=3 # EMAIL_ATTEMPTS_LIMIT=3
## ##
## Setup email 2FA on registration regardless of any organization policy ## Setup email 2FA regardless of any organization policy
# EMAIL_2FA_ENFORCE_ON_VERIFIED_INVITE=false # EMAIL_2FA_ENFORCE_ON_VERIFIED_INVITE=false
## Automatically setup email 2FA as fallback provider when needed ## Automatically setup email 2FA as fallback provider when needed
# EMAIL_2FA_AUTO_FALLBACK=false # EMAIL_2FA_AUTO_FALLBACK=false
@@ -571,7 +491,7 @@
## ##
## According to the RFC6238 (https://tools.ietf.org/html/rfc6238), ## According to the RFC6238 (https://tools.ietf.org/html/rfc6238),
## we allow by default the TOTP code which was valid one step back and one in the future. ## we allow by default the TOTP code which was valid one step back and one in the future.
## This can however allow attackers to be a bit more lucky with their attempts because there are 3 valid codes. ## This can however allow attackers to be a bit more lucky with there attempts because there are 3 valid codes.
## You can disable this, so that only the current TOTP Code is allowed. ## You can disable this, so that only the current TOTP Code is allowed.
## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid. ## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid.
## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid. ## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid.

3
.github/CODEOWNERS vendored
View File

@@ -1,6 +1,3 @@
/.github @dani-garcia @BlackDex /.github @dani-garcia @BlackDex
/.github/** @dani-garcia @BlackDex
/.github/CODEOWNERS @dani-garcia @BlackDex /.github/CODEOWNERS @dani-garcia @BlackDex
/.github/ISSUE_TEMPLATE/** @dani-garcia @BlackDex
/.github/workflows/** @dani-garcia @BlackDex /.github/workflows/** @dani-garcia @BlackDex
/SECURITY.md @dani-garcia @BlackDex

View File

@@ -8,30 +8,15 @@ body:
value: | value: |
Thanks for taking the time to fill out this bug report! Thanks for taking the time to fill out this bug report!
Please **do not** submit feature requests or ask for help on how to configure Vaultwarden here! Please *do not* submit feature requests or ask for help on how to configure Vaultwarden here.
The [GitHub Discussions](https://github.com/dani-garcia/vaultwarden/discussions/) has sections for Questions and Ideas. The [GitHub Discussions](https://github.com/dani-garcia/vaultwarden/discussions/) has sections for Questions and Ideas.
Our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki/) has topics on how to configure Vaultwarden.
Also, make sure you are running [![GitHub Release](https://img.shields.io/github/release/dani-garcia/vaultwarden.svg)](https://github.com/dani-garcia/vaultwarden/releases/latest) of Vaultwarden! Also, make sure you are running [![GitHub Release](https://img.shields.io/github/release/dani-garcia/vaultwarden.svg)](https://github.com/dani-garcia/vaultwarden/releases/latest) of Vaultwarden!
And search for existing open or closed issues or discussions regarding your topic before posting.
Be sure to check and validate the Vaultwarden Admin Diagnostics (`/admin/diagnostics`) page for any errors! Be sure to check and validate the Vaultwarden Admin Diagnostics (`/admin/diagnostics`) page for any errors!
See here [how to enable the admin page](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-admin-page). See here [how to enable the admin page](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-admin-page).
> [!IMPORTANT]
> ## :bangbang: Search for existing **Closed _AND_ Open** [Issues](https://github.com/dani-garcia/vaultwarden/issues?q=is%3Aissue%20) **_AND_** [Discussions](https://github.com/dani-garcia/vaultwarden/discussions?discussions_q=) regarding your topic before posting! :bangbang:
#
- type: checkboxes
id: checklist
attributes:
label: Prerequisites
description: Please confirm you have completed the following before submitting an issue!
options:
- label: I have searched the existing **Closed _AND_ Open** [Issues](https://github.com/dani-garcia/vaultwarden/issues?q=is%3Aissue%20) **_AND_** [Discussions](https://github.com/dani-garcia/vaultwarden/discussions?discussions_q=)
required: true
- label: I have searched and read the [documentation](https://github.com/dani-garcia/vaultwarden/wiki/)
required: true
# #
- id: support-string - id: support-string
type: textarea type: textarea
@@ -51,7 +36,7 @@ body:
attributes: attributes:
label: Vaultwarden Build Version label: Vaultwarden Build Version
description: What version of Vaultwarden are you running? description: What version of Vaultwarden are you running?
placeholder: ex. v1.34.0 or v1.34.1-53f58b14 placeholder: ex. v1.31.0 or v1.32.0-3466a804
validations: validations:
required: true required: true
# #
@@ -82,7 +67,7 @@ body:
attributes: attributes:
label: Reverse Proxy label: Reverse Proxy
description: Are you using a reverse proxy, if so which and what version? description: Are you using a reverse proxy, if so which and what version?
placeholder: ex. nginx 1.29.0, caddy 2.10.0, traefik 3.4.4, haproxy 3.2 placeholder: ex. nginx 1.26.2, caddy 2.8.4, traefik 3.1.2, haproxy 3.0
validations: validations:
required: true required: true
# #
@@ -130,7 +115,7 @@ body:
attributes: attributes:
label: Client Version label: Client Version
description: What version(s) of the client(s) are you seeing the problem on? description: What version(s) of the client(s) are you seeing the problem on?
placeholder: ex. CLI v2025.7.0, Firefox 140 - v2025.6.1 placeholder: ex. CLI v2024.7.2, Firefox 130 - v2024.7.0
# #
- id: reproduce - id: reproduce
type: textarea type: textarea

View File

@@ -1,5 +1,4 @@
name: Build name: Build
permissions: {}
on: on:
push: push:
@@ -14,7 +13,6 @@ on:
- "diesel.toml" - "diesel.toml"
- "docker/Dockerfile.j2" - "docker/Dockerfile.j2"
- "docker/DockerSettings.yaml" - "docker/DockerSettings.yaml"
pull_request: pull_request:
paths: paths:
- ".github/workflows/build.yml" - ".github/workflows/build.yml"
@@ -30,10 +28,6 @@ on:
jobs: jobs:
build: build:
name: Build and Test ${{ matrix.channel }}
permissions:
actions: write
contents: read
# We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers # We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
timeout-minutes: 120 timeout-minutes: 120
@@ -48,33 +42,32 @@ jobs:
- "rust-toolchain" # The version defined in rust-toolchain - "rust-toolchain" # The version defined in rust-toolchain
- "msrv" # The supported MSRV - "msrv" # The supported MSRV
name: Build and Test ${{ matrix.channel }}
steps: steps:
# Checkout the repo
- name: "Checkout"
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
# End Checkout the repo
# Install dependencies # Install dependencies
- name: "Install dependencies Ubuntu" - name: "Install dependencies Ubuntu"
run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config
# End Install dependencies # End Install dependencies
# Checkout the repo
- name: "Checkout"
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
fetch-depth: 0
# End Checkout the repo
# Determine rust-toolchain version # Determine rust-toolchain version
- name: Init Variables - name: Init Variables
id: toolchain id: toolchain
shell: bash shell: bash
env:
CHANNEL: ${{ matrix.channel }}
run: | run: |
if [[ "${CHANNEL}" == 'rust-toolchain' ]]; then if [[ "${{ matrix.channel }}" == 'rust-toolchain' ]]; then
RUST_TOOLCHAIN="$(grep -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)" RUST_TOOLCHAIN="$(grep -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)"
elif [[ "${CHANNEL}" == 'msrv' ]]; then elif [[ "${{ matrix.channel }}" == 'msrv' ]]; then
RUST_TOOLCHAIN="$(grep -oP 'rust-version.*"(\K.*?)(?=")' Cargo.toml)" RUST_TOOLCHAIN="$(grep -oP 'rust-version.*"(\K.*?)(?=")' Cargo.toml)"
else else
RUST_TOOLCHAIN="${CHANNEL}" RUST_TOOLCHAIN="${{ matrix.channel }}"
fi fi
echo "RUST_TOOLCHAIN=${RUST_TOOLCHAIN}" | tee -a "${GITHUB_OUTPUT}" echo "RUST_TOOLCHAIN=${RUST_TOOLCHAIN}" | tee -a "${GITHUB_OUTPUT}"
# End Determine rust-toolchain version # End Determine rust-toolchain version
@@ -82,7 +75,7 @@ jobs:
# Only install the clippy and rustfmt components on the default rust-toolchain # Only install the clippy and rustfmt components on the default rust-toolchain
- name: "Install rust-toolchain version" - name: "Install rust-toolchain version"
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master @ Apr 29, 2025, 9:22 PM GMT+2 uses: dtolnay/rust-toolchain@7b1c307e0dcbda6122208f10795a713336a9b35a # master @ Aug 8, 2024, 7:36 PM GMT+2
if: ${{ matrix.channel == 'rust-toolchain' }} if: ${{ matrix.channel == 'rust-toolchain' }}
with: with:
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@@ -92,7 +85,7 @@ jobs:
# Install the any other channel to be used for which we do not execute clippy and rustfmt # Install the any other channel to be used for which we do not execute clippy and rustfmt
- name: "Install MSRV version" - name: "Install MSRV version"
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master @ Apr 29, 2025, 9:22 PM GMT+2 uses: dtolnay/rust-toolchain@7b1c307e0dcbda6122208f10795a713336a9b35a # master @ Aug 8, 2024, 7:36 PM GMT+2
if: ${{ matrix.channel != 'rust-toolchain' }} if: ${{ matrix.channel != 'rust-toolchain' }}
with: with:
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@@ -100,13 +93,11 @@ jobs:
# Set the current matrix toolchain version as default # Set the current matrix toolchain version as default
- name: "Set toolchain ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} as default" - name: "Set toolchain ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} as default"
env:
RUST_TOOLCHAIN: ${{steps.toolchain.outputs.RUST_TOOLCHAIN}}
run: | run: |
# Remove the rust-toolchain.toml # Remove the rust-toolchain.toml
rm rust-toolchain.toml rm rust-toolchain.toml
# Set the default # Set the default
rustup default "${RUST_TOOLCHAIN}" rustup default ${{steps.toolchain.outputs.RUST_TOOLCHAIN}}
# Show environment # Show environment
- name: "Show environment" - name: "Show environment"
@@ -116,8 +107,7 @@ jobs:
# End Show environment # End Show environment
# Enable Rust Caching # Enable Rust Caching
- name: Rust Caching - uses: Swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2.7.3
uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with: with:
# Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes. # Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
# Like changing the build host from Ubuntu 20.04 to 22.04 for example. # Like changing the build host from Ubuntu 20.04 to 22.04 for example.
@@ -127,39 +117,33 @@ jobs:
# Run cargo tests # Run cargo tests
# First test all features together, afterwards test them separately. # First test all features together, afterwards test them separately.
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc,query_logger"
id: test_sqlite_mysql_postgresql_mimalloc_logger
if: ${{ !cancelled() }}
run: |
cargo test --features sqlite,mysql,postgresql,enable_mimalloc,query_logger
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc" - name: "test features: sqlite,mysql,postgresql,enable_mimalloc"
id: test_sqlite_mysql_postgresql_mimalloc id: test_sqlite_mysql_postgresql_mimalloc
if: ${{ !cancelled() }} if: $${{ always() }}
run: | run: |
cargo test --features sqlite,mysql,postgresql,enable_mimalloc cargo test --features sqlite,mysql,postgresql,enable_mimalloc
- name: "test features: sqlite,mysql,postgresql" - name: "test features: sqlite,mysql,postgresql"
id: test_sqlite_mysql_postgresql id: test_sqlite_mysql_postgresql
if: ${{ !cancelled() }} if: $${{ always() }}
run: | run: |
cargo test --features sqlite,mysql,postgresql cargo test --features sqlite,mysql,postgresql
- name: "test features: sqlite" - name: "test features: sqlite"
id: test_sqlite id: test_sqlite
if: ${{ !cancelled() }} if: $${{ always() }}
run: | run: |
cargo test --features sqlite cargo test --features sqlite
- name: "test features: mysql" - name: "test features: mysql"
id: test_mysql id: test_mysql
if: ${{ !cancelled() }} if: $${{ always() }}
run: | run: |
cargo test --features mysql cargo test --features mysql
- name: "test features: postgresql" - name: "test features: postgresql"
id: test_postgresql id: test_postgresql
if: ${{ !cancelled() }} if: $${{ always() }}
run: | run: |
cargo test --features postgresql cargo test --features postgresql
# End Run cargo tests # End Run cargo tests
@@ -168,16 +152,16 @@ jobs:
# Run cargo clippy, and fail on warnings # Run cargo clippy, and fail on warnings
- name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc" - name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc"
id: clippy id: clippy
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }} if: ${{ always() && matrix.channel == 'rust-toolchain' }}
run: | run: |
cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc -- -D warnings
# End Run cargo clippy # End Run cargo clippy
# Run cargo fmt (Only run on rust-toolchain defined version) # Run cargo fmt (Only run on rust-toolchain defined version)
- name: "check formatting" - name: "check formatting"
id: formatting id: formatting
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }} if: ${{ always() && matrix.channel == 'rust-toolchain' }}
run: | run: |
cargo fmt --all -- --check cargo fmt --all -- --check
# End Run cargo fmt # End Run cargo fmt
@@ -187,31 +171,21 @@ jobs:
# This is useful so all test/clippy/fmt actions are done, and they can all be addressed # This is useful so all test/clippy/fmt actions are done, and they can all be addressed
- name: "Some checks failed" - name: "Some checks failed"
if: ${{ failure() }} if: ${{ failure() }}
env:
TEST_DB_M_L: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}
TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}
TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }}
TEST_SQLITE: ${{ steps.test_sqlite.outcome }}
TEST_MYSQL: ${{ steps.test_mysql.outcome }}
TEST_POSTGRESQL: ${{ steps.test_postgresql.outcome }}
CLIPPY: ${{ steps.clippy.outcome }}
FMT: ${{ steps.formatting.outcome }}
run: | run: |
echo "### :x: Checks Failed!" >> "${GITHUB_STEP_SUMMARY}" echo "### :x: Checks Failed!" >> $GITHUB_STEP_SUMMARY
echo "" >> "${GITHUB_STEP_SUMMARY}" echo "" >> $GITHUB_STEP_SUMMARY
echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}" echo "|Job|Status|" >> $GITHUB_STEP_SUMMARY
echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}" echo "|---|------|" >> $GITHUB_STEP_SUMMARY
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${TEST_DB_M_L}|" >> "${GITHUB_STEP_SUMMARY}" echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}" echo "|test (sqlite,mysql,postgresql)|${{ steps.test_sqlite_mysql_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}" echo "|test (sqlite)|${{ steps.test_sqlite.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}" echo "|test (mysql)|${{ steps.test_mysql.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}" echo "|test (postgresql)|${{ steps.test_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}" echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.clippy.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}" echo "|fmt|${{ steps.formatting.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}" echo "" >> $GITHUB_STEP_SUMMARY
echo "" >> "${GITHUB_STEP_SUMMARY}" echo "Please check the failed jobs and fix where needed." >> $GITHUB_STEP_SUMMARY
echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}" echo "" >> $GITHUB_STEP_SUMMARY
echo "" >> "${GITHUB_STEP_SUMMARY}"
exit 1 exit 1
@@ -220,5 +194,5 @@ jobs:
- name: "All checks passed" - name: "All checks passed"
if: ${{ success() }} if: ${{ success() }}
run: | run: |
echo "### :tada: Checks Passed!" >> "${GITHUB_STEP_SUMMARY}" echo "### :tada: Checks Passed!" >> $GITHUB_STEP_SUMMARY
echo "" >> "${GITHUB_STEP_SUMMARY}" echo "" >> $GITHUB_STEP_SUMMARY

View File

@@ -1,29 +0,0 @@
name: Check templates
permissions: {}
on: [ push, pull_request ]
jobs:
docker-templates:
name: Validate docker templates
permissions:
contents: read
runs-on: ubuntu-24.04
timeout-minutes: 30
steps:
# Checkout the repo
- name: "Checkout"
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
# End Checkout the repo
- name: Run make to rebuild templates
working-directory: docker
run: make
- name: Check for unstaged changes
working-directory: docker
run: git diff --exit-code
continue-on-error: false

View File

@@ -1,20 +1,24 @@
name: Hadolint name: Hadolint
permissions: {}
on: [ push, pull_request ] on: [
push,
pull_request
]
jobs: jobs:
hadolint: hadolint:
name: Validate Dockerfile syntax name: Validate Dockerfile syntax
permissions:
contents: read
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
timeout-minutes: 30 timeout-minutes: 30
steps: steps:
# Checkout the repo
- name: Checkout
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
# End Checkout the repo
# Start Docker Buildx # Start Docker Buildx
- name: Setup Docker Buildx - name: Setup Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1
# https://github.com/moby/buildkit/issues/3969 # https://github.com/moby/buildkit/issues/3969
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills # Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
with: with:
@@ -33,12 +37,6 @@ jobs:
env: env:
HADOLINT_VERSION: 2.12.0 HADOLINT_VERSION: 2.12.0
# End Download hadolint # End Download hadolint
# Checkout the repo
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
# End Checkout the repo
# Test Dockerfiles with hadolint # Test Dockerfiles with hadolint
- name: Run hadolint - name: Run hadolint

View File

@@ -1,5 +1,4 @@
name: Release name: Release
permissions: {}
on: on:
push: push:
@@ -7,23 +6,17 @@ on:
- main - main
tags: tags:
# https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet - '*'
- '[1-2].[0-9]+.[0-9]+'
jobs: jobs:
# https://github.com/marketplace/actions/skip-duplicate-actions # https://github.com/marketplace/actions/skip-duplicate-actions
# Some checks to determine if we need to continue with building a new docker. # Some checks to determine if we need to continue with building a new docker.
# We will skip this check if we are creating a tag, because that has the same hash as a previous run already. # We will skip this check if we are creating a tag, because that has the same hash as a previous run already.
skip_check: skip_check:
# Only run this in the upstream repo and not on forks
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
name: Cancel older jobs when running
permissions:
actions: write
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
outputs: outputs:
should_skip: ${{ steps.skip_check.outputs.should_skip }} should_skip: ${{ steps.skip_check.outputs.should_skip }}
steps: steps:
- name: Skip Duplicates Actions - name: Skip Duplicates Actions
id: skip_check id: skip_check
@@ -34,20 +27,14 @@ jobs:
if: ${{ github.ref_type == 'branch' }} if: ${{ github.ref_type == 'branch' }}
docker-build: docker-build:
needs: skip_check
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
name: Build Vaultwarden containers
permissions:
packages: write
contents: read
attestations: write
id-token: write
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
timeout-minutes: 120 timeout-minutes: 120
# Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them needs: skip_check
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
# Start a local docker registry to extract the final Alpine static build binaries
services: services:
registry: registry:
image: registry@sha256:1fc7de654f2ac1247f0b67e8a459e273b0993be7d2beda1f3f56fbf1001ed3e7 # v3.0.0 image: registry:2
ports: ports:
- 5000:5000 - 5000:5000
env: env:
@@ -69,42 +56,37 @@ jobs:
base_image: ["debian","alpine"] base_image: ["debian","alpine"]
steps: steps:
# Checkout the repo
- name: Checkout
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
with:
fetch-depth: 0
- name: Initialize QEMU binfmt support - name: Initialize QEMU binfmt support
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0
with: with:
platforms: "arm64,arm" platforms: "arm64,arm"
# Start Docker Buildx # Start Docker Buildx
- name: Setup Docker Buildx - name: Setup Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1
# https://github.com/moby/buildkit/issues/3969 # https://github.com/moby/buildkit/issues/3969
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills # Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
with: with:
cache-binary: false
buildkitd-config-inline: | buildkitd-config-inline: |
[worker.oci] [worker.oci]
max-parallelism = 2 max-parallelism = 2
driver-opts: | driver-opts: |
network=host network=host
# Checkout the repo
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# We need fetch-depth of 0 so we also get all the tag metadata
with:
persist-credentials: false
fetch-depth: 0
# Determine Base Tags and Source Version # Determine Base Tags and Source Version
- name: Determine Base Tags and Source Version - name: Determine Base Tags and Source Version
shell: bash shell: bash
env:
REF_TYPE: ${{ github.ref_type }}
run: | run: |
# Check which main tag we are going to build determined by ref_type # Check which main tag we are going to build determined by github.ref_type
if [[ "${REF_TYPE}" == "tag" ]]; then if [[ "${{ github.ref_type }}" == "tag" ]]; then
echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}" echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}"
elif [[ "${REF_TYPE}" == "branch" ]]; then elif [[ "${{ github.ref_type }}" == "branch" ]]; then
echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}" echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}"
fi fi
@@ -120,7 +102,7 @@ jobs:
# Login to Docker Hub # Login to Docker Hub
- name: Login to Docker Hub - name: Login to Docker Hub
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
@@ -129,14 +111,12 @@ jobs:
- name: Add registry for DockerHub - name: Add registry for DockerHub
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }} if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
shell: bash shell: bash
env:
DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }}
run: | run: |
echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}" echo "CONTAINER_REGISTRIES=${{ vars.DOCKERHUB_REPO }}" | tee -a "${GITHUB_ENV}"
# Login to GitHub Container Registry # Login to GitHub Container Registry
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -146,14 +126,12 @@ jobs:
- name: Add registry for ghcr.io - name: Add registry for ghcr.io
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }} if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
shell: bash shell: bash
env:
GHCR_REPO: ${{ vars.GHCR_REPO }}
run: | run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}" echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.GHCR_REPO }}" | tee -a "${GITHUB_ENV}"
# Login to Quay.io # Login to Quay.io
- name: Login to Quay.io - name: Login to Quay.io
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with: with:
registry: quay.io registry: quay.io
username: ${{ secrets.QUAY_USERNAME }} username: ${{ secrets.QUAY_USERNAME }}
@@ -163,22 +141,17 @@ jobs:
- name: Add registry for Quay.io - name: Add registry for Quay.io
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }} if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
shell: bash shell: bash
env:
QUAY_REPO: ${{ vars.QUAY_REPO }}
run: | run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}" echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.QUAY_REPO }}" | tee -a "${GITHUB_ENV}"
- name: Configure build cache from/to - name: Configure build cache from/to
shell: bash shell: bash
env:
GHCR_REPO: ${{ vars.GHCR_REPO }}
BASE_IMAGE: ${{ matrix.base_image }}
run: | run: |
# #
# Check if there is a GitHub Container Registry Login and use it for caching # Check if there is a GitHub Container Registry Login and use it for caching
if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then
echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}" | tee -a "${GITHUB_ENV}" echo "BAKE_CACHE_FROM=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }}" | tee -a "${GITHUB_ENV}"
echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}" echo "BAKE_CACHE_TO=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
else else
echo "BAKE_CACHE_FROM=" echo "BAKE_CACHE_FROM="
echo "BAKE_CACHE_TO=" echo "BAKE_CACHE_TO="
@@ -186,13 +159,13 @@ jobs:
# #
- name: Add localhost registry - name: Add localhost registry
if: ${{ matrix.base_image == 'alpine' }}
shell: bash shell: bash
run: | run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}" echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}"
- name: Bake ${{ matrix.base_image }} containers - name: Bake ${{ matrix.base_image }} containers
id: bake_vw uses: docker/bake-action@2e3d19baedb14545e5d41222653874f25d5b4dfb # v5.10.0
uses: docker/bake-action@37816e747588cb137173af99ab33873600c46ea8 # v6.8.0
env: env:
BASE_TAGS: "${{ env.BASE_TAGS }}" BASE_TAGS: "${{ env.BASE_TAGS }}"
SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}" SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}"
@@ -202,121 +175,78 @@ jobs:
with: with:
pull: true pull: true
push: true push: true
source: .
files: docker/docker-bake.hcl files: docker/docker-bake.hcl
targets: "${{ matrix.base_image }}-multi" targets: "${{ matrix.base_image }}-multi"
set: | set: |
*.cache-from=${{ env.BAKE_CACHE_FROM }} *.cache-from=${{ env.BAKE_CACHE_FROM }}
*.cache-to=${{ env.BAKE_CACHE_TO }} *.cache-to=${{ env.BAKE_CACHE_TO }}
- name: Extract digest SHA
shell: bash
env:
BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }}
BASE_IMAGE: ${{ matrix.base_image }}
run: |
GET_DIGEST_SHA="$(jq -r --arg base "$BASE_IMAGE" '.[$base + "-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")"
echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}"
# Attest container images
- name: Attest - docker.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
with:
subject-name: ${{ vars.DOCKERHUB_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true
- name: Attest - ghcr.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
with:
subject-name: ${{ vars.GHCR_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true
- name: Attest - quay.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
with:
subject-name: ${{ vars.QUAY_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true
# Extract the Alpine binaries from the containers # Extract the Alpine binaries from the containers
- name: Extract binaries - name: Extract binaries
if: ${{ matrix.base_image == 'alpine' }}
shell: bash shell: bash
env:
REF_TYPE: ${{ github.ref_type }}
BASE_IMAGE: ${{ matrix.base_image }}
run: | run: |
# Check which main tag we are going to build determined by ref_type # Check which main tag we are going to build determined by github.ref_type
if [[ "${REF_TYPE}" == "tag" ]]; then if [[ "${{ github.ref_type }}" == "tag" ]]; then
EXTRACT_TAG="latest" EXTRACT_TAG="latest"
elif [[ "${REF_TYPE}" == "branch" ]]; then elif [[ "${{ github.ref_type }}" == "branch" ]]; then
EXTRACT_TAG="testing" EXTRACT_TAG="testing"
fi fi
# Check which base_image was used and append -alpine if needed
if [[ "${BASE_IMAGE}" == "alpine" ]]; then
EXTRACT_TAG="${EXTRACT_TAG}-alpine"
fi
# After each extraction the image is removed. # After each extraction the image is removed.
# This is needed because using different platforms doesn't trigger a new pull/download # This is needed because using different platforms doesn't trigger a new pull/download
# Extract amd64 binary # Extract amd64 binary
docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine"
docker cp amd64:/vaultwarden vaultwarden-amd64-${BASE_IMAGE} docker cp amd64:/vaultwarden vaultwarden-amd64
docker rm --force amd64 docker rm --force amd64
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine"
# Extract arm64 binary # Extract arm64 binary
docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine"
docker cp arm64:/vaultwarden vaultwarden-arm64-${BASE_IMAGE} docker cp arm64:/vaultwarden vaultwarden-arm64
docker rm --force arm64 docker rm --force arm64
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine"
# Extract armv7 binary # Extract armv7 binary
docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine"
docker cp armv7:/vaultwarden vaultwarden-armv7-${BASE_IMAGE} docker cp armv7:/vaultwarden vaultwarden-armv7
docker rm --force armv7 docker rm --force armv7
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine"
# Extract armv6 binary # Extract armv6 binary
docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine"
docker cp armv6:/vaultwarden vaultwarden-armv6-${BASE_IMAGE} docker cp armv6:/vaultwarden vaultwarden-armv6
docker rm --force armv6 docker rm --force armv6
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine"
# Upload artifacts to Github Actions and Attest the binaries # Upload artifacts to Github Actions
- name: "Upload amd64 artifact ${{ matrix.base_image }}" - name: "Upload amd64 artifact"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
if: ${{ matrix.base_image == 'alpine' }}
with: with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }} name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64
path: vaultwarden-amd64-${{ matrix.base_image }} path: vaultwarden-amd64
- name: "Upload arm64 artifact ${{ matrix.base_image }}" - name: "Upload arm64 artifact"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
if: ${{ matrix.base_image == 'alpine' }}
with: with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }} name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64
path: vaultwarden-arm64-${{ matrix.base_image }} path: vaultwarden-arm64
- name: "Upload armv7 artifact ${{ matrix.base_image }}" - name: "Upload armv7 artifact"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
if: ${{ matrix.base_image == 'alpine' }}
with: with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }} name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7
path: vaultwarden-armv7-${{ matrix.base_image }} path: vaultwarden-armv7
- name: "Upload armv6 artifact ${{ matrix.base_image }}" - name: "Upload armv6 artifact"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
if: ${{ matrix.base_image == 'alpine' }}
with: with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }} name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6
path: vaultwarden-armv6-${{ matrix.base_image }} path: vaultwarden-armv6
- name: "Attest artifacts ${{ matrix.base_image }}"
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
with:
subject-path: vaultwarden-*
# End Upload artifacts to Github Actions # End Upload artifacts to Github Actions

View File

@@ -1,6 +1,3 @@
name: Cleanup
permissions: {}
on: on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
@@ -12,11 +9,10 @@ on:
schedule: schedule:
- cron: '0 1 * * FRI' - cron: '0 1 * * FRI'
name: Cleanup
jobs: jobs:
releasecache-cleanup: releasecache-cleanup:
name: Releasecache Cleanup name: Releasecache Cleanup
permissions:
packages: write
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
continue-on-error: true continue-on-error: true
timeout-minutes: 30 timeout-minutes: 30

View File

@@ -1,45 +1,37 @@
name: Trivy name: trivy
permissions: {}
on: on:
push: push:
branches: branches:
- main - main
tags: tags:
- '*' - '*'
pull_request: pull_request:
branches: branches: [ "main" ]
- main
schedule: schedule:
- cron: '08 11 * * *' - cron: '08 11 * * *'
jobs:
trivy-scan:
# Only run this in the upstream repo and not on forks
# When all forks run this at the same time, it is causing `Too Many Requests` issues
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
name: Trivy Scan
permissions: permissions:
contents: read contents: read
actions: read
security-events: write jobs:
trivy-scan:
# Only run this in the master repo and not on forks
# When all forks run this at the same time, it is causing `Too Many Requests` issues
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
name: Check
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
timeout-minutes: 30 timeout-minutes: 30
permissions:
contents: read
security-events: write
actions: read
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
with:
persist-credentials: false
- name: Run Trivy vulnerability scanner - name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@dc5a429b52fcf669ce959baa2c2dd26090d2a6c4 # v0.32.0 uses: aquasecurity/trivy-action@5681af892cd0f4997658e2bacc62bd0a894cf564 # v0.27.0
env:
TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2,public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2
TRIVY_JAVA_DB_REPOSITORY: docker.io/aquasec/trivy-java-db:1,public.ecr.aws/aquasecurity/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1
with: with:
scan-type: repo scan-type: repo
ignore-unfixed: true ignore-unfixed: true
@@ -48,6 +40,6 @@ jobs:
severity: CRITICAL,HIGH severity: CRITICAL,HIGH
- name: Upload Trivy scan results to GitHub Security tab - name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@df559355d593797519d70b90fc8edd5db049e7a2 # v3.29.9 uses: github/codeql-action/upload-sarif@2bbafcdd7fbf96243689e764c2f15d9735164f33 # v3.26.6
with: with:
sarif_file: 'trivy-results.sarif' sarif_file: 'trivy-results.sarif'

View File

@@ -1,28 +0,0 @@
name: Security Analysis with zizmor
on:
push:
branches: ["main"]
pull_request:
branches: ["**"]
permissions: {}
jobs:
zizmor:
name: Run zizmor
runs-on: ubuntu-latest
permissions:
security-events: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- name: Run zizmor
uses: zizmorcore/zizmor-action@5ca5fc7a4779c5263a3ffa0e1f693009994446d1 # v0.1.2
with:
# intentionally not scanning the entire repository,
# since it contains integration tests.
inputs: ./.github/

View File

@@ -1,7 +1,7 @@
--- ---
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0 rev: v4.6.0
hooks: hooks:
- id: check-yaml - id: check-yaml
- id: check-json - id: check-json
@@ -31,7 +31,7 @@ repos:
language: system language: system
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--"] args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--"]
types_or: [rust, file] types_or: [rust, file]
files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$) files: (Cargo.toml|Cargo.lock|rust-toolchain|.*\.rs$)
pass_filenames: false pass_filenames: false
- id: cargo-clippy - id: cargo-clippy
name: cargo clippy name: cargo clippy
@@ -40,13 +40,5 @@ repos:
language: system language: system
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--", "-D", "warnings"] args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--", "-D", "warnings"]
types_or: [rust, file] types_or: [rust, file]
files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$) files: (Cargo.toml|Cargo.lock|rust-toolchain|clippy.toml|.*\.rs$)
pass_filenames: false pass_filenames: false
- id: check-docker-templates
name: check docker templates
description: Check if the Docker templates are updated
language: system
entry: sh
args:
- "-c"
- "cd docker && make"

3785
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,12 +1,9 @@
[workspace]
members = ["macros"]
[package] [package]
name = "vaultwarden" name = "vaultwarden"
version = "1.0.0" version = "1.0.0"
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"] authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
edition = "2021" edition = "2021"
rust-version = "1.87.0" rust-version = "1.80.0"
resolver = "2" resolver = "2"
repository = "https://github.com/dani-garcia/vaultwarden" repository = "https://github.com/dani-garcia/vaultwarden"
@@ -32,11 +29,6 @@ enable_mimalloc = ["dep:mimalloc"]
# You also need to set an env variable `QUERY_LOGGER=1` to fully activate this so you do not have to re-compile # You also need to set an env variable `QUERY_LOGGER=1` to fully activate this so you do not have to re-compile
# if you want to turn off the logging for a specific run. # if you want to turn off the logging for a specific run.
query_logger = ["dep:diesel_logger"] query_logger = ["dep:diesel_logger"]
s3 = ["opendal/services-s3", "dep:aws-config", "dep:aws-credential-types", "dep:aws-smithy-runtime-api", "dep:anyhow", "dep:http", "dep:reqsign"]
# OIDC specific features
oidc-accept-rfc3339-timestamps = ["openidconnect/accept-rfc3339-timestamps"]
oidc-accept-string-booleans = ["openidconnect/accept-string-booleans"]
# Enable unstable features, requires nightly # Enable unstable features, requires nightly
# Currently only used to enable rusts official ip support # Currently only used to enable rusts official ip support
@@ -44,26 +36,24 @@ unstable = []
[target."cfg(unix)".dependencies] [target."cfg(unix)".dependencies]
# Logging # Logging
syslog = "7.0.0" syslog = "6.1.1"
[dependencies] [dependencies]
macros = { path = "./macros" }
# Logging # Logging
log = "0.4.27" log = "0.4.22"
fern = { version = "0.7.1", features = ["syslog-7", "reopen-1"] } fern = { version = "0.7.0", features = ["syslog-6", "reopen-1"] }
tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work tracing = { version = "0.1.40", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
# A `dotenv` implementation for Rust # A `dotenv` implementation for Rust
dotenvy = { version = "0.15.7", default-features = false } dotenvy = { version = "0.15.7", default-features = false }
# Lazy initialization # Lazy initialization
once_cell = "1.21.3" once_cell = "1.20.2"
# Numerical libraries # Numerical libraries
num-traits = "0.2.19" num-traits = "0.2.19"
num-derive = "0.4.2" num-derive = "0.4.2"
bigdecimal = "0.4.8" bigdecimal = "0.4.5"
# Web framework # Web framework
rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false } rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false }
@@ -77,134 +67,104 @@ dashmap = "6.1.0"
# Async futures # Async futures
futures = "0.3.31" futures = "0.3.31"
tokio = { version = "1.47.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] } tokio = { version = "1.41.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
tokio-util = { version = "0.7.16", features = ["compat"]}
# A generic serialization/deserialization framework # A generic serialization/deserialization framework
serde = { version = "1.0.219", features = ["derive"] } serde = { version = "1.0.213", features = ["derive"] }
serde_json = "1.0.142" serde_json = "1.0.132"
# A safe, extensible ORM and Query builder # A safe, extensible ORM and Query builder
diesel = { version = "2.2.12", features = ["chrono", "r2d2", "numeric"] } diesel = { version = "2.2.4", features = ["chrono", "r2d2", "numeric"] }
diesel_migrations = "2.2.0" diesel_migrations = "2.2.0"
diesel_logger = { version = "0.4.0", optional = true } diesel_logger = { version = "0.3.0", optional = true }
derive_more = { version = "2.0.1", features = ["from", "into", "as_ref", "deref", "display"] }
diesel-derive-newtype = "2.1.2"
# Bundled/Static SQLite # Bundled/Static SQLite
libsqlite3-sys = { version = "0.35.0", features = ["bundled"], optional = true } libsqlite3-sys = { version = "0.30.1", features = ["bundled"], optional = true }
# Crypto-related libraries # Crypto-related libraries
rand = "0.9.2" rand = { version = "0.8.5", features = ["small_rng"] }
ring = "0.17.14" ring = "0.17.8"
subtle = "2.6.1"
# UUID generation # UUID generation
uuid = { version = "1.18.0", features = ["v4"] } uuid = { version = "1.11.0", features = ["v4"] }
# Date and time libraries # Date and time libraries
chrono = { version = "0.4.41", features = ["clock", "serde"], default-features = false } chrono = { version = "0.4.38", features = ["clock", "serde"], default-features = false }
chrono-tz = "0.10.4" chrono-tz = "0.10.0"
time = "0.3.41" time = "0.3.36"
# Job scheduler # Job scheduler
job_scheduler_ng = "2.3.0" job_scheduler_ng = "2.0.5"
# Data encoding library Hex/Base32/Base64 # Data encoding library Hex/Base32/Base64
data-encoding = "2.9.0" data-encoding = "2.6.0"
# JWT library # JWT library
jsonwebtoken = "9.3.1" jsonwebtoken = "9.3.0"
# TOTP library # TOTP library
totp-lite = "2.0.1" totp-lite = "2.0.1"
# Yubico Library # Yubico Library
yubico = { package = "yubico_ng", version = "0.14.1", features = ["online-tokio"], default-features = false } yubico = { version = "0.11.0", features = ["online-tokio"], default-features = false }
# WebAuthn libraries # WebAuthn libraries
# danger-allow-state-serialisation is needed to save the state in the db webauthn-rs = "0.3.2"
# danger-credential-internals is needed to support U2F to Webauthn migration
# danger-user-presence-only-security-keys is needed to disable UV
webauthn-rs = { version = "0.5.2", features = ["danger-allow-state-serialisation", "danger-credential-internals", "danger-user-presence-only-security-keys"] }
webauthn-rs-proto = "0.5.2"
webauthn-rs-core = "0.5.2"
# Handling of URL's for WebAuthn and favicons # Handling of URL's for WebAuthn and favicons
url = "2.5.4" url = "2.5.2"
# Email libraries # Email libraries
lettre = { version = "0.11.18", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "hostname", "tracing", "tokio1-rustls", "ring", "rustls-native-certs"], default-features = false } lettre = { version = "0.11.10", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
email_address = "0.2.9" email_address = "0.2.9"
# HTML Template library # HTML Template library
handlebars = { version = "6.3.2", features = ["dir_source"] } handlebars = { version = "6.1.0", features = ["dir_source"] }
# HTTP client (Used for favicons, version check, DUO and HIBP API) # HTTP client (Used for favicons, version check, DUO and HIBP API)
reqwest = { version = "0.12.23", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false} reqwest = { version = "0.12.8", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] }
hickory-resolver = "0.25.2" hickory-resolver = "0.24.1"
# Favicon extraction libraries # Favicon extraction libraries
html5gum = "0.7.0" html5gum = "0.5.7"
regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false } regex = { version = "1.11.0", features = ["std", "perf", "unicode-perl"], default-features = false }
data-url = "0.3.1" data-url = "0.3.1"
bytes = "1.10.1" bytes = "1.8.0"
svg-hush = "0.9.5"
# Cache function results (Used for version check and favicon fetching) # Cache function results (Used for version check and favicon fetching)
cached = { version = "0.56.0", features = ["async"] } cached = { version = "0.53.1", features = ["async"] }
# Used for custom short lived cookie jar during favicon extraction # Used for custom short lived cookie jar during favicon extraction
cookie = "0.18.1" cookie = "0.18.1"
cookie_store = "0.21.1" cookie_store = "0.21.0"
# Used by U2F, JWT and PostgreSQL # Used by U2F, JWT and PostgreSQL
openssl = "0.10.73" openssl = "0.10.68"
# CLI argument parsing # CLI argument parsing
pico-args = "0.5.0" pico-args = "0.5.0"
# Macro ident concatenation # Macro ident concatenation
pastey = "0.1.1" paste = "1.0.15"
governor = "0.10.1" governor = "0.7.0"
# OIDC for SSO
openidconnect = { version = "4.0.1", features = ["reqwest", "native-tls"] }
mini-moka = "0.10.3"
# Check client versions for specific features. # Check client versions for specific features.
semver = "1.0.26" semver = "1.0.23"
# Allow overriding the default memory allocator # Allow overriding the default memory allocator
# Mainly used for the musl builds, since the default musl malloc is very slow # Mainly used for the musl builds, since the default musl malloc is very slow
mimalloc = { version = "0.1.47", features = ["secure"], default-features = false, optional = true } mimalloc = { version = "0.1.43", features = ["secure"], default-features = false, optional = true }
which = "6.0.3"
which = "8.0.0"
# Argon2 library with support for the PHC format # Argon2 library with support for the PHC format
argon2 = "0.5.3" argon2 = "0.5.3"
# Reading a password from the cli for generating the Argon2id ADMIN_TOKEN # Reading a password from the cli for generating the Argon2id ADMIN_TOKEN
rpassword = "7.4.0" rpassword = "7.3.1"
# Loading a dynamic CSS Stylesheet
grass_compiler = { version = "0.13.4", default-features = false }
# File are accessed through Apache OpenDAL
opendal = { version = "0.54.0", features = ["services-fs"], default-features = false }
# For retrieving AWS credentials, including temporary SSO credentials
anyhow = { version = "1.0.99", optional = true }
aws-config = { version = "1.8.5", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true }
aws-credential-types = { version = "1.2.5", optional = true }
aws-smithy-runtime-api = { version = "1.8.7", optional = true }
http = { version = "1.3.1", optional = true }
reqsign = { version = "0.16.5", optional = true }
# Strip debuginfo from the release builds # Strip debuginfo from the release builds
# The debug symbols are to provide better panic traces # The symbols are the provide better panic traces
# Also enable fat LTO and use 1 codegen unit for optimizations # Also enable fat LTO and use 1 codegen unit for optimizations
[profile.release] [profile.release]
strip = "debuginfo" strip = "debuginfo"
@@ -239,7 +199,7 @@ codegen-units = 16
# Linting config # Linting config
# https://doc.rust-lang.org/rustc/lints/groups.html # https://doc.rust-lang.org/rustc/lints/groups.html
[workspace.lints.rust] [lints.rust]
# Forbid # Forbid
unsafe_code = "forbid" unsafe_code = "forbid"
non_ascii_idents = "forbid" non_ascii_idents = "forbid"
@@ -253,8 +213,7 @@ noop_method_call = "deny"
refining_impl_trait = { level = "deny", priority = -1 } refining_impl_trait = { level = "deny", priority = -1 }
rust_2018_idioms = { level = "deny", priority = -1 } rust_2018_idioms = { level = "deny", priority = -1 }
rust_2021_compatibility = { level = "deny", priority = -1 } rust_2021_compatibility = { level = "deny", priority = -1 }
rust_2024_compatibility = { level = "deny", priority = -1 } # rust_2024_compatibility = { level = "deny", priority = -1 } # Enable once we are at MSRV 1.81.0
edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
single_use_lifetimes = "deny" single_use_lifetimes = "deny"
trivial_casts = "deny" trivial_casts = "deny"
trivial_numeric_casts = "deny" trivial_numeric_casts = "deny"
@@ -263,20 +222,16 @@ unused_import_braces = "deny"
unused_lifetimes = "deny" unused_lifetimes = "deny"
unused_qualifications = "deny" unused_qualifications = "deny"
variant_size_differences = "deny" variant_size_differences = "deny"
# Allow the following lints since these cause issues with Rust v1.84.0 or newer # The lints below are part of the rust_2024_compatibility group
# Building Vaultwarden with Rust v1.85.0 and edition 2024 also works without issues static-mut-refs = "deny"
if_let_rescope = "allow" unsafe-op-in-unsafe-fn = "deny"
tail_expr_drop_order = "allow"
# https://rust-lang.github.io/rust-clippy/stable/index.html # https://rust-lang.github.io/rust-clippy/stable/index.html
[workspace.lints.clippy] [lints.clippy]
# Warn # Warn
dbg_macro = "warn" dbg_macro = "warn"
todo = "warn" todo = "warn"
# Ignore/Allow
result_large_err = "allow"
# Deny # Deny
case_sensitive_file_extension_comparisons = "deny" case_sensitive_file_extension_comparisons = "deny"
cast_lossless = "deny" cast_lossless = "deny"
@@ -284,7 +239,6 @@ clone_on_ref_ptr = "deny"
equatable_if_let = "deny" equatable_if_let = "deny"
filter_map_next = "deny" filter_map_next = "deny"
float_cmp_const = "deny" float_cmp_const = "deny"
implicit_clone = "deny"
inefficient_to_string = "deny" inefficient_to_string = "deny"
iter_on_empty_collections = "deny" iter_on_empty_collections = "deny"
iter_on_single_items = "deny" iter_on_single_items = "deny"
@@ -293,12 +247,14 @@ macro_use_imports = "deny"
manual_assert = "deny" manual_assert = "deny"
manual_instant_elapsed = "deny" manual_instant_elapsed = "deny"
manual_string_new = "deny" manual_string_new = "deny"
match_on_vec_items = "deny"
match_wildcard_for_single_variants = "deny" match_wildcard_for_single_variants = "deny"
mem_forget = "deny" mem_forget = "deny"
needless_continue = "deny" needless_continue = "deny"
needless_lifetimes = "deny" needless_lifetimes = "deny"
option_option = "deny" option_option = "deny"
string_add_assign = "deny" string_add_assign = "deny"
string_to_string = "deny"
unnecessary_join = "deny" unnecessary_join = "deny"
unnecessary_self_imports = "deny" unnecessary_self_imports = "deny"
unnested_or_patterns = "deny" unnested_or_patterns = "deny"
@@ -306,6 +262,3 @@ unused_async = "deny"
unused_self = "deny" unused_self = "deny"
verbose_file_reads = "deny" verbose_file_reads = "deny"
zero_sized_map_values = "deny" zero_sized_map_values = "deny"
[lints]
workspace = true

View File

@@ -59,21 +59,19 @@ A nearly complete implementation of the Bitwarden Client API is provided, includ
## Usage ## Usage
> [!IMPORTANT] > [!IMPORTANT]
> The web-vault requires the use a secure context for the [Web Crypto API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Crypto_API). > Most modern web browsers disallow the use of Web Crypto APIs in insecure contexts. In this case, you might get an error like `Cannot read property 'importKey'`. To solve this problem, you need to access the web vault via HTTPS or localhost.
> That means it will only work via `http://localhost:8000` (using the port from the example below) or if you [enable HTTPS](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS). >
>This can be configured in [Vaultwarden directly](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS) or using a third-party reverse proxy ([some examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)).
The recommended way to install and use Vaultwarden is via our container images which are published to [ghcr.io](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden), [docker.io](https://hub.docker.com/r/vaultwarden/server) and [quay.io](https://quay.io/repository/vaultwarden/server). >
See [which container image to use](https://github.com/dani-garcia/vaultwarden/wiki/Which-container-image-to-use) for an explanation of the provided tags. >If you have an available domain name, you can get HTTPS certificates with [Let's Encrypt](https://letsencrypt.org/), or you can generate self-signed certificates with utilities like [mkcert](https://github.com/FiloSottile/mkcert). Some proxies automatically do this step, like Caddy or Traefik (see examples linked above).
There are also [community driven packages](https://github.com/dani-garcia/vaultwarden/wiki/Third-party-packages) which can be used, but those might be lagging behind the latest version or might deviate in the way Vaultwarden is configured, as described in our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).
Alternatively, you can also [build Vaultwarden](https://github.com/dani-garcia/vaultwarden/wiki/Building-binary) yourself.
While Vaultwarden is based upon the [Rocket web framework](https://rocket.rs) which has built-in support for TLS our recommendation would be that you setup a reverse proxy (see [proxy examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)).
> [!TIP] > [!TIP]
>**For more detailed examples on how to install, use and configure Vaultwarden you can check our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).** >**For more detailed examples on how to install, use and configure Vaultwarden you can check our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).**
The main way to use Vaultwarden is via our container images which are published to [ghcr.io](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden), [docker.io](https://hub.docker.com/r/vaultwarden/server) and [quay.io](https://quay.io/repository/vaultwarden/server).
There are also [community driven packages](https://github.com/dani-garcia/vaultwarden/wiki/Third-party-packages) which can be used, but those might be lagging behind the latest version or might deviate in the way Vaultwarden is configured, as described in our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).
### Docker/Podman CLI ### Docker/Podman CLI
Pull the container image and mount a volume from the host for persistent storage.<br> Pull the container image and mount a volume from the host for persistent storage.<br>
@@ -85,7 +83,7 @@ docker run --detach --name vaultwarden \
--env DOMAIN="https://vw.domain.tld" \ --env DOMAIN="https://vw.domain.tld" \
--volume /vw-data/:/data/ \ --volume /vw-data/:/data/ \
--restart unless-stopped \ --restart unless-stopped \
--publish 127.0.0.1:8000:80 \ --publish 80:80 \
vaultwarden/server:latest vaultwarden/server:latest
``` ```
@@ -106,7 +104,7 @@ services:
volumes: volumes:
- ./vw-data/:/data/ - ./vw-data/:/data/
ports: ports:
- 127.0.0.1:8000:80 - 80:80
``` ```
<br> <br>

View File

@@ -21,7 +21,7 @@ notify us. We welcome working with you to resolve the issue promptly. Thanks in
The following bug classes are out-of scope: The following bug classes are out-of scope:
- Bugs that are already reported on Vaultwarden's issue tracker (https://github.com/dani-garcia/vaultwarden/issues) - Bugs that are already reported on Vaultwarden's issue tracker (https://github.com/dani-garcia/vaultwarden/issues)
- Bugs that are not part of Vaultwarden, like on the web-vault or mobile and desktop clients. These issues need to be reported in the respective project issue tracker at https://github.com/bitwarden to which we are not associated - Bugs that are not part of Vaultwarden, like on the the web-vault or mobile and desktop clients. These issues need to be reported in the respective project issue tracker at https://github.com/bitwarden to which we are not associated
- Issues in an upstream software dependency (ex: Rust, or External Libraries) which are already reported to the upstream maintainer - Issues in an upstream software dependency (ex: Rust, or External Libraries) which are already reported to the upstream maintainer
- Attacks requiring physical access to a user's device - Attacks requiring physical access to a user's device
- Issues related to software or protocols not under Vaultwarden's control - Issues related to software or protocols not under Vaultwarden's control

View File

@@ -11,8 +11,6 @@ fn main() {
println!("cargo:rustc-cfg=postgresql"); println!("cargo:rustc-cfg=postgresql");
#[cfg(feature = "query_logger")] #[cfg(feature = "query_logger")]
println!("cargo:rustc-cfg=query_logger"); println!("cargo:rustc-cfg=query_logger");
#[cfg(feature = "s3")]
println!("cargo:rustc-cfg=s3");
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))] #[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
compile_error!( compile_error!(
@@ -25,7 +23,6 @@ fn main() {
println!("cargo::rustc-check-cfg=cfg(mysql)"); println!("cargo::rustc-check-cfg=cfg(mysql)");
println!("cargo::rustc-check-cfg=cfg(postgresql)"); println!("cargo::rustc-check-cfg=cfg(postgresql)");
println!("cargo::rustc-check-cfg=cfg(query_logger)"); println!("cargo::rustc-check-cfg=cfg(query_logger)");
println!("cargo::rustc-check-cfg=cfg(s3)");
// Rerun when these paths are changed. // Rerun when these paths are changed.
// Someone could have checked-out a tag or specific commit, but no other files changed. // Someone could have checked-out a tag or specific commit, but no other files changed.
@@ -51,8 +48,8 @@ fn main() {
fn run(args: &[&str]) -> Result<String, std::io::Error> { fn run(args: &[&str]) -> Result<String, std::io::Error> {
let out = Command::new(args[0]).args(&args[1..]).output()?; let out = Command::new(args[0]).args(&args[1..]).output()?;
if !out.status.success() { if !out.status.success() {
use std::io::Error; use std::io::{Error, ErrorKind};
return Err(Error::other("Command not successful")); return Err(Error::new(ErrorKind::Other, "Command not successful"));
} }
Ok(String::from_utf8(out.stdout).unwrap().trim().to_string()) Ok(String::from_utf8(out.stdout).unwrap().trim().to_string())
} }

View File

@@ -1,13 +1,13 @@
--- ---
vault_version: "v2025.7.2" vault_version: "v2024.6.2c"
vault_image_digest: "sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9" vault_image_digest: "sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b"
# Cross Compile Docker Helper Scripts v1.6.1 # Cross Compile Docker Helper Scripts v1.5.0
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts # We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags # https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894" xx_image_digest: "sha256:1978e7a58a1777cb0ef0dde76bad60b7914b21da57cfa88047875e4f364297aa"
rust_version: 1.89.0 # Rust version to be used rust_version: 1.82.0 # Rust version to be used
debian_version: trixie # Debian release name to be used debian_version: bookworm # Debian release name to be used
alpine_version: "3.22" # Alpine version to be used alpine_version: "3.20" # Alpine version to be used
# For which platforms/architectures will we try to build images # For which platforms/architectures will we try to build images
platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"] platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"]
# Determine the build images per OS/Arch # Determine the build images per OS/Arch

View File

@@ -19,23 +19,23 @@
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
# click the tag name to view the digest of the image it currently points to. # click the tag name to view the digest of the image it currently points to.
# - From the command line: # - From the command line:
# $ docker pull docker.io/vaultwarden/web-vault:v2025.7.2 # $ docker pull docker.io/vaultwarden/web-vault:v2024.6.2c
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.2 # $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2024.6.2c
# [docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9] # [docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b]
# #
# - Conversely, to get the tag name from the digest: # - Conversely, to get the tag name from the digest:
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9 # $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b
# [docker.io/vaultwarden/web-vault:v2025.7.2] # [docker.io/vaultwarden/web-vault:v2024.6.2c]
# #
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9 AS vault FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b AS vault
########################## ALPINE BUILD IMAGES ########################## ########################## ALPINE BUILD IMAGES ##########################
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 ## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
## And for Alpine we define all build images here, they will only be loaded when actually used ## And for Alpine we define all build images here, they will only be loaded when actually used
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.89.0 AS build_amd64 FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.82.0 AS build_amd64
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.89.0 AS build_arm64 FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.82.0 AS build_arm64
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.89.0 AS build_armv7 FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.82.0 AS build_armv7
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.89.0 AS build_armv6 FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.82.0 AS build_armv6
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006 # hadolint ignore=DL3006
@@ -76,7 +76,6 @@ RUN source /env-cargo && \
# Copies over *only* your manifests and build files # Copies over *only* your manifests and build files
COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./ COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./
COPY ./macros ./macros
ARG CARGO_PROFILE=release ARG CARGO_PROFILE=release
@@ -127,7 +126,7 @@ RUN source /env-cargo && \
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' # To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
# #
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742 # We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.22 FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.20
ENV ROCKET_PROFILE="release" \ ENV ROCKET_PROFILE="release" \
ROCKET_ADDRESS=0.0.0.0 \ ROCKET_ADDRESS=0.0.0.0 \

View File

@@ -19,24 +19,24 @@
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
# click the tag name to view the digest of the image it currently points to. # click the tag name to view the digest of the image it currently points to.
# - From the command line: # - From the command line:
# $ docker pull docker.io/vaultwarden/web-vault:v2025.7.2 # $ docker pull docker.io/vaultwarden/web-vault:v2024.6.2c
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.2 # $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2024.6.2c
# [docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9] # [docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b]
# #
# - Conversely, to get the tag name from the digest: # - Conversely, to get the tag name from the digest:
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9 # $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b
# [docker.io/vaultwarden/web-vault:v2025.7.2] # [docker.io/vaultwarden/web-vault:v2024.6.2c]
# #
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9 AS vault FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b AS vault
########################## Cross Compile Docker Helper Scripts ########################## ########################## Cross Compile Docker Helper Scripts ##########################
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts ## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
## And these bash scripts do not have any significant difference if at all ## And these bash scripts do not have any significant difference if at all
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894 AS xx FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:1978e7a58a1777cb0ef0dde76bad60b7914b21da57cfa88047875e4f364297aa AS xx
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006 # hadolint ignore=DL3006
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.89.0-slim-trixie AS build FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.82.0-slim-bookworm AS build
COPY --from=xx / / COPY --from=xx / /
ARG TARGETARCH ARG TARGETARCH
ARG TARGETVARIANT ARG TARGETVARIANT
@@ -68,11 +68,15 @@ RUN apt-get update && \
xx-apt-get install -y \ xx-apt-get install -y \
--no-install-recommends \ --no-install-recommends \
gcc \ gcc \
libmariadb3 \
libpq-dev \ libpq-dev \
libpq5 \ libpq5 \
libssl-dev \ libssl-dev \
libmariadb-dev \
zlib1g-dev && \ zlib1g-dev && \
# Force install arch dependend mariadb dev packages
# Installing them the normal way breaks several other packages (again)
apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \
dpkg --force-all -i ./libmariadb-dev*.deb && \
# Run xx-cargo early, since it sometimes seems to break when run at a later stage # Run xx-cargo early, since it sometimes seems to break when run at a later stage
echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo
@@ -85,24 +89,24 @@ RUN USER=root cargo new --bin /app
WORKDIR /app WORKDIR /app
# Environment variables for Cargo on Debian based builds # Environment variables for Cargo on Debian based builds
ARG TARGET_PKG_CONFIG_PATH ARG ARCH_OPENSSL_LIB_DIR \
ARCH_OPENSSL_INCLUDE_DIR
RUN source /env-cargo && \ RUN source /env-cargo && \
if xx-info is-cross ; then \ if xx-info is-cross ; then \
# Some special variables if needed to override some build paths
if [[ -n "${ARCH_OPENSSL_LIB_DIR}" && -n "${ARCH_OPENSSL_INCLUDE_DIR}" ]]; then \
echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_LIB_DIR=${ARCH_OPENSSL_LIB_DIR}" >> /env-cargo && \
echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_INCLUDE_DIR=${ARCH_OPENSSL_INCLUDE_DIR}" >> /env-cargo ; \
fi && \
# We can't use xx-cargo since that uses clang, which doesn't work for our libraries. # We can't use xx-cargo since that uses clang, which doesn't work for our libraries.
# Because of this we generate the needed environment variables here which we can load in the needed steps. # Because of this we generate the needed environment variables here which we can load in the needed steps.
echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
echo "export PKG_CONFIG=/usr/bin/$(xx-info)-pkg-config" >> /env-cargo && \
echo "export CROSS_COMPILE=1" >> /env-cargo && \ echo "export CROSS_COMPILE=1" >> /env-cargo && \
echo "export PKG_CONFIG_ALLOW_CROSS=1" >> /env-cargo && \ echo "export OPENSSL_INCLUDE_DIR=/usr/include/$(xx-info)" >> /env-cargo && \
# For some architectures `xx-info` returns a triple which doesn't matches the path on disk echo "export OPENSSL_LIB_DIR=/usr/lib/$(xx-info)" >> /env-cargo ; \
# In those cases you can override this by setting the `TARGET_PKG_CONFIG_PATH` build-arg
if [[ -n "${TARGET_PKG_CONFIG_PATH}" ]]; then \
echo "export TARGET_PKG_CONFIG_PATH=${TARGET_PKG_CONFIG_PATH}" >> /env-cargo ; \
else \
echo "export PKG_CONFIG_PATH=/usr/lib/$(xx-info)/pkgconfig" >> /env-cargo ; \
fi && \
echo "# End of env-cargo" >> /env-cargo ; \
fi && \ fi && \
# Output the current contents of the file # Output the current contents of the file
cat /env-cargo cat /env-cargo
@@ -112,7 +116,6 @@ RUN source /env-cargo && \
# Copies over *only* your manifests and build files # Copies over *only* your manifests and build files
COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./ COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./
COPY ./macros ./macros
ARG CARGO_PROFILE=release ARG CARGO_PROFILE=release
@@ -162,7 +165,7 @@ RUN source /env-cargo && \
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' # To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
# #
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742 # We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
FROM --platform=$TARGETPLATFORM docker.io/library/debian:trixie-slim FROM --platform=$TARGETPLATFORM docker.io/library/debian:bookworm-slim
ENV ROCKET_PROFILE="release" \ ENV ROCKET_PROFILE="release" \
ROCKET_ADDRESS=0.0.0.0 \ ROCKET_ADDRESS=0.0.0.0 \
@@ -175,7 +178,7 @@ RUN mkdir /data && \
--no-install-recommends \ --no-install-recommends \
ca-certificates \ ca-certificates \
curl \ curl \
libmariadb-dev \ libmariadb-dev-compat \
libpq5 \ libpq5 \
openssl && \ openssl && \
apt-get clean && \ apt-get clean && \

View File

@@ -86,11 +86,15 @@ RUN apt-get update && \
xx-apt-get install -y \ xx-apt-get install -y \
--no-install-recommends \ --no-install-recommends \
gcc \ gcc \
libmariadb3 \
libpq-dev \ libpq-dev \
libpq5 \ libpq5 \
libssl-dev \ libssl-dev \
libmariadb-dev \
zlib1g-dev && \ zlib1g-dev && \
# Force install arch dependend mariadb dev packages
# Installing them the normal way breaks several other packages (again)
apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \
dpkg --force-all -i ./libmariadb-dev*.deb && \
# Run xx-cargo early, since it sometimes seems to break when run at a later stage # Run xx-cargo early, since it sometimes seems to break when run at a later stage
echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo
{% endif %} {% endif %}
@@ -105,24 +109,24 @@ WORKDIR /app
{% if base == "debian" %} {% if base == "debian" %}
# Environment variables for Cargo on Debian based builds # Environment variables for Cargo on Debian based builds
ARG TARGET_PKG_CONFIG_PATH ARG ARCH_OPENSSL_LIB_DIR \
ARCH_OPENSSL_INCLUDE_DIR
RUN source /env-cargo && \ RUN source /env-cargo && \
if xx-info is-cross ; then \ if xx-info is-cross ; then \
# Some special variables if needed to override some build paths
if [[ -n "${ARCH_OPENSSL_LIB_DIR}" && -n "${ARCH_OPENSSL_INCLUDE_DIR}" ]]; then \
echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_LIB_DIR=${ARCH_OPENSSL_LIB_DIR}" >> /env-cargo && \
echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_INCLUDE_DIR=${ARCH_OPENSSL_INCLUDE_DIR}" >> /env-cargo ; \
fi && \
# We can't use xx-cargo since that uses clang, which doesn't work for our libraries. # We can't use xx-cargo since that uses clang, which doesn't work for our libraries.
# Because of this we generate the needed environment variables here which we can load in the needed steps. # Because of this we generate the needed environment variables here which we can load in the needed steps.
echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
echo "export PKG_CONFIG=/usr/bin/$(xx-info)-pkg-config" >> /env-cargo && \
echo "export CROSS_COMPILE=1" >> /env-cargo && \ echo "export CROSS_COMPILE=1" >> /env-cargo && \
echo "export PKG_CONFIG_ALLOW_CROSS=1" >> /env-cargo && \ echo "export OPENSSL_INCLUDE_DIR=/usr/include/$(xx-info)" >> /env-cargo && \
# For some architectures `xx-info` returns a triple which doesn't matches the path on disk echo "export OPENSSL_LIB_DIR=/usr/lib/$(xx-info)" >> /env-cargo ; \
# In those cases you can override this by setting the `TARGET_PKG_CONFIG_PATH` build-arg
if [[ -n "${TARGET_PKG_CONFIG_PATH}" ]]; then \
echo "export TARGET_PKG_CONFIG_PATH=${TARGET_PKG_CONFIG_PATH}" >> /env-cargo ; \
else \
echo "export PKG_CONFIG_PATH=/usr/lib/$(xx-info)/pkgconfig" >> /env-cargo ; \
fi && \
echo "# End of env-cargo" >> /env-cargo ; \
fi && \ fi && \
# Output the current contents of the file # Output the current contents of the file
cat /env-cargo cat /env-cargo
@@ -139,7 +143,6 @@ RUN source /env-cargo && \
# Copies over *only* your manifests and build files # Copies over *only* your manifests and build files
COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./ COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./
COPY ./macros ./macros
ARG CARGO_PROFILE=release ARG CARGO_PROFILE=release
@@ -212,7 +215,7 @@ RUN mkdir /data && \
--no-install-recommends \ --no-install-recommends \
ca-certificates \ ca-certificates \
curl \ curl \
libmariadb-dev \ libmariadb-dev-compat \
libpq5 \ libpq5 \
openssl && \ openssl && \
apt-get clean && \ apt-get clean && \

View File

@@ -46,7 +46,7 @@ There also is an option to use an other docker container to provide support for
```bash ```bash
# To install and activate # To install and activate
docker run --privileged --rm tonistiigi/binfmt --install arm64,arm docker run --privileged --rm tonistiigi/binfmt --install arm64,arm
# To uninstall # To unistall
docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
``` ```

View File

@@ -17,7 +17,7 @@ variable "SOURCE_REPOSITORY_URL" {
default = null default = null
} }
// The commit hash of the current commit this build was triggered on // The commit hash of of the current commit this build was triggered on
variable "SOURCE_COMMIT" { variable "SOURCE_COMMIT" {
default = null default = null
} }
@@ -133,7 +133,8 @@ target "debian-386" {
platforms = ["linux/386"] platforms = ["linux/386"]
tags = generate_tags("", "-386") tags = generate_tags("", "-386")
args = { args = {
TARGET_PKG_CONFIG_PATH = "/usr/lib/i386-linux-gnu/pkgconfig" ARCH_OPENSSL_LIB_DIR = "/usr/lib/i386-linux-gnu"
ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/i386-linux-gnu"
} }
} }
@@ -141,12 +142,20 @@ target "debian-ppc64le" {
inherits = ["debian"] inherits = ["debian"]
platforms = ["linux/ppc64le"] platforms = ["linux/ppc64le"]
tags = generate_tags("", "-ppc64le") tags = generate_tags("", "-ppc64le")
args = {
ARCH_OPENSSL_LIB_DIR = "/usr/lib/powerpc64le-linux-gnu"
ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/powerpc64le-linux-gnu"
}
} }
target "debian-s390x" { target "debian-s390x" {
inherits = ["debian"] inherits = ["debian"]
platforms = ["linux/s390x"] platforms = ["linux/s390x"]
tags = generate_tags("", "-s390x") tags = generate_tags("", "-s390x")
args = {
ARCH_OPENSSL_LIB_DIR = "/usr/lib/s390x-linux-gnu"
ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/s390x-linux-gnu"
}
} }
// ==== End of unsupported Debian architecture targets === // ==== End of unsupported Debian architecture targets ===

2
dylint.toml Normal file
View File

@@ -0,0 +1,2 @@
[workspace.metadata.dylint]
libraries = [{ path = "dylints/*" }]

7
dylints/README.md Normal file
View File

@@ -0,0 +1,7 @@
# How to run Lints
```sh
cargo install cargo-dylint dylint-link
RUSTFLAGS="-Aunreachable_patterns" cargo dylint --all -- --features sqlite
```

View File

@@ -0,0 +1,2 @@
[target.'cfg(all())']
linker = "dylint-link"

View File

@@ -0,0 +1 @@
/target

1659
dylints/non_authenticated_routes/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,20 @@
[package]
name = "non_authenticated_routes"
version = "0.1.0"
authors = ["authors go here"]
description = "description goes here"
edition = "2021"
publish = false
[lib]
crate-type = ["cdylib"]
[dependencies]
clippy_utils = { git = "https://github.com/rust-lang/rust-clippy", rev = "4f0e46b74dbc8441daf084b6f141a7fe414672a2" }
dylint_linting = "3.2.1"
[dev-dependencies]
dylint_testing = "3.2.1"
[package.metadata.rust-analyzer]
rustc_private = true

View File

@@ -0,0 +1,3 @@
[toolchain]
channel = "nightly-2024-11-09"
components = ["llvm-tools-preview", "rustc-dev"]

View File

@@ -0,0 +1,167 @@
#![feature(rustc_private)]
#![feature(let_chains)]
extern crate rustc_arena;
extern crate rustc_ast;
extern crate rustc_ast_pretty;
extern crate rustc_attr;
extern crate rustc_data_structures;
extern crate rustc_errors;
extern crate rustc_hir;
extern crate rustc_hir_pretty;
extern crate rustc_index;
extern crate rustc_infer;
extern crate rustc_lexer;
extern crate rustc_middle;
extern crate rustc_mir_dataflow;
extern crate rustc_parse;
extern crate rustc_span;
extern crate rustc_target;
extern crate rustc_trait_selection;
use clippy_utils::diagnostics::span_lint;
use rustc_hir::{def_id::DefId, Item, ItemKind, QPath, TyKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_span::{symbol::Ident, Span, Symbol};
dylint_linting::impl_late_lint! {
/// ### What it does
///
/// ### Why is this bad?
///
/// ### Known problems
/// Remove if none.
///
/// ### Example
/// ```rust
/// // example code where a warning is issued
/// ```
/// Use instead:
/// ```rust
/// // example code that does not raise a warning
/// ```
pub NON_AUTHENTICATED_ROUTES,
Warn,
"description goes here",
NonAuthenticatedRoutes::default()
}
#[derive(Default)]
pub struct NonAuthenticatedRoutes {
last_function_item: Option<(Ident, Span, bool)>,
}
// Collect all the attribute macros that are applied to the given span
fn attr_def_ids(mut span: rustc_span::Span) -> Vec<(DefId, Symbol, Option<DefId>)> {
use rustc_span::hygiene::{walk_chain, ExpnKind, MacroKind};
use rustc_span::{ExpnData, SyntaxContext};
let mut def_ids = Vec::new();
while span.ctxt() != SyntaxContext::root() {
if let ExpnData {
kind: ExpnKind::Macro(MacroKind::Attr, macro_symbol),
macro_def_id: Some(def_id),
parent_module,
..
} = span.ctxt().outer_expn_data()
{
def_ids.push((def_id, macro_symbol, parent_module));
}
span = walk_chain(span, SyntaxContext::root());
}
def_ids
}
const ROCKET_MACRO_EXCEPTIONS: [(&str, &str); 1] = [("rocket::catch", "catch")];
const VALID_AUTH_HEADERS: [&str; 6] = [
"auth::Headers",
"auth::OrgHeaders",
"auth::AdminHeaders",
"auth::ManagerHeaders",
"auth::ManagerHeadersLoose",
"auth::OwnerHeaders",
];
impl<'tcx> LateLintPass<'tcx> for NonAuthenticatedRoutes {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item) {
if let ItemKind::Fn(sig, ..) = item.kind {
let mut has_auth_headers = false;
for input in sig.decl.inputs {
let TyKind::Path(QPath::Resolved(_, path)) = input.kind else {
continue;
};
for seg in path.segments {
if let Some(def_id) = seg.res.opt_def_id() {
let def = cx.tcx.def_path_str(def_id);
if VALID_AUTH_HEADERS.contains(&def.as_str()) {
has_auth_headers = true;
}
}
}
}
self.last_function_item = Some((item.ident, sig.span, has_auth_headers));
return;
}
let ItemKind::Struct(_data, _generics) = item.kind else {
return;
};
let def_ids = attr_def_ids(item.span);
let mut is_rocket_route = false;
for (def_id, sym, parent) in &def_ids {
let def_id = cx.tcx.def_path_str(*def_id);
let sym = sym.as_str();
let parent = parent.map(|parent| cx.tcx.def_path_str(parent));
if ROCKET_MACRO_EXCEPTIONS.contains(&(&def_id, sym)) {
is_rocket_route = false;
break;
}
if def_id.starts_with("rocket::") || parent.as_deref() == Some("rocket_codegen") {
is_rocket_route = true;
break;
}
}
if !is_rocket_route {
return;
}
let Some((func_ident, func_span, has_auth_headers)) = self.last_function_item.take() else {
span_lint(cx, NON_AUTHENTICATED_ROUTES, item.span, "No function found before the expanded route");
return;
};
if func_ident != item.ident {
span_lint(
cx,
NON_AUTHENTICATED_ROUTES,
item.span,
"The function before the expanded route does not match the route",
);
return;
}
if !has_auth_headers {
span_lint(
cx,
NON_AUTHENTICATED_ROUTES,
func_span,
"This Rocket route does not have any authentication headers",
);
}
}
}
#[test]
fn ui() {
dylint_testing::ui_test(env!("CARGO_PKG_NAME"), "ui");
}

View File

@@ -0,0 +1 @@
fn main() {}

View File

@@ -1,16 +0,0 @@
[package]
name = "macros"
version = "0.1.0"
edition = "2021"
[lib]
name = "macros"
path = "src/lib.rs"
proc-macro = true
[dependencies]
quote = "1.0.40"
syn = "2.0.105"
[lints]
workspace = true

View File

@@ -1,56 +0,0 @@
use proc_macro::TokenStream;
use quote::quote;
#[proc_macro_derive(UuidFromParam)]
pub fn derive_uuid_from_param(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap();
impl_derive_uuid_macro(&ast)
}
fn impl_derive_uuid_macro(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
let gen_derive = quote! {
#[automatically_derived]
impl<'r> rocket::request::FromParam<'r> for #name {
type Error = ();
#[inline(always)]
fn from_param(param: &'r str) -> Result<Self, Self::Error> {
if uuid::Uuid::parse_str(param).is_ok() {
Ok(Self(param.to_string()))
} else {
Err(())
}
}
}
};
gen_derive.into()
}
#[proc_macro_derive(IdFromParam)]
pub fn derive_id_from_param(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap();
impl_derive_safestring_macro(&ast)
}
fn impl_derive_safestring_macro(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
let gen_derive = quote! {
#[automatically_derived]
impl<'r> rocket::request::FromParam<'r> for #name {
type Error = ();
#[inline(always)]
fn from_param(param: &'r str) -> Result<Self, Self::Error> {
if param.chars().all(|c| matches!(c, 'a'..='z' | 'A'..='Z' |'0'..='9' | '-')) {
Ok(Self(param.to_string()))
} else {
Err(())
}
}
}
};
gen_derive.into()
}

View File

@@ -1 +0,0 @@
DROP TABLE sso_nonce;

View File

@@ -1,4 +0,0 @@
CREATE TABLE sso_nonce (
nonce CHAR(36) NOT NULL PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@@ -1 +0,0 @@
ALTER TABLE users_organizations DROP COLUMN invited_by_email;

View File

@@ -1 +0,0 @@
ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL;

View File

@@ -1,6 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
nonce CHAR(36) NOT NULL PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@@ -1,8 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
state VARCHAR(512) NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@@ -1,8 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
state VARCHAR(512) NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@@ -1,9 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
state VARCHAR(512) NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
verifier TEXT,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@@ -1 +0,0 @@
DROP TABLE IF EXISTS sso_users;

View File

@@ -1,7 +0,0 @@
CREATE TABLE sso_users (
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
identifier VARCHAR(768) NOT NULL UNIQUE,
created_at TIMESTAMP NOT NULL DEFAULT now(),
FOREIGN KEY(user_uuid) REFERENCES users(uuid)
);

View File

@@ -1,2 +0,0 @@
ALTER TABLE sso_users DROP FOREIGN KEY `sso_users_ibfk_1`;
ALTER TABLE sso_users ADD FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE;

View File

@@ -1,5 +0,0 @@
ALTER TABLE users_collections
ADD COLUMN manage BOOLEAN NOT NULL DEFAULT FALSE;
ALTER TABLE collections_groups
ADD COLUMN manage BOOLEAN NOT NULL DEFAULT FALSE;

View File

@@ -1 +0,0 @@
DROP TABLE sso_nonce;

View File

@@ -1,4 +0,0 @@
CREATE TABLE sso_nonce (
nonce CHAR(36) NOT NULL PRIMARY KEY,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@@ -1 +0,0 @@
ALTER TABLE users_organizations DROP COLUMN invited_by_email;

View File

@@ -1 +0,0 @@
ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL;

View File

@@ -1,6 +0,0 @@
DROP TABLE sso_nonce;
CREATE TABLE sso_nonce (
nonce CHAR(36) NOT NULL PRIMARY KEY,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@@ -1,8 +0,0 @@
DROP TABLE sso_nonce;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@@ -1,8 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@@ -1,9 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
verifier TEXT,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@@ -1 +0,0 @@
DROP TABLE IF EXISTS sso_users;

View File

@@ -1,7 +0,0 @@
CREATE TABLE sso_users (
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
identifier TEXT NOT NULL UNIQUE,
created_at TIMESTAMP NOT NULL DEFAULT now(),
FOREIGN KEY(user_uuid) REFERENCES users(uuid)
);

View File

@@ -1,3 +0,0 @@
ALTER TABLE sso_users
DROP CONSTRAINT "sso_users_user_uuid_fkey",
ADD CONSTRAINT "sso_users_user_uuid_fkey" FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE;

View File

@@ -1,5 +0,0 @@
ALTER TABLE users_collections
ADD COLUMN manage BOOLEAN NOT NULL DEFAULT FALSE;
ALTER TABLE collections_groups
ADD COLUMN manage BOOLEAN NOT NULL DEFAULT FALSE;

View File

@@ -1 +0,0 @@
DROP TABLE sso_nonce;

View File

@@ -1,4 +0,0 @@
CREATE TABLE sso_nonce (
nonce CHAR(36) NOT NULL PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@@ -1 +0,0 @@
ALTER TABLE users_organizations DROP COLUMN invited_by_email;

View File

@@ -1 +0,0 @@
ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL;

View File

@@ -1,6 +0,0 @@
DROP TABLE sso_nonce;
CREATE TABLE sso_nonce (
nonce CHAR(36) NOT NULL PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@@ -1,8 +0,0 @@
DROP TABLE sso_nonce;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@@ -1,8 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@@ -1,9 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
verifier TEXT,
redirect_uri TEXT NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@@ -1 +0,0 @@
DROP TABLE IF EXISTS sso_users;

View File

@@ -1,7 +0,0 @@
CREATE TABLE sso_users (
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
identifier TEXT NOT NULL UNIQUE,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(user_uuid) REFERENCES users(uuid)
);

View File

@@ -1,9 +0,0 @@
DROP TABLE IF EXISTS sso_users;
CREATE TABLE sso_users (
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
identifier TEXT NOT NULL UNIQUE,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE
);

View File

@@ -1,5 +0,0 @@
ALTER TABLE users_collections
ADD COLUMN manage BOOLEAN NOT NULL DEFAULT 0; -- FALSE
ALTER TABLE collections_groups
ADD COLUMN manage BOOLEAN NOT NULL DEFAULT 0; -- FALSE

View File

@@ -1,64 +0,0 @@
#################################
### Conf to run dev instances ###
#################################
ENV=dev
DC_ENV_FILE=.env
COMPOSE_IGNORE_ORPHANS=True
DOCKER_BUILDKIT=1
################
# Users Config #
################
TEST_USER=test
TEST_USER_PASSWORD=${TEST_USER}
TEST_USER_MAIL=${TEST_USER}@yopmail.com
TEST_USER2=test2
TEST_USER2_PASSWORD=${TEST_USER2}
TEST_USER2_MAIL=${TEST_USER2}@yopmail.com
TEST_USER3=test3
TEST_USER3_PASSWORD=${TEST_USER3}
TEST_USER3_MAIL=${TEST_USER3}@yopmail.com
###################
# Keycloak Config #
###################
KEYCLOAK_ADMIN=admin
KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN}
KC_HTTP_HOST=127.0.0.1
KC_HTTP_PORT=8080
# Script parameters (use Keycloak and Vaultwarden config too)
TEST_REALM=test
DUMMY_REALM=dummy
DUMMY_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${DUMMY_REALM}
######################
# Vaultwarden Config #
######################
ROCKET_ADDRESS=0.0.0.0
ROCKET_PORT=8000
DOMAIN=http://localhost:${ROCKET_PORT}
LOG_LEVEL=info,oidcwarden::sso=debug
I_REALLY_WANT_VOLATILE_STORAGE=true
SSO_ENABLED=true
SSO_ONLY=false
SSO_CLIENT_ID=warden
SSO_CLIENT_SECRET=warden
SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM}
SMTP_HOST=127.0.0.1
SMTP_PORT=1025
SMTP_SECURITY=off
SMTP_TIMEOUT=5
SMTP_FROM=vaultwarden@test
SMTP_FROM_NAME=Vaultwarden
########################################################
# DUMMY values for docker-compose to stop bothering us #
########################################################
MARIADB_PORT=3305
MYSQL_PORT=3307
POSTGRES_PORT=5432

View File

@@ -1,6 +0,0 @@
logs
node_modules/
/test-results/
/playwright-report/
/playwright/.cache/
temp

View File

@@ -1,169 +0,0 @@
# Integration tests
This allows running integration tests using [Playwright](https://playwright.dev/).
It uses its own `test.env` with different ports to not collide with a running dev instance.
## Install
This relies on `docker` and the `compose` [plugin](https://docs.docker.com/compose/install/).
Databases (`Mariadb`, `Mysql` and `Postgres`) and `Playwright` will run in containers.
### Running Playwright outside docker
It is possible to run `Playwright` outside of the container, this removes the need to rebuild the image for each change.
You will additionally need `nodejs` then run:
```bash
npm install
npx playwright install-deps
npx playwright install firefox
```
## Usage
To run all the tests:
```bash
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright
```
To force a rebuild of the Playwright image:
```bash
DOCKER_BUILDKIT=1 docker compose --env-file test.env build Playwright
```
To access the UI to easily run test individually and debug if needed (this will not work in docker):
```bash
npx playwright test --ui
```
### DB
Projects are configured to allow to run tests only on specific database.
You can use:
```bash
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=mariadb
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=mysql
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=postgres
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite
```
### SSO
To run the SSO tests:
```bash
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project sso-sqlite
```
### Keep services running
If you want you can keep the DB and Keycloak runnning (states are not impacted by the tests):
```bash
PW_KEEP_SERVICE_RUNNNING=true npx playwright test
```
### Running specific tests
To run a whole file you can :
```bash
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite tests/login.spec.ts
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite login
```
To run only a specifc test (It might fail if it has dependency):
```bash
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite -g "Account creation"
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite tests/login.spec.ts:16
```
## Writing scenario
When creating new scenario use the recorder to more easily identify elements
(in general try to rely on visible hint to identify elements and not hidden IDs).
This does not start the server, you will need to start it manually.
```bash
npx playwright codegen "http://127.0.0.1:8000"
```
## Override web-vault
It is possible to change the `web-vault` used by referencing a different `bw_web_builds` commit.
```bash
export PW_WV_REPO_URL=https://github.com/Timshel/oidc_web_builds.git
export PW_WV_COMMIT_HASH=8707dc76df3f0cceef2be5bfae37bb29bd17fae6
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env build Playwright
```
# OpenID Connect test setup
Additionally this `docker-compose` template allows to run locally Vaultwarden,
[Keycloak](https://www.keycloak.org/) and [Maildev](https://github.com/timshel/maildev) to test OIDC.
## Setup
This rely on `docker` and the `compose` [plugin](https://docs.docker.com/compose/install/).
First create a copy of `.env.template` as `.env` (This is done to prevent committing your custom settings, Ex `SMTP_`).
## Usage
Then start the stack (the `profile` is required to run `Vaultwarden`) :
```bash
> docker compose --profile vaultwarden --env-file .env up
....
keycloakSetup_1 | Logging into http://127.0.0.1:8080 as user admin of realm master
keycloakSetup_1 | Created new realm with id 'test'
keycloakSetup_1 | 74af4933-e386-4e64-ba15-a7b61212c45e
oidc_keycloakSetup_1 exited with code 0
```
Wait until `oidc_keycloakSetup_1 exited with code 0` which indicates the correct setup of the Keycloak realm, client and user
(It is normal for this container to stop once the configuration is done).
Then you can access :
- `Vaultwarden` on http://0.0.0.0:8000 with the default user `test@yopmail.com/test`.
- `Keycloak` on http://0.0.0.0:8080/admin/master/console/ with the default user `admin/admin`
- `Maildev` on http://0.0.0.0:1080
To proceed with an SSO login after you enter the email, on the screen prompting for `Master Password` the SSO button should be visible.
To use your computer external ip (for example when testing with a phone) you will have to configure `KC_HTTP_HOST` and `DOMAIN`.
## Running only Keycloak
You can run just `Keycloak` with `--profile keycloak`:
```bash
> docker compose --profile keycloak --env-file .env up
```
When running with a local Vaultwarden, you can use a front-end build from [dani-garcia/bw_web_builds](https://github.com/dani-garcia/bw_web_builds/releases).
## Rebuilding the Vaultwarden
To force rebuilding the Vaultwarden image you can run
```bash
docker compose --profile vaultwarden --env-file .env build VaultwardenPrebuild Vaultwarden
```
## Configuration
All configuration for `keycloak` / `Vaultwarden` / `keycloak_setup.sh` can be found in [.env](.env.template).
The content of the file will be loaded as environment variables in all containers.
- `keycloak` [configuration](https://www.keycloak.org/server/all-config) includes `KEYCLOAK_ADMIN` / `KEYCLOAK_ADMIN_PASSWORD` and any variable prefixed `KC_` ([more information](https://www.keycloak.org/server/configuration#_example_configuring_the_db_url_host_parameter)).
- All `Vaultwarden` configuration can be set (EX: `SMTP_*`)
## Cleanup
Use `docker compose --profile vaultwarden down`.

View File

@@ -1,40 +0,0 @@
FROM docker.io/library/debian:bookworm-slim as build
ENV DEBIAN_FRONTEND=noninteractive
ARG KEYCLOAK_VERSION
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN apt-get update \
&& apt-get install -y ca-certificates curl wget \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /
RUN wget -c https://github.com/keycloak/keycloak/releases/download/${KEYCLOAK_VERSION}/keycloak-${KEYCLOAK_VERSION}.tar.gz -O - | tar -xz
FROM docker.io/library/debian:bookworm-slim
ENV DEBIAN_FRONTEND=noninteractive
ARG KEYCLOAK_VERSION
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN apt-get update \
&& apt-get install -y ca-certificates curl wget \
&& rm -rf /var/lib/apt/lists/*
ARG JAVA_URL
ARG JAVA_VERSION
ENV JAVA_VERSION=${JAVA_VERSION}
RUN mkdir -p /opt/openjdk && cd /opt/openjdk \
&& wget -c "${JAVA_URL}" -O - | tar -xz
WORKDIR /
COPY setup.sh /setup.sh
COPY --from=build /keycloak-${KEYCLOAK_VERSION}/bin /opt/keycloak/bin
CMD "/setup.sh"

View File

@@ -1,36 +0,0 @@
#!/bin/bash
export PATH=/opt/keycloak/bin:/opt/openjdk/jdk-${JAVA_VERSION}/bin:$PATH
export JAVA_HOME=/opt/openjdk/jdk-${JAVA_VERSION}
STATUS_CODE=0
while [[ "$STATUS_CODE" != "404" ]] ; do
echo "Will retry in 2 seconds"
sleep 2
STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}" "$DUMMY_AUTHORITY")
if [[ "$STATUS_CODE" = "200" ]]; then
echo "Setup should already be done. Will not run."
exit 0
fi
done
set -e
kcadm.sh config credentials --server "http://${KC_HTTP_HOST}:${KC_HTTP_PORT}" --realm master --user "$KEYCLOAK_ADMIN" --password "$KEYCLOAK_ADMIN_PASSWORD" --client admin-cli
kcadm.sh create realms -s realm="$TEST_REALM" -s enabled=true -s "accessTokenLifespan=600"
kcadm.sh create clients -r test -s "clientId=$SSO_CLIENT_ID" -s "secret=$SSO_CLIENT_SECRET" -s "redirectUris=[\"$DOMAIN/*\"]" -i
TEST_USER_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER" -s "firstName=$TEST_USER" -s "lastName=$TEST_USER" -s "email=$TEST_USER_MAIL" -s emailVerified=true -s enabled=true -i)
kcadm.sh update users/$TEST_USER_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER_PASSWORD" -n
TEST_USER2_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER2" -s "firstName=$TEST_USER2" -s "lastName=$TEST_USER2" -s "email=$TEST_USER2_MAIL" -s emailVerified=true -s enabled=true -i)
kcadm.sh update users/$TEST_USER2_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER2_PASSWORD" -n
TEST_USER3_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER3" -s "firstName=$TEST_USER3" -s "lastName=$TEST_USER3" -s "email=$TEST_USER3_MAIL" -s emailVerified=true -s enabled=true -i)
kcadm.sh update users/$TEST_USER3_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER3_PASSWORD" -n
# Dummy realm to mark end of setup
kcadm.sh create realms -s realm="$DUMMY_REALM" -s enabled=true -s "accessTokenLifespan=600"

View File

@@ -1,40 +0,0 @@
FROM docker.io/library/debian:bookworm-slim
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update \
&& apt-get install -y ca-certificates curl \
&& curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc \
&& chmod a+r /etc/apt/keyrings/docker.asc \
&& echo "deb [signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian bookworm stable" | tee /etc/apt/sources.list.d/docker.list \
&& apt-get update \
&& apt-get install -y --no-install-recommends \
containerd.io \
docker-buildx-plugin \
docker-ce \
docker-ce-cli \
docker-compose-plugin \
git \
libmariadb-dev-compat \
libpq5 \
nodejs \
npm \
openssl \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir /playwright
WORKDIR /playwright
COPY package.json .
RUN npm install && npx playwright install-deps && npx playwright install firefox
COPY docker-compose.yml test.env ./
COPY compose ./compose
COPY *.ts test.env ./
COPY tests ./tests
ENTRYPOINT ["/usr/bin/npx", "playwright"]
CMD ["test"]

View File

@@ -1,40 +0,0 @@
FROM playwright_oidc_vaultwarden_prebuilt AS prebuilt
FROM node:22-bookworm AS build
ARG REPO_URL
ARG COMMIT_HASH
ENV REPO_URL=$REPO_URL
ENV COMMIT_HASH=$COMMIT_HASH
COPY --from=prebuilt /web-vault /web-vault
COPY build.sh /build.sh
RUN /build.sh
######################## RUNTIME IMAGE ########################
FROM docker.io/library/debian:bookworm-slim
ENV DEBIAN_FRONTEND=noninteractive
# Create data folder and Install needed libraries
RUN mkdir /data && \
apt-get update && apt-get install -y \
--no-install-recommends \
ca-certificates \
curl \
libmariadb-dev-compat \
libpq5 \
openssl && \
rm -rf /var/lib/apt/lists/*
# Copies the files from the context (Rocket.toml file and web-vault)
# and the binary from the "build" stage to the current stage
WORKDIR /
COPY --from=prebuilt /start.sh .
COPY --from=prebuilt /vaultwarden .
COPY --from=build /web-vault ./web-vault
ENTRYPOINT ["/start.sh"]

View File

@@ -1,24 +0,0 @@
#!/bin/bash
echo $REPO_URL
echo $COMMIT_HASH
if [[ ! -z "$REPO_URL" ]] && [[ ! -z "$COMMIT_HASH" ]] ; then
rm -rf /web-vault
mkdir bw_web_builds;
cd bw_web_builds;
git -c init.defaultBranch=main init
git remote add origin "$REPO_URL"
git fetch --depth 1 origin "$COMMIT_HASH"
git -c advice.detachedHead=false checkout FETCH_HEAD
export VAULT_VERSION=$(cat Dockerfile | grep "ARG VAULT_VERSION" | cut -d "=" -f2)
./scripts/checkout_web_vault.sh
./scripts/patch_web_vault.sh
./scripts/build_web_vault.sh
printf '{"version":"%s"}' "$COMMIT_HASH" > ./web-vault/apps/web/build/vw-version.json
mv ./web-vault/apps/web/build /web-vault
fi

View File

@@ -1,124 +0,0 @@
services:
VaultwardenPrebuild:
profiles: ["playwright", "vaultwarden"]
container_name: playwright_oidc_vaultwarden_prebuilt
image: playwright_oidc_vaultwarden_prebuilt
build:
context: ..
dockerfile: Dockerfile
entrypoint: /bin/bash
restart: "no"
Vaultwarden:
profiles: ["playwright", "vaultwarden"]
container_name: playwright_oidc_vaultwarden-${ENV:-dev}
image: playwright_oidc_vaultwarden-${ENV:-dev}
network_mode: "host"
build:
context: compose/warden
dockerfile: Dockerfile
args:
REPO_URL: ${PW_WV_REPO_URL:-}
COMMIT_HASH: ${PW_WV_COMMIT_HASH:-}
env_file: ${DC_ENV_FILE:-.env}
environment:
- DATABASE_URL
- I_REALLY_WANT_VOLATILE_STORAGE
- LOG_LEVEL
- LOGIN_RATELIMIT_MAX_BURST
- SMTP_HOST
- SMTP_FROM
- SMTP_DEBUG
- SSO_DEBUG_TOKENS
- SSO_FRONTEND
- SSO_ENABLED
- SSO_ONLY
restart: "no"
depends_on:
- VaultwardenPrebuild
Playwright:
profiles: ["playwright"]
container_name: playwright_oidc_playwright
image: playwright_oidc_playwright
network_mode: "host"
build:
context: .
dockerfile: compose/playwright/Dockerfile
environment:
- PW_WV_REPO_URL
- PW_WV_COMMIT_HASH
restart: "no"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ..:/project
Mariadb:
profiles: ["playwright"]
container_name: playwright_mariadb
image: mariadb:11.2.4
env_file: test.env
healthcheck:
test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"]
start_period: 10s
interval: 10s
ports:
- ${MARIADB_PORT}:3306
Mysql:
profiles: ["playwright"]
container_name: playwright_mysql
image: mysql:8.4.1
env_file: test.env
healthcheck:
test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"]
start_period: 10s
interval: 10s
ports:
- ${MYSQL_PORT}:3306
Postgres:
profiles: ["playwright"]
container_name: playwright_postgres
image: postgres:16.3
env_file: test.env
healthcheck:
test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}"]
start_period: 20s
interval: 30s
ports:
- ${POSTGRES_PORT}:5432
Maildev:
profiles: ["vaultwarden", "maildev"]
container_name: maildev
image: timshel/maildev:3.0.4
ports:
- ${SMTP_PORT}:1025
- 1080:1080
Keycloak:
profiles: ["keycloak", "vaultwarden"]
container_name: keycloak-${ENV:-dev}
image: quay.io/keycloak/keycloak:25.0.4
network_mode: "host"
command:
- start-dev
env_file: ${DC_ENV_FILE:-.env}
KeycloakSetup:
profiles: ["keycloak", "vaultwarden"]
container_name: keycloakSetup-${ENV:-dev}
image: keycloak_setup-${ENV:-dev}
build:
context: compose/keycloak
dockerfile: Dockerfile
args:
KEYCLOAK_VERSION: 25.0.4
JAVA_URL: https://download.java.net/java/GA/jdk21.0.2/f2283984656d49d69e91c558476027ac/13/GPL/openjdk-21.0.2_linux-x64_bin.tar.gz
JAVA_VERSION: 21.0.2
network_mode: "host"
depends_on:
- Keycloak
restart: "no"
env_file: ${DC_ENV_FILE:-.env}

View File

@@ -1,22 +0,0 @@
import { firefox, type FullConfig } from '@playwright/test';
import { execSync } from 'node:child_process';
import fs from 'fs';
const utils = require('./global-utils');
utils.loadEnv();
async function globalSetup(config: FullConfig) {
// Are we running in docker and the project is mounted ?
const path = (fs.existsSync("/project/playwright/playwright.config.ts") ? "/project/playwright" : ".");
execSync(`docker compose --project-directory ${path} --profile playwright --env-file test.env build VaultwardenPrebuild`, {
env: { ...process.env },
stdio: "inherit"
});
execSync(`docker compose --project-directory ${path} --profile playwright --env-file test.env build Vaultwarden`, {
env: { ...process.env },
stdio: "inherit"
});
}
export default globalSetup;

View File

@@ -1,246 +0,0 @@
import { expect, type Browser, type TestInfo } from '@playwright/test';
import { EventEmitter } from "events";
import { type Mail, MailServer } from 'maildev';
import { execSync } from 'node:child_process';
import dotenv from 'dotenv';
import dotenvExpand from 'dotenv-expand';
const fs = require("fs");
const { spawn } = require('node:child_process');
export function loadEnv(){
var myEnv = dotenv.config({ path: 'test.env' });
dotenvExpand.expand(myEnv);
return {
user1: {
email: process.env.TEST_USER_MAIL,
name: process.env.TEST_USER,
password: process.env.TEST_USER_PASSWORD,
},
user2: {
email: process.env.TEST_USER2_MAIL,
name: process.env.TEST_USER2,
password: process.env.TEST_USER2_PASSWORD,
},
user3: {
email: process.env.TEST_USER3_MAIL,
name: process.env.TEST_USER3,
password: process.env.TEST_USER3_PASSWORD,
},
}
}
export async function waitFor(url: String, browser: Browser) {
var ready = false;
var context;
do {
try {
context = await browser.newContext();
const page = await context.newPage();
await page.waitForTimeout(500);
const result = await page.goto(url);
ready = result.status() === 200;
} catch(e) {
if( !e.message.includes("CONNECTION_REFUSED") ){
throw e;
}
} finally {
await context.close();
}
} while(!ready);
}
export function startComposeService(serviceName: String){
console.log(`Starting ${serviceName}`);
execSync(`docker compose --profile playwright --env-file test.env up -d ${serviceName}`);
}
export function stopComposeService(serviceName: String){
console.log(`Stopping ${serviceName}`);
execSync(`docker compose --profile playwright --env-file test.env stop ${serviceName}`);
}
function wipeSqlite(){
console.log(`Delete Vaultwarden container to wipe sqlite`);
execSync(`docker compose --env-file test.env stop Vaultwarden`);
execSync(`docker compose --env-file test.env rm -f Vaultwarden`);
}
async function wipeMariaDB(){
var mysql = require('mysql2/promise');
var ready = false;
var connection;
do {
try {
connection = await mysql.createConnection({
user: process.env.MARIADB_USER,
host: "127.0.0.1",
database: process.env.MARIADB_DATABASE,
password: process.env.MARIADB_PASSWORD,
port: process.env.MARIADB_PORT,
});
await connection.execute(`DROP DATABASE ${process.env.MARIADB_DATABASE}`);
await connection.execute(`CREATE DATABASE ${process.env.MARIADB_DATABASE}`);
console.log('Successfully wiped mariadb');
ready = true;
} catch (err) {
console.log(`Error when wiping mariadb: ${err}`);
} finally {
if( connection ){
connection.end();
}
}
await new Promise(r => setTimeout(r, 1000));
} while(!ready);
}
async function wipeMysqlDB(){
var mysql = require('mysql2/promise');
var ready = false;
var connection;
do{
try {
connection = await mysql.createConnection({
user: process.env.MYSQL_USER,
host: "127.0.0.1",
database: process.env.MYSQL_DATABASE,
password: process.env.MYSQL_PASSWORD,
port: process.env.MYSQL_PORT,
});
await connection.execute(`DROP DATABASE ${process.env.MYSQL_DATABASE}`);
await connection.execute(`CREATE DATABASE ${process.env.MYSQL_DATABASE}`);
console.log('Successfully wiped mysql');
ready = true;
} catch (err) {
console.log(`Error when wiping mysql: ${err}`);
} finally {
if( connection ){
connection.end();
}
}
await new Promise(r => setTimeout(r, 1000));
} while(!ready);
}
async function wipePostgres(){
const { Client } = require('pg');
const client = new Client({
user: process.env.POSTGRES_USER,
host: "127.0.0.1",
database: "postgres",
password: process.env.POSTGRES_PASSWORD,
port: process.env.POSTGRES_PORT,
});
try {
await client.connect();
await client.query(`DROP DATABASE ${process.env.POSTGRES_DB}`);
await client.query(`CREATE DATABASE ${process.env.POSTGRES_DB}`);
console.log('Successfully wiped postgres');
} catch (err) {
console.log(`Error when wiping postgres: ${err}`);
} finally {
client.end();
}
}
function dbConfig(testInfo: TestInfo){
switch(testInfo.project.name) {
case "postgres":
case "sso-postgres":
return { DATABASE_URL: `postgresql://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PASSWORD}@127.0.0.1:${process.env.POSTGRES_PORT}/${process.env.POSTGRES_DB}` };
case "mariadb":
case "sso-mariadb":
return { DATABASE_URL: `mysql://${process.env.MARIADB_USER}:${process.env.MARIADB_PASSWORD}@127.0.0.1:${process.env.MARIADB_PORT}/${process.env.MARIADB_DATABASE}` };
case "mysql":
case "sso-mysql":
return { DATABASE_URL: `mysql://${process.env.MYSQL_USER}:${process.env.MYSQL_PASSWORD}@127.0.0.1:${process.env.MYSQL_PORT}/${process.env.MYSQL_DATABASE}`};
case "sqlite":
case "sso-sqlite":
return { I_REALLY_WANT_VOLATILE_STORAGE: true };
default:
throw new Error(`Unknow database name: ${testInfo.project.name}`);
}
}
/**
* All parameters passed in `env` need to be added to the docker-compose.yml
**/
export async function startVault(browser: Browser, testInfo: TestInfo, env = {}, resetDB: Boolean = true) {
if( resetDB ){
switch(testInfo.project.name) {
case "postgres":
case "sso-postgres":
await wipePostgres();
break;
case "mariadb":
case "sso-mariadb":
await wipeMariaDB();
break;
case "mysql":
case "sso-mysql":
await wipeMysqlDB();
break;
case "sqlite":
case "sso-sqlite":
wipeSqlite();
break;
default:
throw new Error(`Unknow database name: ${testInfo.project.name}`);
}
}
console.log(`Starting Vaultwarden`);
execSync(`docker compose --profile playwright --env-file test.env up -d Vaultwarden`, {
env: { ...env, ...dbConfig(testInfo) },
});
await waitFor("/", browser);
console.log(`Vaultwarden running on: ${process.env.DOMAIN}`);
}
export async function stopVault(force: boolean = false) {
if( force === false && process.env.PW_KEEP_SERVICE_RUNNNING === "true" ) {
console.log(`Keep vaultwarden running on: ${process.env.DOMAIN}`);
} else {
console.log(`Vaultwarden stopping`);
execSync(`docker compose --profile playwright --env-file test.env stop Vaultwarden`);
}
}
export async function restartVault(page: Page, testInfo: TestInfo, env, resetDB: Boolean = true) {
stopVault(true);
return startVault(page.context().browser(), testInfo, env, resetDB);
}
export async function checkNotification(page: Page, hasText: string) {
await expect(page.locator('bit-toast').filter({ hasText })).toBeVisible();
await page.locator('bit-toast').filter({ hasText }).getByRole('button').click();
await expect(page.locator('bit-toast').filter({ hasText })).toHaveCount(0);
}
export async function cleanLanding(page: Page) {
await page.goto('/', { waitUntil: 'domcontentloaded' });
await expect(page.getByRole('button').nth(0)).toBeVisible();
const logged = await page.getByRole('button', { name: 'Log out' }).count();
if( logged > 0 ){
await page.getByRole('button', { name: 'Log out' }).click();
await page.getByRole('button', { name: 'Log out' }).click();
}
}
export async function logout(test: Test, page: Page, user: { name: string }) {
await test.step('logout', async () => {
await page.getByRole('button', { name: user.name, exact: true }).click();
await page.getByRole('menuitem', { name: 'Log out' }).click();
await expect(page.getByRole('heading', { name: 'Log in' })).toBeVisible();
});
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,21 +0,0 @@
{
"name": "scenarios",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@playwright/test": "^1.54.2",
"dotenv": "^16.6.1",
"dotenv-expand": "^12.0.2",
"maildev": "npm:@timshel_npm/maildev@^3.2.1"
},
"dependencies": {
"mysql2": "^3.14.3",
"otpauth": "^9.4.0",
"pg": "^8.16.3"
}
}

View File

@@ -1,143 +0,0 @@
import { defineConfig, devices } from '@playwright/test';
import { exec } from 'node:child_process';
const utils = require('./global-utils');
utils.loadEnv();
/**
* See https://playwright.dev/docs/test-configuration.
*/
export default defineConfig({
testDir: './.',
/* Run tests in files in parallel */
fullyParallel: false,
/* Fail the build on CI if you accidentally left test.only in the source code. */
forbidOnly: !!process.env.CI,
retries: 0,
workers: 1,
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
reporter: 'html',
/* Long global timeout for complex tests
* But short action/nav/expect timeouts to fail on specific step (raise locally if not enough).
*/
timeout: 120 * 1000,
actionTimeout: 10 * 1000,
navigationTimeout: 10 * 1000,
expect: { timeout: 10 * 1000 },
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
use: {
/* Base URL to use in actions like `await page.goto('/')`. */
baseURL: process.env.DOMAIN,
browserName: 'firefox',
locale: 'en-GB',
timezoneId: 'Europe/London',
/* Always collect trace (other values add random test failures) See https://playwright.dev/docs/trace-viewer */
trace: 'on',
viewport: {
width: 1080,
height: 720,
},
video: "on",
},
/* Configure projects for major browsers */
projects: [
{
name: 'mariadb-setup',
testMatch: 'tests/setups/db-setup.ts',
use: { serviceName: "Mariadb" },
teardown: 'mariadb-teardown',
},
{
name: 'mysql-setup',
testMatch: 'tests/setups/db-setup.ts',
use: { serviceName: "Mysql" },
teardown: 'mysql-teardown',
},
{
name: 'postgres-setup',
testMatch: 'tests/setups/db-setup.ts',
use: { serviceName: "Postgres" },
teardown: 'postgres-teardown',
},
{
name: 'sso-setup',
testMatch: 'tests/setups/sso-setup.ts',
teardown: 'sso-teardown',
},
{
name: 'mariadb',
testMatch: 'tests/*.spec.ts',
testIgnore: 'tests/sso_*.spec.ts',
dependencies: ['mariadb-setup'],
},
{
name: 'mysql',
testMatch: 'tests/*.spec.ts',
testIgnore: 'tests/sso_*.spec.ts',
dependencies: ['mysql-setup'],
},
{
name: 'postgres',
testMatch: 'tests/*.spec.ts',
testIgnore: 'tests/sso_*.spec.ts',
dependencies: ['postgres-setup'],
},
{
name: 'sqlite',
testMatch: 'tests/*.spec.ts',
testIgnore: 'tests/sso_*.spec.ts',
},
{
name: 'sso-mariadb',
testMatch: 'tests/sso_*.spec.ts',
dependencies: ['sso-setup', 'mariadb-setup'],
},
{
name: 'sso-mysql',
testMatch: 'tests/sso_*.spec.ts',
dependencies: ['sso-setup', 'mysql-setup'],
},
{
name: 'sso-postgres',
testMatch: 'tests/sso_*.spec.ts',
dependencies: ['sso-setup', 'postgres-setup'],
},
{
name: 'sso-sqlite',
testMatch: 'tests/sso_*.spec.ts',
dependencies: ['sso-setup'],
},
{
name: 'mariadb-teardown',
testMatch: 'tests/setups/db-teardown.ts',
use: { serviceName: "Mariadb" },
},
{
name: 'mysql-teardown',
testMatch: 'tests/setups/db-teardown.ts',
use: { serviceName: "Mysql" },
},
{
name: 'postgres-teardown',
testMatch: 'tests/setups/db-teardown.ts',
use: { serviceName: "Postgres" },
},
{
name: 'sso-teardown',
testMatch: 'tests/setups/sso-teardown.ts',
},
],
globalSetup: require.resolve('./global-setup'),
});

View File

@@ -1,93 +0,0 @@
##################################################################
### Shared Playwright conf test file Vaultwarden and Databases ###
##################################################################
ENV=test
DC_ENV_FILE=test.env
COMPOSE_IGNORE_ORPHANS=True
DOCKER_BUILDKIT=1
#####################
# Playwright Config #
#####################
PW_KEEP_SERVICE_RUNNNING=${PW_KEEP_SERVICE_RUNNNING:-false}
PW_SMTP_FROM=vaultwarden@playwright.test
#####################
# Maildev Config #
#####################
MAILDEV_HTTP_PORT=1081
MAILDEV_SMTP_PORT=1026
MAILDEV_HOST=127.0.0.1
################
# Users Config #
################
TEST_USER=test
TEST_USER_PASSWORD=Master Password
TEST_USER_MAIL=${TEST_USER}@example.com
TEST_USER2=test2
TEST_USER2_PASSWORD=Master Password
TEST_USER2_MAIL=${TEST_USER2}@example.com
TEST_USER3=test3
TEST_USER3_PASSWORD=Master Password
TEST_USER3_MAIL=${TEST_USER3}@example.com
###################
# Keycloak Config #
###################
KEYCLOAK_ADMIN=admin
KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN}
KC_HTTP_HOST=127.0.0.1
KC_HTTP_PORT=8081
# Script parameters (use Keycloak and Vaultwarden config too)
TEST_REALM=test
DUMMY_REALM=dummy
DUMMY_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${DUMMY_REALM}
######################
# Vaultwarden Config #
######################
ROCKET_PORT=8003
DOMAIN=http://localhost:${ROCKET_PORT}
LOG_LEVEL=info,oidcwarden::sso=debug
LOGIN_RATELIMIT_MAX_BURST=100
SMTP_SECURITY=off
SMTP_PORT=${MAILDEV_SMTP_PORT}
SMTP_FROM_NAME=Vaultwarden
SMTP_TIMEOUT=5
SSO_CLIENT_ID=warden
SSO_CLIENT_SECRET=warden
SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM}
SSO_DEBUG_TOKENS=true
###########################
# Docker MariaDb container#
###########################
MARIADB_PORT=3307
MARIADB_ROOT_PASSWORD=warden
MARIADB_USER=warden
MARIADB_PASSWORD=warden
MARIADB_DATABASE=warden
###########################
# Docker Mysql container#
###########################
MYSQL_PORT=3309
MYSQL_ROOT_PASSWORD=warden
MYSQL_USER=warden
MYSQL_PASSWORD=warden
MYSQL_DATABASE=warden
############################
# Docker Postgres container#
############################
POSTGRES_PORT=5433
POSTGRES_USER=warden
POSTGRES_PASSWORD=warden
POSTGRES_DB=warden

View File

@@ -1,37 +0,0 @@
import { test, expect, type TestInfo } from '@playwright/test';
import * as utils from "../global-utils";
import { createAccount } from './setups/user';
let users = utils.loadEnv();
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
await utils.startVault(browser, testInfo);
});
test.afterAll('Teardown', async ({}) => {
utils.stopVault();
});
test('Create', async ({ page }) => {
await createAccount(test, page, users.user1);
await test.step('Create Org', async () => {
await page.getByRole('link', { name: 'New organisation' }).click();
await page.getByLabel('Organisation name (required)').fill('Test');
await page.getByRole('button', { name: 'Submit' }).click();
await page.locator('div').filter({ hasText: 'Members' }).nth(2).click();
await utils.checkNotification(page, 'Organisation created');
});
await test.step('Create Collection', async () => {
await page.getByRole('link', { name: 'Collections' }).click();
await page.getByRole('button', { name: 'New' }).click();
await page.getByRole('menuitem', { name: 'Collection' }).click();
await page.getByLabel('Name (required)').fill('RandomCollec');
await page.getByRole('button', { name: 'Save' }).click();
await utils.checkNotification(page, 'Created collection RandomCollec');
await expect(page.getByRole('button', { name: 'RandomCollec' })).toBeVisible();
});
});

View File

@@ -1,100 +0,0 @@
import { test, expect, type TestInfo } from '@playwright/test';
import { MailDev } from 'maildev';
const utils = require('../global-utils');
import { createAccount, logUser } from './setups/user';
import { activateEmail, retrieveEmailCode, disableEmail } from './setups/2fa';
let users = utils.loadEnv();
let mailserver;
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
mailserver = new MailDev({
port: process.env.MAILDEV_SMTP_PORT,
web: { port: process.env.MAILDEV_HTTP_PORT },
})
await mailserver.listen();
await utils.startVault(browser, testInfo, {
SMTP_HOST: process.env.MAILDEV_HOST,
SMTP_FROM: process.env.PW_SMTP_FROM,
});
});
test.afterAll('Teardown', async ({}) => {
utils.stopVault();
if( mailserver ){
await mailserver.close();
}
});
test('Account creation', async ({ page }) => {
const mailBuffer = mailserver.buffer(users.user1.email);
await createAccount(test, page, users.user1, mailBuffer);
mailBuffer.close();
});
test('Login', async ({ context, page }) => {
const mailBuffer = mailserver.buffer(users.user1.email);
await logUser(test, page, users.user1, mailBuffer);
await test.step('verify email', async () => {
await page.getByText('Verify your account\'s email').click();
await expect(page.getByText('Verify your account\'s email')).toBeVisible();
await page.getByRole('button', { name: 'Send email' }).click();
await utils.checkNotification(page, 'Check your email inbox for a verification link');
const verify = await mailBuffer.expect((m) => m.subject === "Verify Your Email");
expect(verify.from[0]?.address).toBe(process.env.PW_SMTP_FROM);
const page2 = await context.newPage();
await page2.setContent(verify.html);
const link = await page2.getByTestId("verify").getAttribute("href");
await page2.close();
await page.goto(link);
await utils.checkNotification(page, 'Account email verified');
});
mailBuffer.close();
});
test('Activate 2fa', async ({ page }) => {
const emails = mailserver.buffer(users.user1.email);
await logUser(test, page, users.user1);
await activateEmail(test, page, users.user1, emails);
emails.close();
});
test('2fa', async ({ page }) => {
const emails = mailserver.buffer(users.user1.email);
await test.step('login', async () => {
await page.goto('/');
await page.getByLabel(/Email address/).fill(users.user1.email);
await page.getByRole('button', { name: 'Continue' }).click();
await page.getByLabel('Master password').fill(users.user1.password);
await page.getByRole('button', { name: 'Log in with master password' }).click();
await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible();
const code = await retrieveEmailCode(test, page, emails);
await page.getByLabel(/Verification code/).fill(code);
await page.getByRole('button', { name: 'Continue' }).click();
await expect(page).toHaveTitle(/Vaults/);
})
await disableEmail(test, page, users.user1);
emails.close();
});

View File

@@ -1,51 +0,0 @@
import { test, expect, type Page, type TestInfo } from '@playwright/test';
import * as OTPAuth from "otpauth";
import * as utils from "../global-utils";
import { createAccount, logUser } from './setups/user';
import { activateTOTP, disableTOTP } from './setups/2fa';
let users = utils.loadEnv();
let totp;
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
await utils.startVault(browser, testInfo, {});
});
test.afterAll('Teardown', async ({}) => {
utils.stopVault();
});
test('Account creation', async ({ page }) => {
await createAccount(test, page, users.user1);
});
test('Master password login', async ({ page }) => {
await logUser(test, page, users.user1);
});
test('Authenticator 2fa', async ({ page }) => {
await logUser(test, page, users.user1);
let totp = await activateTOTP(test, page, users.user1);
await utils.logout(test, page, users.user1);
await test.step('login', async () => {
let timestamp = Date.now(); // Needed to use the next token
timestamp = timestamp + (totp.period - (Math.floor(timestamp / 1000) % totp.period) + 1) * 1000;
await page.getByLabel(/Email address/).fill(users.user1.email);
await page.getByRole('button', { name: 'Continue' }).click();
await page.getByLabel('Master password').fill(users.user1.password);
await page.getByRole('button', { name: 'Log in with master password' }).click();
await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible();
await page.getByLabel(/Verification code/).fill(totp.generate({timestamp}));
await page.getByRole('button', { name: 'Continue' }).click();
await expect(page).toHaveTitle(/Vaultwarden Web/);
});
await disableTOTP(test, page, users.user1);
});

View File

@@ -1,115 +0,0 @@
import { test, expect, type TestInfo } from '@playwright/test';
import { MailDev } from 'maildev';
import * as utils from '../global-utils';
import * as orgs from './setups/orgs';
import { createAccount, logUser } from './setups/user';
let users = utils.loadEnv();
let mailServer, mail1Buffer, mail2Buffer, mail3Buffer;
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
mailServer = new MailDev({
port: process.env.MAILDEV_SMTP_PORT,
web: { port: process.env.MAILDEV_HTTP_PORT },
})
await mailServer.listen();
await utils.startVault(browser, testInfo, {
SMTP_HOST: process.env.MAILDEV_HOST,
SMTP_FROM: process.env.PW_SMTP_FROM,
});
mail1Buffer = mailServer.buffer(users.user1.email);
mail2Buffer = mailServer.buffer(users.user2.email);
mail3Buffer = mailServer.buffer(users.user3.email);
});
test.afterAll('Teardown', async ({}, testInfo: TestInfo) => {
utils.stopVault(testInfo);
[mail1Buffer, mail2Buffer, mail3Buffer, mailServer].map((m) => m?.close());
});
test('Create user3', async ({ page }) => {
await createAccount(test, page, users.user3, mail3Buffer);
});
test('Invite users', async ({ page }) => {
await createAccount(test, page, users.user1, mail1Buffer);
await orgs.create(test, page, 'Test');
await orgs.members(test, page, 'Test');
await orgs.invite(test, page, 'Test', users.user2.email);
await orgs.invite(test, page, 'Test', users.user3.email, {
navigate: false,
});
});
test('invited with new account', async ({ page }) => {
const invited = await mail2Buffer.expect((mail) => mail.subject === 'Join Test');
await test.step('Create account', async () => {
await page.setContent(invited.html);
const link = await page.getByTestId('invite').getAttribute('href');
await page.goto(link);
await expect(page).toHaveTitle(/Create account | Vaultwarden Web/);
//await page.getByLabel('Name').fill(users.user2.name);
await page.getByLabel('New master password (required)', { exact: true }).fill(users.user2.password);
await page.getByLabel('Confirm new master password (').fill(users.user2.password);
await page.getByRole('button', { name: 'Create account' }).click();
await utils.checkNotification(page, 'Your new account has been created');
// Redirected to the vault
await expect(page).toHaveTitle('Vaults | Vaultwarden Web');
await utils.checkNotification(page, 'You have been logged in!');
await utils.checkNotification(page, 'Invitation accepted');
});
await test.step('Check mails', async () => {
await mail2Buffer.expect((m) => m.subject === 'Welcome');
await mail2Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox');
await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted'));
});
});
test('invited with existing account', async ({ page }) => {
const invited = await mail3Buffer.expect((mail) => mail.subject === 'Join Test');
await page.setContent(invited.html);
const link = await page.getByTestId('invite').getAttribute('href');
await page.goto(link);
// We should be on login page with email prefilled
await expect(page).toHaveTitle(/Vaultwarden Web/);
await page.getByRole('button', { name: 'Continue' }).click();
// Unlock page
await page.getByLabel('Master password').fill(users.user3.password);
await page.getByRole('button', { name: 'Log in with master password' }).click();
// We are now in the default vault page
await expect(page).toHaveTitle(/Vaultwarden Web/);
await utils.checkNotification(page, 'Invitation accepted');
await mail3Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox');
await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted'));
});
test('Confirm invited user', async ({ page }) => {
await logUser(test, page, users.user1, mail1Buffer);
await orgs.members(test, page, 'Test');
await orgs.confirm(test, page, 'Test', users.user2.email);
await mail2Buffer.expect((m) => m.subject.includes('Invitation to Test confirmed'));
});
test('Organization is visible', async ({ page }) => {
await logUser(test, page, users.user2, mail2Buffer);
await page.getByRole('button', { name: 'vault: Test', exact: true }).click();
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
});

View File

@@ -1,54 +0,0 @@
import { test, expect, type TestInfo } from '@playwright/test';
import { MailDev } from 'maildev';
import * as utils from "../global-utils";
import * as orgs from './setups/orgs';
import { createAccount, logUser } from './setups/user';
let users = utils.loadEnv();
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
await utils.startVault(browser, testInfo);
});
test.afterAll('Teardown', async ({}) => {
utils.stopVault();
});
test('Invite', async ({ page }) => {
await createAccount(test, page, users.user3);
await createAccount(test, page, users.user1);
await orgs.create(test, page, 'New organisation');
await orgs.members(test, page, 'New organisation');
await test.step('missing user2', async () => {
await orgs.invite(test, page, 'New organisation', users.user2.email);
await expect(page.getByRole('row', { name: users.user2.email })).toHaveText(/Invited/);
});
await test.step('existing user3', async () => {
await orgs.invite(test, page, 'New organisation', users.user3.email);
await expect(page.getByRole('row', { name: users.user3.email })).toHaveText(/Needs confirmation/);
await orgs.confirm(test, page, 'New organisation', users.user3.email);
});
await test.step('confirm user2', async () => {
await createAccount(test, page, users.user2);
await logUser(test, page, users.user1);
await orgs.members(test, page, 'New organisation');
await orgs.confirm(test, page, 'New organisation', users.user2.email);
});
await test.step('Org visible user2 ', async () => {
await logUser(test, page, users.user2);
await page.getByRole('button', { name: 'vault: New organisation', exact: true }).click();
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
});
await test.step('Org visible user3 ', async () => {
await logUser(test, page, users.user3);
await page.getByRole('button', { name: 'vault: New organisation', exact: true }).click();
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
});
});

View File

@@ -1,92 +0,0 @@
import { expect, type Page, Test } from '@playwright/test';
import { type MailBuffer } from 'maildev';
import * as OTPAuth from "otpauth";
import * as utils from '../../global-utils';
export async function activateTOTP(test: Test, page: Page, user: { name: string, password: string }): OTPAuth.TOTP {
return await test.step('Activate TOTP 2FA', async () => {
await page.getByRole('button', { name: user.name }).click();
await page.getByRole('menuitem', { name: 'Account settings' }).click();
await page.getByRole('link', { name: 'Security' }).click();
await page.getByRole('link', { name: 'Two-step login' }).click();
await page.locator('bit-item').filter({ hasText: /Authenticator app/ }).getByRole('button').click();
await page.getByLabel('Master password (required)').fill(user.password);
await page.getByRole('button', { name: 'Continue' }).click();
const secret = await page.getByLabel('Key').innerText();
let totp = new OTPAuth.TOTP({ secret, period: 30 });
await page.getByLabel(/Verification code/).fill(totp.generate());
await page.getByRole('button', { name: 'Turn on' }).click();
await page.getByRole('heading', { name: 'Turned on', exact: true });
await page.getByLabel('Close').click();
return totp;
})
}
export async function disableTOTP(test: Test, page: Page, user: { password: string }) {
await test.step('Disable TOTP 2FA', async () => {
await page.getByRole('button', { name: 'Test' }).click();
await page.getByRole('menuitem', { name: 'Account settings' }).click();
await page.getByRole('link', { name: 'Security' }).click();
await page.getByRole('link', { name: 'Two-step login' }).click();
await page.locator('bit-item').filter({ hasText: /Authenticator app/ }).getByRole('button').click();
await page.getByLabel('Master password (required)').click();
await page.getByLabel('Master password (required)').fill(user.password);
await page.getByRole('button', { name: 'Continue' }).click();
await page.getByRole('button', { name: 'Turn off' }).click();
await page.getByRole('button', { name: 'Yes' }).click();
await utils.checkNotification(page, 'Two-step login provider turned off');
});
}
export async function activateEmail(test: Test, page: Page, user: { name: string, password: string }, mailBuffer: MailBuffer) {
await test.step('Activate Email 2FA', async () => {
await page.getByRole('button', { name: user.name }).click();
await page.getByRole('menuitem', { name: 'Account settings' }).click();
await page.getByRole('link', { name: 'Security' }).click();
await page.getByRole('link', { name: 'Two-step login' }).click();
await page.locator('bit-item').filter({ hasText: 'Email Email Enter a code sent' }).getByRole('button').click();
await page.getByLabel('Master password (required)').fill(user.password);
await page.getByRole('button', { name: 'Continue' }).click();
await page.getByRole('button', { name: 'Send email' }).click();
});
let code = await retrieveEmailCode(test, page, mailBuffer);
await test.step('input code', async () => {
await page.getByLabel('2. Enter the resulting 6').fill(code);
await page.getByRole('button', { name: 'Turn on' }).click();
await page.getByRole('heading', { name: 'Turned on', exact: true });
});
}
export async function retrieveEmailCode(test: Test, page: Page, mailBuffer: MailBuffer): string {
return await test.step('retrieve code', async () => {
const codeMail = await mailBuffer.expect((mail) => mail.subject.includes("Login Verification Code"));
const page2 = await page.context().newPage();
await page2.setContent(codeMail.html);
const code = await page2.getByTestId("2fa").innerText();
await page2.close();
return code;
});
}
export async function disableEmail(test: Test, page: Page, user: { password: string }) {
await test.step('Disable Email 2FA', async () => {
await page.getByRole('button', { name: 'Test' }).click();
await page.getByRole('menuitem', { name: 'Account settings' }).click();
await page.getByRole('link', { name: 'Security' }).click();
await page.getByRole('link', { name: 'Two-step login' }).click();
await page.locator('bit-item').filter({ hasText: 'Email' }).getByRole('button').click();
await page.getByLabel('Master password (required)').click();
await page.getByLabel('Master password (required)').fill(user.password);
await page.getByRole('button', { name: 'Continue' }).click();
await page.getByRole('button', { name: 'Turn off' }).click();
await page.getByRole('button', { name: 'Yes' }).click();
await utils.checkNotification(page, 'Two-step login provider turned off');
});
}

View File

@@ -1,7 +0,0 @@
import { test } from './db-test';
const utils = require('../../global-utils');
test('DB start', async ({ serviceName }) => {
utils.startComposeService(serviceName);
});

View File

@@ -1,11 +0,0 @@
import { test } from './db-test';
const utils = require('../../global-utils');
utils.loadEnv();
test('DB teardown ?', async ({ serviceName }) => {
if( process.env.PW_KEEP_SERVICE_RUNNNING !== "true" ) {
utils.stopComposeService(serviceName);
}
});

View File

@@ -1,9 +0,0 @@
import { test as base } from '@playwright/test';
export type TestOptions = {
serviceName: string;
};
export const test = base.extend<TestOptions>({
serviceName: ['', { option: true }],
});

Some files were not shown because too many files have changed in this diff Show More