mirror of
				https://github.com/dani-garcia/vaultwarden.git
				synced 2025-10-31 18:28:20 +02:00 
			
		
		
		
	Compare commits
	
		
			148 Commits
		
	
	
		
			1.32.4
			...
			7161f612a1
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 7161f612a1 | ||
|  | 5ee908517f | ||
|  | 55577fa4eb | ||
|  | 843c063649 | ||
|  | 550b670dba | ||
|  | de808c5ad9 | ||
|  | 1f73630136 | ||
|  | 77008a91e9 | ||
|  | 7f386d38ae | ||
|  | 8e7eeab293 | ||
|  | e35c6f8705 | ||
|  | ae7b725c0f | ||
|  | 2a5489a4b2 | ||
|  | 8fd0ee4211 | ||
|  | 4a5516e150 | ||
|  | 7fc94516ce | ||
|  | 5ea0779d6b | ||
|  | a133d4e90c | ||
|  | 49eff787de | ||
|  | cff6c2b3af | ||
|  | a0c76284fd | ||
|  | 318653b0e5 | ||
|  | 5d84f17600 | ||
|  | 0db4b00007 | ||
|  | a0198d8d7c | ||
|  | dfad931dca | ||
|  | 25865efd79 | ||
|  | bcf627930e | ||
|  | ce70cd2cf4 | ||
|  | 2ac589d4b4 | ||
|  | b2e2aef7de | ||
|  | 0755bb19c0 | ||
|  | fee0c1c711 | ||
|  | f58539f0b4 | ||
|  | e718afb441 | ||
|  | 55945ad793 | ||
|  | 4fd22d8e3b | ||
|  | d6a8fb8e48 | ||
|  | 3b48e6e903 | ||
|  | 6b9333b33e | ||
|  | a545636ee5 | ||
|  | f125d5f1a1 | ||
|  | ad75ce281e | ||
|  | 9059437c35 | ||
|  | c84db0daca | ||
|  | 72adc239f5 | ||
|  | 34ebeeca76 | ||
|  | 0469d9ba4c | ||
|  | eaa6ad06ed | ||
|  | 0d3f283c37 | ||
|  | 51a1d641c5 | ||
|  | 90f7e5ff80 | ||
|  | 200999c94e | ||
|  | d363e647e9 | ||
|  | 53f58b14d5 | ||
|  | ef7835d1b0 | ||
|  | 3a44dc963b | ||
|  | a039e227c7 | ||
|  | 602b18fdd6 | ||
|  | bf04c64759 | ||
|  | 2f1d86b7f1 | ||
|  | ff97bcfdda | ||
|  | 73f2441d1a | ||
|  | ad8484a2d5 | ||
|  | 9813e480c0 | ||
|  | bfe172702a | ||
|  | df42b6d6b0 | ||
|  | 2697fe8aba | ||
|  | 674e444d67 | ||
|  | 0d16da440d | ||
|  | 66cf179bca | ||
|  | 025bb90f8f | ||
|  | d5039d9c17 | ||
|  | e7c796a660 | ||
|  | bbbd2f6d15 | ||
|  | a2d7895586 | ||
|  | 8a0cb1137e | ||
|  | f960bf59bb | ||
|  | 3a1f1bae00 | ||
|  | 8dfe805954 | ||
|  | 07b869b3ef | ||
|  | 2a18665288 | ||
|  | 71952a4ab5 | ||
|  | 994d157064 | ||
|  | 1dae6093c9 | ||
|  | 6edceb5f7a | ||
|  | 359a4a088a | ||
|  | 3baffeee9a | ||
|  | d5c353427d | ||
|  | 1f868b8d22 | ||
|  | 8d1df08b81 | ||
|  | 3b6bccde97 | ||
|  | d2b36642a6 | ||
|  | a02fb0fd24 | ||
|  | 1109293992 | ||
|  | 3c29f82974 | ||
|  | 663f88e717 | ||
|  | a3dccee243 | ||
|  | c0ebe0d982 | ||
|  | 1b46c80389 | ||
|  | 2c549984c0 | ||
|  | ecab7a50ea | ||
|  | 2903a3a13a | ||
|  | 952992c85b | ||
|  | c0be36a17f | ||
|  | d1dee04615 | ||
|  | ef2695de0c | ||
|  | 29f2b433f0 | ||
|  | 07f80346b4 | ||
|  | 4f68eafa3e | ||
|  | 327d369188 | ||
|  | ca7483df85 | ||
|  | 16b6d2a71e | ||
|  | 871a3f214a | ||
|  | 10d12676cf | ||
|  | dec3a9603a | ||
|  | 86aaf27659 | ||
|  | bc913d1156 | ||
|  | ef4bff09eb | ||
|  | 4816f77fd7 | ||
|  | dfd9e65396 | ||
|  | b1481c7c1a | ||
|  | d9e0d68f20 | ||
|  | 08183fc999 | ||
|  | d9b043d32c | ||
|  | ed4ad67e73 | ||
|  | a523c82f5f | ||
|  | 4d6d3443ae | ||
|  | 9cd400db6c | ||
|  | fd51230044 | ||
|  | 45e5f06b86 | ||
|  | 620ad92331 | ||
|  | c9860af11c | ||
|  | d7adce97df | ||
|  | 71b3d3c818 | ||
|  | da3701c0cf | ||
|  | 96813b1317 | ||
|  | b0b953f348 | ||
|  | cdfdc6ff4f | ||
|  | 2393c3f3c0 | ||
|  | 0d16b38a68 | ||
|  | ff33534c07 | ||
|  | adb21d5c1a | ||
|  | e927b8aa5e | ||
|  | ba48ca68fc | ||
|  | 294b429436 | ||
|  | 37c14c3c69 | ||
|  | d0581da638 | 
| @@ -5,6 +5,7 @@ | ||||
| !.git | ||||
| !docker/healthcheck.sh | ||||
| !docker/start.sh | ||||
| !macros | ||||
| !migrations | ||||
| !src | ||||
|  | ||||
|   | ||||
							
								
								
									
										130
									
								
								.env.template
									
									
									
									
									
								
							
							
						
						
									
										130
									
								
								.env.template
									
									
									
									
									
								
							| @@ -15,6 +15,14 @@ | ||||
| #################### | ||||
|  | ||||
| ## Main data folder | ||||
| ## This can be a path to local folder or a path to an external location | ||||
| ## depending on features enabled at build time. Possible external locations: | ||||
| ## | ||||
| ## - AWS S3 Bucket (via `s3` feature): s3://bucket-name/path/to/folder | ||||
| ## | ||||
| ## When using an external location, make sure to set TMP_FOLDER, | ||||
| ## TEMPLATES_FOLDER, and DATABASE_URL to local paths and/or a remote database | ||||
| ## location. | ||||
| # DATA_FOLDER=data | ||||
|  | ||||
| ## Individual folders, these override %DATA_FOLDER% | ||||
| @@ -22,10 +30,13 @@ | ||||
| # ICON_CACHE_FOLDER=data/icon_cache | ||||
| # ATTACHMENTS_FOLDER=data/attachments | ||||
| # SENDS_FOLDER=data/sends | ||||
|  | ||||
| ## Temporary folder used for storing temporary file uploads | ||||
| ## Must be a local path. | ||||
| # TMP_FOLDER=data/tmp | ||||
|  | ||||
| ## Templates data folder, by default uses embedded templates | ||||
| ## Check source code to see the format | ||||
| ## HTML template overrides data folder | ||||
| ## Must be a local path. | ||||
| # TEMPLATES_FOLDER=data/templates | ||||
| ## Automatically reload the templates for every request, slow, use only for development | ||||
| # RELOAD_TEMPLATES=false | ||||
| @@ -39,7 +50,9 @@ | ||||
| ######################### | ||||
|  | ||||
| ## Database URL | ||||
| ## When using SQLite, this is the path to the DB file, default to %DATA_FOLDER%/db.sqlite3 | ||||
| ## When using SQLite, this is the path to the DB file, and it defaults to | ||||
| ## %DATA_FOLDER%/db.sqlite3. If DATA_FOLDER is set to an external location, this | ||||
| ## must be set to a local sqlite3 file path. | ||||
| # DATABASE_URL=data/db.sqlite3 | ||||
| ## When using MySQL, specify an appropriate connection URI. | ||||
| ## Details: https://docs.diesel.rs/2.1.x/diesel/mysql/struct.MysqlConnection.html | ||||
| @@ -67,8 +80,16 @@ | ||||
| ## Timeout when acquiring database connection | ||||
| # DATABASE_TIMEOUT=30 | ||||
|  | ||||
| ## Database idle timeout | ||||
| ## Timeout in seconds before idle connections to the database are closed. | ||||
| # DATABASE_IDLE_TIMEOUT=600 | ||||
|  | ||||
| ## Database min connections | ||||
| ## Define the minimum size of the connection pool used for connecting to the database. | ||||
| # DATABASE_MIN_CONNS=2 | ||||
|  | ||||
| ## Database max connections | ||||
| ## Define the size of the connection pool used for connecting to the database. | ||||
| ## Define the maximum size of the connection pool used for connecting to the database. | ||||
| # DATABASE_MAX_CONNS=10 | ||||
|  | ||||
| ## Database connection initialization | ||||
| @@ -117,7 +138,7 @@ | ||||
| ## and are always in terms of UTC time (regardless of your local time zone settings). | ||||
| ## | ||||
| ## The schedule format is a bit different from crontab as crontab does not contains seconds. | ||||
| ## You can test the the format here: https://crontab.guru, but remove the first digit! | ||||
| ## You can test the format here: https://crontab.guru, but remove the first digit! | ||||
| ## SEC  MIN   HOUR   DAY OF MONTH    MONTH   DAY OF WEEK | ||||
| ## "0   30   9,12,15     1,15       May-Aug  Mon,Wed,Fri" | ||||
| ## "0   30     *          *            *          *     " | ||||
| @@ -161,6 +182,10 @@ | ||||
| ## Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt. | ||||
| ## Defaults to every minute. Set blank to disable this job. | ||||
| # DUO_CONTEXT_PURGE_SCHEDULE="30 * * * * *" | ||||
| # | ||||
| ## Cron schedule of the job that cleans sso nonce from incomplete flow | ||||
| ## Defaults to daily (20 minutes after midnight). Set blank to disable this job. | ||||
| # PURGE_INCOMPLETE_SSO_NONCE="0 20 0 * * *" | ||||
|  | ||||
| ######################## | ||||
| ### General settings ### | ||||
| @@ -229,7 +254,8 @@ | ||||
| # SIGNUPS_ALLOWED=true | ||||
|  | ||||
| ## Controls if new users need to verify their email address upon registration | ||||
| ## Note that setting this option to true prevents logins until the email address has been verified! | ||||
| ## On new client versions, this will require the user to verify their email at signup time. | ||||
| ## On older clients, it will require the user to verify their email before they can log in. | ||||
| ## The welcome email will include a verification link, and login attempts will periodically | ||||
| ## trigger another verification email to be sent. | ||||
| # SIGNUPS_VERIFY=false | ||||
| @@ -259,7 +285,7 @@ | ||||
| ## A comma-separated list means only those users can create orgs: | ||||
| # ORG_CREATION_USERS=admin1@example.com,admin2@example.com | ||||
|  | ||||
| ## Invitations org admins to invite users, even when signups are disabled | ||||
| ## Allows org admins to invite users, even when signups are disabled | ||||
| # INVITATIONS_ALLOWED=true | ||||
| ## Name shown in the invitation emails that don't come from a specific organization | ||||
| # INVITATION_ORG_NAME=Vaultwarden | ||||
| @@ -280,12 +306,13 @@ | ||||
| ## The default for new users. If changed, it will be updated during login for existing users. | ||||
| # PASSWORD_ITERATIONS=600000 | ||||
|  | ||||
| ## Controls whether users can set password hints. This setting applies globally to all users. | ||||
| ## Controls whether users can set or show password hints. This setting applies globally to all users. | ||||
| # PASSWORD_HINTS_ALLOWED=true | ||||
|  | ||||
| ## Controls whether a password hint should be shown directly in the web page if | ||||
| ## SMTP service is not configured. Not recommended for publicly-accessible instances | ||||
| ## as this provides unauthenticated access to potentially sensitive data. | ||||
| ## SMTP service is not configured and password hints are allowed. | ||||
| ## Not recommended for publicly-accessible instances because this provides | ||||
| ## unauthenticated access to potentially sensitive data. | ||||
| # SHOW_PASSWORD_HINT=false | ||||
|  | ||||
| ######################### | ||||
| @@ -326,29 +353,33 @@ | ||||
|  | ||||
| ## Icon download timeout | ||||
| ## Configure the timeout value when downloading the favicons. | ||||
| ## The default is 10 seconds, but this could be to low on slower network connections | ||||
| ## The default is 10 seconds, but this could be too low on slower network connections | ||||
| # ICON_DOWNLOAD_TIMEOUT=10 | ||||
|  | ||||
| ## Block HTTP domains/IPs by Regex | ||||
| ## Any domains or IPs that match this regex won't be fetched by the internal HTTP client. | ||||
| ## Useful to hide other servers in the local network. Check the WIKI for more details | ||||
| ## NOTE: Always enclose this regex withing single quotes! | ||||
| ## NOTE: Always enclose this regex within single quotes! | ||||
| # HTTP_REQUEST_BLOCK_REGEX='^(192\.168\.0\.[0-9]+|192\.168\.1\.[0-9]+)$' | ||||
|  | ||||
| ## Enabling this will cause the internal HTTP client to refuse to connect to any non global IP address. | ||||
| ## Enabling this will cause the internal HTTP client to refuse to connect to any non-global IP address. | ||||
| ## Useful to secure your internal environment: See https://en.wikipedia.org/wiki/Reserved_IP_addresses for a list of IPs which it will block | ||||
| # HTTP_REQUEST_BLOCK_NON_GLOBAL_IPS=true | ||||
|  | ||||
| ## Client Settings | ||||
| ## Enable experimental feature flags for clients. | ||||
| ## This is a comma-separated list of flags, e.g. "flag1,flag2,flag3". | ||||
| ## Note that clients cache the /api/config endpoint for about 1 hour and it could take some time before they are enabled or disabled! | ||||
| ## | ||||
| ## The following flags are available: | ||||
| ## - "autofill-overlay": Add an overlay menu to form fields for quick access to credentials. | ||||
| ## - "autofill-v2": Use the new autofill implementation. | ||||
| ## - "browser-fileless-import": Directly import credentials from other providers without a file. | ||||
| ## - "extension-refresh": Temporarily enable the new extension design until general availability (should be used with the beta Chrome extension) | ||||
| ## - "fido2-vault-credentials": Enable the use of FIDO2 security keys as second factor. | ||||
| ## - "inline-menu-positioning-improvements": Enable the use of inline menu password generator and identity suggestions in the browser extension. | ||||
| ## - "inline-menu-totp": Enable the use of inline menu TOTP codes in the browser extension. | ||||
| ## - "ssh-agent": Enable SSH agent support on Desktop. (Needs desktop >=2024.12.0) | ||||
| ## - "ssh-key-vault-item": Enable the creation and use of SSH key vault items. (Needs clients >=2024.12.0) | ||||
| ## - "export-attachments": Enable support for exporting attachments (Clients >=2025.4.0) | ||||
| ## - "anon-addy-self-host-alias": Enable configuring self-hosted Anon Addy alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0) | ||||
| ## - "simple-login-self-host-alias": Enable configuring self-hosted Simple Login alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0) | ||||
| ## - "mutual-tls": Enable the use of mutual TLS on Android (Client >= 2025.2.0) | ||||
| # EXPERIMENTAL_CLIENT_FEATURE_FLAGS=fido2-vault-credentials | ||||
|  | ||||
| ## Require new device emails. When a user logs in an email is required to be sent. | ||||
| @@ -407,6 +438,14 @@ | ||||
| ## Multiple values must be separated with a whitespace. | ||||
| # ALLOWED_IFRAME_ANCESTORS= | ||||
|  | ||||
| ## Allowed connect-src (Know the risks!) | ||||
| ## https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy/connect-src | ||||
| ## Allows other domains to URLs which can be loaded using script interfaces like the Forwarded email alias feature | ||||
| ## This adds the configured value to the 'Content-Security-Policy' headers 'connect-src' value. | ||||
| ## Multiple values must be separated with a whitespace. And only HTTPS values are allowed. | ||||
| ## Example: "https://my-addy-io.domain.tld https://my-simplelogin.domain.tld" | ||||
| # ALLOWED_CONNECT_SRC="" | ||||
|  | ||||
| ## Number of seconds, on average, between login requests from the same IP address before rate limiting kicks in. | ||||
| # LOGIN_RATELIMIT_SECONDS=60 | ||||
| ## Allow a burst of requests of up to this size, while maintaining the average indicated by `LOGIN_RATELIMIT_SECONDS`. | ||||
| @@ -432,6 +471,55 @@ | ||||
| ## Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy. | ||||
| # ENFORCE_SINGLE_ORG_WITH_RESET_PW_POLICY=false | ||||
|  | ||||
| ##################################### | ||||
| ### SSO settings (OpenID Connect) ### | ||||
| ##################################### | ||||
|  | ||||
| ## Controls whether users can login using an OpenID Connect identity provider | ||||
| # SSO_ENABLED=false | ||||
|  | ||||
| ## Prevent users from logging in directly without going through SSO | ||||
| # SSO_ONLY=false | ||||
|  | ||||
| ## On SSO Signup if a user with a matching email already exists make the association | ||||
| # SSO_SIGNUPS_MATCH_EMAIL=true | ||||
|  | ||||
| ## Allow unknown email verification status. Allowing this with `SSO_SIGNUPS_MATCH_EMAIL=true` open potential account takeover. | ||||
| # SSO_ALLOW_UNKNOWN_EMAIL_VERIFICATION=false | ||||
|  | ||||
| ## Base URL of the OIDC server (auto-discovery is used) | ||||
| ##  - Should not include the `/.well-known/openid-configuration` part and no trailing `/` | ||||
| ##  - ${SSO_AUTHORITY}/.well-known/openid-configuration should return a json document: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationResponse | ||||
| # SSO_AUTHORITY=https://auth.example.com | ||||
|  | ||||
| ## Authorization request scopes. Optional SSO scopes, override if email and profile are not enough (`openid` is implicit). | ||||
| # SSO_SCOPES="email profile" | ||||
|  | ||||
| ## Additional authorization url parameters (ex: to obtain a `refresh_token` with Google Auth). | ||||
| # SSO_AUTHORIZE_EXTRA_PARAMS="access_type=offline&prompt=consent" | ||||
|  | ||||
| ## Activate PKCE for the Auth Code flow. | ||||
| # SSO_PKCE=true | ||||
|  | ||||
| ## Regex for additional trusted Id token audience (by default only the client_id is trusted). | ||||
| # SSO_AUDIENCE_TRUSTED='^$' | ||||
|  | ||||
| ## Set your Client ID and Client Key | ||||
| # SSO_CLIENT_ID=11111 | ||||
| # SSO_CLIENT_SECRET=AAAAAAAAAAAAAAAAAAAAAAAA | ||||
|  | ||||
| ## Optional Master password policy (minComplexity=[0-4]), `enforceOnLogin` is not supported at the moment. | ||||
| # SSO_MASTER_PASSWORD_POLICY='{"enforceOnLogin":false,"minComplexity":3,"minLength":12,"requireLower":false,"requireNumbers":false,"requireSpecial":false,"requireUpper":false}' | ||||
|  | ||||
| ## Use sso only for authentication not the session lifecycle | ||||
| # SSO_AUTH_ONLY_NOT_SESSION=false | ||||
|  | ||||
| ## Client cache for discovery endpoint. Duration in seconds (0 to disable). | ||||
| # SSO_CLIENT_CACHE_EXPIRATION=0 | ||||
|  | ||||
| ## Log all the tokens, LOG_LEVEL=debug is required | ||||
| # SSO_DEBUG_TOKENS=false | ||||
|  | ||||
| ######################## | ||||
| ### MFA/2FA settings ### | ||||
| ######################## | ||||
| @@ -474,7 +562,7 @@ | ||||
| ## Maximum attempts before an email token is reset and a new email will need to be sent. | ||||
| # EMAIL_ATTEMPTS_LIMIT=3 | ||||
| ## | ||||
| ## Setup email 2FA regardless of any organization policy | ||||
| ## Setup email 2FA on registration regardless of any organization policy | ||||
| # EMAIL_2FA_ENFORCE_ON_VERIFIED_INVITE=false | ||||
| ## Automatically setup email 2FA as fallback provider when needed | ||||
| # EMAIL_2FA_AUTO_FALLBACK=false | ||||
| @@ -491,7 +579,7 @@ | ||||
| ## | ||||
| ## According to the RFC6238 (https://tools.ietf.org/html/rfc6238), | ||||
| ## we allow by default the TOTP code which was valid one step back and one in the future. | ||||
| ## This can however allow attackers to be a bit more lucky with there attempts because there are 3 valid codes. | ||||
| ## This can however allow attackers to be a bit more lucky with their attempts because there are 3 valid codes. | ||||
| ## You can disable this, so that only the current TOTP Code is allowed. | ||||
| ## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid. | ||||
| ## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid. | ||||
| @@ -531,7 +619,7 @@ | ||||
| # SMTP_AUTH_MECHANISM= | ||||
|  | ||||
| ## Server name sent during the SMTP HELO | ||||
| ## By default this value should be is on the machine's hostname, | ||||
| ## By default this value should be the machine's hostname, | ||||
| ## but might need to be changed in case it trips some anti-spam filters | ||||
| # HELO_NAME= | ||||
|  | ||||
|   | ||||
							
								
								
									
										3
									
								
								.github/CODEOWNERS
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/CODEOWNERS
									
									
									
									
										vendored
									
									
								
							| @@ -1,3 +1,6 @@ | ||||
| /.github @dani-garcia @BlackDex | ||||
| /.github/** @dani-garcia @BlackDex | ||||
| /.github/CODEOWNERS @dani-garcia @BlackDex | ||||
| /.github/ISSUE_TEMPLATE/** @dani-garcia @BlackDex | ||||
| /.github/workflows/** @dani-garcia @BlackDex | ||||
| /SECURITY.md @dani-garcia @BlackDex | ||||
|   | ||||
							
								
								
									
										25
									
								
								.github/ISSUE_TEMPLATE/bug_report.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										25
									
								
								.github/ISSUE_TEMPLATE/bug_report.yml
									
									
									
									
										vendored
									
									
								
							| @@ -8,15 +8,30 @@ body: | ||||
|       value: | | ||||
|         Thanks for taking the time to fill out this bug report! | ||||
|  | ||||
|         Please *do not* submit feature requests or ask for help on how to configure Vaultwarden here. | ||||
|         Please **do not** submit feature requests or ask for help on how to configure Vaultwarden here! | ||||
|  | ||||
|         The [GitHub Discussions](https://github.com/dani-garcia/vaultwarden/discussions/) has sections for Questions and Ideas. | ||||
|  | ||||
|         Our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki/) has topics on how to configure Vaultwarden. | ||||
|  | ||||
|         Also, make sure you are running [](https://github.com/dani-garcia/vaultwarden/releases/latest) of Vaultwarden! | ||||
|         And search for existing open or closed issues or discussions regarding your topic before posting. | ||||
|  | ||||
|         Be sure to check and validate the Vaultwarden Admin Diagnostics (`/admin/diagnostics`) page for any errors! | ||||
|         See here [how to enable the admin page](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-admin-page). | ||||
|  | ||||
|         > [!IMPORTANT] | ||||
|         > ## :bangbang: Search for existing **Closed _AND_ Open** [Issues](https://github.com/dani-garcia/vaultwarden/issues?q=is%3Aissue%20) **_AND_** [Discussions](https://github.com/dani-garcia/vaultwarden/discussions?discussions_q=) regarding your topic before posting! :bangbang: | ||||
|   # | ||||
|   - type: checkboxes | ||||
|     id: checklist | ||||
|     attributes: | ||||
|       label: Prerequisites | ||||
|       description: Please confirm you have completed the following before submitting an issue! | ||||
|       options: | ||||
|         - label: I have searched the existing **Closed _AND_ Open** [Issues](https://github.com/dani-garcia/vaultwarden/issues?q=is%3Aissue%20) **_AND_** [Discussions](https://github.com/dani-garcia/vaultwarden/discussions?discussions_q=) | ||||
|           required: true | ||||
|         - label: I have searched and read the [documentation](https://github.com/dani-garcia/vaultwarden/wiki/) | ||||
|           required: true | ||||
|   # | ||||
|   - id: support-string | ||||
|     type: textarea | ||||
| @@ -36,7 +51,7 @@ body: | ||||
|     attributes: | ||||
|       label: Vaultwarden Build Version | ||||
|       description: What version of Vaultwarden are you running? | ||||
|       placeholder: ex. v1.31.0 or v1.32.0-3466a804 | ||||
|       placeholder: ex. v1.34.0 or v1.34.1-53f58b14 | ||||
|     validations: | ||||
|       required: true | ||||
|   # | ||||
| @@ -67,7 +82,7 @@ body: | ||||
|     attributes: | ||||
|       label: Reverse Proxy | ||||
|       description: Are you using a reverse proxy, if so which and what version? | ||||
|       placeholder: ex. nginx 1.26.2, caddy 2.8.4, traefik 3.1.2, haproxy 3.0 | ||||
|       placeholder: ex. nginx 1.29.0, caddy 2.10.0, traefik 3.4.4, haproxy 3.2 | ||||
|     validations: | ||||
|       required: true | ||||
|   # | ||||
| @@ -115,7 +130,7 @@ body: | ||||
|     attributes: | ||||
|       label: Client Version | ||||
|       description: What version(s) of the client(s) are you seeing the problem on? | ||||
|       placeholder: ex. CLI v2024.7.2, Firefox 130 - v2024.7.0 | ||||
|       placeholder: ex. CLI v2025.7.0, Firefox 140 - v2025.6.1 | ||||
|   # | ||||
|   - id: reproduce | ||||
|     type: textarea | ||||
|   | ||||
							
								
								
									
										106
									
								
								.github/workflows/build.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										106
									
								
								.github/workflows/build.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,4 +1,5 @@ | ||||
| name: Build | ||||
| permissions: {} | ||||
|  | ||||
| on: | ||||
|   push: | ||||
| @@ -13,6 +14,7 @@ on: | ||||
|       - "diesel.toml" | ||||
|       - "docker/Dockerfile.j2" | ||||
|       - "docker/DockerSettings.yaml" | ||||
|  | ||||
|   pull_request: | ||||
|     paths: | ||||
|       - ".github/workflows/build.yml" | ||||
| @@ -28,13 +30,17 @@ on: | ||||
|  | ||||
| jobs: | ||||
|   build: | ||||
|     name: Build and Test ${{ matrix.channel }} | ||||
|     permissions: | ||||
|       actions: write | ||||
|       contents: read | ||||
|     # We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers | ||||
|     runs-on: ubuntu-22.04 | ||||
|     timeout-minutes: 120 | ||||
|     # Make warnings errors, this is to prevent warnings slipping through. | ||||
|     # This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes. | ||||
|     env: | ||||
|       RUSTFLAGS: "-D warnings" | ||||
|       RUSTFLAGS: "-Dwarnings" | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
| @@ -42,32 +48,33 @@ jobs: | ||||
|           - "rust-toolchain" # The version defined in rust-toolchain | ||||
|           - "msrv" # The supported MSRV | ||||
|  | ||||
|     name: Build and Test ${{ matrix.channel }} | ||||
|  | ||||
|     steps: | ||||
|       # Checkout the repo | ||||
|       - name: "Checkout" | ||||
|         uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1 | ||||
|       # End Checkout the repo | ||||
|  | ||||
|  | ||||
|       # Install dependencies | ||||
|       - name: "Install dependencies Ubuntu" | ||||
|         run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config | ||||
|       # End Install dependencies | ||||
|  | ||||
|       # Checkout the repo | ||||
|       - name: "Checkout" | ||||
|         uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 | ||||
|         with: | ||||
|           persist-credentials: false | ||||
|           fetch-depth: 0 | ||||
|       # End Checkout the repo | ||||
|  | ||||
|       # Determine rust-toolchain version | ||||
|       - name: Init Variables | ||||
|         id: toolchain | ||||
|         shell: bash | ||||
|         env: | ||||
|           CHANNEL: ${{ matrix.channel }} | ||||
|         run: | | ||||
|           if [[ "${{ matrix.channel }}" == 'rust-toolchain' ]]; then | ||||
|           if [[ "${CHANNEL}" == 'rust-toolchain' ]]; then | ||||
|             RUST_TOOLCHAIN="$(grep -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)" | ||||
|           elif [[ "${{ matrix.channel }}" == 'msrv' ]]; then | ||||
|           elif [[ "${CHANNEL}" == 'msrv' ]]; then | ||||
|             RUST_TOOLCHAIN="$(grep -oP 'rust-version.*"(\K.*?)(?=")' Cargo.toml)" | ||||
|           else | ||||
|             RUST_TOOLCHAIN="${{ matrix.channel }}" | ||||
|             RUST_TOOLCHAIN="${CHANNEL}" | ||||
|           fi | ||||
|           echo "RUST_TOOLCHAIN=${RUST_TOOLCHAIN}" | tee -a "${GITHUB_OUTPUT}" | ||||
|       # End Determine rust-toolchain version | ||||
| @@ -75,7 +82,7 @@ jobs: | ||||
|  | ||||
|       # Only install the clippy and rustfmt components on the default rust-toolchain | ||||
|       - name: "Install rust-toolchain version" | ||||
|         uses: dtolnay/rust-toolchain@7b1c307e0dcbda6122208f10795a713336a9b35a # master @ Aug 8, 2024, 7:36 PM GMT+2 | ||||
|         uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master @ Apr 29, 2025, 9:22 PM GMT+2 | ||||
|         if: ${{ matrix.channel == 'rust-toolchain' }} | ||||
|         with: | ||||
|           toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" | ||||
| @@ -85,7 +92,7 @@ jobs: | ||||
|  | ||||
|       # Install the any other channel to be used for which we do not execute clippy and rustfmt | ||||
|       - name: "Install MSRV version" | ||||
|         uses: dtolnay/rust-toolchain@7b1c307e0dcbda6122208f10795a713336a9b35a # master @ Aug 8, 2024, 7:36 PM GMT+2 | ||||
|         uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master @ Apr 29, 2025, 9:22 PM GMT+2 | ||||
|         if: ${{ matrix.channel != 'rust-toolchain' }} | ||||
|         with: | ||||
|           toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" | ||||
| @@ -93,11 +100,13 @@ jobs: | ||||
|  | ||||
|       # Set the current matrix toolchain version as default | ||||
|       - name: "Set toolchain ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} as default" | ||||
|         env: | ||||
|           RUST_TOOLCHAIN: ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} | ||||
|         run: | | ||||
|           # Remove the rust-toolchain.toml | ||||
|           rm rust-toolchain.toml | ||||
|           # Set the default | ||||
|           rustup default ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} | ||||
|           rustup default "${RUST_TOOLCHAIN}" | ||||
|  | ||||
|       # Show environment | ||||
|       - name: "Show environment" | ||||
| @@ -107,7 +116,8 @@ jobs: | ||||
|       # End Show environment | ||||
|  | ||||
|       # Enable Rust Caching | ||||
|       - uses: Swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2.7.3 | ||||
|       - name: Rust Caching | ||||
|         uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 | ||||
|         with: | ||||
|           # Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes. | ||||
|           # Like changing the build host from Ubuntu 20.04 to 22.04 for example. | ||||
| @@ -117,33 +127,39 @@ jobs: | ||||
|  | ||||
|       # Run cargo tests | ||||
|       # First test all features together, afterwards test them separately. | ||||
|       - name: "test features: sqlite,mysql,postgresql,enable_mimalloc,query_logger" | ||||
|         id: test_sqlite_mysql_postgresql_mimalloc_logger | ||||
|         if: ${{ !cancelled() }} | ||||
|         run: | | ||||
|           cargo test --features sqlite,mysql,postgresql,enable_mimalloc,query_logger | ||||
|  | ||||
|       - name: "test features: sqlite,mysql,postgresql,enable_mimalloc" | ||||
|         id: test_sqlite_mysql_postgresql_mimalloc | ||||
|         if: $${{ always() }} | ||||
|         if: ${{ !cancelled() }} | ||||
|         run: | | ||||
|           cargo test --features sqlite,mysql,postgresql,enable_mimalloc | ||||
|  | ||||
|       - name: "test features: sqlite,mysql,postgresql" | ||||
|         id: test_sqlite_mysql_postgresql | ||||
|         if: $${{ always() }} | ||||
|         if: ${{ !cancelled() }} | ||||
|         run: | | ||||
|           cargo test --features sqlite,mysql,postgresql | ||||
|  | ||||
|       - name: "test features: sqlite" | ||||
|         id: test_sqlite | ||||
|         if: $${{ always() }} | ||||
|         if: ${{ !cancelled() }} | ||||
|         run: | | ||||
|           cargo test --features sqlite | ||||
|  | ||||
|       - name: "test features: mysql" | ||||
|         id: test_mysql | ||||
|         if: $${{ always() }} | ||||
|         if: ${{ !cancelled() }} | ||||
|         run: | | ||||
|           cargo test --features mysql | ||||
|  | ||||
|       - name: "test features: postgresql" | ||||
|         id: test_postgresql | ||||
|         if: $${{ always() }} | ||||
|         if: ${{ !cancelled() }} | ||||
|         run: | | ||||
|           cargo test --features postgresql | ||||
|       # End Run cargo tests | ||||
| @@ -152,16 +168,16 @@ jobs: | ||||
|       # Run cargo clippy, and fail on warnings | ||||
|       - name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc" | ||||
|         id: clippy | ||||
|         if: ${{ always() && matrix.channel == 'rust-toolchain' }} | ||||
|         if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }} | ||||
|         run: | | ||||
|           cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc -- -D warnings | ||||
|           cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc | ||||
|       # End Run cargo clippy | ||||
|  | ||||
|  | ||||
|       # Run cargo fmt (Only run on rust-toolchain defined version) | ||||
|       - name: "check formatting" | ||||
|         id: formatting | ||||
|         if: ${{ always() && matrix.channel == 'rust-toolchain' }} | ||||
|         if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }} | ||||
|         run: | | ||||
|           cargo fmt --all -- --check | ||||
|       # End Run cargo fmt | ||||
| @@ -171,21 +187,31 @@ jobs: | ||||
|       # This is useful so all test/clippy/fmt actions are done, and they can all be addressed | ||||
|       - name: "Some checks failed" | ||||
|         if: ${{ failure() }} | ||||
|         env: | ||||
|           TEST_DB_M_L: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }} | ||||
|           TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }} | ||||
|           TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }} | ||||
|           TEST_SQLITE: ${{ steps.test_sqlite.outcome }} | ||||
|           TEST_MYSQL: ${{ steps.test_mysql.outcome }} | ||||
|           TEST_POSTGRESQL: ${{ steps.test_postgresql.outcome }} | ||||
|           CLIPPY: ${{ steps.clippy.outcome }} | ||||
|           FMT: ${{ steps.formatting.outcome }} | ||||
|         run: | | ||||
|           echo "### :x: Checks Failed!" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "|Job|Status|" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "|---|------|" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}|" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "|test (sqlite,mysql,postgresql)|${{ steps.test_sqlite_mysql_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "|test (sqlite)|${{ steps.test_sqlite.outcome }}|" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "|test (mysql)|${{ steps.test_mysql.outcome }}|" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "|test (postgresql)|${{ steps.test_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.clippy.outcome }}|" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "|fmt|${{ steps.formatting.outcome }}|" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "Please check the failed jobs and fix where needed." >> $GITHUB_STEP_SUMMARY | ||||
|           echo "" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "### :x: Checks Failed!" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${TEST_DB_M_L}|" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           exit 1 | ||||
|  | ||||
|  | ||||
| @@ -194,5 +220,5 @@ jobs: | ||||
|       - name: "All checks passed" | ||||
|         if: ${{ success() }} | ||||
|         run: | | ||||
|           echo "### :tada: Checks Passed!" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "" >> $GITHUB_STEP_SUMMARY | ||||
|           echo "### :tada: Checks Passed!" >> "${GITHUB_STEP_SUMMARY}" | ||||
|           echo "" >> "${GITHUB_STEP_SUMMARY}" | ||||
|   | ||||
							
								
								
									
										29
									
								
								.github/workflows/check-templates.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								.github/workflows/check-templates.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | ||||
| name: Check templates | ||||
| permissions: {} | ||||
|  | ||||
| on: [ push, pull_request ] | ||||
|  | ||||
| jobs: | ||||
|   docker-templates: | ||||
|     name: Validate docker templates | ||||
|     permissions: | ||||
|       contents: read | ||||
|     runs-on: ubuntu-24.04 | ||||
|     timeout-minutes: 30 | ||||
|  | ||||
|     steps: | ||||
|       # Checkout the repo | ||||
|       - name: "Checkout" | ||||
|         uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 | ||||
|         with: | ||||
|           persist-credentials: false | ||||
|       # End Checkout the repo | ||||
|  | ||||
|       - name: Run make to rebuild templates | ||||
|         working-directory: docker | ||||
|         run: make | ||||
|  | ||||
|       - name: Check for unstaged changes | ||||
|         working-directory: docker | ||||
|         run: git diff --exit-code | ||||
|         continue-on-error: false | ||||
							
								
								
									
										22
									
								
								.github/workflows/hadolint.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										22
									
								
								.github/workflows/hadolint.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,24 +1,20 @@ | ||||
| name: Hadolint | ||||
| permissions: {} | ||||
|  | ||||
| on: [ | ||||
|       push, | ||||
|       pull_request | ||||
|     ] | ||||
| on: [ push, pull_request ] | ||||
|  | ||||
| jobs: | ||||
|   hadolint: | ||||
|     name: Validate Dockerfile syntax | ||||
|     permissions: | ||||
|       contents: read | ||||
|     runs-on: ubuntu-24.04 | ||||
|     timeout-minutes: 30 | ||||
|     steps: | ||||
|       # Checkout the repo | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1 | ||||
|       # End Checkout the repo | ||||
|  | ||||
|     steps: | ||||
|       # Start Docker Buildx | ||||
|       - name: Setup Docker Buildx | ||||
|         uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1 | ||||
|         uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 | ||||
|         # https://github.com/moby/buildkit/issues/3969 | ||||
|         # Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills | ||||
|         with: | ||||
| @@ -37,6 +33,12 @@ jobs: | ||||
|         env: | ||||
|           HADOLINT_VERSION: 2.12.0 | ||||
|       # End Download hadolint | ||||
|       # Checkout the repo | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 | ||||
|         with: | ||||
|           persist-credentials: false | ||||
|       # End Checkout the repo | ||||
|  | ||||
|       # Test Dockerfiles with hadolint | ||||
|       - name: Run hadolint | ||||
|   | ||||
							
								
								
									
										213
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										213
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,4 +1,5 @@ | ||||
| name: Release | ||||
| permissions: {} | ||||
|  | ||||
| on: | ||||
|   push: | ||||
| @@ -6,35 +7,30 @@ on: | ||||
|       - main | ||||
|  | ||||
|     tags: | ||||
|       - '*' | ||||
|       # https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet | ||||
|       - '[1-2].[0-9]+.[0-9]+' | ||||
|  | ||||
| concurrency: | ||||
|   # Apply concurrency control only on the upstream repo | ||||
|   group: ${{ github.repository == 'dani-garcia/vaultwarden' && format('{0}-{1}', github.workflow, github.ref) || github.run_id }} | ||||
|   # Don't cancel other runs when creating a tag | ||||
|   cancel-in-progress: ${{ github.ref_type == 'branch' }} | ||||
|  | ||||
| jobs: | ||||
|   # https://github.com/marketplace/actions/skip-duplicate-actions | ||||
|   # Some checks to determine if we need to continue with building a new docker. | ||||
|   # We will skip this check if we are creating a tag, because that has the same hash as a previous run already. | ||||
|   skip_check: | ||||
|     runs-on: ubuntu-24.04 | ||||
|     if: ${{ github.repository == 'dani-garcia/vaultwarden' }} | ||||
|     outputs: | ||||
|       should_skip: ${{ steps.skip_check.outputs.should_skip }} | ||||
|     steps: | ||||
|       - name: Skip Duplicates Actions | ||||
|         id: skip_check | ||||
|         uses: fkirc/skip-duplicate-actions@f75f66ce1886f00957d99748a42c724f4330bdcf # v5.3.1 | ||||
|         with: | ||||
|           cancel_others: 'true' | ||||
|         # Only run this when not creating a tag | ||||
|         if: ${{ github.ref_type == 'branch' }} | ||||
|  | ||||
|   docker-build: | ||||
|     name: Build Vaultwarden containers | ||||
|     if: ${{ github.repository == 'dani-garcia/vaultwarden' }} | ||||
|     permissions: | ||||
|       packages: write | ||||
|       contents: read | ||||
|       attestations: write | ||||
|       id-token: write | ||||
|     runs-on: ubuntu-24.04 | ||||
|     timeout-minutes: 120 | ||||
|     needs: skip_check | ||||
|     if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }} | ||||
|     # Start a local docker registry to extract the final Alpine static build binaries | ||||
|     # Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them | ||||
|     services: | ||||
|       registry: | ||||
|         image: registry:2 | ||||
|         image: registry@sha256:1fc7de654f2ac1247f0b67e8a459e273b0993be7d2beda1f3f56fbf1001ed3e7 # v3.0.0 | ||||
|         ports: | ||||
|           - 5000:5000 | ||||
|     env: | ||||
| @@ -56,37 +52,42 @@ jobs: | ||||
|         base_image: ["debian","alpine"] | ||||
|  | ||||
|     steps: | ||||
|       # Checkout the repo | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1 | ||||
|         with: | ||||
|           fetch-depth: 0 | ||||
|  | ||||
|       - name: Initialize QEMU binfmt support | ||||
|         uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0 | ||||
|         uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 | ||||
|         with: | ||||
|           platforms: "arm64,arm" | ||||
|  | ||||
|       # Start Docker Buildx | ||||
|       - name: Setup Docker Buildx | ||||
|         uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1 | ||||
|         uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 | ||||
|         # https://github.com/moby/buildkit/issues/3969 | ||||
|         # Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills | ||||
|         with: | ||||
|           cache-binary: false | ||||
|           buildkitd-config-inline: | | ||||
|             [worker.oci] | ||||
|               max-parallelism = 2 | ||||
|           driver-opts: | | ||||
|             network=host | ||||
|  | ||||
|       # Checkout the repo | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 | ||||
|         # We need fetch-depth of 0 so we also get all the tag metadata | ||||
|         with: | ||||
|           persist-credentials: false | ||||
|           fetch-depth: 0 | ||||
|  | ||||
|       # Determine Base Tags and Source Version | ||||
|       - name: Determine Base Tags and Source Version | ||||
|         shell: bash | ||||
|         env: | ||||
|           REF_TYPE: ${{ github.ref_type }} | ||||
|         run: | | ||||
|           # Check which main tag we are going to build determined by github.ref_type | ||||
|           if [[ "${{ github.ref_type }}" == "tag" ]]; then | ||||
|           # Check which main tag we are going to build determined by ref_type | ||||
|           if [[ "${REF_TYPE}" == "tag" ]]; then | ||||
|             echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}" | ||||
|           elif [[ "${{ github.ref_type }}" == "branch" ]]; then | ||||
|           elif [[ "${REF_TYPE}" == "branch" ]]; then | ||||
|             echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}" | ||||
|           fi | ||||
|  | ||||
| @@ -102,7 +103,7 @@ jobs: | ||||
|  | ||||
|       # Login to Docker Hub | ||||
|       - name: Login to Docker Hub | ||||
|         uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           username: ${{ secrets.DOCKERHUB_USERNAME }} | ||||
|           password: ${{ secrets.DOCKERHUB_TOKEN }} | ||||
| @@ -111,12 +112,14 @@ jobs: | ||||
|       - name: Add registry for DockerHub | ||||
|         if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }} | ||||
|         shell: bash | ||||
|         env: | ||||
|           DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }} | ||||
|         run: | | ||||
|           echo "CONTAINER_REGISTRIES=${{ vars.DOCKERHUB_REPO }}" | tee -a "${GITHUB_ENV}" | ||||
|           echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}" | ||||
|  | ||||
|       # Login to GitHub Container Registry | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
| @@ -126,12 +129,14 @@ jobs: | ||||
|       - name: Add registry for ghcr.io | ||||
|         if: ${{ env.HAVE_GHCR_LOGIN == 'true' }} | ||||
|         shell: bash | ||||
|         env: | ||||
|           GHCR_REPO: ${{ vars.GHCR_REPO }} | ||||
|         run: | | ||||
|           echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.GHCR_REPO }}" | tee -a "${GITHUB_ENV}" | ||||
|           echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}" | ||||
|  | ||||
|       # Login to Quay.io | ||||
|       - name: Login to Quay.io | ||||
|         uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: quay.io | ||||
|           username: ${{ secrets.QUAY_USERNAME }} | ||||
| @@ -141,17 +146,22 @@ jobs: | ||||
|       - name: Add registry for Quay.io | ||||
|         if: ${{ env.HAVE_QUAY_LOGIN == 'true' }} | ||||
|         shell: bash | ||||
|         env: | ||||
|           QUAY_REPO: ${{ vars.QUAY_REPO }} | ||||
|         run: | | ||||
|           echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.QUAY_REPO }}" | tee -a "${GITHUB_ENV}" | ||||
|           echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}" | ||||
|  | ||||
|       - name: Configure build cache from/to | ||||
|         shell: bash | ||||
|         env: | ||||
|           GHCR_REPO: ${{ vars.GHCR_REPO }} | ||||
|           BASE_IMAGE: ${{ matrix.base_image }} | ||||
|         run: | | ||||
|           # | ||||
|           # Check if there is a GitHub Container Registry Login and use it for caching | ||||
|           if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then | ||||
|             echo "BAKE_CACHE_FROM=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }}" | tee -a "${GITHUB_ENV}" | ||||
|             echo "BAKE_CACHE_TO=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}" | ||||
|             echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}" | tee -a "${GITHUB_ENV}" | ||||
|             echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}" | ||||
|           else | ||||
|             echo "BAKE_CACHE_FROM=" | ||||
|             echo "BAKE_CACHE_TO=" | ||||
| @@ -159,13 +169,13 @@ jobs: | ||||
|           # | ||||
|  | ||||
|       - name: Add localhost registry | ||||
|         if: ${{ matrix.base_image == 'alpine' }} | ||||
|         shell: bash | ||||
|         run: | | ||||
|           echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}" | ||||
|  | ||||
|       - name: Bake ${{ matrix.base_image }} containers | ||||
|         uses: docker/bake-action@2e3d19baedb14545e5d41222653874f25d5b4dfb # v5.10.0 | ||||
|         id: bake_vw | ||||
|         uses: docker/bake-action@37816e747588cb137173af99ab33873600c46ea8 # v6.8.0 | ||||
|         env: | ||||
|           BASE_TAGS: "${{ env.BASE_TAGS }}" | ||||
|           SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}" | ||||
| @@ -175,78 +185,121 @@ jobs: | ||||
|         with: | ||||
|           pull: true | ||||
|           push: true | ||||
|           source: . | ||||
|           files: docker/docker-bake.hcl | ||||
|           targets: "${{ matrix.base_image }}-multi" | ||||
|           set: | | ||||
|             *.cache-from=${{ env.BAKE_CACHE_FROM }} | ||||
|             *.cache-to=${{ env.BAKE_CACHE_TO }} | ||||
|  | ||||
|       - name: Extract digest SHA | ||||
|         shell: bash | ||||
|         env: | ||||
|           BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }} | ||||
|           BASE_IMAGE: ${{ matrix.base_image }} | ||||
|         run: | | ||||
|           GET_DIGEST_SHA="$(jq -r --arg base "$BASE_IMAGE" '.[$base + "-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")" | ||||
|           echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}" | ||||
|  | ||||
|       # Attest container images | ||||
|       - name: Attest - docker.io - ${{ matrix.base_image }} | ||||
|         if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} | ||||
|         uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 | ||||
|         with: | ||||
|           subject-name: ${{ vars.DOCKERHUB_REPO }} | ||||
|           subject-digest: ${{ env.DIGEST_SHA }} | ||||
|           push-to-registry: true | ||||
|  | ||||
|       - name: Attest - ghcr.io - ${{ matrix.base_image }} | ||||
|         if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} | ||||
|         uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 | ||||
|         with: | ||||
|           subject-name: ${{ vars.GHCR_REPO }} | ||||
|           subject-digest: ${{ env.DIGEST_SHA }} | ||||
|           push-to-registry: true | ||||
|  | ||||
|       - name: Attest - quay.io - ${{ matrix.base_image }} | ||||
|         if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} | ||||
|         uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 | ||||
|         with: | ||||
|           subject-name: ${{ vars.QUAY_REPO }} | ||||
|           subject-digest: ${{ env.DIGEST_SHA }} | ||||
|           push-to-registry: true | ||||
|  | ||||
|  | ||||
|       # Extract the Alpine binaries from the containers | ||||
|       - name: Extract binaries | ||||
|         if: ${{ matrix.base_image == 'alpine' }} | ||||
|         shell: bash | ||||
|         env: | ||||
|           REF_TYPE: ${{ github.ref_type }} | ||||
|           BASE_IMAGE: ${{ matrix.base_image }} | ||||
|         run: | | ||||
|           # Check which main tag we are going to build determined by github.ref_type | ||||
|           if [[ "${{ github.ref_type }}" == "tag" ]]; then | ||||
|           # Check which main tag we are going to build determined by ref_type | ||||
|           if [[ "${REF_TYPE}" == "tag" ]]; then | ||||
|             EXTRACT_TAG="latest" | ||||
|           elif [[ "${{ github.ref_type }}" == "branch" ]]; then | ||||
|           elif [[ "${REF_TYPE}" == "branch" ]]; then | ||||
|             EXTRACT_TAG="testing" | ||||
|           fi | ||||
|  | ||||
|           # Check which base_image was used and append -alpine if needed | ||||
|           if [[ "${BASE_IMAGE}" == "alpine" ]]; then | ||||
|             EXTRACT_TAG="${EXTRACT_TAG}-alpine" | ||||
|           fi | ||||
|  | ||||
|           # After each extraction the image is removed. | ||||
|           # This is needed because using different platforms doesn't trigger a new pull/download | ||||
|  | ||||
|           # Extract amd64 binary | ||||
|           docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" | ||||
|           docker cp amd64:/vaultwarden vaultwarden-amd64 | ||||
|           docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||
|           docker cp amd64:/vaultwarden vaultwarden-amd64-${BASE_IMAGE} | ||||
|           docker rm --force amd64 | ||||
|           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" | ||||
|           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||
|  | ||||
|           # Extract arm64 binary | ||||
|           docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" | ||||
|           docker cp arm64:/vaultwarden vaultwarden-arm64 | ||||
|           docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||
|           docker cp arm64:/vaultwarden vaultwarden-arm64-${BASE_IMAGE} | ||||
|           docker rm --force arm64 | ||||
|           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" | ||||
|           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||
|  | ||||
|           # Extract armv7 binary | ||||
|           docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" | ||||
|           docker cp armv7:/vaultwarden vaultwarden-armv7 | ||||
|           docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||
|           docker cp armv7:/vaultwarden vaultwarden-armv7-${BASE_IMAGE} | ||||
|           docker rm --force armv7 | ||||
|           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" | ||||
|           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||
|  | ||||
|           # Extract armv6 binary | ||||
|           docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" | ||||
|           docker cp armv6:/vaultwarden vaultwarden-armv6 | ||||
|           docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||
|           docker cp armv6:/vaultwarden vaultwarden-armv6-${BASE_IMAGE} | ||||
|           docker rm --force armv6 | ||||
|           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" | ||||
|           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||
|  | ||||
|       # Upload artifacts to Github Actions | ||||
|       - name: "Upload amd64 artifact" | ||||
|         uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 | ||||
|         if: ${{ matrix.base_image == 'alpine' }} | ||||
|       # Upload artifacts to Github Actions and Attest the binaries | ||||
|       - name: "Upload amd64 artifact ${{ matrix.base_image }}" | ||||
|         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||
|         with: | ||||
|           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64 | ||||
|           path: vaultwarden-amd64 | ||||
|           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }} | ||||
|           path: vaultwarden-amd64-${{ matrix.base_image }} | ||||
|  | ||||
|       - name: "Upload arm64 artifact" | ||||
|         uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 | ||||
|         if: ${{ matrix.base_image == 'alpine' }} | ||||
|       - name: "Upload arm64 artifact ${{ matrix.base_image }}" | ||||
|         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||
|         with: | ||||
|           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64 | ||||
|           path: vaultwarden-arm64 | ||||
|           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }} | ||||
|           path: vaultwarden-arm64-${{ matrix.base_image }} | ||||
|  | ||||
|       - name: "Upload armv7 artifact" | ||||
|         uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 | ||||
|         if: ${{ matrix.base_image == 'alpine' }} | ||||
|       - name: "Upload armv7 artifact ${{ matrix.base_image }}" | ||||
|         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||
|         with: | ||||
|           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7 | ||||
|           path: vaultwarden-armv7 | ||||
|           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }} | ||||
|           path: vaultwarden-armv7-${{ matrix.base_image }} | ||||
|  | ||||
|       - name: "Upload armv6 artifact" | ||||
|         uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 | ||||
|         if: ${{ matrix.base_image == 'alpine' }} | ||||
|       - name: "Upload armv6 artifact ${{ matrix.base_image }}" | ||||
|         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||
|         with: | ||||
|           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6 | ||||
|           path: vaultwarden-armv6 | ||||
|           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }} | ||||
|           path: vaultwarden-armv6-${{ matrix.base_image }} | ||||
|  | ||||
|       - name: "Attest artifacts ${{ matrix.base_image }}" | ||||
|         uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 | ||||
|         with: | ||||
|           subject-path: vaultwarden-* | ||||
|       # End Upload artifacts to Github Actions | ||||
|   | ||||
							
								
								
									
										6
									
								
								.github/workflows/releasecache-cleanup.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/releasecache-cleanup.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,3 +1,6 @@ | ||||
| name: Cleanup | ||||
| permissions: {} | ||||
|  | ||||
| on: | ||||
|   workflow_dispatch: | ||||
|     inputs: | ||||
| @@ -9,10 +12,11 @@ on: | ||||
|   schedule: | ||||
|     - cron: '0 1 * * FRI' | ||||
|  | ||||
| name: Cleanup | ||||
| jobs: | ||||
|   releasecache-cleanup: | ||||
|     name: Releasecache Cleanup | ||||
|     permissions: | ||||
|       packages: write | ||||
|     runs-on: ubuntu-24.04 | ||||
|     continue-on-error: true | ||||
|     timeout-minutes: 30 | ||||
|   | ||||
							
								
								
									
										34
									
								
								.github/workflows/trivy.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										34
									
								
								.github/workflows/trivy.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,37 +1,45 @@ | ||||
| name: trivy | ||||
| name: Trivy | ||||
| permissions: {} | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches: | ||||
|       - main | ||||
|  | ||||
|     tags: | ||||
|       - '*' | ||||
|  | ||||
|   pull_request: | ||||
|     branches: [ "main" ] | ||||
|     branches: | ||||
|       - main | ||||
|  | ||||
|   schedule: | ||||
|     - cron: '08 11 * * *' | ||||
|  | ||||
| permissions: | ||||
|   contents: read | ||||
|  | ||||
| jobs: | ||||
|   trivy-scan: | ||||
|     # Only run this in the master repo and not on forks | ||||
|     # Only run this in the upstream repo and not on forks | ||||
|     # When all forks run this at the same time, it is causing `Too Many Requests` issues | ||||
|     if: ${{ github.repository == 'dani-garcia/vaultwarden' }} | ||||
|     name: Check | ||||
|     runs-on: ubuntu-24.04 | ||||
|     timeout-minutes: 30 | ||||
|     name: Trivy Scan | ||||
|     permissions: | ||||
|       contents: read | ||||
|       security-events: write | ||||
|       actions: read | ||||
|       security-events: write | ||||
|     runs-on: ubuntu-24.04 | ||||
|     timeout-minutes: 30 | ||||
|  | ||||
|     steps: | ||||
|       - name: Checkout code | ||||
|         uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1 | ||||
|         uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 | ||||
|         with: | ||||
|           persist-credentials: false | ||||
|  | ||||
|       - name: Run Trivy vulnerability scanner | ||||
|         uses: aquasecurity/trivy-action@5681af892cd0f4997658e2bacc62bd0a894cf564 # v0.27.0 | ||||
|         uses: aquasecurity/trivy-action@dc5a429b52fcf669ce959baa2c2dd26090d2a6c4 # v0.32.0 | ||||
|         env: | ||||
|           TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2,public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 | ||||
|           TRIVY_JAVA_DB_REPOSITORY: docker.io/aquasec/trivy-java-db:1,public.ecr.aws/aquasecurity/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1 | ||||
|         with: | ||||
|           scan-type: repo | ||||
|           ignore-unfixed: true | ||||
| @@ -40,6 +48,6 @@ jobs: | ||||
|           severity: CRITICAL,HIGH | ||||
|  | ||||
|       - name: Upload Trivy scan results to GitHub Security tab | ||||
|         uses: github/codeql-action/upload-sarif@2bbafcdd7fbf96243689e764c2f15d9735164f33 # v3.26.6 | ||||
|         uses: github/codeql-action/upload-sarif@df559355d593797519d70b90fc8edd5db049e7a2 # v3.29.9 | ||||
|         with: | ||||
|           sarif_file: 'trivy-results.sarif' | ||||
|   | ||||
							
								
								
									
										28
									
								
								.github/workflows/zizmor.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								.github/workflows/zizmor.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,28 @@ | ||||
| name: Security Analysis with zizmor | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches: ["main"] | ||||
|   pull_request: | ||||
|     branches: ["**"] | ||||
|  | ||||
| permissions: {} | ||||
|  | ||||
| jobs: | ||||
|   zizmor: | ||||
|     name: Run zizmor | ||||
|     runs-on: ubuntu-latest | ||||
|     permissions: | ||||
|       security-events: write | ||||
|     steps: | ||||
|       - name: Checkout repository | ||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||
|         with: | ||||
|           persist-credentials: false | ||||
|  | ||||
|       - name: Run zizmor | ||||
|         uses: zizmorcore/zizmor-action@5ca5fc7a4779c5263a3ffa0e1f693009994446d1 # v0.1.2 | ||||
|         with: | ||||
|           # intentionally not scanning the entire repository, | ||||
|           # since it contains integration tests. | ||||
|           inputs: ./.github/ | ||||
| @@ -1,7 +1,7 @@ | ||||
| --- | ||||
| repos: | ||||
| -   repo: https://github.com/pre-commit/pre-commit-hooks | ||||
|     rev: v4.6.0 | ||||
|     rev: v6.0.0 | ||||
|     hooks: | ||||
|     - id: check-yaml | ||||
|     - id: check-json | ||||
| @@ -31,7 +31,7 @@ repos: | ||||
|       language: system | ||||
|       args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--"] | ||||
|       types_or: [rust, file] | ||||
|       files: (Cargo.toml|Cargo.lock|rust-toolchain|.*\.rs$) | ||||
|       files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$) | ||||
|       pass_filenames: false | ||||
|     - id: cargo-clippy | ||||
|       name: cargo clippy | ||||
| @@ -40,5 +40,13 @@ repos: | ||||
|       language: system | ||||
|       args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--", "-D", "warnings"] | ||||
|       types_or: [rust, file] | ||||
|       files: (Cargo.toml|Cargo.lock|rust-toolchain|clippy.toml|.*\.rs$) | ||||
|       files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$) | ||||
|       pass_filenames: false | ||||
|     - id: check-docker-templates | ||||
|       name: check docker templates | ||||
|       description: Check if the Docker templates are updated | ||||
|       language: system | ||||
|       entry: sh | ||||
|       args: | ||||
|         - "-c" | ||||
|         - "cd docker && make" | ||||
|   | ||||
							
								
								
									
										3756
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										3756
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										142
									
								
								Cargo.toml
									
									
									
									
									
								
							
							
						
						
									
										142
									
								
								Cargo.toml
									
									
									
									
									
								
							| @@ -1,9 +1,12 @@ | ||||
| [workspace] | ||||
| members = ["macros"] | ||||
|  | ||||
| [package] | ||||
| name = "vaultwarden" | ||||
| version = "1.0.0" | ||||
| authors = ["Daniel García <dani-garcia@users.noreply.github.com>"] | ||||
| edition = "2021" | ||||
| rust-version = "1.80.0" | ||||
| rust-version = "1.87.0" | ||||
| resolver = "2" | ||||
|  | ||||
| repository = "https://github.com/dani-garcia/vaultwarden" | ||||
| @@ -29,6 +32,11 @@ enable_mimalloc = ["dep:mimalloc"] | ||||
| # You also need to set an env variable `QUERY_LOGGER=1` to fully activate this so you do not have to re-compile | ||||
| # if you want to turn off the logging for a specific run. | ||||
| query_logger = ["dep:diesel_logger"] | ||||
| s3 = ["opendal/services-s3", "dep:aws-config", "dep:aws-credential-types", "dep:aws-smithy-runtime-api", "dep:anyhow", "dep:http", "dep:reqsign"] | ||||
|  | ||||
| # OIDC specific features | ||||
| oidc-accept-rfc3339-timestamps = ["openidconnect/accept-rfc3339-timestamps"] | ||||
| oidc-accept-string-booleans = ["openidconnect/accept-string-booleans"] | ||||
|  | ||||
| # Enable unstable features, requires nightly | ||||
| # Currently only used to enable rusts official ip support | ||||
| @@ -36,24 +44,26 @@ unstable = [] | ||||
|  | ||||
| [target."cfg(unix)".dependencies] | ||||
| # Logging | ||||
| syslog = "6.1.1" | ||||
| syslog = "7.0.0" | ||||
|  | ||||
| [dependencies] | ||||
| macros = { path = "./macros" } | ||||
|  | ||||
| # Logging | ||||
| log = "0.4.22" | ||||
| fern = { version = "0.7.0", features = ["syslog-6", "reopen-1"] } | ||||
| tracing = { version = "0.1.40", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work | ||||
| log = "0.4.27" | ||||
| fern = { version = "0.7.1", features = ["syslog-7", "reopen-1"] } | ||||
| tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work | ||||
|  | ||||
| # A `dotenv` implementation for Rust | ||||
| dotenvy = { version = "0.15.7", default-features = false } | ||||
|  | ||||
| # Lazy initialization | ||||
| once_cell = "1.20.2" | ||||
| once_cell = "1.21.3" | ||||
|  | ||||
| # Numerical libraries | ||||
| num-traits = "0.2.19" | ||||
| num-derive = "0.4.2" | ||||
| bigdecimal = "0.4.6" | ||||
| bigdecimal = "0.4.8" | ||||
|  | ||||
| # Web framework | ||||
| rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false } | ||||
| @@ -67,104 +77,133 @@ dashmap = "6.1.0" | ||||
|  | ||||
| # Async futures | ||||
| futures = "0.3.31" | ||||
| tokio = { version = "1.41.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] } | ||||
| tokio = { version = "1.47.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] } | ||||
| tokio-util = { version = "0.7.16", features = ["compat"]} | ||||
|  | ||||
| # A generic serialization/deserialization framework | ||||
| serde = { version = "1.0.214", features = ["derive"] } | ||||
| serde_json = "1.0.132" | ||||
| serde = { version = "1.0.219", features = ["derive"] } | ||||
| serde_json = "1.0.142" | ||||
|  | ||||
| # A safe, extensible ORM and Query builder | ||||
| diesel = { version = "2.2.4", features = ["chrono", "r2d2", "numeric"] } | ||||
| diesel = { version = "2.2.12", features = ["chrono", "r2d2", "numeric"] } | ||||
| diesel_migrations = "2.2.0" | ||||
| diesel_logger = { version = "0.3.0", optional = true } | ||||
| diesel_logger = { version = "0.4.0", optional = true } | ||||
|  | ||||
| derive_more = { version = "2.0.1", features = ["from", "into", "as_ref", "deref", "display"] } | ||||
| diesel-derive-newtype = "2.1.2" | ||||
|  | ||||
| # Bundled/Static SQLite | ||||
| libsqlite3-sys = { version = "0.30.1", features = ["bundled"], optional = true } | ||||
| libsqlite3-sys = { version = "0.35.0", features = ["bundled"], optional = true } | ||||
|  | ||||
| # Crypto-related libraries | ||||
| rand = { version = "0.8.5", features = ["small_rng"] } | ||||
| ring = "0.17.8" | ||||
| rand = "0.9.2" | ||||
| ring = "0.17.14" | ||||
| subtle = "2.6.1" | ||||
|  | ||||
| # UUID generation | ||||
| uuid = { version = "1.11.0", features = ["v4"] } | ||||
| uuid = { version = "1.18.0", features = ["v4"] } | ||||
|  | ||||
| # Date and time libraries | ||||
| chrono = { version = "0.4.38", features = ["clock", "serde"], default-features = false } | ||||
| chrono-tz = "0.10.0" | ||||
| time = "0.3.36" | ||||
| chrono = { version = "0.4.41", features = ["clock", "serde"], default-features = false } | ||||
| chrono-tz = "0.10.4" | ||||
| time = "0.3.41" | ||||
|  | ||||
| # Job scheduler | ||||
| job_scheduler_ng = "2.0.5" | ||||
| job_scheduler_ng = "2.3.0" | ||||
|  | ||||
| # Data encoding library Hex/Base32/Base64 | ||||
| data-encoding = "2.6.0" | ||||
| data-encoding = "2.9.0" | ||||
|  | ||||
| # JWT library | ||||
| jsonwebtoken = "9.3.0" | ||||
| jsonwebtoken = "9.3.1" | ||||
|  | ||||
| # TOTP library | ||||
| totp-lite = "2.0.1" | ||||
|  | ||||
| # Yubico Library | ||||
| yubico = { version = "0.11.0", features = ["online-tokio"], default-features = false } | ||||
| yubico = { package = "yubico_ng", version = "0.14.1", features = ["online-tokio"], default-features = false } | ||||
|  | ||||
| # WebAuthn libraries | ||||
| webauthn-rs = "0.3.2" | ||||
| # danger-allow-state-serialisation is needed to save the state in the db | ||||
| # danger-credential-internals is needed to support U2F to Webauthn migration | ||||
| webauthn-rs = { version = "0.5.2", features = ["danger-allow-state-serialisation", "danger-credential-internals"] } | ||||
| webauthn-rs-proto = "0.5.2" | ||||
| webauthn-rs-core = "0.5.2" | ||||
|  | ||||
| # Handling of URL's for WebAuthn and favicons | ||||
| url = "2.5.3" | ||||
| url = "2.5.4" | ||||
|  | ||||
| # Email libraries | ||||
| lettre = { version = "0.11.10", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false } | ||||
| lettre = { version = "0.11.18", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "hostname", "tracing", "tokio1-rustls", "ring", "rustls-native-certs"], default-features = false } | ||||
| percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails | ||||
| email_address = "0.2.9" | ||||
|  | ||||
| # HTML Template library | ||||
| handlebars = { version = "6.2.0", features = ["dir_source"] } | ||||
| handlebars = { version = "6.3.2", features = ["dir_source"] } | ||||
|  | ||||
| # HTTP client (Used for favicons, version check, DUO and HIBP API) | ||||
| reqwest = { version = "0.12.9", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] } | ||||
| hickory-resolver = "0.24.1" | ||||
| reqwest = { version = "0.12.23", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false} | ||||
| hickory-resolver = "0.25.2" | ||||
|  | ||||
| # Favicon extraction libraries | ||||
| html5gum = "0.6.1" | ||||
| html5gum = "0.7.0" | ||||
| regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false } | ||||
| data-url = "0.3.1" | ||||
| bytes = "1.8.0" | ||||
| bytes = "1.10.1" | ||||
| svg-hush = "0.9.5" | ||||
|  | ||||
| # Cache function results (Used for version check and favicon fetching) | ||||
| cached = { version = "0.54.0", features = ["async"] } | ||||
| cached = { version = "0.56.0", features = ["async"] } | ||||
|  | ||||
| # Used for custom short lived cookie jar during favicon extraction | ||||
| cookie = "0.18.1" | ||||
| cookie_store = "0.21.1" | ||||
|  | ||||
| # Used by U2F, JWT and PostgreSQL | ||||
| openssl = "0.10.68" | ||||
| openssl = "0.10.73" | ||||
|  | ||||
| # CLI argument parsing | ||||
| pico-args = "0.5.0" | ||||
|  | ||||
| # Macro ident concatenation | ||||
| paste = "1.0.15" | ||||
| governor = "0.7.0" | ||||
| pastey = "0.1.1" | ||||
| governor = "0.10.1" | ||||
|  | ||||
| # OIDC for SSO | ||||
| openidconnect = { version = "4.0.1", features = ["reqwest", "native-tls"] } | ||||
| mini-moka = "0.10.3" | ||||
|  | ||||
| # Check client versions for specific features. | ||||
| semver = "1.0.23" | ||||
| semver = "1.0.26" | ||||
|  | ||||
| # Allow overriding the default memory allocator | ||||
| # Mainly used for the musl builds, since the default musl malloc is very slow | ||||
| mimalloc = { version = "0.1.43", features = ["secure"], default-features = false, optional = true } | ||||
| which = "7.0.0" | ||||
| mimalloc = { version = "0.1.47", features = ["secure"], default-features = false, optional = true } | ||||
|  | ||||
| which = "8.0.0" | ||||
|  | ||||
| # Argon2 library with support for the PHC format | ||||
| argon2 = "0.5.3" | ||||
|  | ||||
| # Reading a password from the cli for generating the Argon2id ADMIN_TOKEN | ||||
| rpassword = "7.3.1" | ||||
| rpassword = "7.4.0" | ||||
|  | ||||
| # Loading a dynamic CSS Stylesheet | ||||
| grass_compiler = { version = "0.13.4", default-features = false } | ||||
|  | ||||
| # File are accessed through Apache OpenDAL | ||||
| opendal = { version = "0.54.0", features = ["services-fs"], default-features = false } | ||||
|  | ||||
| # For retrieving AWS credentials, including temporary SSO credentials | ||||
| anyhow = { version = "1.0.99", optional = true } | ||||
| aws-config = { version = "1.8.5", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true } | ||||
| aws-credential-types = { version = "1.2.5", optional = true } | ||||
| aws-smithy-runtime-api = { version = "1.8.7", optional = true } | ||||
| http = { version = "1.3.1", optional = true } | ||||
| reqsign = { version = "0.16.5", optional = true } | ||||
|  | ||||
| # Strip debuginfo from the release builds | ||||
| # The symbols are the provide better panic traces | ||||
| # The debug symbols are to provide better panic traces | ||||
| # Also enable fat LTO and use 1 codegen unit for optimizations | ||||
| [profile.release] | ||||
| strip = "debuginfo" | ||||
| @@ -199,7 +238,7 @@ codegen-units = 16 | ||||
|  | ||||
| # Linting config | ||||
| # https://doc.rust-lang.org/rustc/lints/groups.html | ||||
| [lints.rust] | ||||
| [workspace.lints.rust] | ||||
| # Forbid | ||||
| unsafe_code = "forbid" | ||||
| non_ascii_idents = "forbid" | ||||
| @@ -213,7 +252,8 @@ noop_method_call = "deny" | ||||
| refining_impl_trait = { level = "deny", priority = -1 } | ||||
| rust_2018_idioms = { level = "deny", priority = -1 } | ||||
| rust_2021_compatibility = { level = "deny", priority = -1 } | ||||
| # rust_2024_compatibility = { level = "deny", priority = -1 } # Enable once we are at MSRV 1.81.0 | ||||
| rust_2024_compatibility = { level = "deny", priority = -1 } | ||||
| edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again | ||||
| single_use_lifetimes = "deny" | ||||
| trivial_casts = "deny" | ||||
| trivial_numeric_casts = "deny" | ||||
| @@ -222,16 +262,20 @@ unused_import_braces = "deny" | ||||
| unused_lifetimes = "deny" | ||||
| unused_qualifications = "deny" | ||||
| variant_size_differences = "deny" | ||||
| # The lints below are part of the rust_2024_compatibility group | ||||
| static-mut-refs = "deny" | ||||
| unsafe-op-in-unsafe-fn = "deny" | ||||
| # Allow the following lints since these cause issues with Rust v1.84.0 or newer | ||||
| # Building Vaultwarden with Rust v1.85.0 and edition 2024 also works without issues | ||||
| if_let_rescope = "allow" | ||||
| tail_expr_drop_order = "allow" | ||||
|  | ||||
| # https://rust-lang.github.io/rust-clippy/stable/index.html | ||||
| [lints.clippy] | ||||
| [workspace.lints.clippy] | ||||
| # Warn | ||||
| dbg_macro = "warn" | ||||
| todo = "warn" | ||||
|  | ||||
| # Ignore/Allow | ||||
| result_large_err = "allow" | ||||
|  | ||||
| # Deny | ||||
| case_sensitive_file_extension_comparisons = "deny" | ||||
| cast_lossless = "deny" | ||||
| @@ -239,6 +283,7 @@ clone_on_ref_ptr = "deny" | ||||
| equatable_if_let = "deny" | ||||
| filter_map_next = "deny" | ||||
| float_cmp_const = "deny" | ||||
| implicit_clone = "deny" | ||||
| inefficient_to_string = "deny" | ||||
| iter_on_empty_collections = "deny" | ||||
| iter_on_single_items = "deny" | ||||
| @@ -247,14 +292,12 @@ macro_use_imports = "deny" | ||||
| manual_assert = "deny" | ||||
| manual_instant_elapsed = "deny" | ||||
| manual_string_new = "deny" | ||||
| match_on_vec_items = "deny" | ||||
| match_wildcard_for_single_variants = "deny" | ||||
| mem_forget = "deny" | ||||
| needless_continue = "deny" | ||||
| needless_lifetimes = "deny" | ||||
| option_option = "deny" | ||||
| string_add_assign = "deny" | ||||
| string_to_string = "deny" | ||||
| unnecessary_join = "deny" | ||||
| unnecessary_self_imports = "deny" | ||||
| unnested_or_patterns = "deny" | ||||
| @@ -262,3 +305,6 @@ unused_async = "deny" | ||||
| unused_self = "deny" | ||||
| verbose_file_reads = "deny" | ||||
| zero_sized_map_values = "deny" | ||||
|  | ||||
| [lints] | ||||
| workspace = true | ||||
|   | ||||
							
								
								
									
										24
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										24
									
								
								README.md
									
									
									
									
									
								
							| @@ -59,19 +59,21 @@ A nearly complete implementation of the Bitwarden Client API is provided, includ | ||||
| ## Usage | ||||
|  | ||||
| > [!IMPORTANT] | ||||
| > Most modern web browsers disallow the use of Web Crypto APIs in insecure contexts. In this case, you might get an error like `Cannot read property 'importKey'`. To solve this problem, you need to access the web vault via HTTPS or localhost. | ||||
| > | ||||
| >This can be configured in [Vaultwarden directly](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS) or using a third-party reverse proxy ([some examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)). | ||||
| > | ||||
| >If you have an available domain name, you can get HTTPS certificates with [Let's Encrypt](https://letsencrypt.org/), or you can generate self-signed certificates with utilities like [mkcert](https://github.com/FiloSottile/mkcert). Some proxies automatically do this step, like Caddy or Traefik (see examples linked above). | ||||
| > The web-vault requires the use a secure context for the [Web Crypto API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Crypto_API). | ||||
| > That means it will only work via `http://localhost:8000` (using the port from the example below) or if you [enable HTTPS](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS). | ||||
|  | ||||
| The recommended way to install and use Vaultwarden is via our container images which are published to [ghcr.io](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden), [docker.io](https://hub.docker.com/r/vaultwarden/server) and [quay.io](https://quay.io/repository/vaultwarden/server). | ||||
| See [which container image to use](https://github.com/dani-garcia/vaultwarden/wiki/Which-container-image-to-use) for an explanation of the provided tags. | ||||
|  | ||||
| There are also [community driven packages](https://github.com/dani-garcia/vaultwarden/wiki/Third-party-packages) which can be used, but those might be lagging behind the latest version or might deviate in the way Vaultwarden is configured, as described in our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki). | ||||
|  | ||||
| Alternatively, you can also [build Vaultwarden](https://github.com/dani-garcia/vaultwarden/wiki/Building-binary) yourself. | ||||
|  | ||||
| While Vaultwarden is based upon the [Rocket web framework](https://rocket.rs) which has built-in support for TLS our recommendation would be that you setup a reverse proxy (see [proxy examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)). | ||||
|  | ||||
| > [!TIP] | ||||
| >**For more detailed examples on how to install, use and configure Vaultwarden you can check our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).** | ||||
|  | ||||
| The main way to use Vaultwarden is via our container images which are published to [ghcr.io](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden), [docker.io](https://hub.docker.com/r/vaultwarden/server) and [quay.io](https://quay.io/repository/vaultwarden/server). | ||||
|  | ||||
| There are also [community driven packages](https://github.com/dani-garcia/vaultwarden/wiki/Third-party-packages) which can be used, but those might be lagging behind the latest version or might deviate in the way Vaultwarden is configured, as described in our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki). | ||||
|  | ||||
| ### Docker/Podman CLI | ||||
|  | ||||
| Pull the container image and mount a volume from the host for persistent storage.<br> | ||||
| @@ -83,7 +85,7 @@ docker run --detach --name vaultwarden \ | ||||
|   --env DOMAIN="https://vw.domain.tld" \ | ||||
|   --volume /vw-data/:/data/ \ | ||||
|   --restart unless-stopped \ | ||||
|   --publish 80:80 \ | ||||
|   --publish 127.0.0.1:8000:80 \ | ||||
|   vaultwarden/server:latest | ||||
| ``` | ||||
|  | ||||
| @@ -104,7 +106,7 @@ services: | ||||
|     volumes: | ||||
|       - ./vw-data/:/data/ | ||||
|     ports: | ||||
|       - 80:80 | ||||
|       - 127.0.0.1:8000:80 | ||||
| ``` | ||||
|  | ||||
| <br> | ||||
|   | ||||
| @@ -21,7 +21,7 @@ notify us. We welcome working with you to resolve the issue promptly. Thanks in | ||||
| The following bug classes are out-of scope: | ||||
|  | ||||
| - Bugs that are already reported on Vaultwarden's issue tracker (https://github.com/dani-garcia/vaultwarden/issues) | ||||
| - Bugs that are not part of Vaultwarden, like on the the web-vault or mobile and desktop clients. These issues need to be reported in the respective project issue tracker at https://github.com/bitwarden to which we are not associated | ||||
| - Bugs that are not part of Vaultwarden, like on the web-vault or mobile and desktop clients. These issues need to be reported in the respective project issue tracker at https://github.com/bitwarden to which we are not associated | ||||
| - Issues in an upstream software dependency (ex: Rust, or External Libraries) which are already reported to the upstream maintainer | ||||
| - Attacks requiring physical access to a user's device | ||||
| - Issues related to software or protocols not under Vaultwarden's control | ||||
|   | ||||
							
								
								
									
										7
									
								
								build.rs
									
									
									
									
									
								
							
							
						
						
									
										7
									
								
								build.rs
									
									
									
									
									
								
							| @@ -11,6 +11,8 @@ fn main() { | ||||
|     println!("cargo:rustc-cfg=postgresql"); | ||||
|     #[cfg(feature = "query_logger")] | ||||
|     println!("cargo:rustc-cfg=query_logger"); | ||||
|     #[cfg(feature = "s3")] | ||||
|     println!("cargo:rustc-cfg=s3"); | ||||
|  | ||||
|     #[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))] | ||||
|     compile_error!( | ||||
| @@ -23,6 +25,7 @@ fn main() { | ||||
|     println!("cargo::rustc-check-cfg=cfg(mysql)"); | ||||
|     println!("cargo::rustc-check-cfg=cfg(postgresql)"); | ||||
|     println!("cargo::rustc-check-cfg=cfg(query_logger)"); | ||||
|     println!("cargo::rustc-check-cfg=cfg(s3)"); | ||||
|  | ||||
|     // Rerun when these paths are changed. | ||||
|     // Someone could have checked-out a tag or specific commit, but no other files changed. | ||||
| @@ -48,8 +51,8 @@ fn main() { | ||||
| fn run(args: &[&str]) -> Result<String, std::io::Error> { | ||||
|     let out = Command::new(args[0]).args(&args[1..]).output()?; | ||||
|     if !out.status.success() { | ||||
|         use std::io::{Error, ErrorKind}; | ||||
|         return Err(Error::new(ErrorKind::Other, "Command not successful")); | ||||
|         use std::io::Error; | ||||
|         return Err(Error::other("Command not successful")); | ||||
|     } | ||||
|     Ok(String::from_utf8(out.stdout).unwrap().trim().to_string()) | ||||
| } | ||||
|   | ||||
| @@ -1,13 +1,13 @@ | ||||
| --- | ||||
| vault_version: "v2024.6.2c" | ||||
| vault_image_digest: "sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b" | ||||
| # Cross Compile Docker Helper Scripts v1.5.0 | ||||
| vault_version: "v2025.7.2" | ||||
| vault_image_digest: "sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9" | ||||
| # Cross Compile Docker Helper Scripts v1.6.1 | ||||
| # We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts | ||||
| # https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags | ||||
| xx_image_digest: "sha256:1978e7a58a1777cb0ef0dde76bad60b7914b21da57cfa88047875e4f364297aa" | ||||
| rust_version: 1.82.0 # Rust version to be used | ||||
| debian_version: bookworm # Debian release name to be used | ||||
| alpine_version: "3.20" # Alpine version to be used | ||||
| xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894" | ||||
| rust_version: 1.89.0 # Rust version to be used | ||||
| debian_version: trixie # Debian release name to be used | ||||
| alpine_version: "3.22" # Alpine version to be used | ||||
| # For which platforms/architectures will we try to build images | ||||
| platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"] | ||||
| # Determine the build images per OS/Arch | ||||
|   | ||||
| @@ -19,23 +19,23 @@ | ||||
| # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, | ||||
| #   click the tag name to view the digest of the image it currently points to. | ||||
| # - From the command line: | ||||
| #     $ docker pull docker.io/vaultwarden/web-vault:v2024.6.2c | ||||
| #     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2024.6.2c | ||||
| #     [docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b] | ||||
| #     $ docker pull docker.io/vaultwarden/web-vault:v2025.7.2 | ||||
| #     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.2 | ||||
| #     [docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9] | ||||
| # | ||||
| # - Conversely, to get the tag name from the digest: | ||||
| #     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b | ||||
| #     [docker.io/vaultwarden/web-vault:v2024.6.2c] | ||||
| #     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9 | ||||
| #     [docker.io/vaultwarden/web-vault:v2025.7.2] | ||||
| # | ||||
| FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b AS vault | ||||
| FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9 AS vault | ||||
|  | ||||
| ########################## ALPINE BUILD IMAGES ########################## | ||||
| ## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 | ||||
| ## And for Alpine we define all build images here, they will only be loaded when actually used | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.82.0 AS build_amd64 | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.82.0 AS build_arm64 | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.82.0 AS build_armv7 | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.82.0 AS build_armv6 | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.89.0 AS build_amd64 | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.89.0 AS build_arm64 | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.89.0 AS build_armv7 | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.89.0 AS build_armv6 | ||||
|  | ||||
| ########################## BUILD IMAGE ########################## | ||||
| # hadolint ignore=DL3006 | ||||
| @@ -76,6 +76,7 @@ RUN source /env-cargo && \ | ||||
|  | ||||
| # Copies over *only* your manifests and build files | ||||
| COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./ | ||||
| COPY ./macros ./macros | ||||
|  | ||||
| ARG CARGO_PROFILE=release | ||||
|  | ||||
| @@ -126,7 +127,7 @@ RUN source /env-cargo && \ | ||||
| # To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' | ||||
| # | ||||
| # We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742 | ||||
| FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.20 | ||||
| FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.22 | ||||
|  | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|   | ||||
| @@ -19,24 +19,24 @@ | ||||
| # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, | ||||
| #   click the tag name to view the digest of the image it currently points to. | ||||
| # - From the command line: | ||||
| #     $ docker pull docker.io/vaultwarden/web-vault:v2024.6.2c | ||||
| #     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2024.6.2c | ||||
| #     [docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b] | ||||
| #     $ docker pull docker.io/vaultwarden/web-vault:v2025.7.2 | ||||
| #     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.2 | ||||
| #     [docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9] | ||||
| # | ||||
| # - Conversely, to get the tag name from the digest: | ||||
| #     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b | ||||
| #     [docker.io/vaultwarden/web-vault:v2024.6.2c] | ||||
| #     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9 | ||||
| #     [docker.io/vaultwarden/web-vault:v2025.7.2] | ||||
| # | ||||
| FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b AS vault | ||||
| FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9 AS vault | ||||
|  | ||||
| ########################## Cross Compile Docker Helper Scripts ########################## | ||||
| ## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts | ||||
| ## And these bash scripts do not have any significant difference if at all | ||||
| FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:1978e7a58a1777cb0ef0dde76bad60b7914b21da57cfa88047875e4f364297aa AS xx | ||||
| FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894 AS xx | ||||
|  | ||||
| ########################## BUILD IMAGE ########################## | ||||
| # hadolint ignore=DL3006 | ||||
| FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.82.0-slim-bookworm AS build | ||||
| FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.89.0-slim-trixie AS build | ||||
| COPY --from=xx / / | ||||
| ARG TARGETARCH | ||||
| ARG TARGETVARIANT | ||||
| @@ -68,15 +68,11 @@ RUN apt-get update && \ | ||||
|     xx-apt-get install -y \ | ||||
|         --no-install-recommends \ | ||||
|         gcc \ | ||||
|         libmariadb3 \ | ||||
|         libpq-dev \ | ||||
|         libpq5 \ | ||||
|         libssl-dev \ | ||||
|         libmariadb-dev \ | ||||
|         zlib1g-dev && \ | ||||
|     # Force install arch dependend mariadb dev packages | ||||
|     # Installing them the normal way breaks several other packages (again) | ||||
|     apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \ | ||||
|     dpkg --force-all -i ./libmariadb-dev*.deb && \ | ||||
|     # Run xx-cargo early, since it sometimes seems to break when run at a later stage | ||||
|     echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo | ||||
|  | ||||
| @@ -89,24 +85,24 @@ RUN USER=root cargo new --bin /app | ||||
| WORKDIR /app | ||||
|  | ||||
| # Environment variables for Cargo on Debian based builds | ||||
| ARG ARCH_OPENSSL_LIB_DIR \ | ||||
|     ARCH_OPENSSL_INCLUDE_DIR | ||||
| ARG TARGET_PKG_CONFIG_PATH | ||||
|  | ||||
| RUN source /env-cargo && \ | ||||
|     if xx-info is-cross ; then \ | ||||
|         # Some special variables if needed to override some build paths | ||||
|         if [[ -n "${ARCH_OPENSSL_LIB_DIR}" && -n "${ARCH_OPENSSL_INCLUDE_DIR}" ]]; then \ | ||||
|             echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_LIB_DIR=${ARCH_OPENSSL_LIB_DIR}" >> /env-cargo && \ | ||||
|             echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_INCLUDE_DIR=${ARCH_OPENSSL_INCLUDE_DIR}" >> /env-cargo ; \ | ||||
|         fi && \ | ||||
|         # We can't use xx-cargo since that uses clang, which doesn't work for our libraries. | ||||
|         # Because of this we generate the needed environment variables here which we can load in the needed steps. | ||||
|         echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ | ||||
|         echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ | ||||
|         echo "export PKG_CONFIG=/usr/bin/$(xx-info)-pkg-config" >> /env-cargo && \ | ||||
|         echo "export CROSS_COMPILE=1" >> /env-cargo && \ | ||||
|         echo "export OPENSSL_INCLUDE_DIR=/usr/include/$(xx-info)" >> /env-cargo && \ | ||||
|         echo "export OPENSSL_LIB_DIR=/usr/lib/$(xx-info)" >> /env-cargo ; \ | ||||
|         echo "export PKG_CONFIG_ALLOW_CROSS=1" >> /env-cargo && \ | ||||
|         # For some architectures `xx-info` returns a triple which doesn't matches the path on disk | ||||
|         # In those cases you can override this by setting the `TARGET_PKG_CONFIG_PATH` build-arg | ||||
|         if [[ -n "${TARGET_PKG_CONFIG_PATH}" ]]; then \ | ||||
|             echo "export TARGET_PKG_CONFIG_PATH=${TARGET_PKG_CONFIG_PATH}" >> /env-cargo ; \ | ||||
|         else \ | ||||
|             echo "export PKG_CONFIG_PATH=/usr/lib/$(xx-info)/pkgconfig" >> /env-cargo ; \ | ||||
|         fi && \ | ||||
|         echo "# End of env-cargo" >> /env-cargo ; \ | ||||
|     fi && \ | ||||
|     # Output the current contents of the file | ||||
|     cat /env-cargo | ||||
| @@ -116,6 +112,7 @@ RUN source /env-cargo && \ | ||||
|  | ||||
| # Copies over *only* your manifests and build files | ||||
| COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./ | ||||
| COPY ./macros ./macros | ||||
|  | ||||
| ARG CARGO_PROFILE=release | ||||
|  | ||||
| @@ -165,7 +162,7 @@ RUN source /env-cargo && \ | ||||
| # To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' | ||||
| # | ||||
| # We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742 | ||||
| FROM --platform=$TARGETPLATFORM docker.io/library/debian:bookworm-slim | ||||
| FROM --platform=$TARGETPLATFORM docker.io/library/debian:trixie-slim | ||||
|  | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
| @@ -178,7 +175,7 @@ RUN mkdir /data && \ | ||||
|         --no-install-recommends \ | ||||
|         ca-certificates \ | ||||
|         curl \ | ||||
|         libmariadb-dev-compat \ | ||||
|         libmariadb-dev \ | ||||
|         libpq5 \ | ||||
|         openssl && \ | ||||
|     apt-get clean && \ | ||||
|   | ||||
| @@ -86,15 +86,11 @@ RUN apt-get update && \ | ||||
|     xx-apt-get install -y \ | ||||
|         --no-install-recommends \ | ||||
|         gcc \ | ||||
|         libmariadb3 \ | ||||
|         libpq-dev \ | ||||
|         libpq5 \ | ||||
|         libssl-dev \ | ||||
|         libmariadb-dev \ | ||||
|         zlib1g-dev && \ | ||||
|     # Force install arch dependend mariadb dev packages | ||||
|     # Installing them the normal way breaks several other packages (again) | ||||
|     apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \ | ||||
|     dpkg --force-all -i ./libmariadb-dev*.deb && \ | ||||
|     # Run xx-cargo early, since it sometimes seems to break when run at a later stage | ||||
|     echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo | ||||
| {% endif %} | ||||
| @@ -109,24 +105,24 @@ WORKDIR /app | ||||
|  | ||||
| {% if base == "debian" %} | ||||
| # Environment variables for Cargo on Debian based builds | ||||
| ARG ARCH_OPENSSL_LIB_DIR \ | ||||
|     ARCH_OPENSSL_INCLUDE_DIR | ||||
| ARG TARGET_PKG_CONFIG_PATH | ||||
|  | ||||
| RUN source /env-cargo && \ | ||||
|     if xx-info is-cross ; then \ | ||||
|         # Some special variables if needed to override some build paths | ||||
|         if [[ -n "${ARCH_OPENSSL_LIB_DIR}" && -n "${ARCH_OPENSSL_INCLUDE_DIR}" ]]; then \ | ||||
|             echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_LIB_DIR=${ARCH_OPENSSL_LIB_DIR}" >> /env-cargo && \ | ||||
|             echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_INCLUDE_DIR=${ARCH_OPENSSL_INCLUDE_DIR}" >> /env-cargo ; \ | ||||
|         fi && \ | ||||
|         # We can't use xx-cargo since that uses clang, which doesn't work for our libraries. | ||||
|         # Because of this we generate the needed environment variables here which we can load in the needed steps. | ||||
|         echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ | ||||
|         echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ | ||||
|         echo "export PKG_CONFIG=/usr/bin/$(xx-info)-pkg-config" >> /env-cargo && \ | ||||
|         echo "export CROSS_COMPILE=1" >> /env-cargo && \ | ||||
|         echo "export OPENSSL_INCLUDE_DIR=/usr/include/$(xx-info)" >> /env-cargo && \ | ||||
|         echo "export OPENSSL_LIB_DIR=/usr/lib/$(xx-info)" >> /env-cargo ; \ | ||||
|         echo "export PKG_CONFIG_ALLOW_CROSS=1" >> /env-cargo && \ | ||||
|         # For some architectures `xx-info` returns a triple which doesn't matches the path on disk | ||||
|         # In those cases you can override this by setting the `TARGET_PKG_CONFIG_PATH` build-arg | ||||
|         if [[ -n "${TARGET_PKG_CONFIG_PATH}" ]]; then \ | ||||
|             echo "export TARGET_PKG_CONFIG_PATH=${TARGET_PKG_CONFIG_PATH}" >> /env-cargo ; \ | ||||
|         else \ | ||||
|             echo "export PKG_CONFIG_PATH=/usr/lib/$(xx-info)/pkgconfig" >> /env-cargo ; \ | ||||
|         fi && \ | ||||
|         echo "# End of env-cargo" >> /env-cargo ; \ | ||||
|     fi && \ | ||||
|     # Output the current contents of the file | ||||
|     cat /env-cargo | ||||
| @@ -143,6 +139,7 @@ RUN source /env-cargo && \ | ||||
|  | ||||
| # Copies over *only* your manifests and build files | ||||
| COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./ | ||||
| COPY ./macros ./macros | ||||
|  | ||||
| ARG CARGO_PROFILE=release | ||||
|  | ||||
| @@ -215,7 +212,7 @@ RUN mkdir /data && \ | ||||
|         --no-install-recommends \ | ||||
|         ca-certificates \ | ||||
|         curl \ | ||||
|         libmariadb-dev-compat \ | ||||
|         libmariadb-dev \ | ||||
|         libpq5 \ | ||||
|         openssl && \ | ||||
|     apt-get clean && \ | ||||
|   | ||||
| @@ -46,7 +46,7 @@ There also is an option to use an other docker container to provide support for | ||||
| ```bash | ||||
| # To install and activate | ||||
| docker run --privileged --rm tonistiigi/binfmt --install arm64,arm | ||||
| # To unistall | ||||
| # To uninstall | ||||
| docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' | ||||
| ``` | ||||
|  | ||||
|   | ||||
| @@ -17,7 +17,7 @@ variable "SOURCE_REPOSITORY_URL" { | ||||
|   default = null | ||||
| } | ||||
|  | ||||
| // The commit hash of of the current commit this build was triggered on | ||||
| // The commit hash of the current commit this build was triggered on | ||||
| variable "SOURCE_COMMIT" { | ||||
|   default = null | ||||
| } | ||||
| @@ -133,8 +133,7 @@ target "debian-386" { | ||||
|   platforms = ["linux/386"] | ||||
|   tags = generate_tags("", "-386") | ||||
|   args = { | ||||
|     ARCH_OPENSSL_LIB_DIR = "/usr/lib/i386-linux-gnu" | ||||
|     ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/i386-linux-gnu" | ||||
|     TARGET_PKG_CONFIG_PATH = "/usr/lib/i386-linux-gnu/pkgconfig" | ||||
|   } | ||||
| } | ||||
|  | ||||
| @@ -142,20 +141,12 @@ target "debian-ppc64le" { | ||||
|   inherits = ["debian"] | ||||
|   platforms = ["linux/ppc64le"] | ||||
|   tags = generate_tags("", "-ppc64le") | ||||
|   args = { | ||||
|     ARCH_OPENSSL_LIB_DIR = "/usr/lib/powerpc64le-linux-gnu" | ||||
|     ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/powerpc64le-linux-gnu" | ||||
|   } | ||||
| } | ||||
|  | ||||
| target "debian-s390x" { | ||||
|   inherits = ["debian"] | ||||
|   platforms = ["linux/s390x"] | ||||
|   tags = generate_tags("", "-s390x") | ||||
|   args = { | ||||
|     ARCH_OPENSSL_LIB_DIR = "/usr/lib/s390x-linux-gnu" | ||||
|     ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/s390x-linux-gnu" | ||||
|   } | ||||
| } | ||||
| // ==== End of unsupported Debian architecture targets === | ||||
|  | ||||
|   | ||||
							
								
								
									
										16
									
								
								macros/Cargo.toml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								macros/Cargo.toml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| [package] | ||||
| name = "macros" | ||||
| version = "0.1.0" | ||||
| edition = "2021" | ||||
|  | ||||
| [lib] | ||||
| name = "macros" | ||||
| path = "src/lib.rs" | ||||
| proc-macro = true | ||||
|  | ||||
| [dependencies] | ||||
| quote = "1.0.40" | ||||
| syn = "2.0.105" | ||||
|  | ||||
| [lints] | ||||
| workspace = true | ||||
							
								
								
									
										56
									
								
								macros/src/lib.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										56
									
								
								macros/src/lib.rs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,56 @@ | ||||
| use proc_macro::TokenStream; | ||||
| use quote::quote; | ||||
|  | ||||
| #[proc_macro_derive(UuidFromParam)] | ||||
| pub fn derive_uuid_from_param(input: TokenStream) -> TokenStream { | ||||
|     let ast = syn::parse(input).unwrap(); | ||||
|  | ||||
|     impl_derive_uuid_macro(&ast) | ||||
| } | ||||
|  | ||||
| fn impl_derive_uuid_macro(ast: &syn::DeriveInput) -> TokenStream { | ||||
|     let name = &ast.ident; | ||||
|     let gen_derive = quote! { | ||||
|         #[automatically_derived] | ||||
|         impl<'r> rocket::request::FromParam<'r> for #name { | ||||
|             type Error = (); | ||||
|  | ||||
|             #[inline(always)] | ||||
|             fn from_param(param: &'r str) -> Result<Self, Self::Error> { | ||||
|                 if uuid::Uuid::parse_str(param).is_ok() { | ||||
|                     Ok(Self(param.to_string())) | ||||
|                 } else { | ||||
|                     Err(()) | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     }; | ||||
|     gen_derive.into() | ||||
| } | ||||
|  | ||||
| #[proc_macro_derive(IdFromParam)] | ||||
| pub fn derive_id_from_param(input: TokenStream) -> TokenStream { | ||||
|     let ast = syn::parse(input).unwrap(); | ||||
|  | ||||
|     impl_derive_safestring_macro(&ast) | ||||
| } | ||||
|  | ||||
| fn impl_derive_safestring_macro(ast: &syn::DeriveInput) -> TokenStream { | ||||
|     let name = &ast.ident; | ||||
|     let gen_derive = quote! { | ||||
|         #[automatically_derived] | ||||
|         impl<'r> rocket::request::FromParam<'r> for #name { | ||||
|             type Error = (); | ||||
|  | ||||
|             #[inline(always)] | ||||
|             fn from_param(param: &'r str) -> Result<Self, Self::Error> { | ||||
|                 if param.chars().all(|c| matches!(c, 'a'..='z' | 'A'..='Z' |'0'..='9' | '-')) { | ||||
|                     Ok(Self(param.to_string())) | ||||
|                 } else { | ||||
|                     Err(()) | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     }; | ||||
|     gen_derive.into() | ||||
| } | ||||
							
								
								
									
										1
									
								
								migrations/mysql/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								migrations/mysql/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| DROP TABLE sso_nonce; | ||||
							
								
								
									
										4
									
								
								migrations/mysql/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								migrations/mysql/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,4 @@ | ||||
| CREATE TABLE sso_nonce ( | ||||
|   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||
| ); | ||||
| @@ -0,0 +1 @@ | ||||
| ALTER TABLE users_organizations DROP COLUMN invited_by_email; | ||||
| @@ -0,0 +1 @@ | ||||
| ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; | ||||
| @@ -0,0 +1,6 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||
| ); | ||||
| @@ -0,0 +1,8 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
| 	state               VARCHAR(512) NOT NULL PRIMARY KEY, | ||||
|   	nonce               TEXT NOT NULL, | ||||
|   	redirect_uri 		TEXT NOT NULL, | ||||
|   	created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1,8 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|     state               VARCHAR(512) NOT NULL PRIMARY KEY, | ||||
|     nonce               TEXT NOT NULL, | ||||
|     redirect_uri        TEXT NOT NULL, | ||||
|     created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1,9 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|     state               VARCHAR(512) NOT NULL PRIMARY KEY, | ||||
|   	nonce               TEXT NOT NULL, | ||||
|     verifier            TEXT, | ||||
|   	redirect_uri 		TEXT NOT NULL, | ||||
|   	created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1 @@ | ||||
| DROP TABLE IF EXISTS sso_users; | ||||
							
								
								
									
										7
									
								
								migrations/mysql/2024-03-06-170000_add_sso_users/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								migrations/mysql/2024-03-06-170000_add_sso_users/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | ||||
| CREATE TABLE sso_users ( | ||||
|   user_uuid           CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   identifier          VARCHAR(768) NOT NULL UNIQUE, | ||||
|   created_at          TIMESTAMP NOT NULL DEFAULT now(), | ||||
|  | ||||
|   FOREIGN KEY(user_uuid) REFERENCES users(uuid) | ||||
| ); | ||||
| @@ -0,0 +1,2 @@ | ||||
| ALTER TABLE sso_users DROP FOREIGN KEY `sso_users_ibfk_1`; | ||||
| ALTER TABLE sso_users ADD FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE; | ||||
							
								
								
									
										5
									
								
								migrations/mysql/2025-01-09-172300_add_manage/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								migrations/mysql/2025-01-09-172300_add_manage/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| ALTER TABLE users_collections | ||||
| ADD COLUMN manage BOOLEAN NOT NULL DEFAULT FALSE; | ||||
|  | ||||
| ALTER TABLE collections_groups | ||||
| ADD COLUMN manage BOOLEAN NOT NULL DEFAULT FALSE; | ||||
							
								
								
									
										1
									
								
								migrations/postgresql/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								migrations/postgresql/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| DROP TABLE sso_nonce; | ||||
							
								
								
									
										4
									
								
								migrations/postgresql/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								migrations/postgresql/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,4 @@ | ||||
| CREATE TABLE sso_nonce ( | ||||
|   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1 @@ | ||||
| ALTER TABLE users_organizations DROP COLUMN invited_by_email; | ||||
| @@ -0,0 +1 @@ | ||||
| ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; | ||||
| @@ -0,0 +1,6 @@ | ||||
| DROP TABLE sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1,8 @@ | ||||
| DROP TABLE sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
| 	state               TEXT NOT NULL PRIMARY KEY, | ||||
|   	nonce               TEXT NOT NULL, | ||||
|   	redirect_uri 		TEXT NOT NULL, | ||||
|   	created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1,8 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|     state               TEXT NOT NULL PRIMARY KEY, | ||||
|     nonce               TEXT NOT NULL, | ||||
|     redirect_uri        TEXT NOT NULL, | ||||
|     created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1,9 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|     state               TEXT NOT NULL PRIMARY KEY, | ||||
|     nonce               TEXT NOT NULL, | ||||
|     verifier            TEXT, | ||||
|     redirect_uri        TEXT NOT NULL, | ||||
|     created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1 @@ | ||||
| DROP TABLE IF EXISTS sso_users; | ||||
| @@ -0,0 +1,7 @@ | ||||
| CREATE TABLE sso_users ( | ||||
|   user_uuid           CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   identifier          TEXT NOT NULL UNIQUE, | ||||
|   created_at          TIMESTAMP NOT NULL DEFAULT now(), | ||||
|  | ||||
|   FOREIGN KEY(user_uuid) REFERENCES users(uuid) | ||||
| ); | ||||
| @@ -0,0 +1,3 @@ | ||||
| ALTER TABLE sso_users | ||||
|   DROP CONSTRAINT "sso_users_user_uuid_fkey", | ||||
|   ADD CONSTRAINT "sso_users_user_uuid_fkey" FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE; | ||||
| @@ -0,0 +1,5 @@ | ||||
| ALTER TABLE users_collections | ||||
| ADD COLUMN manage BOOLEAN NOT NULL DEFAULT FALSE; | ||||
|  | ||||
| ALTER TABLE collections_groups | ||||
| ADD COLUMN manage BOOLEAN NOT NULL DEFAULT FALSE; | ||||
							
								
								
									
										1
									
								
								migrations/sqlite/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								migrations/sqlite/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| DROP TABLE sso_nonce; | ||||
							
								
								
									
										4
									
								
								migrations/sqlite/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								migrations/sqlite/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,4 @@ | ||||
| CREATE TABLE sso_nonce ( | ||||
|   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||
| ); | ||||
| @@ -0,0 +1 @@ | ||||
| ALTER TABLE users_organizations DROP COLUMN invited_by_email; | ||||
| @@ -0,0 +1 @@ | ||||
| ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; | ||||
| @@ -0,0 +1,6 @@ | ||||
| DROP TABLE sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||
| ); | ||||
| @@ -0,0 +1,8 @@ | ||||
| DROP TABLE sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|   state               TEXT NOT NULL PRIMARY KEY, | ||||
|   nonce               TEXT NOT NULL, | ||||
|   redirect_uri        TEXT NOT NULL, | ||||
|   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||
| ); | ||||
| @@ -0,0 +1,8 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|   state               TEXT NOT NULL PRIMARY KEY, | ||||
|   nonce               TEXT NOT NULL, | ||||
|   redirect_uri        TEXT NOT NULL, | ||||
|   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||
| ); | ||||
| @@ -0,0 +1,9 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|   state               TEXT NOT NULL PRIMARY KEY, | ||||
|   nonce               TEXT NOT NULL, | ||||
|   verifier            TEXT, | ||||
|   redirect_uri        TEXT NOT NULL, | ||||
|   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||
| ); | ||||
| @@ -0,0 +1 @@ | ||||
| DROP TABLE IF EXISTS sso_users; | ||||
							
								
								
									
										7
									
								
								migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | ||||
| CREATE TABLE sso_users ( | ||||
|   user_uuid           CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   identifier          TEXT NOT NULL UNIQUE, | ||||
|   created_at          TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, | ||||
|  | ||||
|   FOREIGN KEY(user_uuid) REFERENCES users(uuid) | ||||
| ); | ||||
| @@ -0,0 +1,9 @@ | ||||
| DROP TABLE IF EXISTS sso_users; | ||||
|  | ||||
| CREATE TABLE sso_users ( | ||||
|   user_uuid           CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   identifier          TEXT NOT NULL UNIQUE, | ||||
|   created_at          TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, | ||||
|  | ||||
|   FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE | ||||
| ); | ||||
							
								
								
									
										5
									
								
								migrations/sqlite/2025-01-09-172300_add_manage/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								migrations/sqlite/2025-01-09-172300_add_manage/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| ALTER TABLE users_collections | ||||
| ADD COLUMN manage BOOLEAN NOT NULL DEFAULT 0; -- FALSE | ||||
|  | ||||
| ALTER TABLE collections_groups | ||||
| ADD COLUMN manage BOOLEAN NOT NULL DEFAULT 0; -- FALSE | ||||
							
								
								
									
										64
									
								
								playwright/.env.template
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								playwright/.env.template
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,64 @@ | ||||
| ################################# | ||||
| ### Conf to run dev instances ### | ||||
| ################################# | ||||
| ENV=dev | ||||
| DC_ENV_FILE=.env | ||||
| COMPOSE_IGNORE_ORPHANS=True | ||||
| DOCKER_BUILDKIT=1 | ||||
|  | ||||
| ################ | ||||
| # Users Config # | ||||
| ################ | ||||
| TEST_USER=test | ||||
| TEST_USER_PASSWORD=${TEST_USER} | ||||
| TEST_USER_MAIL=${TEST_USER}@yopmail.com | ||||
|  | ||||
| TEST_USER2=test2 | ||||
| TEST_USER2_PASSWORD=${TEST_USER2} | ||||
| TEST_USER2_MAIL=${TEST_USER2}@yopmail.com | ||||
|  | ||||
| TEST_USER3=test3 | ||||
| TEST_USER3_PASSWORD=${TEST_USER3} | ||||
| TEST_USER3_MAIL=${TEST_USER3}@yopmail.com | ||||
|  | ||||
| ################### | ||||
| # Keycloak Config # | ||||
| ################### | ||||
| KEYCLOAK_ADMIN=admin | ||||
| KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN} | ||||
| KC_HTTP_HOST=127.0.0.1 | ||||
| KC_HTTP_PORT=8080 | ||||
|  | ||||
| # Script parameters (use Keycloak and Vaultwarden config too) | ||||
| TEST_REALM=test | ||||
| DUMMY_REALM=dummy | ||||
| DUMMY_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${DUMMY_REALM} | ||||
|  | ||||
| ###################### | ||||
| # Vaultwarden Config # | ||||
| ###################### | ||||
| ROCKET_ADDRESS=0.0.0.0 | ||||
| ROCKET_PORT=8000 | ||||
| DOMAIN=http://localhost:${ROCKET_PORT} | ||||
| LOG_LEVEL=info,oidcwarden::sso=debug | ||||
| I_REALLY_WANT_VOLATILE_STORAGE=true | ||||
|  | ||||
| SSO_ENABLED=true | ||||
| SSO_ONLY=false | ||||
| SSO_CLIENT_ID=warden | ||||
| SSO_CLIENT_SECRET=warden | ||||
| SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM} | ||||
|  | ||||
| SMTP_HOST=127.0.0.1 | ||||
| SMTP_PORT=1025 | ||||
| SMTP_SECURITY=off | ||||
| SMTP_TIMEOUT=5 | ||||
| SMTP_FROM=vaultwarden@test | ||||
| SMTP_FROM_NAME=Vaultwarden | ||||
|  | ||||
| ######################################################## | ||||
| # DUMMY values for docker-compose to stop bothering us # | ||||
| ######################################################## | ||||
| MARIADB_PORT=3305 | ||||
| MYSQL_PORT=3307 | ||||
| POSTGRES_PORT=5432 | ||||
							
								
								
									
										6
									
								
								playwright/.gitignore
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								playwright/.gitignore
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| logs | ||||
| node_modules/ | ||||
| /test-results/ | ||||
| /playwright-report/ | ||||
| /playwright/.cache/ | ||||
| temp | ||||
							
								
								
									
										177
									
								
								playwright/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										177
									
								
								playwright/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,177 @@ | ||||
| # Integration tests | ||||
|  | ||||
| This allows running integration tests using [Playwright](https://playwright.dev/). | ||||
|  | ||||
| It uses its own `test.env` with different ports to not collide with a running dev instance. | ||||
|  | ||||
| ## Install | ||||
|  | ||||
| This relies on `docker` and the `compose` [plugin](https://docs.docker.com/compose/install/). | ||||
| Databases (`Mariadb`, `Mysql` and `Postgres`) and `Playwright` will run in containers. | ||||
|  | ||||
| ### Running Playwright outside docker | ||||
|  | ||||
| It is possible to run `Playwright` outside of the container, this removes the need to rebuild the image for each change. | ||||
| You will additionally need `nodejs` then run: | ||||
|  | ||||
| ```bash | ||||
| npm install | ||||
| npx playwright install-deps | ||||
| npx playwright install firefox | ||||
| ``` | ||||
|  | ||||
| ## Usage | ||||
|  | ||||
| To run all the tests: | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright | ||||
| ``` | ||||
|  | ||||
| To force a rebuild of the Playwright image: | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --env-file test.env build Playwright | ||||
| ``` | ||||
|  | ||||
| To access the UI to easily run test individually and debug if needed (this will not work in docker): | ||||
|  | ||||
| ```bash | ||||
| npx playwright test --ui | ||||
| ``` | ||||
|  | ||||
| ### DB | ||||
|  | ||||
| Projects are configured to allow to run tests only on specific database. | ||||
|  | ||||
| You can use: | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=mariadb | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=mysql | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=postgres | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite | ||||
| ``` | ||||
|  | ||||
| ### SSO | ||||
|  | ||||
| To run the SSO tests: | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project sso-sqlite | ||||
| ``` | ||||
|  | ||||
| ### Keep services running | ||||
|  | ||||
| If you want you can keep the DB and Keycloak runnning (states are not impacted by the tests): | ||||
|  | ||||
| ```bash | ||||
| PW_KEEP_SERVICE_RUNNNING=true npx playwright test | ||||
| ``` | ||||
|  | ||||
| ### Running specific tests | ||||
|  | ||||
| To run a whole file you can : | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite tests/login.spec.ts | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite login | ||||
| ``` | ||||
|  | ||||
| To run only a specifc test (It might fail if it has dependency): | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite -g "Account creation" | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite tests/login.spec.ts:16 | ||||
| ``` | ||||
|  | ||||
| ## Writing scenario | ||||
|  | ||||
| When creating new scenario use the recorder to more easily identify elements | ||||
| (in general try to rely on visible hint to identify elements and not hidden IDs). | ||||
| This does not start the server, you will need to start it manually. | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env up Vaultwarden | ||||
| npx playwright codegen "http://127.0.0.1:8003" | ||||
| ``` | ||||
|  | ||||
| ## Override web-vault | ||||
|  | ||||
| It is possible to change the `web-vault` used by referencing a different `bw_web_builds` commit. | ||||
|  | ||||
| Simplest is to set and uncomment `PW_WV_REPO_URL` and `PW_WV_COMMIT_HASH` in the `test.env`. | ||||
| Ensure that the image is built with: | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env build Vaultwarden | ||||
| ``` | ||||
|  | ||||
| You can check the result running: | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env up Vaultwarden | ||||
| ``` | ||||
|  | ||||
| # OpenID Connect test setup | ||||
|  | ||||
| Additionally this `docker-compose` template allows to run locally Vaultwarden, | ||||
| [Keycloak](https://www.keycloak.org/) and [Maildev](https://github.com/timshel/maildev) to test OIDC. | ||||
|  | ||||
| ## Setup | ||||
|  | ||||
| This rely on `docker` and the `compose` [plugin](https://docs.docker.com/compose/install/). | ||||
| First create a copy of `.env.template` as `.env` (This is done to prevent committing your custom settings, Ex `SMTP_`). | ||||
|  | ||||
| ## Usage | ||||
|  | ||||
| Then start the stack (the `profile` is required to run `Vaultwarden`) : | ||||
|  | ||||
| ```bash | ||||
| > docker compose --profile vaultwarden --env-file .env up | ||||
| .... | ||||
| keycloakSetup_1  | Logging into http://127.0.0.1:8080 as user admin of realm master | ||||
| keycloakSetup_1  | Created new realm with id 'test' | ||||
| keycloakSetup_1  | 74af4933-e386-4e64-ba15-a7b61212c45e | ||||
| oidc_keycloakSetup_1 exited with code 0 | ||||
| ``` | ||||
|  | ||||
| Wait until `oidc_keycloakSetup_1 exited with code 0` which indicates the correct setup of the Keycloak realm, client and user | ||||
| (It is normal for this container to stop once the configuration is done). | ||||
|  | ||||
| Then you can access : | ||||
|  | ||||
| - `Vaultwarden` on http://0.0.0.0:8000 with the default user `test@yopmail.com/test`. | ||||
| - `Keycloak` on http://0.0.0.0:8080/admin/master/console/ with the default user `admin/admin` | ||||
| - `Maildev` on http://0.0.0.0:1080 | ||||
|  | ||||
| To proceed with an SSO login after you enter the email, on the screen prompting for `Master Password` the SSO button should be visible. | ||||
| To use your computer external ip (for example when testing with a phone) you will have to configure `KC_HTTP_HOST` and `DOMAIN`. | ||||
|  | ||||
| ## Running only Keycloak | ||||
|  | ||||
| You can run just `Keycloak` with `--profile keycloak`: | ||||
|  | ||||
| ```bash | ||||
| > docker compose --profile keycloak --env-file .env up | ||||
| ``` | ||||
| When running with a local Vaultwarden, you can use a front-end build from [dani-garcia/bw_web_builds](https://github.com/dani-garcia/bw_web_builds/releases). | ||||
|  | ||||
| ## Rebuilding the Vaultwarden | ||||
|  | ||||
| To force rebuilding the Vaultwarden image you can run | ||||
|  | ||||
| ```bash | ||||
| docker compose --profile vaultwarden --env-file .env build VaultwardenPrebuild Vaultwarden | ||||
| ``` | ||||
|  | ||||
| ## Configuration | ||||
|  | ||||
| All configuration for `keycloak` / `Vaultwarden` / `keycloak_setup.sh` can be found in [.env](.env.template). | ||||
| The content of the file will be loaded as environment variables in all containers. | ||||
|  | ||||
| - `keycloak` [configuration](https://www.keycloak.org/server/all-config) includes `KEYCLOAK_ADMIN` / `KEYCLOAK_ADMIN_PASSWORD` and any variable prefixed `KC_` ([more information](https://www.keycloak.org/server/configuration#_example_configuring_the_db_url_host_parameter)). | ||||
| - All `Vaultwarden` configuration can be set (EX: `SMTP_*`) | ||||
|  | ||||
| ## Cleanup | ||||
|  | ||||
| Use `docker compose --profile vaultwarden down`. | ||||
							
								
								
									
										40
									
								
								playwright/compose/keycloak/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								playwright/compose/keycloak/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | ||||
| FROM docker.io/library/debian:bookworm-slim as build | ||||
|  | ||||
| ENV DEBIAN_FRONTEND=noninteractive | ||||
| ARG KEYCLOAK_VERSION | ||||
|  | ||||
| SHELL ["/bin/bash", "-o", "pipefail", "-c"] | ||||
|  | ||||
| RUN apt-get update \ | ||||
|     && apt-get install -y ca-certificates curl wget \ | ||||
|     && rm -rf /var/lib/apt/lists/* | ||||
|  | ||||
| WORKDIR / | ||||
|  | ||||
| RUN wget -c https://github.com/keycloak/keycloak/releases/download/${KEYCLOAK_VERSION}/keycloak-${KEYCLOAK_VERSION}.tar.gz -O - | tar -xz | ||||
|  | ||||
| FROM docker.io/library/debian:bookworm-slim | ||||
|  | ||||
| ENV DEBIAN_FRONTEND=noninteractive | ||||
| ARG KEYCLOAK_VERSION | ||||
|  | ||||
| SHELL ["/bin/bash", "-o", "pipefail", "-c"] | ||||
|  | ||||
| RUN apt-get update \ | ||||
|     && apt-get install -y ca-certificates curl wget \ | ||||
|     && rm -rf /var/lib/apt/lists/* | ||||
|  | ||||
| ARG JAVA_URL | ||||
| ARG JAVA_VERSION | ||||
|  | ||||
| ENV JAVA_VERSION=${JAVA_VERSION} | ||||
|  | ||||
| RUN mkdir -p /opt/openjdk && cd /opt/openjdk \ | ||||
|     && wget -c "${JAVA_URL}"  -O - | tar -xz | ||||
|  | ||||
| WORKDIR / | ||||
|  | ||||
| COPY setup.sh /setup.sh | ||||
| COPY --from=build /keycloak-${KEYCLOAK_VERSION}/bin /opt/keycloak/bin | ||||
|  | ||||
| CMD "/setup.sh" | ||||
							
								
								
									
										36
									
								
								playwright/compose/keycloak/setup.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										36
									
								
								playwright/compose/keycloak/setup.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,36 @@ | ||||
| #!/bin/bash | ||||
|  | ||||
| export PATH=/opt/keycloak/bin:/opt/openjdk/jdk-${JAVA_VERSION}/bin:$PATH | ||||
| export JAVA_HOME=/opt/openjdk/jdk-${JAVA_VERSION} | ||||
|  | ||||
| STATUS_CODE=0 | ||||
| while [[ "$STATUS_CODE" != "404" ]] ; do | ||||
|     echo "Will retry in 2 seconds" | ||||
|     sleep 2 | ||||
|  | ||||
|     STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}"  "$DUMMY_AUTHORITY") | ||||
|  | ||||
|     if [[ "$STATUS_CODE" = "200" ]]; then | ||||
|         echo "Setup should already be done. Will not run." | ||||
|         exit 0 | ||||
|     fi | ||||
| done | ||||
|  | ||||
| set -e | ||||
|  | ||||
| kcadm.sh config credentials --server "http://${KC_HTTP_HOST}:${KC_HTTP_PORT}" --realm master --user "$KEYCLOAK_ADMIN" --password "$KEYCLOAK_ADMIN_PASSWORD" --client admin-cli | ||||
|  | ||||
| kcadm.sh create realms -s realm="$TEST_REALM" -s enabled=true -s "accessTokenLifespan=600" | ||||
| kcadm.sh create clients -r test -s "clientId=$SSO_CLIENT_ID" -s "secret=$SSO_CLIENT_SECRET" -s "redirectUris=[\"$DOMAIN/*\"]" -i | ||||
|  | ||||
| TEST_USER_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER" -s "firstName=$TEST_USER" -s "lastName=$TEST_USER" -s "email=$TEST_USER_MAIL"  -s emailVerified=true -s enabled=true -i) | ||||
| kcadm.sh update users/$TEST_USER_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER_PASSWORD" -n | ||||
|  | ||||
| TEST_USER2_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER2" -s "firstName=$TEST_USER2" -s "lastName=$TEST_USER2" -s "email=$TEST_USER2_MAIL"  -s emailVerified=true -s enabled=true -i) | ||||
| kcadm.sh update users/$TEST_USER2_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER2_PASSWORD" -n | ||||
|  | ||||
| TEST_USER3_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER3" -s "firstName=$TEST_USER3" -s "lastName=$TEST_USER3" -s "email=$TEST_USER3_MAIL"  -s emailVerified=true -s enabled=true -i) | ||||
| kcadm.sh update users/$TEST_USER3_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER3_PASSWORD" -n | ||||
|  | ||||
| # Dummy realm to mark end of setup | ||||
| kcadm.sh create realms -s realm="$DUMMY_REALM" -s enabled=true -s "accessTokenLifespan=600" | ||||
							
								
								
									
										40
									
								
								playwright/compose/playwright/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								playwright/compose/playwright/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | ||||
| FROM docker.io/library/debian:bookworm-slim | ||||
|  | ||||
| SHELL ["/bin/bash", "-o", "pipefail", "-c"] | ||||
|  | ||||
| ENV DEBIAN_FRONTEND=noninteractive | ||||
|  | ||||
| RUN apt-get update \ | ||||
|     && apt-get install -y ca-certificates curl \ | ||||
|     && curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc \ | ||||
|     && chmod a+r /etc/apt/keyrings/docker.asc \ | ||||
|     && echo "deb [signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian bookworm stable" | tee /etc/apt/sources.list.d/docker.list \ | ||||
|     && apt-get update \ | ||||
|     && apt-get install -y --no-install-recommends \ | ||||
|         containerd.io \ | ||||
|         docker-buildx-plugin \ | ||||
|         docker-ce \ | ||||
|         docker-ce-cli \ | ||||
|         docker-compose-plugin \ | ||||
|         git \ | ||||
|         libmariadb-dev-compat \ | ||||
|         libpq5 \ | ||||
|         nodejs \ | ||||
|         npm \ | ||||
|         openssl \ | ||||
|     && rm -rf /var/lib/apt/lists/* | ||||
|  | ||||
| RUN mkdir /playwright | ||||
| WORKDIR /playwright | ||||
|  | ||||
| COPY package.json . | ||||
| RUN npm install && npx playwright install-deps && npx playwright install firefox | ||||
|  | ||||
| COPY docker-compose.yml test.env ./ | ||||
| COPY compose ./compose | ||||
|  | ||||
| COPY *.ts test.env ./ | ||||
| COPY tests ./tests | ||||
|  | ||||
| ENTRYPOINT ["/usr/bin/npx", "playwright"] | ||||
| CMD ["test"] | ||||
							
								
								
									
										40
									
								
								playwright/compose/warden/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								playwright/compose/warden/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | ||||
| FROM playwright_oidc_vaultwarden_prebuilt AS prebuilt | ||||
|  | ||||
| FROM node:22-trixie AS build | ||||
|  | ||||
| ARG REPO_URL | ||||
| ARG COMMIT_HASH | ||||
|  | ||||
| ENV REPO_URL=$REPO_URL | ||||
| ENV COMMIT_HASH=$COMMIT_HASH | ||||
|  | ||||
| COPY --from=prebuilt /web-vault /web-vault | ||||
|  | ||||
| COPY build.sh /build.sh | ||||
| RUN /build.sh | ||||
|  | ||||
| ######################## RUNTIME IMAGE  ######################## | ||||
| FROM docker.io/library/debian:trixie-slim | ||||
|  | ||||
| ENV DEBIAN_FRONTEND=noninteractive | ||||
|  | ||||
| # Create data folder and Install needed libraries | ||||
| RUN mkdir /data && \ | ||||
|     apt-get update && apt-get install -y \ | ||||
|         --no-install-recommends \ | ||||
|         ca-certificates \ | ||||
|         curl \ | ||||
|         libmariadb-dev \ | ||||
|         libpq5 \ | ||||
|         openssl && \ | ||||
|     rm -rf /var/lib/apt/lists/* | ||||
|  | ||||
| # Copies the files from the context (Rocket.toml file and web-vault) | ||||
| # and the binary from the "build" stage to the current stage | ||||
| WORKDIR / | ||||
|  | ||||
| COPY --from=prebuilt /start.sh . | ||||
| COPY --from=prebuilt /vaultwarden . | ||||
| COPY --from=build /web-vault ./web-vault | ||||
|  | ||||
| ENTRYPOINT ["/start.sh"] | ||||
							
								
								
									
										23
									
								
								playwright/compose/warden/build.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										23
									
								
								playwright/compose/warden/build.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| #!/bin/bash | ||||
|  | ||||
| echo $REPO_URL | ||||
| echo $COMMIT_HASH | ||||
|  | ||||
| if [[ ! -z "$REPO_URL" ]] && [[ ! -z "$COMMIT_HASH" ]] ; then | ||||
|     rm -rf /web-vault | ||||
|  | ||||
|     mkdir bw_web_builds; | ||||
|     cd bw_web_builds; | ||||
|  | ||||
|     git -c init.defaultBranch=main init | ||||
|     git remote add origin "$REPO_URL" | ||||
|     git fetch --depth 1 origin "$COMMIT_HASH" | ||||
|     git -c advice.detachedHead=false checkout FETCH_HEAD | ||||
|  | ||||
|     export VAULT_VERSION=$(cat Dockerfile | grep "ARG VAULT_VERSION" | cut -d "=" -f2) | ||||
|     ./scripts/checkout_web_vault.sh | ||||
|     ./scripts/build_web_vault.sh | ||||
|     printf '{"version":"%s"}' "$COMMIT_HASH" > ./web-vault/apps/web/build/vw-version.json | ||||
|  | ||||
|     mv ./web-vault/apps/web/build /web-vault | ||||
| fi | ||||
							
								
								
									
										124
									
								
								playwright/docker-compose.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										124
									
								
								playwright/docker-compose.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,124 @@ | ||||
| services: | ||||
|   VaultwardenPrebuild: | ||||
|     profiles: ["playwright", "vaultwarden"] | ||||
|     container_name: playwright_oidc_vaultwarden_prebuilt | ||||
|     image: playwright_oidc_vaultwarden_prebuilt | ||||
|     build: | ||||
|       context: .. | ||||
|       dockerfile: Dockerfile | ||||
|     entrypoint: /bin/bash | ||||
|     restart: "no" | ||||
|  | ||||
|   Vaultwarden: | ||||
|     profiles: ["playwright", "vaultwarden"] | ||||
|     container_name: playwright_oidc_vaultwarden-${ENV:-dev} | ||||
|     image: playwright_oidc_vaultwarden-${ENV:-dev} | ||||
|     network_mode: "host" | ||||
|     build: | ||||
|       context: compose/warden | ||||
|       dockerfile: Dockerfile | ||||
|       args: | ||||
|         REPO_URL: ${PW_WV_REPO_URL:-} | ||||
|         COMMIT_HASH: ${PW_WV_COMMIT_HASH:-} | ||||
|     env_file: ${DC_ENV_FILE:-.env} | ||||
|     environment: | ||||
|       - DATABASE_URL | ||||
|       - I_REALLY_WANT_VOLATILE_STORAGE | ||||
|       - LOG_LEVEL | ||||
|       - LOGIN_RATELIMIT_MAX_BURST | ||||
|       - SMTP_HOST | ||||
|       - SMTP_FROM | ||||
|       - SMTP_DEBUG | ||||
|       - SSO_DEBUG_TOKENS | ||||
|       - SSO_FRONTEND | ||||
|       - SSO_ENABLED | ||||
|       - SSO_ONLY | ||||
|     restart: "no" | ||||
|     depends_on: | ||||
|       - VaultwardenPrebuild | ||||
|  | ||||
|   Playwright: | ||||
|     profiles: ["playwright"] | ||||
|     container_name: playwright_oidc_playwright | ||||
|     image: playwright_oidc_playwright | ||||
|     network_mode: "host" | ||||
|     build: | ||||
|       context: . | ||||
|       dockerfile: compose/playwright/Dockerfile | ||||
|     environment: | ||||
|       - PW_WV_REPO_URL | ||||
|       - PW_WV_COMMIT_HASH | ||||
|     restart: "no" | ||||
|     volumes: | ||||
|       - /var/run/docker.sock:/var/run/docker.sock | ||||
|       - ..:/project | ||||
|  | ||||
|   Mariadb: | ||||
|     profiles: ["playwright"] | ||||
|     container_name: playwright_mariadb | ||||
|     image: mariadb:11.2.4 | ||||
|     env_file: test.env | ||||
|     healthcheck: | ||||
|       test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"] | ||||
|       start_period: 10s | ||||
|       interval: 10s | ||||
|     ports: | ||||
|       - ${MARIADB_PORT}:3306 | ||||
|  | ||||
|   Mysql: | ||||
|     profiles: ["playwright"] | ||||
|     container_name: playwright_mysql | ||||
|     image: mysql:8.4.1 | ||||
|     env_file: test.env | ||||
|     healthcheck: | ||||
|       test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"] | ||||
|       start_period: 10s | ||||
|       interval: 10s | ||||
|     ports: | ||||
|       - ${MYSQL_PORT}:3306 | ||||
|  | ||||
|   Postgres: | ||||
|     profiles: ["playwright"] | ||||
|     container_name: playwright_postgres | ||||
|     image: postgres:16.3 | ||||
|     env_file: test.env | ||||
|     healthcheck: | ||||
|       test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}"] | ||||
|       start_period: 20s | ||||
|       interval: 30s | ||||
|     ports: | ||||
|       - ${POSTGRES_PORT}:5432 | ||||
|  | ||||
|   Maildev: | ||||
|     profiles: ["vaultwarden", "maildev"] | ||||
|     container_name: maildev | ||||
|     image: timshel/maildev:3.0.4 | ||||
|     ports: | ||||
|       - ${SMTP_PORT}:1025 | ||||
|       - 1080:1080 | ||||
|  | ||||
|   Keycloak: | ||||
|     profiles: ["keycloak", "vaultwarden"] | ||||
|     container_name: keycloak-${ENV:-dev} | ||||
|     image: quay.io/keycloak/keycloak:25.0.4 | ||||
|     network_mode: "host" | ||||
|     command: | ||||
|       - start-dev | ||||
|     env_file: ${DC_ENV_FILE:-.env} | ||||
|  | ||||
|   KeycloakSetup: | ||||
|     profiles: ["keycloak", "vaultwarden"] | ||||
|     container_name: keycloakSetup-${ENV:-dev} | ||||
|     image: keycloak_setup-${ENV:-dev} | ||||
|     build: | ||||
|       context: compose/keycloak | ||||
|       dockerfile: Dockerfile | ||||
|       args: | ||||
|         KEYCLOAK_VERSION: 25.0.4 | ||||
|         JAVA_URL: https://download.java.net/java/GA/jdk21.0.2/f2283984656d49d69e91c558476027ac/13/GPL/openjdk-21.0.2_linux-x64_bin.tar.gz | ||||
|         JAVA_VERSION: 21.0.2 | ||||
|     network_mode: "host" | ||||
|     depends_on: | ||||
|       - Keycloak | ||||
|     restart: "no" | ||||
|     env_file: ${DC_ENV_FILE:-.env} | ||||
							
								
								
									
										22
									
								
								playwright/global-setup.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								playwright/global-setup.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | ||||
| import { firefox, type FullConfig } from '@playwright/test'; | ||||
| import { execSync } from 'node:child_process'; | ||||
| import fs from 'fs'; | ||||
|  | ||||
| const utils = require('./global-utils'); | ||||
|  | ||||
| utils.loadEnv(); | ||||
|  | ||||
| async function globalSetup(config: FullConfig) { | ||||
|     // Are we running in docker and the project is mounted ? | ||||
|     const path = (fs.existsSync("/project/playwright/playwright.config.ts") ? "/project/playwright" : "."); | ||||
|     execSync(`docker compose --project-directory ${path} --profile playwright --env-file test.env build VaultwardenPrebuild`, { | ||||
|         env: { ...process.env }, | ||||
|         stdio: "inherit" | ||||
|     }); | ||||
|     execSync(`docker compose --project-directory ${path} --profile playwright --env-file test.env build Vaultwarden`, { | ||||
|         env: { ...process.env }, | ||||
|         stdio: "inherit" | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export default globalSetup; | ||||
							
								
								
									
										246
									
								
								playwright/global-utils.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										246
									
								
								playwright/global-utils.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,246 @@ | ||||
| import { expect, type Browser, type TestInfo } from '@playwright/test'; | ||||
| import { EventEmitter } from "events"; | ||||
| import { type Mail, MailServer } from 'maildev'; | ||||
| import { execSync } from 'node:child_process'; | ||||
|  | ||||
| import dotenv from 'dotenv'; | ||||
| import dotenvExpand from 'dotenv-expand'; | ||||
|  | ||||
| const fs = require("fs"); | ||||
| const { spawn } = require('node:child_process'); | ||||
|  | ||||
| export function loadEnv(){ | ||||
|     var myEnv = dotenv.config({ path: 'test.env' }); | ||||
|     dotenvExpand.expand(myEnv); | ||||
|  | ||||
|     return { | ||||
|         user1: { | ||||
|             email: process.env.TEST_USER_MAIL, | ||||
|             name: process.env.TEST_USER, | ||||
|             password: process.env.TEST_USER_PASSWORD, | ||||
|         }, | ||||
|         user2: { | ||||
|             email: process.env.TEST_USER2_MAIL, | ||||
|             name: process.env.TEST_USER2, | ||||
|             password: process.env.TEST_USER2_PASSWORD, | ||||
|         }, | ||||
|         user3: { | ||||
|             email: process.env.TEST_USER3_MAIL, | ||||
|             name: process.env.TEST_USER3, | ||||
|             password: process.env.TEST_USER3_PASSWORD, | ||||
|         }, | ||||
|     } | ||||
| } | ||||
|  | ||||
| export async function waitFor(url: String, browser: Browser) { | ||||
|     var ready = false; | ||||
|     var context; | ||||
|  | ||||
|     do { | ||||
|         try { | ||||
|             context = await browser.newContext(); | ||||
|             const page = await context.newPage(); | ||||
|             await page.waitForTimeout(500); | ||||
|             const result = await page.goto(url); | ||||
|             ready = result.status() === 200; | ||||
|         } catch(e) { | ||||
|             if( !e.message.includes("CONNECTION_REFUSED") ){ | ||||
|                 throw e; | ||||
|             } | ||||
|         } finally { | ||||
|             await context.close(); | ||||
|         } | ||||
|     } while(!ready); | ||||
| } | ||||
|  | ||||
| export function startComposeService(serviceName: String){ | ||||
|     console.log(`Starting ${serviceName}`); | ||||
|     execSync(`docker compose --profile playwright --env-file test.env  up -d ${serviceName}`); | ||||
| } | ||||
|  | ||||
| export function stopComposeService(serviceName: String){ | ||||
|     console.log(`Stopping ${serviceName}`); | ||||
|     execSync(`docker compose --profile playwright --env-file test.env  stop ${serviceName}`); | ||||
| } | ||||
|  | ||||
| function wipeSqlite(){ | ||||
|     console.log(`Delete Vaultwarden container to wipe sqlite`); | ||||
|     execSync(`docker compose --env-file test.env stop Vaultwarden`); | ||||
|     execSync(`docker compose --env-file test.env rm -f Vaultwarden`); | ||||
| } | ||||
|  | ||||
| async function wipeMariaDB(){ | ||||
|     var mysql = require('mysql2/promise'); | ||||
|     var ready = false; | ||||
|     var connection; | ||||
|  | ||||
|     do { | ||||
|         try { | ||||
|             connection = await mysql.createConnection({ | ||||
|                 user: process.env.MARIADB_USER, | ||||
|                 host: "127.0.0.1", | ||||
|                 database: process.env.MARIADB_DATABASE, | ||||
|                 password: process.env.MARIADB_PASSWORD, | ||||
|                 port: process.env.MARIADB_PORT, | ||||
|             }); | ||||
|  | ||||
|             await connection.execute(`DROP DATABASE ${process.env.MARIADB_DATABASE}`); | ||||
|             await connection.execute(`CREATE DATABASE ${process.env.MARIADB_DATABASE}`); | ||||
|             console.log('Successfully wiped mariadb'); | ||||
|             ready = true; | ||||
|         } catch (err) { | ||||
|             console.log(`Error when wiping mariadb: ${err}`); | ||||
|         } finally { | ||||
|             if( connection ){ | ||||
|                 connection.end(); | ||||
|             } | ||||
|         } | ||||
|         await new Promise(r => setTimeout(r, 1000)); | ||||
|     } while(!ready); | ||||
| } | ||||
|  | ||||
| async function wipeMysqlDB(){ | ||||
|     var mysql = require('mysql2/promise'); | ||||
|     var ready = false; | ||||
|     var connection; | ||||
|  | ||||
|     do{ | ||||
|         try { | ||||
|             connection = await mysql.createConnection({ | ||||
|                 user: process.env.MYSQL_USER, | ||||
|                 host: "127.0.0.1", | ||||
|                 database: process.env.MYSQL_DATABASE, | ||||
|                 password: process.env.MYSQL_PASSWORD, | ||||
|                 port: process.env.MYSQL_PORT, | ||||
|             }); | ||||
|  | ||||
|             await connection.execute(`DROP DATABASE ${process.env.MYSQL_DATABASE}`); | ||||
|             await connection.execute(`CREATE DATABASE ${process.env.MYSQL_DATABASE}`); | ||||
|             console.log('Successfully wiped mysql'); | ||||
|             ready = true; | ||||
|         } catch (err) { | ||||
|             console.log(`Error when wiping mysql: ${err}`); | ||||
|         } finally { | ||||
|             if( connection ){ | ||||
|                 connection.end(); | ||||
|             } | ||||
|         } | ||||
|         await new Promise(r => setTimeout(r, 1000)); | ||||
|     } while(!ready); | ||||
| } | ||||
|  | ||||
| async function wipePostgres(){ | ||||
|     const { Client } = require('pg'); | ||||
|  | ||||
|     const client = new Client({ | ||||
|         user: process.env.POSTGRES_USER, | ||||
|         host: "127.0.0.1", | ||||
|         database: "postgres", | ||||
|         password: process.env.POSTGRES_PASSWORD, | ||||
|         port: process.env.POSTGRES_PORT, | ||||
|     }); | ||||
|  | ||||
|     try { | ||||
|         await client.connect(); | ||||
|         await client.query(`DROP DATABASE ${process.env.POSTGRES_DB}`); | ||||
|         await client.query(`CREATE DATABASE ${process.env.POSTGRES_DB}`); | ||||
|         console.log('Successfully wiped postgres'); | ||||
|     } catch (err) { | ||||
|         console.log(`Error when wiping postgres: ${err}`); | ||||
|     } finally { | ||||
|         client.end(); | ||||
|     } | ||||
| } | ||||
|  | ||||
| function dbConfig(testInfo: TestInfo){ | ||||
|     switch(testInfo.project.name) { | ||||
|         case "postgres": | ||||
|         case "sso-postgres": | ||||
|             return { DATABASE_URL: `postgresql://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PASSWORD}@127.0.0.1:${process.env.POSTGRES_PORT}/${process.env.POSTGRES_DB}` }; | ||||
|         case "mariadb": | ||||
|         case "sso-mariadb": | ||||
|             return { DATABASE_URL: `mysql://${process.env.MARIADB_USER}:${process.env.MARIADB_PASSWORD}@127.0.0.1:${process.env.MARIADB_PORT}/${process.env.MARIADB_DATABASE}` }; | ||||
|         case "mysql": | ||||
|         case "sso-mysql": | ||||
|             return { DATABASE_URL: `mysql://${process.env.MYSQL_USER}:${process.env.MYSQL_PASSWORD}@127.0.0.1:${process.env.MYSQL_PORT}/${process.env.MYSQL_DATABASE}`}; | ||||
|         case "sqlite": | ||||
|         case "sso-sqlite": | ||||
|             return { I_REALLY_WANT_VOLATILE_STORAGE: true }; | ||||
|         default: | ||||
|             throw new Error(`Unknow database name: ${testInfo.project.name}`); | ||||
|     } | ||||
| } | ||||
|  | ||||
| /** | ||||
|  *  All parameters passed in `env` need to be added to the docker-compose.yml | ||||
|  **/ | ||||
| export async function startVault(browser: Browser, testInfo: TestInfo, env = {}, resetDB: Boolean = true) { | ||||
|     if( resetDB ){ | ||||
|         switch(testInfo.project.name) { | ||||
|             case "postgres": | ||||
|             case "sso-postgres": | ||||
|                 await wipePostgres(); | ||||
|                 break; | ||||
|             case "mariadb": | ||||
|             case "sso-mariadb": | ||||
|                 await wipeMariaDB(); | ||||
|                 break; | ||||
|             case "mysql": | ||||
|             case "sso-mysql": | ||||
|                 await wipeMysqlDB(); | ||||
|                 break; | ||||
|             case "sqlite": | ||||
|             case "sso-sqlite": | ||||
|                 wipeSqlite(); | ||||
|                 break; | ||||
|             default: | ||||
|                 throw new Error(`Unknow database name: ${testInfo.project.name}`); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     console.log(`Starting Vaultwarden`); | ||||
|     execSync(`docker compose --profile playwright --env-file test.env up -d Vaultwarden`, { | ||||
|         env: { ...env, ...dbConfig(testInfo) }, | ||||
|     }); | ||||
|     await waitFor("/", browser); | ||||
|     console.log(`Vaultwarden running on: ${process.env.DOMAIN}`); | ||||
| } | ||||
|  | ||||
| export async function stopVault(force: boolean = false) { | ||||
|     if( force === false && process.env.PW_KEEP_SERVICE_RUNNNING === "true" ) { | ||||
|         console.log(`Keep vaultwarden running on: ${process.env.DOMAIN}`); | ||||
|     } else { | ||||
|         console.log(`Vaultwarden stopping`); | ||||
|         execSync(`docker compose --profile playwright --env-file test.env stop Vaultwarden`); | ||||
|     } | ||||
| } | ||||
|  | ||||
| export async function restartVault(page: Page, testInfo: TestInfo, env, resetDB: Boolean = true) { | ||||
|     stopVault(true); | ||||
|     return startVault(page.context().browser(), testInfo, env, resetDB); | ||||
| } | ||||
|  | ||||
| export async function checkNotification(page: Page, hasText: string) { | ||||
|     await expect(page.locator('bit-toast').filter({ hasText })).toBeVisible(); | ||||
|     await page.locator('bit-toast').filter({ hasText }).getByRole('button').click(); | ||||
|     await expect(page.locator('bit-toast').filter({ hasText })).toHaveCount(0); | ||||
| } | ||||
|  | ||||
| export async function cleanLanding(page: Page) { | ||||
|     await page.goto('/', { waitUntil: 'domcontentloaded' }); | ||||
|     await expect(page.getByRole('button').nth(0)).toBeVisible(); | ||||
|  | ||||
|     const logged = await page.getByRole('button', { name: 'Log out' }).count(); | ||||
|     if( logged > 0 ){ | ||||
|         await page.getByRole('button', { name: 'Log out' }).click(); | ||||
|         await page.getByRole('button', { name: 'Log out' }).click(); | ||||
|     } | ||||
| } | ||||
|  | ||||
| export async function logout(test: Test, page: Page, user: { name: string }) { | ||||
|     await test.step('logout', async () => { | ||||
|         await page.getByRole('button', { name: user.name, exact: true }).click(); | ||||
|         await page.getByRole('menuitem', { name: 'Log out' }).click(); | ||||
|         await expect(page.getByRole('heading', { name: 'Log in' })).toBeVisible(); | ||||
|     }); | ||||
| } | ||||
							
								
								
									
										2594
									
								
								playwright/package-lock.json
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										2594
									
								
								playwright/package-lock.json
									
									
									
										generated
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										21
									
								
								playwright/package.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								playwright/package.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | ||||
| { | ||||
|     "name": "scenarios", | ||||
|     "version": "1.0.0", | ||||
|     "description": "", | ||||
|     "main": "index.js", | ||||
|     "scripts": {}, | ||||
|     "keywords": [], | ||||
|     "author": "", | ||||
|     "license": "ISC", | ||||
|     "devDependencies": { | ||||
|         "@playwright/test": "^1.54.2", | ||||
|         "dotenv": "^16.6.1", | ||||
|         "dotenv-expand": "^12.0.2", | ||||
|         "maildev": "npm:@timshel_npm/maildev@^3.2.1" | ||||
|     }, | ||||
|     "dependencies": { | ||||
|         "mysql2": "^3.14.3", | ||||
|         "otpauth": "^9.4.0", | ||||
|         "pg": "^8.16.3" | ||||
|     } | ||||
| } | ||||
							
								
								
									
										143
									
								
								playwright/playwright.config.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										143
									
								
								playwright/playwright.config.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,143 @@ | ||||
| import { defineConfig, devices } from '@playwright/test'; | ||||
| import { exec } from 'node:child_process'; | ||||
|  | ||||
| const utils = require('./global-utils'); | ||||
|  | ||||
| utils.loadEnv(); | ||||
|  | ||||
| /** | ||||
|  * See https://playwright.dev/docs/test-configuration. | ||||
|  */ | ||||
| export default defineConfig({ | ||||
|     testDir: './.', | ||||
|     /* Run tests in files in parallel */ | ||||
|     fullyParallel: false, | ||||
|  | ||||
|     /* Fail the build on CI if you accidentally left test.only in the source code. */ | ||||
|     forbidOnly: !!process.env.CI, | ||||
|  | ||||
|     retries: 0, | ||||
|     workers: 1, | ||||
|  | ||||
|     /* Reporter to use. See https://playwright.dev/docs/test-reporters */ | ||||
|     reporter: 'html', | ||||
|  | ||||
|     /* Long global timeout for complex tests | ||||
|      * But short action/nav/expect timeouts to fail on specific step (raise locally if not enough). | ||||
|      */ | ||||
|     timeout: 120 * 1000, | ||||
|     actionTimeout: 20 * 1000, | ||||
|     navigationTimeout: 20 * 1000, | ||||
|     expect: { timeout: 20 * 1000 }, | ||||
|  | ||||
|     /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ | ||||
|     use: { | ||||
|         /* Base URL to use in actions like `await page.goto('/')`. */ | ||||
|         baseURL: process.env.DOMAIN, | ||||
|         browserName: 'firefox', | ||||
|         locale: 'en-GB', | ||||
|         timezoneId: 'Europe/London', | ||||
|  | ||||
|         /* Always collect trace (other values add random test failures) See https://playwright.dev/docs/trace-viewer */ | ||||
|         trace: 'on', | ||||
|         viewport: { | ||||
|             width: 1080, | ||||
|             height: 720, | ||||
|         }, | ||||
|         video: "on", | ||||
|     }, | ||||
|  | ||||
|     /* Configure projects for major browsers */ | ||||
|     projects: [ | ||||
|         { | ||||
|             name: 'mariadb-setup', | ||||
|             testMatch: 'tests/setups/db-setup.ts', | ||||
|             use: { serviceName: "Mariadb" }, | ||||
|             teardown: 'mariadb-teardown', | ||||
|         }, | ||||
|         { | ||||
|             name: 'mysql-setup', | ||||
|             testMatch: 'tests/setups/db-setup.ts', | ||||
|             use: { serviceName: "Mysql" }, | ||||
|             teardown: 'mysql-teardown', | ||||
|         }, | ||||
|         { | ||||
|             name: 'postgres-setup', | ||||
|             testMatch: 'tests/setups/db-setup.ts', | ||||
|             use: { serviceName: "Postgres" }, | ||||
|             teardown: 'postgres-teardown', | ||||
|         }, | ||||
|         { | ||||
|             name: 'sso-setup', | ||||
|             testMatch: 'tests/setups/sso-setup.ts', | ||||
|             teardown: 'sso-teardown', | ||||
|         }, | ||||
|  | ||||
|         { | ||||
|             name: 'mariadb', | ||||
|             testMatch: 'tests/*.spec.ts', | ||||
|             testIgnore: 'tests/sso_*.spec.ts', | ||||
|             dependencies: ['mariadb-setup'], | ||||
|         }, | ||||
|         { | ||||
|             name: 'mysql', | ||||
|             testMatch: 'tests/*.spec.ts', | ||||
|             testIgnore: 'tests/sso_*.spec.ts', | ||||
|             dependencies: ['mysql-setup'], | ||||
|         }, | ||||
|         { | ||||
|             name: 'postgres', | ||||
|             testMatch: 'tests/*.spec.ts', | ||||
|             testIgnore: 'tests/sso_*.spec.ts', | ||||
|             dependencies: ['postgres-setup'], | ||||
|         }, | ||||
|         { | ||||
|             name: 'sqlite', | ||||
|             testMatch: 'tests/*.spec.ts', | ||||
|             testIgnore: 'tests/sso_*.spec.ts', | ||||
|         }, | ||||
|  | ||||
|         { | ||||
|             name: 'sso-mariadb', | ||||
|             testMatch: 'tests/sso_*.spec.ts', | ||||
|             dependencies: ['sso-setup', 'mariadb-setup'], | ||||
|         }, | ||||
|         { | ||||
|             name: 'sso-mysql', | ||||
|             testMatch: 'tests/sso_*.spec.ts', | ||||
|             dependencies: ['sso-setup', 'mysql-setup'], | ||||
|         }, | ||||
|         { | ||||
|             name: 'sso-postgres', | ||||
|             testMatch: 'tests/sso_*.spec.ts', | ||||
|             dependencies: ['sso-setup', 'postgres-setup'], | ||||
|         }, | ||||
|         { | ||||
|             name: 'sso-sqlite', | ||||
|             testMatch: 'tests/sso_*.spec.ts', | ||||
|             dependencies: ['sso-setup'], | ||||
|         }, | ||||
|  | ||||
|         { | ||||
|             name: 'mariadb-teardown', | ||||
|             testMatch: 'tests/setups/db-teardown.ts', | ||||
|             use: { serviceName: "Mariadb" }, | ||||
|         }, | ||||
|         { | ||||
|             name: 'mysql-teardown', | ||||
|             testMatch: 'tests/setups/db-teardown.ts', | ||||
|             use: { serviceName: "Mysql" }, | ||||
|         }, | ||||
|         { | ||||
|             name: 'postgres-teardown', | ||||
|             testMatch: 'tests/setups/db-teardown.ts', | ||||
|             use: { serviceName: "Postgres" }, | ||||
|         }, | ||||
|         { | ||||
|             name: 'sso-teardown', | ||||
|             testMatch: 'tests/setups/sso-teardown.ts', | ||||
|         }, | ||||
|     ], | ||||
|  | ||||
|     globalSetup: require.resolve('./global-setup'), | ||||
| }); | ||||
							
								
								
									
										97
									
								
								playwright/test.env
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										97
									
								
								playwright/test.env
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,97 @@ | ||||
| ################################################################## | ||||
| ### Shared Playwright conf test file Vaultwarden and Databases ### | ||||
| ################################################################## | ||||
|  | ||||
| ENV=test | ||||
| DC_ENV_FILE=test.env | ||||
| COMPOSE_IGNORE_ORPHANS=True | ||||
| DOCKER_BUILDKIT=1 | ||||
|  | ||||
| ##################### | ||||
| # Playwright Config # | ||||
| ##################### | ||||
| PW_KEEP_SERVICE_RUNNNING=${PW_KEEP_SERVICE_RUNNNING:-false} | ||||
| PW_SMTP_FROM=vaultwarden@playwright.test | ||||
|  | ||||
| ##################### | ||||
| # Maildev Config 	# | ||||
| ##################### | ||||
| MAILDEV_HTTP_PORT=1081 | ||||
| MAILDEV_SMTP_PORT=1026 | ||||
| MAILDEV_HOST=127.0.0.1 | ||||
|  | ||||
| ################ | ||||
| # Users Config # | ||||
| ################ | ||||
| TEST_USER=test | ||||
| TEST_USER_PASSWORD=Master Password | ||||
| TEST_USER_MAIL=${TEST_USER}@example.com | ||||
|  | ||||
| TEST_USER2=test2 | ||||
| TEST_USER2_PASSWORD=Master Password | ||||
| TEST_USER2_MAIL=${TEST_USER2}@example.com | ||||
|  | ||||
| TEST_USER3=test3 | ||||
| TEST_USER3_PASSWORD=Master Password | ||||
| TEST_USER3_MAIL=${TEST_USER3}@example.com | ||||
|  | ||||
| ################### | ||||
| # Keycloak Config # | ||||
| ################### | ||||
| KEYCLOAK_ADMIN=admin | ||||
| KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN} | ||||
| KC_HTTP_HOST=127.0.0.1 | ||||
| KC_HTTP_PORT=8081 | ||||
|  | ||||
| # Script parameters (use Keycloak and Vaultwarden config too) | ||||
| TEST_REALM=test | ||||
| DUMMY_REALM=dummy | ||||
| DUMMY_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${DUMMY_REALM} | ||||
|  | ||||
| ###################### | ||||
| # Vaultwarden Config # | ||||
| ###################### | ||||
| ROCKET_PORT=8003 | ||||
| DOMAIN=http://localhost:${ROCKET_PORT} | ||||
| LOG_LEVEL=info,oidcwarden::sso=debug | ||||
| LOGIN_RATELIMIT_MAX_BURST=100 | ||||
|  | ||||
| SMTP_SECURITY=off | ||||
| SMTP_PORT=${MAILDEV_SMTP_PORT} | ||||
| SMTP_FROM_NAME=Vaultwarden | ||||
| SMTP_TIMEOUT=5 | ||||
|  | ||||
| SSO_CLIENT_ID=warden | ||||
| SSO_CLIENT_SECRET=warden | ||||
| SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM} | ||||
| SSO_DEBUG_TOKENS=true | ||||
|  | ||||
| # Custom web-vault build | ||||
| # PW_WV_REPO_URL=https://github.com/dani-garcia/bw_web_builds.git | ||||
| # PW_WV_COMMIT_HASH=a5f5390895516bce2f48b7baadb6dc399e5fe75a | ||||
|  | ||||
| ########################### | ||||
| # Docker MariaDb container# | ||||
| ########################### | ||||
| MARIADB_PORT=3307 | ||||
| MARIADB_ROOT_PASSWORD=warden | ||||
| MARIADB_USER=warden | ||||
| MARIADB_PASSWORD=warden | ||||
| MARIADB_DATABASE=warden | ||||
|  | ||||
| ########################### | ||||
| # Docker Mysql container# | ||||
| ########################### | ||||
| MYSQL_PORT=3309 | ||||
| MYSQL_ROOT_PASSWORD=warden | ||||
| MYSQL_USER=warden | ||||
| MYSQL_PASSWORD=warden | ||||
| MYSQL_DATABASE=warden | ||||
|  | ||||
| ############################ | ||||
| # Docker Postgres container# | ||||
| ############################ | ||||
| POSTGRES_PORT=5433 | ||||
| POSTGRES_USER=warden | ||||
| POSTGRES_PASSWORD=warden | ||||
| POSTGRES_DB=warden | ||||
							
								
								
									
										37
									
								
								playwright/tests/collection.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										37
									
								
								playwright/tests/collection.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,37 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
|  | ||||
| import * as utils from "../global-utils"; | ||||
| import { createAccount } from './setups/user'; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     await utils.startVault(browser, testInfo); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
| }); | ||||
|  | ||||
| test('Create', async ({ page }) => { | ||||
|     await createAccount(test, page, users.user1); | ||||
|  | ||||
|     await test.step('Create Org', async () => { | ||||
|         await page.getByRole('link', { name: 'New organisation' }).click(); | ||||
|         await page.getByLabel('Organisation name (required)').fill('Test'); | ||||
|         await page.getByRole('button', { name: 'Submit' }).click(); | ||||
|         await page.locator('div').filter({ hasText: 'Members' }).nth(2).click(); | ||||
|  | ||||
|         await utils.checkNotification(page, 'Organisation created'); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Create Collection', async () => { | ||||
|         await page.getByRole('link', { name: 'Collections' }).click(); | ||||
|         await page.getByRole('button', { name: 'New' }).click(); | ||||
|         await page.getByRole('menuitem', { name: 'Collection' }).click(); | ||||
|         await page.getByLabel('Name (required)').fill('RandomCollec'); | ||||
|         await page.getByRole('button', { name: 'Save' }).click(); | ||||
|         await utils.checkNotification(page, 'Created collection RandomCollec'); | ||||
|         await expect(page.getByRole('button', { name: 'RandomCollec' })).toBeVisible(); | ||||
|     }); | ||||
| }); | ||||
							
								
								
									
										100
									
								
								playwright/tests/login.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										100
									
								
								playwright/tests/login.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,100 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
| import { MailDev } from 'maildev'; | ||||
|  | ||||
| const utils = require('../global-utils'); | ||||
| import { createAccount, logUser } from './setups/user'; | ||||
| import { activateEmail, retrieveEmailCode, disableEmail } from './setups/2fa'; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| let mailserver; | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     mailserver = new MailDev({ | ||||
|         port: process.env.MAILDEV_SMTP_PORT, | ||||
|         web: { port: process.env.MAILDEV_HTTP_PORT }, | ||||
|     }) | ||||
|  | ||||
|     await mailserver.listen(); | ||||
|  | ||||
|     await utils.startVault(browser, testInfo, { | ||||
|         SMTP_HOST: process.env.MAILDEV_HOST, | ||||
|         SMTP_FROM: process.env.PW_SMTP_FROM, | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
|     if( mailserver ){ | ||||
|         await mailserver.close(); | ||||
|     } | ||||
| }); | ||||
|  | ||||
| test('Account creation', async ({ page }) => { | ||||
|     const mailBuffer = mailserver.buffer(users.user1.email); | ||||
|  | ||||
|     await createAccount(test, page, users.user1, mailBuffer); | ||||
|  | ||||
|     mailBuffer.close(); | ||||
| }); | ||||
|  | ||||
| test('Login', async ({ context, page }) => { | ||||
|     const mailBuffer = mailserver.buffer(users.user1.email); | ||||
|  | ||||
|     await logUser(test, page, users.user1, mailBuffer); | ||||
|  | ||||
|     await test.step('verify email', async () => { | ||||
|         await page.getByText('Verify your account\'s email').click(); | ||||
|         await expect(page.getByText('Verify your account\'s email')).toBeVisible(); | ||||
|         await page.getByRole('button', { name: 'Send email' }).click(); | ||||
|  | ||||
|         await utils.checkNotification(page, 'Check your email inbox for a verification link'); | ||||
|  | ||||
|         const verify = await mailBuffer.expect((m) => m.subject === "Verify Your Email"); | ||||
|         expect(verify.from[0]?.address).toBe(process.env.PW_SMTP_FROM); | ||||
|  | ||||
|         const page2 = await context.newPage(); | ||||
|         await page2.setContent(verify.html); | ||||
|         const link = await page2.getByTestId("verify").getAttribute("href"); | ||||
|         await page2.close(); | ||||
|  | ||||
|         await page.goto(link); | ||||
|         await utils.checkNotification(page, 'Account email verified'); | ||||
|     }); | ||||
|  | ||||
|     mailBuffer.close(); | ||||
| }); | ||||
|  | ||||
| test('Activate 2fa', async ({ page }) => { | ||||
|     const emails = mailserver.buffer(users.user1.email); | ||||
|  | ||||
|     await logUser(test, page, users.user1); | ||||
|  | ||||
|     await activateEmail(test, page, users.user1, emails); | ||||
|  | ||||
|     emails.close(); | ||||
| }); | ||||
|  | ||||
| test('2fa', async ({ page }) => { | ||||
|     const emails = mailserver.buffer(users.user1.email); | ||||
|  | ||||
|     await test.step('login', async () => { | ||||
|         await page.goto('/'); | ||||
|  | ||||
|         await page.getByLabel(/Email address/).fill(users.user1.email); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|         await page.getByLabel('Master password').fill(users.user1.password); | ||||
|         await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||
|  | ||||
|         await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible(); | ||||
|         const code = await retrieveEmailCode(test, page, emails); | ||||
|         await page.getByLabel(/Verification code/).fill(code); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|  | ||||
|         await expect(page).toHaveTitle(/Vaults/); | ||||
|     }) | ||||
|  | ||||
|     await disableEmail(test, page, users.user1); | ||||
|  | ||||
|     emails.close(); | ||||
| }); | ||||
							
								
								
									
										51
									
								
								playwright/tests/login.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								playwright/tests/login.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,51 @@ | ||||
| import { test, expect, type Page, type TestInfo } from '@playwright/test'; | ||||
| import * as OTPAuth from "otpauth"; | ||||
|  | ||||
| import * as utils from "../global-utils"; | ||||
| import { createAccount, logUser } from './setups/user'; | ||||
| import { activateTOTP, disableTOTP } from './setups/2fa'; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
| let totp; | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     await utils.startVault(browser, testInfo, {}); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
| }); | ||||
|  | ||||
| test('Account creation', async ({ page }) => { | ||||
|     await createAccount(test, page, users.user1); | ||||
| }); | ||||
|  | ||||
| test('Master password login', async ({ page }) => { | ||||
|     await logUser(test, page, users.user1); | ||||
| }); | ||||
|  | ||||
| test('Authenticator 2fa', async ({ page }) => { | ||||
|     await logUser(test, page, users.user1); | ||||
|  | ||||
|     let totp = await activateTOTP(test, page, users.user1); | ||||
|  | ||||
|     await utils.logout(test, page, users.user1); | ||||
|  | ||||
|     await test.step('login', async () => { | ||||
|         let timestamp = Date.now(); // Needed to use the next token | ||||
|         timestamp = timestamp + (totp.period - (Math.floor(timestamp / 1000) % totp.period) + 1) * 1000; | ||||
|  | ||||
|         await page.getByLabel(/Email address/).fill(users.user1.email); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|         await page.getByLabel('Master password').fill(users.user1.password); | ||||
|         await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||
|  | ||||
|         await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible(); | ||||
|         await page.getByLabel(/Verification code/).fill(totp.generate({timestamp})); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|  | ||||
|         await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|     }); | ||||
|  | ||||
|     await disableTOTP(test, page, users.user1); | ||||
| }); | ||||
							
								
								
									
										115
									
								
								playwright/tests/organization.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										115
									
								
								playwright/tests/organization.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,115 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
| import { MailDev } from 'maildev'; | ||||
|  | ||||
| import * as utils from '../global-utils'; | ||||
| import * as orgs from './setups/orgs'; | ||||
| import { createAccount, logUser } from './setups/user'; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| let mailServer, mail1Buffer, mail2Buffer, mail3Buffer; | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     mailServer = new MailDev({ | ||||
|         port: process.env.MAILDEV_SMTP_PORT, | ||||
|         web: { port: process.env.MAILDEV_HTTP_PORT }, | ||||
|     }) | ||||
|  | ||||
|     await mailServer.listen(); | ||||
|  | ||||
|     await utils.startVault(browser, testInfo, { | ||||
|         SMTP_HOST: process.env.MAILDEV_HOST, | ||||
|         SMTP_FROM: process.env.PW_SMTP_FROM, | ||||
|     }); | ||||
|  | ||||
|     mail1Buffer = mailServer.buffer(users.user1.email); | ||||
|     mail2Buffer = mailServer.buffer(users.user2.email); | ||||
|     mail3Buffer = mailServer.buffer(users.user3.email); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}, testInfo: TestInfo) => { | ||||
|     utils.stopVault(testInfo); | ||||
|     [mail1Buffer, mail2Buffer, mail3Buffer, mailServer].map((m) => m?.close()); | ||||
| }); | ||||
|  | ||||
| test('Create user3', async ({ page }) => { | ||||
|     await createAccount(test, page, users.user3, mail3Buffer); | ||||
| }); | ||||
|  | ||||
| test('Invite users', async ({ page }) => { | ||||
|     await createAccount(test, page, users.user1, mail1Buffer); | ||||
|  | ||||
|     await orgs.create(test, page, 'Test'); | ||||
|     await orgs.members(test, page, 'Test'); | ||||
|     await orgs.invite(test, page, 'Test', users.user2.email); | ||||
|     await orgs.invite(test, page, 'Test', users.user3.email, { | ||||
|         navigate: false, | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test('invited with new account', async ({ page }) => { | ||||
|     const invited = await mail2Buffer.expect((mail) => mail.subject === 'Join Test'); | ||||
|  | ||||
|     await test.step('Create account', async () => { | ||||
|         await page.setContent(invited.html); | ||||
|         const link = await page.getByTestId('invite').getAttribute('href'); | ||||
|         await page.goto(link); | ||||
|         await expect(page).toHaveTitle(/Create account | Vaultwarden Web/); | ||||
|  | ||||
|         //await page.getByLabel('Name').fill(users.user2.name); | ||||
|         await page.getByLabel('New master password (required)', { exact: true }).fill(users.user2.password); | ||||
|         await page.getByLabel('Confirm new master password (').fill(users.user2.password); | ||||
|         await page.getByRole('button', { name: 'Create account' }).click(); | ||||
|         await utils.checkNotification(page, 'Your new account has been created'); | ||||
|  | ||||
|         // Redirected to the vault | ||||
|         await expect(page).toHaveTitle('Vaults | Vaultwarden Web'); | ||||
|         await utils.checkNotification(page, 'You have been logged in!'); | ||||
|         await utils.checkNotification(page, 'Invitation accepted'); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Check mails', async () => { | ||||
|         await mail2Buffer.expect((m) => m.subject === 'Welcome'); | ||||
|         await mail2Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox'); | ||||
|         await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted')); | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test('invited with existing account', async ({ page }) => { | ||||
|     const invited = await mail3Buffer.expect((mail) => mail.subject === 'Join Test'); | ||||
|  | ||||
|     await page.setContent(invited.html); | ||||
|     const link = await page.getByTestId('invite').getAttribute('href'); | ||||
|  | ||||
|     await page.goto(link); | ||||
|  | ||||
|     // We should be on login page with email prefilled | ||||
|     await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|     await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|  | ||||
|     // Unlock page | ||||
|     await page.getByLabel('Master password').fill(users.user3.password); | ||||
|     await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||
|  | ||||
|     // We are now in the default vault page | ||||
|     await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|     await utils.checkNotification(page, 'Invitation accepted'); | ||||
|  | ||||
|     await mail3Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox'); | ||||
|     await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted')); | ||||
| }); | ||||
|  | ||||
| test('Confirm invited user', async ({ page }) => { | ||||
|     await logUser(test, page, users.user1, mail1Buffer); | ||||
|  | ||||
|     await orgs.members(test, page, 'Test'); | ||||
|     await orgs.confirm(test, page, 'Test', users.user2.email); | ||||
|  | ||||
|     await mail2Buffer.expect((m) => m.subject.includes('Invitation to Test confirmed')); | ||||
| }); | ||||
|  | ||||
| test('Organization is visible', async ({ page }) => { | ||||
|     await logUser(test, page, users.user2, mail2Buffer); | ||||
|     await page.getByRole('button', { name: 'vault: Test', exact: true }).click(); | ||||
|     await expect(page.getByLabel('Filter: Default collection')).toBeVisible(); | ||||
| }); | ||||
							
								
								
									
										54
									
								
								playwright/tests/organization.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										54
									
								
								playwright/tests/organization.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,54 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
| import { MailDev } from 'maildev'; | ||||
|  | ||||
| import * as utils from "../global-utils"; | ||||
| import * as orgs from './setups/orgs'; | ||||
| import { createAccount, logUser } from './setups/user'; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     await utils.startVault(browser, testInfo); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
| }); | ||||
|  | ||||
| test('Invite', async ({ page }) => { | ||||
|     await createAccount(test, page, users.user3); | ||||
|     await createAccount(test, page, users.user1); | ||||
|  | ||||
|     await orgs.create(test, page, 'New organisation'); | ||||
|     await orgs.members(test, page, 'New organisation'); | ||||
|  | ||||
|     await test.step('missing user2', async () => { | ||||
|         await orgs.invite(test, page, 'New organisation', users.user2.email); | ||||
|         await expect(page.getByRole('row', { name: users.user2.email })).toHaveText(/Invited/); | ||||
|     }); | ||||
|  | ||||
|     await test.step('existing user3', async () => { | ||||
|         await orgs.invite(test, page, 'New organisation', users.user3.email); | ||||
|         await expect(page.getByRole('row', { name: users.user3.email })).toHaveText(/Needs confirmation/); | ||||
|         await orgs.confirm(test, page, 'New organisation', users.user3.email); | ||||
|     }); | ||||
|  | ||||
|     await test.step('confirm user2', async () => { | ||||
|         await createAccount(test, page, users.user2); | ||||
|         await logUser(test, page, users.user1); | ||||
|         await orgs.members(test, page, 'New organisation'); | ||||
|         await orgs.confirm(test, page, 'New organisation', users.user2.email); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Org visible user2  ', async () => { | ||||
|         await logUser(test, page, users.user2); | ||||
|         await page.getByRole('button', { name: 'vault: New organisation', exact: true }).click(); | ||||
|         await expect(page.getByLabel('Filter: Default collection')).toBeVisible(); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Org visible user3  ', async () => { | ||||
|         await logUser(test, page, users.user3); | ||||
|         await page.getByRole('button', { name: 'vault: New organisation', exact: true }).click(); | ||||
|         await expect(page.getByLabel('Filter: Default collection')).toBeVisible(); | ||||
|     }); | ||||
| }); | ||||
							
								
								
									
										92
									
								
								playwright/tests/setups/2fa.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										92
									
								
								playwright/tests/setups/2fa.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,92 @@ | ||||
| import { expect, type Page, Test } from '@playwright/test'; | ||||
| import { type MailBuffer } from 'maildev'; | ||||
| import * as OTPAuth from "otpauth"; | ||||
|  | ||||
| import * as utils from '../../global-utils'; | ||||
|  | ||||
| export async function activateTOTP(test: Test, page: Page, user: { name: string, password: string }): OTPAuth.TOTP { | ||||
|     return await test.step('Activate TOTP 2FA', async () => { | ||||
|         await page.getByRole('button', { name: user.name }).click(); | ||||
|         await page.getByRole('menuitem', { name: 'Account settings' }).click(); | ||||
|         await page.getByRole('link', { name: 'Security' }).click(); | ||||
|         await page.getByRole('link', { name: 'Two-step login' }).click(); | ||||
|         await page.locator('bit-item').filter({ hasText: /Authenticator app/ }).getByRole('button').click(); | ||||
|         await page.getByLabel('Master password (required)').fill(user.password); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|  | ||||
|         const secret = await page.getByLabel('Key').innerText(); | ||||
|         let totp = new OTPAuth.TOTP({ secret, period: 30 }); | ||||
|  | ||||
|         await page.getByLabel(/Verification code/).fill(totp.generate()); | ||||
|         await page.getByRole('button', { name: 'Turn on' }).click(); | ||||
|         await page.getByRole('heading', { name: 'Turned on', exact: true }); | ||||
|         await page.getByLabel('Close').click(); | ||||
|  | ||||
|         return totp; | ||||
|     }) | ||||
| } | ||||
|  | ||||
| export async function disableTOTP(test: Test, page: Page, user: { password: string }) { | ||||
|     await test.step('Disable TOTP 2FA', async () => { | ||||
|         await page.getByRole('button', { name: 'Test' }).click(); | ||||
|         await page.getByRole('menuitem', { name: 'Account settings' }).click(); | ||||
|         await page.getByRole('link', { name: 'Security' }).click(); | ||||
|         await page.getByRole('link', { name: 'Two-step login' }).click(); | ||||
|         await page.locator('bit-item').filter({ hasText: /Authenticator app/ }).getByRole('button').click(); | ||||
|         await page.getByLabel('Master password (required)').click(); | ||||
|         await page.getByLabel('Master password (required)').fill(user.password); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|         await page.getByRole('button', { name: 'Turn off' }).click(); | ||||
|         await page.getByRole('button', { name: 'Yes' }).click(); | ||||
|         await utils.checkNotification(page, 'Two-step login provider turned off'); | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function activateEmail(test: Test, page: Page, user: { name: string, password: string }, mailBuffer: MailBuffer) { | ||||
|     await test.step('Activate Email 2FA', async () => { | ||||
|         await page.getByRole('button', { name: user.name }).click(); | ||||
|         await page.getByRole('menuitem', { name: 'Account settings' }).click(); | ||||
|         await page.getByRole('link', { name: 'Security' }).click(); | ||||
|         await page.getByRole('link', { name: 'Two-step login' }).click(); | ||||
|         await page.locator('bit-item').filter({ hasText: 'Email Email Enter a code sent' }).getByRole('button').click(); | ||||
|         await page.getByLabel('Master password (required)').fill(user.password); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|         await page.getByRole('button', { name: 'Send email' }).click(); | ||||
|     }); | ||||
|  | ||||
|     let code = await retrieveEmailCode(test, page, mailBuffer); | ||||
|  | ||||
|     await test.step('input code', async () => { | ||||
|         await page.getByLabel('2. Enter the resulting 6').fill(code); | ||||
|         await page.getByRole('button', { name: 'Turn on' }).click(); | ||||
|         await page.getByRole('heading', { name: 'Turned on', exact: true }); | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function retrieveEmailCode(test: Test, page: Page, mailBuffer: MailBuffer): string { | ||||
|     return await test.step('retrieve code', async () => { | ||||
|         const codeMail = await mailBuffer.expect((mail) => mail.subject.includes("Login Verification Code")); | ||||
|         const page2 = await page.context().newPage(); | ||||
|         await page2.setContent(codeMail.html); | ||||
|         const code = await page2.getByTestId("2fa").innerText(); | ||||
|         await page2.close(); | ||||
|         return code; | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function disableEmail(test: Test, page: Page, user: { password: string }) { | ||||
|     await test.step('Disable Email 2FA', async () => { | ||||
|         await page.getByRole('button', { name: 'Test' }).click(); | ||||
|         await page.getByRole('menuitem', { name: 'Account settings' }).click(); | ||||
|         await page.getByRole('link', { name: 'Security' }).click(); | ||||
|         await page.getByRole('link', { name: 'Two-step login' }).click(); | ||||
|         await page.locator('bit-item').filter({ hasText: 'Email' }).getByRole('button').click(); | ||||
|         await page.getByLabel('Master password (required)').click(); | ||||
|         await page.getByLabel('Master password (required)').fill(user.password); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|         await page.getByRole('button', { name: 'Turn off' }).click(); | ||||
|         await page.getByRole('button', { name: 'Yes' }).click(); | ||||
|  | ||||
|         await utils.checkNotification(page, 'Two-step login provider turned off'); | ||||
|     }); | ||||
| } | ||||
							
								
								
									
										7
									
								
								playwright/tests/setups/db-setup.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								playwright/tests/setups/db-setup.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | ||||
| import { test } from './db-test'; | ||||
|  | ||||
| const utils = require('../../global-utils'); | ||||
|  | ||||
| test('DB start', async ({ serviceName }) => { | ||||
| 	utils.startComposeService(serviceName); | ||||
| }); | ||||
							
								
								
									
										11
									
								
								playwright/tests/setups/db-teardown.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								playwright/tests/setups/db-teardown.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | ||||
| import { test } from './db-test'; | ||||
|  | ||||
| const utils = require('../../global-utils'); | ||||
|  | ||||
| utils.loadEnv(); | ||||
|  | ||||
| test('DB teardown ?', async ({ serviceName }) => { | ||||
|     if( process.env.PW_KEEP_SERVICE_RUNNNING !== "true" ) { | ||||
|         utils.stopComposeService(serviceName); | ||||
|     } | ||||
| }); | ||||
							
								
								
									
										9
									
								
								playwright/tests/setups/db-test.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								playwright/tests/setups/db-test.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | ||||
| import { test as base } from '@playwright/test'; | ||||
|  | ||||
| export type TestOptions = { | ||||
|   serviceName: string; | ||||
| }; | ||||
|  | ||||
| export const test = base.extend<TestOptions>({ | ||||
|   serviceName: ['', { option: true }], | ||||
| }); | ||||
							
								
								
									
										77
									
								
								playwright/tests/setups/orgs.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										77
									
								
								playwright/tests/setups/orgs.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,77 @@ | ||||
| import { expect, type Browser,Page } from '@playwright/test'; | ||||
|  | ||||
| import * as utils from '../../global-utils'; | ||||
|  | ||||
| export async function create(test, page: Page, name: string) { | ||||
|     await test.step('Create Org', async () => { | ||||
|         await page.locator('a').filter({ hasText: 'Password Manager' }).first().click(); | ||||
|         await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible(); | ||||
|         await page.getByRole('link', { name: 'New organisation' }).click(); | ||||
|         await page.getByLabel('Organisation name (required)').fill(name); | ||||
|         await page.getByRole('button', { name: 'Submit' }).click(); | ||||
|  | ||||
|         await utils.checkNotification(page, 'Organisation created'); | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function policies(test, page: Page, name: string) { | ||||
|     await test.step(`Navigate to ${name} policies`, async () => { | ||||
|         await page.locator('a').filter({ hasText: 'Admin Console' }).first().click(); | ||||
|         await page.locator('org-switcher').getByLabel(/Toggle collapse/).click(); | ||||
|         await page.locator('org-switcher').getByRole('link', { name: `${name}` }).first().click(); | ||||
|         await expect(page.getByRole('heading', { name: `${name} collections` })).toBeVisible(); | ||||
|         await page.getByRole('button', { name: 'Toggle collapse Settings' }).click(); | ||||
|         await page.getByRole('link', { name: 'Policies' }).click(); | ||||
|         await expect(page.getByRole('heading', { name: 'Policies' })).toBeVisible(); | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function members(test, page: Page, name: string) { | ||||
|     await test.step(`Navigate to ${name} members`, async () => { | ||||
|         await page.locator('a').filter({ hasText: 'Admin Console' }).first().click(); | ||||
|         await page.locator('org-switcher').getByLabel(/Toggle collapse/).click(); | ||||
|         await page.locator('org-switcher').getByRole('link', { name: `${name}` }).first().click(); | ||||
|         await expect(page.getByRole('heading', { name: `${name} collections` })).toBeVisible(); | ||||
|         await page.locator('div').filter({ hasText: 'Members' }).nth(2).click(); | ||||
|         await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible(); | ||||
|         await expect(page.getByRole('cell', { name: 'All' })).toBeVisible(); | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function invite(test, page: Page, name: string, email: string) { | ||||
|     await test.step(`Invite ${email}`, async () => { | ||||
|         await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible(); | ||||
|         await page.getByRole('button', { name: 'Invite member' }).click(); | ||||
|         await page.getByLabel('Email (required)').fill(email); | ||||
|         await page.getByRole('tab', { name: 'Collections' }).click(); | ||||
|         await page.getByRole('combobox', { name: 'Permission' }).click(); | ||||
|         await page.getByText('Edit items', { exact: true }).click(); | ||||
|         await page.getByLabel('Select collections').click(); | ||||
|         await page.getByText('Default collection').click(); | ||||
|         await page.getByRole('cell', { name: 'Collection', exact: true }).click(); | ||||
|         await page.getByRole('button', { name: 'Save' }).click(); | ||||
|         await utils.checkNotification(page, 'User(s) invited'); | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function confirm(test, page: Page, name: string, user_email: string) { | ||||
|     await test.step(`Confirm ${user_email}`, async () => { | ||||
|         await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible(); | ||||
|         await page.getByRole('row').filter({hasText: user_email}).getByLabel('Options').click(); | ||||
|         await page.getByRole('menuitem', { name: 'Confirm' }).click(); | ||||
|         await expect(page.getByRole('heading', { name: 'Confirm user' })).toBeVisible(); | ||||
|         await page.getByRole('button', { name: 'Confirm' }).click(); | ||||
|         await utils.checkNotification(page, 'confirmed'); | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function revoke(test, page: Page, name: string, user_email: string) { | ||||
|     await test.step(`Revoke ${user_email}`, async () => { | ||||
|         await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible(); | ||||
|         await page.getByRole('row').filter({hasText: user_email}).getByLabel('Options').click(); | ||||
|         await page.getByRole('menuitem', { name: 'Revoke access' }).click(); | ||||
|         await expect(page.getByRole('heading', { name: 'Revoke access' })).toBeVisible(); | ||||
|         await page.getByRole('button', { name: 'Revoke access' }).click(); | ||||
|         await utils.checkNotification(page, 'Revoked organisation access'); | ||||
|     }); | ||||
| } | ||||
							
								
								
									
										18
									
								
								playwright/tests/setups/sso-setup.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								playwright/tests/setups/sso-setup.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
|  | ||||
| const { exec } = require('node:child_process'); | ||||
| const utils = require('../../global-utils'); | ||||
|  | ||||
| utils.loadEnv(); | ||||
|  | ||||
| test.beforeAll('Setup', async () => { | ||||
|     console.log("Starting Keycloak"); | ||||
|     exec(`docker compose --profile keycloak --env-file test.env up`); | ||||
| }); | ||||
|  | ||||
| test('Keycloak is up', async ({ page }) => { | ||||
|     await utils.waitFor(process.env.SSO_AUTHORITY, page.context().browser()); | ||||
|     // Dummy authority is created at the end of the setup | ||||
|     await utils.waitFor(process.env.DUMMY_AUTHORITY, page.context().browser()); | ||||
|     console.log(`Keycloak running on: ${process.env.SSO_AUTHORITY}`); | ||||
| }); | ||||
							
								
								
									
										15
									
								
								playwright/tests/setups/sso-teardown.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								playwright/tests/setups/sso-teardown.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| import { test, type FullConfig } from '@playwright/test'; | ||||
|  | ||||
| const { execSync } = require('node:child_process'); | ||||
| const utils = require('../../global-utils'); | ||||
|  | ||||
| utils.loadEnv(); | ||||
|  | ||||
| test('Keycloak teardown', async () => { | ||||
|     if( process.env.PW_KEEP_SERVICE_RUNNNING === "true" ) { | ||||
|         console.log("Keep Keycloak running"); | ||||
|     } else { | ||||
|         console.log("Keycloak stopping"); | ||||
|         execSync(`docker compose --profile keycloak --env-file test.env stop Keycloak`); | ||||
|     } | ||||
| }); | ||||
							
								
								
									
										129
									
								
								playwright/tests/setups/sso.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										129
									
								
								playwright/tests/setups/sso.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,129 @@ | ||||
| import { expect, type Page, Test } from '@playwright/test'; | ||||
| import { type MailBuffer, MailServer } from 'maildev'; | ||||
| import * as OTPAuth from "otpauth"; | ||||
|  | ||||
| import * as utils from '../../global-utils'; | ||||
| import { retrieveEmailCode } from './2fa'; | ||||
|  | ||||
| /** | ||||
|  * If a MailBuffer is passed it will be used and consume the expected emails | ||||
|  */ | ||||
| export async function logNewUser( | ||||
|     test: Test, | ||||
|     page: Page, | ||||
|     user: { email: string, name: string, password: string }, | ||||
|     options: { mailBuffer?: MailBuffer } = {} | ||||
| ) { | ||||
|     await test.step(`Create user ${user.name}`, async () => { | ||||
|         await page.context().clearCookies(); | ||||
|  | ||||
|         await test.step('Landing page', async () => { | ||||
|             await utils.cleanLanding(page); | ||||
|  | ||||
|             await page.locator("input[type=email].vw-email-sso").fill(user.email); | ||||
|             await page.getByRole('button', { name: /Use single sign-on/ }).click(); | ||||
|         }); | ||||
|  | ||||
|         await test.step('Keycloak login', async () => { | ||||
|             await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible(); | ||||
|             await page.getByLabel(/Username/).fill(user.name); | ||||
|             await page.getByLabel('Password', { exact: true }).fill(user.password); | ||||
|             await page.getByRole('button', { name: 'Sign In' }).click(); | ||||
|         }); | ||||
|  | ||||
|         await test.step('Create Vault account', async () => { | ||||
|             await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible(); | ||||
|             await page.getByLabel('New master password (required)', { exact: true }).fill(user.password); | ||||
|             await page.getByLabel('Confirm new master password (').fill(user.password); | ||||
|             await page.getByRole('button', { name: 'Create account' }).click(); | ||||
|         }); | ||||
|  | ||||
|         await test.step('Default vault page', async () => { | ||||
|             await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|             await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible(); | ||||
|         }); | ||||
|  | ||||
|         await utils.checkNotification(page, 'Account successfully created!'); | ||||
|         await utils.checkNotification(page, 'Invitation accepted'); | ||||
|  | ||||
|         if( options.mailBuffer ){ | ||||
|             let mailBuffer = options.mailBuffer; | ||||
|             await test.step('Check emails', async () => { | ||||
|                 await mailBuffer.expect((m) => m.subject === "Welcome"); | ||||
|                 await mailBuffer.expect((m) => m.subject.includes("New Device Logged")); | ||||
|             }); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
|  | ||||
| /** | ||||
|  * If a MailBuffer is passed it will be used and consume the expected emails | ||||
|  */ | ||||
| export async function logUser( | ||||
|     test: Test, | ||||
|     page: Page, | ||||
|     user: { email: string, password: string }, | ||||
|     options: { | ||||
|         mailBuffer ?: MailBuffer, | ||||
|         totp?: OTPAuth.TOTP, | ||||
|         mail2fa?: boolean, | ||||
|     } = {} | ||||
| ) { | ||||
|     let mailBuffer = options.mailBuffer; | ||||
|  | ||||
|     await test.step(`Log user ${user.email}`, async () => { | ||||
|         await page.context().clearCookies(); | ||||
|  | ||||
|         await test.step('Landing page', async () => { | ||||
|             await utils.cleanLanding(page); | ||||
|  | ||||
|             await page.locator("input[type=email].vw-email-sso").fill(user.email); | ||||
|             await page.getByRole('button', { name: /Use single sign-on/ }).click(); | ||||
|         }); | ||||
|  | ||||
|         await test.step('Keycloak login', async () => { | ||||
|             await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible(); | ||||
|             await page.getByLabel(/Username/).fill(user.name); | ||||
|             await page.getByLabel('Password', { exact: true }).fill(user.password); | ||||
|             await page.getByRole('button', { name: 'Sign In' }).click(); | ||||
|         }); | ||||
|  | ||||
|         if( options.totp || options.mail2fa ){ | ||||
|             let code; | ||||
|  | ||||
|             await test.step('2FA check', async () => { | ||||
|                 await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible(); | ||||
|  | ||||
|                 if( options.totp ) { | ||||
|                     const totp = options.totp; | ||||
|                     let timestamp = Date.now(); // Needed to use the next token | ||||
|                     timestamp = timestamp + (totp.period - (Math.floor(timestamp / 1000) % totp.period) + 1) * 1000; | ||||
|                     code = totp.generate({timestamp}); | ||||
|                 } else if( options.mail2fa ){ | ||||
|                     code = await retrieveEmailCode(test, page, mailBuffer); | ||||
|                 } | ||||
|  | ||||
|                 await page.getByLabel(/Verification code/).fill(code); | ||||
|                 await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|             }); | ||||
|         } | ||||
|  | ||||
|         await test.step('Unlock vault', async () => { | ||||
|             await expect(page).toHaveTitle('Vaultwarden Web'); | ||||
|             await expect(page.getByRole('heading', { name: 'Your vault is locked' })).toBeVisible(); | ||||
|             await page.getByLabel('Master password').fill(user.password); | ||||
|             await page.getByRole('button', { name: 'Unlock' }).click(); | ||||
|         }); | ||||
|  | ||||
|         await test.step('Default vault page', async () => { | ||||
|             await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|             await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible(); | ||||
|         }); | ||||
|  | ||||
|         if( mailBuffer ){ | ||||
|             await test.step('Check email', async () => { | ||||
|                 await mailBuffer.expect((m) => m.subject.includes("New Device Logged")); | ||||
|             }); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
							
								
								
									
										55
									
								
								playwright/tests/setups/user.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										55
									
								
								playwright/tests/setups/user.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,55 @@ | ||||
| import { expect, type Browser, Page } from '@playwright/test'; | ||||
|  | ||||
| import { type MailBuffer } from 'maildev'; | ||||
|  | ||||
| import * as utils from '../../global-utils'; | ||||
|  | ||||
| export async function createAccount(test, page: Page, user: { email: string, name: string, password: string }, mailBuffer?: MailBuffer) { | ||||
|     await test.step(`Create user ${user.name}`, async () => { | ||||
|         await utils.cleanLanding(page); | ||||
|  | ||||
|         await page.getByRole('link', { name: 'Create account' }).click(); | ||||
|  | ||||
|         // Back to Vault create account | ||||
|         await expect(page).toHaveTitle(/Create account | Vaultwarden Web/); | ||||
|         await page.getByLabel(/Email address/).fill(user.email); | ||||
|         await page.getByLabel('Name').fill(user.name); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|  | ||||
|         // Vault finish Creation | ||||
|         await page.getByLabel('New master password (required)', { exact: true }).fill(user.password); | ||||
|         await page.getByLabel('Confirm new master password (').fill(user.password); | ||||
|         await page.getByRole('button', { name: 'Create account' }).click(); | ||||
|  | ||||
|         await utils.checkNotification(page, 'Your new account has been created') | ||||
|  | ||||
|         // We are now in the default vault page | ||||
|         await expect(page).toHaveTitle('Vaults | Vaultwarden Web'); | ||||
|         await utils.checkNotification(page, 'You have been logged in!'); | ||||
|  | ||||
|         if( mailBuffer ){ | ||||
|             await mailBuffer.expect((m) => m.subject === "Welcome"); | ||||
|             await mailBuffer.expect((m) => m.subject === "New Device Logged In From Firefox"); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function logUser(test, page: Page, user: { email: string, password: string }, mailBuffer?: MailBuffer) { | ||||
|     await test.step(`Log user ${user.email}`, async () => { | ||||
|         await utils.cleanLanding(page); | ||||
|  | ||||
|         await page.getByLabel(/Email address/).fill(user.email); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|  | ||||
|         // Unlock page | ||||
|         await page.getByLabel('Master password').fill(user.password); | ||||
|         await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||
|  | ||||
|         // We are now in the default vault page | ||||
|         await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|  | ||||
|         if( mailBuffer ){ | ||||
|             await mailBuffer.expect((m) => m.subject === "New Device Logged In From Firefox"); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
							
								
								
									
										53
									
								
								playwright/tests/sso_login.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										53
									
								
								playwright/tests/sso_login.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,53 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
| import { MailDev } from 'maildev'; | ||||
|  | ||||
| import { logNewUser, logUser } from './setups/sso'; | ||||
| import { activateEmail, disableEmail } from './setups/2fa'; | ||||
| import * as utils from "../global-utils"; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| let mailserver; | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     mailserver = new MailDev({ | ||||
|         port: process.env.MAILDEV_SMTP_PORT, | ||||
|         web: { port: process.env.MAILDEV_HTTP_PORT }, | ||||
|     }) | ||||
|  | ||||
|     await mailserver.listen(); | ||||
|  | ||||
|     await utils.startVault(browser, testInfo, { | ||||
|         SSO_ENABLED: true, | ||||
|         SSO_ONLY: false, | ||||
|         SMTP_HOST: process.env.MAILDEV_HOST, | ||||
|         SMTP_FROM: process.env.PW_SMTP_FROM, | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
|     if( mailserver ){ | ||||
|         await mailserver.close(); | ||||
|     } | ||||
| }); | ||||
|  | ||||
| test('Create and activate 2FA', async ({ page }) => { | ||||
|     const mailBuffer = mailserver.buffer(users.user1.email); | ||||
|  | ||||
|     await logNewUser(test, page, users.user1, {mailBuffer: mailBuffer}); | ||||
|  | ||||
|     await activateEmail(test, page, users.user1, mailBuffer); | ||||
|  | ||||
|     mailBuffer.close(); | ||||
| }); | ||||
|  | ||||
| test('Log and disable', async ({ page }) => { | ||||
|     const mailBuffer = mailserver.buffer(users.user1.email); | ||||
|  | ||||
|     await logUser(test, page, users.user1, {mailBuffer: mailBuffer, mail2fa: true}); | ||||
|  | ||||
|     await disableEmail(test, page, users.user1); | ||||
|  | ||||
|     mailBuffer.close(); | ||||
| }); | ||||
							
								
								
									
										85
									
								
								playwright/tests/sso_login.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										85
									
								
								playwright/tests/sso_login.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,85 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
|  | ||||
| import { logNewUser, logUser } from './setups/sso'; | ||||
| import { activateTOTP, disableTOTP } from './setups/2fa'; | ||||
| import * as utils from "../global-utils"; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     await utils.startVault(browser, testInfo, { | ||||
|         SSO_ENABLED: true, | ||||
|         SSO_ONLY: false | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
| }); | ||||
|  | ||||
| test('Account creation using SSO', async ({ page }) => { | ||||
|     // Landing page | ||||
|     await logNewUser(test, page, users.user1); | ||||
| }); | ||||
|  | ||||
| test('SSO login', async ({ page }) => { | ||||
|     await logUser(test, page, users.user1); | ||||
| }); | ||||
|  | ||||
| test('Non SSO login', async ({ page }) => { | ||||
|     // Landing page | ||||
|     await page.goto('/'); | ||||
|     await page.locator("input[type=email].vw-email-sso").fill(users.user1.email); | ||||
|     await page.getByRole('button', { name: 'Other' }).click(); | ||||
|  | ||||
|     // Unlock page | ||||
|     await page.getByLabel('Master password').fill(users.user1.password); | ||||
|     await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||
|  | ||||
|     // We are now in the default vault page | ||||
|     await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
| }); | ||||
|  | ||||
| test('SSO login with TOTP 2fa', async ({ page }) => { | ||||
|     await logUser(test, page, users.user1); | ||||
|  | ||||
|     let totp = await activateTOTP(test, page, users.user1); | ||||
|  | ||||
|     await logUser(test, page, users.user1, { totp }); | ||||
|  | ||||
|     await disableTOTP(test, page, users.user1); | ||||
| }); | ||||
|  | ||||
| test('Non SSO login impossible', async ({ page, browser }, testInfo: TestInfo) => { | ||||
|     await utils.restartVault(page, testInfo, { | ||||
|         SSO_ENABLED: true, | ||||
|         SSO_ONLY: true | ||||
|     }, false); | ||||
|  | ||||
|     // Landing page | ||||
|     await page.goto('/'); | ||||
|  | ||||
|     // Check that SSO login is available | ||||
|     await expect(page.getByRole('button', { name: /Use single sign-on/ })).toHaveCount(1); | ||||
|  | ||||
|     // No Continue/Other | ||||
|     await expect(page.getByRole('button', { name: 'Other' })).toHaveCount(0); | ||||
| }); | ||||
|  | ||||
|  | ||||
| test('No SSO login', async ({ page }, testInfo: TestInfo) => { | ||||
|     await utils.restartVault(page, testInfo, { | ||||
|         SSO_ENABLED: false | ||||
|     }, false); | ||||
|  | ||||
|     // Landing page | ||||
|     await page.goto('/'); | ||||
|  | ||||
|     // No SSO button (rely on a correct selector checked in previous test) | ||||
|     await expect(page.getByRole('button', { name: /Use single sign-on/ })).toHaveCount(0); | ||||
|  | ||||
|     // Can continue to Master password | ||||
|     await page.getByLabel(/Email address/).fill(users.user1.email); | ||||
|     await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|     await expect(page.getByRole('button', { name: 'Log in with master password' })).toHaveCount(1); | ||||
| }); | ||||
							
								
								
									
										121
									
								
								playwright/tests/sso_organization.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										121
									
								
								playwright/tests/sso_organization.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,121 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
| import { MailDev } from 'maildev'; | ||||
|  | ||||
| import * as utils from "../global-utils"; | ||||
| import * as orgs from './setups/orgs'; | ||||
| import { logNewUser, logUser } from './setups/sso'; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| let mailServer, mail1Buffer, mail2Buffer, mail3Buffer; | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     mailServer = new MailDev({ | ||||
|         port: process.env.MAILDEV_SMTP_PORT, | ||||
|         web: { port: process.env.MAILDEV_HTTP_PORT }, | ||||
|     }) | ||||
|  | ||||
|     await mailServer.listen(); | ||||
|  | ||||
|     await utils.startVault(browser, testInfo, { | ||||
|         SMTP_HOST: process.env.MAILDEV_HOST, | ||||
|         SMTP_FROM: process.env.PW_SMTP_FROM, | ||||
|         SSO_ENABLED: true, | ||||
|         SSO_ONLY: true, | ||||
|     }); | ||||
|  | ||||
|     mail1Buffer = mailServer.buffer(users.user1.email); | ||||
|     mail2Buffer = mailServer.buffer(users.user2.email); | ||||
|     mail3Buffer = mailServer.buffer(users.user3.email); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
|     [mail1Buffer, mail2Buffer, mail3Buffer, mailServer].map((m) => m?.close()); | ||||
| }); | ||||
|  | ||||
| test('Create user3', async ({ page }) => { | ||||
|     await logNewUser(test, page, users.user3, { mailBuffer: mail3Buffer }); | ||||
| }); | ||||
|  | ||||
| test('Invite users', async ({ page }) => { | ||||
|     await logNewUser(test, page, users.user1, { mailBuffer: mail1Buffer }); | ||||
|  | ||||
|     await orgs.create(test, page, '/Test'); | ||||
|     await orgs.members(test, page, '/Test'); | ||||
|     await orgs.invite(test, page, '/Test', users.user2.email); | ||||
|     await orgs.invite(test, page, '/Test', users.user3.email); | ||||
| }); | ||||
|  | ||||
| test('invited with new account', async ({ page }) => { | ||||
|     const link = await test.step('Extract email link', async () => { | ||||
|         const invited = await mail2Buffer.expect((m) => m.subject === "Join /Test"); | ||||
|         await page.setContent(invited.html); | ||||
|         return await page.getByTestId("invite").getAttribute("href"); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Redirect to Keycloak', async () => { | ||||
|         await page.goto(link); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Keycloak login', async () => { | ||||
|         await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible(); | ||||
|         await page.getByLabel(/Username/).fill(users.user2.name); | ||||
|         await page.getByLabel('Password', { exact: true }).fill(users.user2.password); | ||||
|         await page.getByRole('button', { name: 'Sign In' }).click(); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Create Vault account', async () => { | ||||
|         await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible(); | ||||
|         await page.getByLabel('New master password (required)', { exact: true }).fill(users.user2.password); | ||||
|         await page.getByLabel('Confirm new master password (').fill(users.user2.password); | ||||
|         await page.getByRole('button', { name: 'Create account' }).click(); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Default vault page', async () => { | ||||
|         await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|  | ||||
|         await utils.checkNotification(page, 'Account successfully created!'); | ||||
|         await utils.checkNotification(page, 'Invitation accepted'); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Check mails', async () => { | ||||
|         await mail2Buffer.expect((m) => m.subject.includes("New Device Logged")); | ||||
|         await mail1Buffer.expect((m) => m.subject === "Invitation to /Test accepted"); | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test('invited with existing account', async ({ page }) => { | ||||
|     const link = await test.step('Extract email link', async () => { | ||||
|         const invited = await mail3Buffer.expect((m) => m.subject === "Join /Test"); | ||||
|         await page.setContent(invited.html); | ||||
|         return await page.getByTestId("invite").getAttribute("href"); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Redirect to Keycloak', async () => { | ||||
|         await page.goto(link); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Keycloak login', async () => { | ||||
|         await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible(); | ||||
|         await page.getByLabel(/Username/).fill(users.user3.name); | ||||
|         await page.getByLabel('Password', { exact: true }).fill(users.user3.password); | ||||
|         await page.getByRole('button', { name: 'Sign In' }).click(); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Unlock vault', async () => { | ||||
|         await expect(page).toHaveTitle('Vaultwarden Web'); | ||||
|         await page.getByLabel('Master password').fill(users.user3.password); | ||||
|         await page.getByRole('button', { name: 'Unlock' }).click(); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Default vault page', async () => { | ||||
|         await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|         await utils.checkNotification(page, 'Invitation accepted'); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Check mails', async () => { | ||||
|         await mail3Buffer.expect((m) => m.subject.includes("New Device Logged")); | ||||
|         await mail1Buffer.expect((m) => m.subject === "Invitation to /Test accepted"); | ||||
|     }); | ||||
| }); | ||||
							
								
								
									
										76
									
								
								playwright/tests/sso_organization.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										76
									
								
								playwright/tests/sso_organization.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,76 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
| import { MailDev } from 'maildev'; | ||||
|  | ||||
| import * as utils from "../global-utils"; | ||||
| import * as orgs from './setups/orgs'; | ||||
| import { logNewUser, logUser } from './setups/sso'; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     await utils.startVault(browser, testInfo, { | ||||
|         SSO_ENABLED: true, | ||||
|         SSO_ONLY: true, | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
| }); | ||||
|  | ||||
| test('Create user3', async ({ page }) => { | ||||
|     await logNewUser(test, page, users.user3); | ||||
| }); | ||||
|  | ||||
| test('Invite users', async ({ page }) => { | ||||
|     await logNewUser(test, page, users.user1); | ||||
|  | ||||
|     await orgs.create(test, page, '/Test'); | ||||
|     await orgs.members(test, page, '/Test'); | ||||
|     await orgs.invite(test, page, '/Test', users.user2.email); | ||||
|     await orgs.invite(test, page, '/Test', users.user3.email); | ||||
|     await orgs.confirm(test, page, '/Test', users.user3.email); | ||||
| }); | ||||
|  | ||||
| test('Create invited account', async ({ page }) => { | ||||
|     await logNewUser(test, page, users.user2); | ||||
| }); | ||||
|  | ||||
| test('Confirm invited user', async ({ page }) => { | ||||
|     await logUser(test, page, users.user1); | ||||
|     await orgs.members(test, page, '/Test'); | ||||
|     await expect(page.getByRole('row', { name: users.user2.name })).toHaveText(/Needs confirmation/); | ||||
|     await orgs.confirm(test, page, '/Test', users.user2.email); | ||||
| }); | ||||
|  | ||||
| test('Organization is visible', async ({ page }) => { | ||||
|     await logUser(test, page, users.user2); | ||||
|     await page.getByLabel('vault: /Test').click(); | ||||
|     await expect(page.getByLabel('Filter: Default collection')).toBeVisible(); | ||||
| }); | ||||
|  | ||||
| test('Enforce password policy', async ({ page }) => { | ||||
|     await logUser(test, page, users.user1); | ||||
|     await orgs.policies(test, page, '/Test'); | ||||
|  | ||||
|     await test.step(`Set master password policy`, async () => { | ||||
|         await page.getByRole('button', { name: 'Master password requirements' }).click(); | ||||
|         await page.getByRole('checkbox', { name: 'Turn on' }).check(); | ||||
|         await page.getByRole('checkbox', { name: 'Require existing members to' }).check(); | ||||
|         await page.getByRole('spinbutton', { name: 'Minimum length' }).fill('42'); | ||||
|         await page.getByRole('button', { name: 'Save' }).click(); | ||||
|         await utils.checkNotification(page, 'Edited policy Master password requirements.'); | ||||
|     }); | ||||
|  | ||||
|     await utils.logout(test, page, users.user1); | ||||
|  | ||||
|     await test.step(`Unlock trigger policy`, async () => { | ||||
|         await page.locator("input[type=email].vw-email-sso").fill(users.user1.email); | ||||
|         await page.getByRole('button', { name: 'Use single sign-on' }).click(); | ||||
|  | ||||
|         await page.getByRole('textbox', { name: 'Master password (required)' }).fill(users.user1.password); | ||||
|         await page.getByRole('button', { name: 'Unlock' }).click(); | ||||
|  | ||||
|         await expect(page.getByRole('heading', { name: 'Update master password' })).toBeVisible(); | ||||
|     }); | ||||
| }); | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user