mirror of
				https://github.com/dani-garcia/vaultwarden.git
				synced 2025-10-31 10:18:19 +02:00 
			
		
		
		
	Compare commits
	
		
			93 Commits
		
	
	
		
			1.33.2
			...
			5a8736e116
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 5a8736e116 | ||
|  | f76362ff89 | ||
|  | 6db5b7115d | ||
|  | 3510351f4d | ||
|  | 7161f612a1 | ||
|  | 5ee908517f | ||
|  | 55577fa4eb | ||
|  | 843c063649 | ||
|  | 550b670dba | ||
|  | de808c5ad9 | ||
|  | 1f73630136 | ||
|  | 77008a91e9 | ||
|  | 7f386d38ae | ||
|  | 8e7eeab293 | ||
|  | e35c6f8705 | ||
|  | ae7b725c0f | ||
|  | 2a5489a4b2 | ||
|  | 8fd0ee4211 | ||
|  | 4a5516e150 | ||
|  | 7fc94516ce | ||
|  | 5ea0779d6b | ||
|  | a133d4e90c | ||
|  | 49eff787de | ||
|  | cff6c2b3af | ||
|  | a0c76284fd | ||
|  | 318653b0e5 | ||
|  | 5d84f17600 | ||
|  | 0db4b00007 | ||
|  | a0198d8d7c | ||
|  | dfad931dca | ||
|  | 25865efd79 | ||
|  | bcf627930e | ||
|  | ce70cd2cf4 | ||
|  | 2ac589d4b4 | ||
|  | b2e2aef7de | ||
|  | 0755bb19c0 | ||
|  | fee0c1c711 | ||
|  | f58539f0b4 | ||
|  | e718afb441 | ||
|  | 55945ad793 | ||
|  | 4fd22d8e3b | ||
|  | d6a8fb8e48 | ||
|  | 3b48e6e903 | ||
|  | 6b9333b33e | ||
|  | a545636ee5 | ||
|  | f125d5f1a1 | ||
|  | ad75ce281e | ||
|  | 9059437c35 | ||
|  | c84db0daca | ||
|  | 72adc239f5 | ||
|  | 34ebeeca76 | ||
|  | 0469d9ba4c | ||
|  | eaa6ad06ed | ||
|  | 0d3f283c37 | ||
|  | 51a1d641c5 | ||
|  | 90f7e5ff80 | ||
|  | 200999c94e | ||
|  | d363e647e9 | ||
|  | 53f58b14d5 | ||
|  | ef7835d1b0 | ||
|  | 3a44dc963b | ||
|  | a039e227c7 | ||
|  | 602b18fdd6 | ||
|  | bf04c64759 | ||
|  | 2f1d86b7f1 | ||
|  | ff97bcfdda | ||
|  | 73f2441d1a | ||
|  | ad8484a2d5 | ||
|  | 9813e480c0 | ||
|  | bfe172702a | ||
|  | df42b6d6b0 | ||
|  | 2697fe8aba | ||
|  | 674e444d67 | ||
|  | 0d16da440d | ||
|  | 66cf179bca | ||
|  | 025bb90f8f | ||
|  | d5039d9c17 | ||
|  | e7c796a660 | ||
|  | bbbd2f6d15 | ||
|  | a2d7895586 | ||
|  | 8a0cb1137e | ||
|  | f960bf59bb | ||
|  | 3a1f1bae00 | ||
|  | 8dfe805954 | ||
|  | 07b869b3ef | ||
|  | 2a18665288 | ||
|  | 71952a4ab5 | ||
|  | 994d157064 | ||
|  | 1dae6093c9 | ||
|  | 6edceb5f7a | ||
|  | 359a4a088a | ||
|  | 3baffeee9a | ||
|  | d5c353427d | 
							
								
								
									
										114
									
								
								.env.template
									
									
									
									
									
								
							
							
						
						
									
										114
									
								
								.env.template
									
									
									
									
									
								
							| @@ -15,6 +15,14 @@ | |||||||
| #################### | #################### | ||||||
|  |  | ||||||
| ## Main data folder | ## Main data folder | ||||||
|  | ## This can be a path to local folder or a path to an external location | ||||||
|  | ## depending on features enabled at build time. Possible external locations: | ||||||
|  | ## | ||||||
|  | ## - AWS S3 Bucket (via `s3` feature): s3://bucket-name/path/to/folder | ||||||
|  | ## | ||||||
|  | ## When using an external location, make sure to set TMP_FOLDER, | ||||||
|  | ## TEMPLATES_FOLDER, and DATABASE_URL to local paths and/or a remote database | ||||||
|  | ## location. | ||||||
| # DATA_FOLDER=data | # DATA_FOLDER=data | ||||||
|  |  | ||||||
| ## Individual folders, these override %DATA_FOLDER% | ## Individual folders, these override %DATA_FOLDER% | ||||||
| @@ -22,10 +30,13 @@ | |||||||
| # ICON_CACHE_FOLDER=data/icon_cache | # ICON_CACHE_FOLDER=data/icon_cache | ||||||
| # ATTACHMENTS_FOLDER=data/attachments | # ATTACHMENTS_FOLDER=data/attachments | ||||||
| # SENDS_FOLDER=data/sends | # SENDS_FOLDER=data/sends | ||||||
|  |  | ||||||
|  | ## Temporary folder used for storing temporary file uploads | ||||||
|  | ## Must be a local path. | ||||||
| # TMP_FOLDER=data/tmp | # TMP_FOLDER=data/tmp | ||||||
|  |  | ||||||
| ## Templates data folder, by default uses embedded templates | ## HTML template overrides data folder | ||||||
| ## Check source code to see the format | ## Must be a local path. | ||||||
| # TEMPLATES_FOLDER=data/templates | # TEMPLATES_FOLDER=data/templates | ||||||
| ## Automatically reload the templates for every request, slow, use only for development | ## Automatically reload the templates for every request, slow, use only for development | ||||||
| # RELOAD_TEMPLATES=false | # RELOAD_TEMPLATES=false | ||||||
| @@ -39,7 +50,9 @@ | |||||||
| ######################### | ######################### | ||||||
|  |  | ||||||
| ## Database URL | ## Database URL | ||||||
| ## When using SQLite, this is the path to the DB file, default to %DATA_FOLDER%/db.sqlite3 | ## When using SQLite, this is the path to the DB file, and it defaults to | ||||||
|  | ## %DATA_FOLDER%/db.sqlite3. If DATA_FOLDER is set to an external location, this | ||||||
|  | ## must be set to a local sqlite3 file path. | ||||||
| # DATABASE_URL=data/db.sqlite3 | # DATABASE_URL=data/db.sqlite3 | ||||||
| ## When using MySQL, specify an appropriate connection URI. | ## When using MySQL, specify an appropriate connection URI. | ||||||
| ## Details: https://docs.diesel.rs/2.1.x/diesel/mysql/struct.MysqlConnection.html | ## Details: https://docs.diesel.rs/2.1.x/diesel/mysql/struct.MysqlConnection.html | ||||||
| @@ -67,8 +80,16 @@ | |||||||
| ## Timeout when acquiring database connection | ## Timeout when acquiring database connection | ||||||
| # DATABASE_TIMEOUT=30 | # DATABASE_TIMEOUT=30 | ||||||
|  |  | ||||||
|  | ## Database idle timeout | ||||||
|  | ## Timeout in seconds before idle connections to the database are closed. | ||||||
|  | # DATABASE_IDLE_TIMEOUT=600 | ||||||
|  |  | ||||||
|  | ## Database min connections | ||||||
|  | ## Define the minimum size of the connection pool used for connecting to the database. | ||||||
|  | # DATABASE_MIN_CONNS=2 | ||||||
|  |  | ||||||
| ## Database max connections | ## Database max connections | ||||||
| ## Define the size of the connection pool used for connecting to the database. | ## Define the maximum size of the connection pool used for connecting to the database. | ||||||
| # DATABASE_MAX_CONNS=10 | # DATABASE_MAX_CONNS=10 | ||||||
|  |  | ||||||
| ## Database connection initialization | ## Database connection initialization | ||||||
| @@ -117,7 +138,7 @@ | |||||||
| ## and are always in terms of UTC time (regardless of your local time zone settings). | ## and are always in terms of UTC time (regardless of your local time zone settings). | ||||||
| ## | ## | ||||||
| ## The schedule format is a bit different from crontab as crontab does not contains seconds. | ## The schedule format is a bit different from crontab as crontab does not contains seconds. | ||||||
| ## You can test the the format here: https://crontab.guru, but remove the first digit! | ## You can test the format here: https://crontab.guru, but remove the first digit! | ||||||
| ## SEC  MIN   HOUR   DAY OF MONTH    MONTH   DAY OF WEEK | ## SEC  MIN   HOUR   DAY OF MONTH    MONTH   DAY OF WEEK | ||||||
| ## "0   30   9,12,15     1,15       May-Aug  Mon,Wed,Fri" | ## "0   30   9,12,15     1,15       May-Aug  Mon,Wed,Fri" | ||||||
| ## "0   30     *          *            *          *     " | ## "0   30     *          *            *          *     " | ||||||
| @@ -161,6 +182,10 @@ | |||||||
| ## Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt. | ## Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt. | ||||||
| ## Defaults to every minute. Set blank to disable this job. | ## Defaults to every minute. Set blank to disable this job. | ||||||
| # DUO_CONTEXT_PURGE_SCHEDULE="30 * * * * *" | # DUO_CONTEXT_PURGE_SCHEDULE="30 * * * * *" | ||||||
|  | # | ||||||
|  | ## Cron schedule of the job that cleans sso nonce from incomplete flow | ||||||
|  | ## Defaults to daily (20 minutes after midnight). Set blank to disable this job. | ||||||
|  | # PURGE_INCOMPLETE_SSO_NONCE="0 20 0 * * *" | ||||||
|  |  | ||||||
| ######################## | ######################## | ||||||
| ### General settings ### | ### General settings ### | ||||||
| @@ -229,7 +254,8 @@ | |||||||
| # SIGNUPS_ALLOWED=true | # SIGNUPS_ALLOWED=true | ||||||
|  |  | ||||||
| ## Controls if new users need to verify their email address upon registration | ## Controls if new users need to verify their email address upon registration | ||||||
| ## Note that setting this option to true prevents logins until the email address has been verified! | ## On new client versions, this will require the user to verify their email at signup time. | ||||||
|  | ## On older clients, it will require the user to verify their email before they can log in. | ||||||
| ## The welcome email will include a verification link, and login attempts will periodically | ## The welcome email will include a verification link, and login attempts will periodically | ||||||
| ## trigger another verification email to be sent. | ## trigger another verification email to be sent. | ||||||
| # SIGNUPS_VERIFY=false | # SIGNUPS_VERIFY=false | ||||||
| @@ -259,7 +285,7 @@ | |||||||
| ## A comma-separated list means only those users can create orgs: | ## A comma-separated list means only those users can create orgs: | ||||||
| # ORG_CREATION_USERS=admin1@example.com,admin2@example.com | # ORG_CREATION_USERS=admin1@example.com,admin2@example.com | ||||||
|  |  | ||||||
| ## Invitations org admins to invite users, even when signups are disabled | ## Allows org admins to invite users, even when signups are disabled | ||||||
| # INVITATIONS_ALLOWED=true | # INVITATIONS_ALLOWED=true | ||||||
| ## Name shown in the invitation emails that don't come from a specific organization | ## Name shown in the invitation emails that don't come from a specific organization | ||||||
| # INVITATION_ORG_NAME=Vaultwarden | # INVITATION_ORG_NAME=Vaultwarden | ||||||
| @@ -327,32 +353,33 @@ | |||||||
|  |  | ||||||
| ## Icon download timeout | ## Icon download timeout | ||||||
| ## Configure the timeout value when downloading the favicons. | ## Configure the timeout value when downloading the favicons. | ||||||
| ## The default is 10 seconds, but this could be to low on slower network connections | ## The default is 10 seconds, but this could be too low on slower network connections | ||||||
| # ICON_DOWNLOAD_TIMEOUT=10 | # ICON_DOWNLOAD_TIMEOUT=10 | ||||||
|  |  | ||||||
| ## Block HTTP domains/IPs by Regex | ## Block HTTP domains/IPs by Regex | ||||||
| ## Any domains or IPs that match this regex won't be fetched by the internal HTTP client. | ## Any domains or IPs that match this regex won't be fetched by the internal HTTP client. | ||||||
| ## Useful to hide other servers in the local network. Check the WIKI for more details | ## Useful to hide other servers in the local network. Check the WIKI for more details | ||||||
| ## NOTE: Always enclose this regex withing single quotes! | ## NOTE: Always enclose this regex within single quotes! | ||||||
| # HTTP_REQUEST_BLOCK_REGEX='^(192\.168\.0\.[0-9]+|192\.168\.1\.[0-9]+)$' | # HTTP_REQUEST_BLOCK_REGEX='^(192\.168\.0\.[0-9]+|192\.168\.1\.[0-9]+)$' | ||||||
|  |  | ||||||
| ## Enabling this will cause the internal HTTP client to refuse to connect to any non global IP address. | ## Enabling this will cause the internal HTTP client to refuse to connect to any non-global IP address. | ||||||
| ## Useful to secure your internal environment: See https://en.wikipedia.org/wiki/Reserved_IP_addresses for a list of IPs which it will block | ## Useful to secure your internal environment: See https://en.wikipedia.org/wiki/Reserved_IP_addresses for a list of IPs which it will block | ||||||
| # HTTP_REQUEST_BLOCK_NON_GLOBAL_IPS=true | # HTTP_REQUEST_BLOCK_NON_GLOBAL_IPS=true | ||||||
|  |  | ||||||
| ## Client Settings | ## Client Settings | ||||||
| ## Enable experimental feature flags for clients. | ## Enable experimental feature flags for clients. | ||||||
| ## This is a comma-separated list of flags, e.g. "flag1,flag2,flag3". | ## This is a comma-separated list of flags, e.g. "flag1,flag2,flag3". | ||||||
|  | ## Note that clients cache the /api/config endpoint for about 1 hour and it could take some time before they are enabled or disabled! | ||||||
| ## | ## | ||||||
| ## The following flags are available: | ## The following flags are available: | ||||||
| ## - "autofill-overlay": Add an overlay menu to form fields for quick access to credentials. |  | ||||||
| ## - "autofill-v2": Use the new autofill implementation. |  | ||||||
| ## - "browser-fileless-import": Directly import credentials from other providers without a file. |  | ||||||
| ## - "extension-refresh": Temporarily enable the new extension design until general availability (should be used with the beta Chrome extension) |  | ||||||
| ## - "fido2-vault-credentials": Enable the use of FIDO2 security keys as second factor. |  | ||||||
| ## - "inline-menu-positioning-improvements": Enable the use of inline menu password generator and identity suggestions in the browser extension. | ## - "inline-menu-positioning-improvements": Enable the use of inline menu password generator and identity suggestions in the browser extension. | ||||||
| ## - "ssh-key-vault-item": Enable the creation and use of SSH key vault items. (Needs clients >=2024.12.0) | ## - "inline-menu-totp": Enable the use of inline menu TOTP codes in the browser extension. | ||||||
| ## - "ssh-agent": Enable SSH agent support on Desktop. (Needs desktop >=2024.12.0) | ## - "ssh-agent": Enable SSH agent support on Desktop. (Needs desktop >=2024.12.0) | ||||||
|  | ## - "ssh-key-vault-item": Enable the creation and use of SSH key vault items. (Needs clients >=2024.12.0) | ||||||
|  | ## - "export-attachments": Enable support for exporting attachments (Clients >=2025.4.0) | ||||||
|  | ## - "anon-addy-self-host-alias": Enable configuring self-hosted Anon Addy alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0) | ||||||
|  | ## - "simple-login-self-host-alias": Enable configuring self-hosted Simple Login alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0) | ||||||
|  | ## - "mutual-tls": Enable the use of mutual TLS on Android (Client >= 2025.2.0) | ||||||
| # EXPERIMENTAL_CLIENT_FEATURE_FLAGS=fido2-vault-credentials | # EXPERIMENTAL_CLIENT_FEATURE_FLAGS=fido2-vault-credentials | ||||||
|  |  | ||||||
| ## Require new device emails. When a user logs in an email is required to be sent. | ## Require new device emails. When a user logs in an email is required to be sent. | ||||||
| @@ -444,6 +471,55 @@ | |||||||
| ## Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy. | ## Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy. | ||||||
| # ENFORCE_SINGLE_ORG_WITH_RESET_PW_POLICY=false | # ENFORCE_SINGLE_ORG_WITH_RESET_PW_POLICY=false | ||||||
|  |  | ||||||
|  | ##################################### | ||||||
|  | ### SSO settings (OpenID Connect) ### | ||||||
|  | ##################################### | ||||||
|  |  | ||||||
|  | ## Controls whether users can login using an OpenID Connect identity provider | ||||||
|  | # SSO_ENABLED=false | ||||||
|  |  | ||||||
|  | ## Prevent users from logging in directly without going through SSO | ||||||
|  | # SSO_ONLY=false | ||||||
|  |  | ||||||
|  | ## On SSO Signup if a user with a matching email already exists make the association | ||||||
|  | # SSO_SIGNUPS_MATCH_EMAIL=true | ||||||
|  |  | ||||||
|  | ## Allow unknown email verification status. Allowing this with `SSO_SIGNUPS_MATCH_EMAIL=true` open potential account takeover. | ||||||
|  | # SSO_ALLOW_UNKNOWN_EMAIL_VERIFICATION=false | ||||||
|  |  | ||||||
|  | ## Base URL of the OIDC server (auto-discovery is used) | ||||||
|  | ##  - Should not include the `/.well-known/openid-configuration` part and no trailing `/` | ||||||
|  | ##  - ${SSO_AUTHORITY}/.well-known/openid-configuration should return a json document: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationResponse | ||||||
|  | # SSO_AUTHORITY=https://auth.example.com | ||||||
|  |  | ||||||
|  | ## Authorization request scopes. Optional SSO scopes, override if email and profile are not enough (`openid` is implicit). | ||||||
|  | # SSO_SCOPES="email profile" | ||||||
|  |  | ||||||
|  | ## Additional authorization url parameters (ex: to obtain a `refresh_token` with Google Auth). | ||||||
|  | # SSO_AUTHORIZE_EXTRA_PARAMS="access_type=offline&prompt=consent" | ||||||
|  |  | ||||||
|  | ## Activate PKCE for the Auth Code flow. | ||||||
|  | # SSO_PKCE=true | ||||||
|  |  | ||||||
|  | ## Regex for additional trusted Id token audience (by default only the client_id is trusted). | ||||||
|  | # SSO_AUDIENCE_TRUSTED='^$' | ||||||
|  |  | ||||||
|  | ## Set your Client ID and Client Key | ||||||
|  | # SSO_CLIENT_ID=11111 | ||||||
|  | # SSO_CLIENT_SECRET=AAAAAAAAAAAAAAAAAAAAAAAA | ||||||
|  |  | ||||||
|  | ## Optional Master password policy (minComplexity=[0-4]), `enforceOnLogin` is not supported at the moment. | ||||||
|  | # SSO_MASTER_PASSWORD_POLICY='{"enforceOnLogin":false,"minComplexity":3,"minLength":12,"requireLower":false,"requireNumbers":false,"requireSpecial":false,"requireUpper":false}' | ||||||
|  |  | ||||||
|  | ## Use sso only for authentication not the session lifecycle | ||||||
|  | # SSO_AUTH_ONLY_NOT_SESSION=false | ||||||
|  |  | ||||||
|  | ## Client cache for discovery endpoint. Duration in seconds (0 to disable). | ||||||
|  | # SSO_CLIENT_CACHE_EXPIRATION=0 | ||||||
|  |  | ||||||
|  | ## Log all the tokens, LOG_LEVEL=debug is required | ||||||
|  | # SSO_DEBUG_TOKENS=false | ||||||
|  |  | ||||||
| ######################## | ######################## | ||||||
| ### MFA/2FA settings ### | ### MFA/2FA settings ### | ||||||
| ######################## | ######################## | ||||||
| @@ -486,7 +562,7 @@ | |||||||
| ## Maximum attempts before an email token is reset and a new email will need to be sent. | ## Maximum attempts before an email token is reset and a new email will need to be sent. | ||||||
| # EMAIL_ATTEMPTS_LIMIT=3 | # EMAIL_ATTEMPTS_LIMIT=3 | ||||||
| ## | ## | ||||||
| ## Setup email 2FA regardless of any organization policy | ## Setup email 2FA on registration regardless of any organization policy | ||||||
| # EMAIL_2FA_ENFORCE_ON_VERIFIED_INVITE=false | # EMAIL_2FA_ENFORCE_ON_VERIFIED_INVITE=false | ||||||
| ## Automatically setup email 2FA as fallback provider when needed | ## Automatically setup email 2FA as fallback provider when needed | ||||||
| # EMAIL_2FA_AUTO_FALLBACK=false | # EMAIL_2FA_AUTO_FALLBACK=false | ||||||
| @@ -503,7 +579,7 @@ | |||||||
| ## | ## | ||||||
| ## According to the RFC6238 (https://tools.ietf.org/html/rfc6238), | ## According to the RFC6238 (https://tools.ietf.org/html/rfc6238), | ||||||
| ## we allow by default the TOTP code which was valid one step back and one in the future. | ## we allow by default the TOTP code which was valid one step back and one in the future. | ||||||
| ## This can however allow attackers to be a bit more lucky with there attempts because there are 3 valid codes. | ## This can however allow attackers to be a bit more lucky with their attempts because there are 3 valid codes. | ||||||
| ## You can disable this, so that only the current TOTP Code is allowed. | ## You can disable this, so that only the current TOTP Code is allowed. | ||||||
| ## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid. | ## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid. | ||||||
| ## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid. | ## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid. | ||||||
| @@ -543,7 +619,7 @@ | |||||||
| # SMTP_AUTH_MECHANISM= | # SMTP_AUTH_MECHANISM= | ||||||
|  |  | ||||||
| ## Server name sent during the SMTP HELO | ## Server name sent during the SMTP HELO | ||||||
| ## By default this value should be is on the machine's hostname, | ## By default this value should be the machine's hostname, | ||||||
| ## but might need to be changed in case it trips some anti-spam filters | ## but might need to be changed in case it trips some anti-spam filters | ||||||
| # HELO_NAME= | # HELO_NAME= | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										3
									
								
								.github/CODEOWNERS
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/CODEOWNERS
									
									
									
									
										vendored
									
									
								
							| @@ -1,3 +1,6 @@ | |||||||
| /.github @dani-garcia @BlackDex | /.github @dani-garcia @BlackDex | ||||||
|  | /.github/** @dani-garcia @BlackDex | ||||||
| /.github/CODEOWNERS @dani-garcia @BlackDex | /.github/CODEOWNERS @dani-garcia @BlackDex | ||||||
|  | /.github/ISSUE_TEMPLATE/** @dani-garcia @BlackDex | ||||||
| /.github/workflows/** @dani-garcia @BlackDex | /.github/workflows/** @dani-garcia @BlackDex | ||||||
|  | /SECURITY.md @dani-garcia @BlackDex | ||||||
|   | |||||||
							
								
								
									
										25
									
								
								.github/ISSUE_TEMPLATE/bug_report.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										25
									
								
								.github/ISSUE_TEMPLATE/bug_report.yml
									
									
									
									
										vendored
									
									
								
							| @@ -8,15 +8,30 @@ body: | |||||||
|       value: | |       value: | | ||||||
|         Thanks for taking the time to fill out this bug report! |         Thanks for taking the time to fill out this bug report! | ||||||
|  |  | ||||||
|         Please *do not* submit feature requests or ask for help on how to configure Vaultwarden here. |         Please **do not** submit feature requests or ask for help on how to configure Vaultwarden here! | ||||||
|  |  | ||||||
|         The [GitHub Discussions](https://github.com/dani-garcia/vaultwarden/discussions/) has sections for Questions and Ideas. |         The [GitHub Discussions](https://github.com/dani-garcia/vaultwarden/discussions/) has sections for Questions and Ideas. | ||||||
|  |  | ||||||
|  |         Our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki/) has topics on how to configure Vaultwarden. | ||||||
|  |  | ||||||
|         Also, make sure you are running [](https://github.com/dani-garcia/vaultwarden/releases/latest) of Vaultwarden! |         Also, make sure you are running [](https://github.com/dani-garcia/vaultwarden/releases/latest) of Vaultwarden! | ||||||
|         And search for existing open or closed issues or discussions regarding your topic before posting. |  | ||||||
|  |  | ||||||
|         Be sure to check and validate the Vaultwarden Admin Diagnostics (`/admin/diagnostics`) page for any errors! |         Be sure to check and validate the Vaultwarden Admin Diagnostics (`/admin/diagnostics`) page for any errors! | ||||||
|         See here [how to enable the admin page](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-admin-page). |         See here [how to enable the admin page](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-admin-page). | ||||||
|  |  | ||||||
|  |         > [!IMPORTANT] | ||||||
|  |         > ## :bangbang: Search for existing **Closed _AND_ Open** [Issues](https://github.com/dani-garcia/vaultwarden/issues?q=is%3Aissue%20) **_AND_** [Discussions](https://github.com/dani-garcia/vaultwarden/discussions?discussions_q=) regarding your topic before posting! :bangbang: | ||||||
|  |   # | ||||||
|  |   - type: checkboxes | ||||||
|  |     id: checklist | ||||||
|  |     attributes: | ||||||
|  |       label: Prerequisites | ||||||
|  |       description: Please confirm you have completed the following before submitting an issue! | ||||||
|  |       options: | ||||||
|  |         - label: I have searched the existing **Closed _AND_ Open** [Issues](https://github.com/dani-garcia/vaultwarden/issues?q=is%3Aissue%20) **_AND_** [Discussions](https://github.com/dani-garcia/vaultwarden/discussions?discussions_q=) | ||||||
|  |           required: true | ||||||
|  |         - label: I have searched and read the [documentation](https://github.com/dani-garcia/vaultwarden/wiki/) | ||||||
|  |           required: true | ||||||
|   # |   # | ||||||
|   - id: support-string |   - id: support-string | ||||||
|     type: textarea |     type: textarea | ||||||
| @@ -36,7 +51,7 @@ body: | |||||||
|     attributes: |     attributes: | ||||||
|       label: Vaultwarden Build Version |       label: Vaultwarden Build Version | ||||||
|       description: What version of Vaultwarden are you running? |       description: What version of Vaultwarden are you running? | ||||||
|       placeholder: ex. v1.31.0 or v1.32.0-3466a804 |       placeholder: ex. v1.34.0 or v1.34.1-53f58b14 | ||||||
|     validations: |     validations: | ||||||
|       required: true |       required: true | ||||||
|   # |   # | ||||||
| @@ -67,7 +82,7 @@ body: | |||||||
|     attributes: |     attributes: | ||||||
|       label: Reverse Proxy |       label: Reverse Proxy | ||||||
|       description: Are you using a reverse proxy, if so which and what version? |       description: Are you using a reverse proxy, if so which and what version? | ||||||
|       placeholder: ex. nginx 1.26.2, caddy 2.8.4, traefik 3.1.2, haproxy 3.0 |       placeholder: ex. nginx 1.29.0, caddy 2.10.0, traefik 3.4.4, haproxy 3.2 | ||||||
|     validations: |     validations: | ||||||
|       required: true |       required: true | ||||||
|   # |   # | ||||||
| @@ -115,7 +130,7 @@ body: | |||||||
|     attributes: |     attributes: | ||||||
|       label: Client Version |       label: Client Version | ||||||
|       description: What version(s) of the client(s) are you seeing the problem on? |       description: What version(s) of the client(s) are you seeing the problem on? | ||||||
|       placeholder: ex. CLI v2024.7.2, Firefox 130 - v2024.7.0 |       placeholder: ex. CLI v2025.7.0, Firefox 140 - v2025.6.1 | ||||||
|   # |   # | ||||||
|   - id: reproduce |   - id: reproduce | ||||||
|     type: textarea |     type: textarea | ||||||
|   | |||||||
							
								
								
									
										19
									
								
								.github/workflows/build.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										19
									
								
								.github/workflows/build.yml
									
									
									
									
										vendored
									
									
								
							| @@ -34,8 +34,7 @@ jobs: | |||||||
|     permissions: |     permissions: | ||||||
|       actions: write |       actions: write | ||||||
|       contents: read |       contents: read | ||||||
|     # We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers |     runs-on: ubuntu-24.04 | ||||||
|     runs-on: ubuntu-22.04 |  | ||||||
|     timeout-minutes: 120 |     timeout-minutes: 120 | ||||||
|     # Make warnings errors, this is to prevent warnings slipping through. |     # Make warnings errors, this is to prevent warnings slipping through. | ||||||
|     # This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes. |     # This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes. | ||||||
| @@ -56,7 +55,7 @@ jobs: | |||||||
|  |  | ||||||
|       # Checkout the repo |       # Checkout the repo | ||||||
|       - name: "Checkout" |       - name: "Checkout" | ||||||
|         uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 |         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 | ||||||
|         with: |         with: | ||||||
|           persist-credentials: false |           persist-credentials: false | ||||||
|           fetch-depth: 0 |           fetch-depth: 0 | ||||||
| @@ -66,13 +65,15 @@ jobs: | |||||||
|       - name: Init Variables |       - name: Init Variables | ||||||
|         id: toolchain |         id: toolchain | ||||||
|         shell: bash |         shell: bash | ||||||
|  |         env: | ||||||
|  |           CHANNEL: ${{ matrix.channel }} | ||||||
|         run: | |         run: | | ||||||
|           if [[ "${{ matrix.channel }}" == 'rust-toolchain' ]]; then |           if [[ "${CHANNEL}" == 'rust-toolchain' ]]; then | ||||||
|             RUST_TOOLCHAIN="$(grep -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)" |             RUST_TOOLCHAIN="$(grep -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)" | ||||||
|           elif [[ "${{ matrix.channel }}" == 'msrv' ]]; then |           elif [[ "${CHANNEL}" == 'msrv' ]]; then | ||||||
|             RUST_TOOLCHAIN="$(grep -oP 'rust-version.*"(\K.*?)(?=")' Cargo.toml)" |             RUST_TOOLCHAIN="$(grep -oP 'rust-version.*"(\K.*?)(?=")' Cargo.toml)" | ||||||
|           else |           else | ||||||
|             RUST_TOOLCHAIN="${{ matrix.channel }}" |             RUST_TOOLCHAIN="${CHANNEL}" | ||||||
|           fi |           fi | ||||||
|           echo "RUST_TOOLCHAIN=${RUST_TOOLCHAIN}" | tee -a "${GITHUB_OUTPUT}" |           echo "RUST_TOOLCHAIN=${RUST_TOOLCHAIN}" | tee -a "${GITHUB_OUTPUT}" | ||||||
|       # End Determine rust-toolchain version |       # End Determine rust-toolchain version | ||||||
| @@ -80,7 +81,7 @@ jobs: | |||||||
|  |  | ||||||
|       # Only install the clippy and rustfmt components on the default rust-toolchain |       # Only install the clippy and rustfmt components on the default rust-toolchain | ||||||
|       - name: "Install rust-toolchain version" |       - name: "Install rust-toolchain version" | ||||||
|         uses: dtolnay/rust-toolchain@c5a29ddb4d9d194e7c84ec8c3fba61b1c31fee8c # master @ Jan 30, 2025, 8:16 PM GMT+1 |         uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @ Aug 23, 2025, 3:20 AM GMT+2 | ||||||
|         if: ${{ matrix.channel == 'rust-toolchain' }} |         if: ${{ matrix.channel == 'rust-toolchain' }} | ||||||
|         with: |         with: | ||||||
|           toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" |           toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" | ||||||
| @@ -90,7 +91,7 @@ jobs: | |||||||
|  |  | ||||||
|       # Install the any other channel to be used for which we do not execute clippy and rustfmt |       # Install the any other channel to be used for which we do not execute clippy and rustfmt | ||||||
|       - name: "Install MSRV version" |       - name: "Install MSRV version" | ||||||
|         uses: dtolnay/rust-toolchain@c5a29ddb4d9d194e7c84ec8c3fba61b1c31fee8c # master @ Jan 30, 2025, 8:16 PM GMT+1 |         uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @ Aug 23, 2025, 3:20 AM GMT+2 | ||||||
|         if: ${{ matrix.channel != 'rust-toolchain' }} |         if: ${{ matrix.channel != 'rust-toolchain' }} | ||||||
|         with: |         with: | ||||||
|           toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" |           toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" | ||||||
| @@ -115,7 +116,7 @@ jobs: | |||||||
|  |  | ||||||
|       # Enable Rust Caching |       # Enable Rust Caching | ||||||
|       - name: Rust Caching |       - name: Rust Caching | ||||||
|         uses: Swatinem/rust-cache@f0deed1e0edfc6a9be95417288c0e1099b1eeec3 # v2.7.7 |         uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 | ||||||
|         with: |         with: | ||||||
|           # Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes. |           # Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes. | ||||||
|           # Like changing the build host from Ubuntu 20.04 to 22.04 for example. |           # Like changing the build host from Ubuntu 20.04 to 22.04 for example. | ||||||
|   | |||||||
							
								
								
									
										29
									
								
								.github/workflows/check-templates.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								.github/workflows/check-templates.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | |||||||
|  | name: Check templates | ||||||
|  | permissions: {} | ||||||
|  |  | ||||||
|  | on: [ push, pull_request ] | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   docker-templates: | ||||||
|  |     name: Validate docker templates | ||||||
|  |     permissions: | ||||||
|  |       contents: read | ||||||
|  |     runs-on: ubuntu-24.04 | ||||||
|  |     timeout-minutes: 30 | ||||||
|  |  | ||||||
|  |     steps: | ||||||
|  |       # Checkout the repo | ||||||
|  |       - name: "Checkout" | ||||||
|  |         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 | ||||||
|  |         with: | ||||||
|  |           persist-credentials: false | ||||||
|  |       # End Checkout the repo | ||||||
|  |  | ||||||
|  |       - name: Run make to rebuild templates | ||||||
|  |         working-directory: docker | ||||||
|  |         run: make | ||||||
|  |  | ||||||
|  |       - name: Check for unstaged changes | ||||||
|  |         working-directory: docker | ||||||
|  |         run: git diff --exit-code | ||||||
|  |         continue-on-error: false | ||||||
							
								
								
									
										4
									
								
								.github/workflows/hadolint.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/hadolint.yml
									
									
									
									
										vendored
									
									
								
							| @@ -14,7 +14,7 @@ jobs: | |||||||
|     steps: |     steps: | ||||||
|       # Start Docker Buildx |       # Start Docker Buildx | ||||||
|       - name: Setup Docker Buildx |       - name: Setup Docker Buildx | ||||||
|         uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0 |         uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 | ||||||
|         # https://github.com/moby/buildkit/issues/3969 |         # https://github.com/moby/buildkit/issues/3969 | ||||||
|         # Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills |         # Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills | ||||||
|         with: |         with: | ||||||
| @@ -35,7 +35,7 @@ jobs: | |||||||
|       # End Download hadolint |       # End Download hadolint | ||||||
|       # Checkout the repo |       # Checkout the repo | ||||||
|       - name: Checkout |       - name: Checkout | ||||||
|         uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 |         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 | ||||||
|         with: |         with: | ||||||
|           persist-credentials: false |           persist-credentials: false | ||||||
|       # End Checkout the repo |       # End Checkout the repo | ||||||
|   | |||||||
							
								
								
									
										77
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										77
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -10,33 +10,16 @@ on: | |||||||
|       # https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet |       # https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet | ||||||
|       - '[1-2].[0-9]+.[0-9]+' |       - '[1-2].[0-9]+.[0-9]+' | ||||||
|  |  | ||||||
|  | concurrency: | ||||||
|  |   # Apply concurrency control only on the upstream repo | ||||||
|  |   group: ${{ github.repository == 'dani-garcia/vaultwarden' && format('{0}-{1}', github.workflow, github.ref) || github.run_id }} | ||||||
|  |   # Don't cancel other runs when creating a tag | ||||||
|  |   cancel-in-progress: ${{ github.ref_type == 'branch' }} | ||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   # https://github.com/marketplace/actions/skip-duplicate-actions |  | ||||||
|   # Some checks to determine if we need to continue with building a new docker. |  | ||||||
|   # We will skip this check if we are creating a tag, because that has the same hash as a previous run already. |  | ||||||
|   skip_check: |  | ||||||
|     # Only run this in the upstream repo and not on forks |  | ||||||
|     if: ${{ github.repository == 'dani-garcia/vaultwarden' }} |  | ||||||
|     name: Cancel older jobs when running |  | ||||||
|     permissions: |  | ||||||
|       actions: write |  | ||||||
|     runs-on: ubuntu-24.04 |  | ||||||
|     outputs: |  | ||||||
|       should_skip: ${{ steps.skip_check.outputs.should_skip }} |  | ||||||
|  |  | ||||||
|     steps: |  | ||||||
|       - name: Skip Duplicates Actions |  | ||||||
|         id: skip_check |  | ||||||
|         uses: fkirc/skip-duplicate-actions@f75f66ce1886f00957d99748a42c724f4330bdcf # v5.3.1 |  | ||||||
|         with: |  | ||||||
|           cancel_others: 'true' |  | ||||||
|         # Only run this when not creating a tag |  | ||||||
|         if: ${{ github.ref_type == 'branch' }} |  | ||||||
|  |  | ||||||
|   docker-build: |   docker-build: | ||||||
|     needs: skip_check |  | ||||||
|     if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }} |  | ||||||
|     name: Build Vaultwarden containers |     name: Build Vaultwarden containers | ||||||
|  |     if: ${{ github.repository == 'dani-garcia/vaultwarden' }} | ||||||
|     permissions: |     permissions: | ||||||
|       packages: write |       packages: write | ||||||
|       contents: read |       contents: read | ||||||
| @@ -47,7 +30,7 @@ jobs: | |||||||
|     # Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them |     # Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them | ||||||
|     services: |     services: | ||||||
|       registry: |       registry: | ||||||
|         image: registry:2 |         image: registry@sha256:1fc7de654f2ac1247f0b67e8a459e273b0993be7d2beda1f3f56fbf1001ed3e7 # v3.0.0 | ||||||
|         ports: |         ports: | ||||||
|           - 5000:5000 |           - 5000:5000 | ||||||
|     env: |     env: | ||||||
| @@ -70,13 +53,13 @@ jobs: | |||||||
|  |  | ||||||
|     steps: |     steps: | ||||||
|       - name: Initialize QEMU binfmt support |       - name: Initialize QEMU binfmt support | ||||||
|         uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0 |         uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 | ||||||
|         with: |         with: | ||||||
|           platforms: "arm64,arm" |           platforms: "arm64,arm" | ||||||
|  |  | ||||||
|       # Start Docker Buildx |       # Start Docker Buildx | ||||||
|       - name: Setup Docker Buildx |       - name: Setup Docker Buildx | ||||||
|         uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0 |         uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 | ||||||
|         # https://github.com/moby/buildkit/issues/3969 |         # https://github.com/moby/buildkit/issues/3969 | ||||||
|         # Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills |         # Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills | ||||||
|         with: |         with: | ||||||
| @@ -89,7 +72,7 @@ jobs: | |||||||
|  |  | ||||||
|       # Checkout the repo |       # Checkout the repo | ||||||
|       - name: Checkout |       - name: Checkout | ||||||
|         uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 |         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 | ||||||
|         # We need fetch-depth of 0 so we also get all the tag metadata |         # We need fetch-depth of 0 so we also get all the tag metadata | ||||||
|         with: |         with: | ||||||
|           persist-credentials: false |           persist-credentials: false | ||||||
| @@ -120,7 +103,7 @@ jobs: | |||||||
|  |  | ||||||
|       # Login to Docker Hub |       # Login to Docker Hub | ||||||
|       - name: Login to Docker Hub |       - name: Login to Docker Hub | ||||||
|         uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 |         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||||
|         with: |         with: | ||||||
|           username: ${{ secrets.DOCKERHUB_USERNAME }} |           username: ${{ secrets.DOCKERHUB_USERNAME }} | ||||||
|           password: ${{ secrets.DOCKERHUB_TOKEN }} |           password: ${{ secrets.DOCKERHUB_TOKEN }} | ||||||
| @@ -136,7 +119,7 @@ jobs: | |||||||
|  |  | ||||||
|       # Login to GitHub Container Registry |       # Login to GitHub Container Registry | ||||||
|       - name: Login to GitHub Container Registry |       - name: Login to GitHub Container Registry | ||||||
|         uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 |         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||||
|         with: |         with: | ||||||
|           registry: ghcr.io |           registry: ghcr.io | ||||||
|           username: ${{ github.repository_owner }} |           username: ${{ github.repository_owner }} | ||||||
| @@ -153,7 +136,7 @@ jobs: | |||||||
|  |  | ||||||
|       # Login to Quay.io |       # Login to Quay.io | ||||||
|       - name: Login to Quay.io |       - name: Login to Quay.io | ||||||
|         uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 |         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||||
|         with: |         with: | ||||||
|           registry: quay.io |           registry: quay.io | ||||||
|           username: ${{ secrets.QUAY_USERNAME }} |           username: ${{ secrets.QUAY_USERNAME }} | ||||||
| @@ -192,7 +175,7 @@ jobs: | |||||||
|  |  | ||||||
|       - name: Bake ${{ matrix.base_image }} containers |       - name: Bake ${{ matrix.base_image }} containers | ||||||
|         id: bake_vw |         id: bake_vw | ||||||
|         uses: docker/bake-action@7bff531c65a5cda33e52e43950a795b91d450f63 # v6.3.0 |         uses: docker/bake-action@3acf805d94d93a86cce4ca44798a76464a75b88c # v6.9.0 | ||||||
|         env: |         env: | ||||||
|           BASE_TAGS: "${{ env.BASE_TAGS }}" |           BASE_TAGS: "${{ env.BASE_TAGS }}" | ||||||
|           SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}" |           SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}" | ||||||
| @@ -213,14 +196,15 @@ jobs: | |||||||
|         shell: bash |         shell: bash | ||||||
|         env: |         env: | ||||||
|           BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }} |           BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }} | ||||||
|  |           BASE_IMAGE: ${{ matrix.base_image }} | ||||||
|         run: | |         run: | | ||||||
|           GET_DIGEST_SHA="$(jq -r '.["${{ matrix.base_image }}-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")" |           GET_DIGEST_SHA="$(jq -r --arg base "$BASE_IMAGE" '.[$base + "-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")" | ||||||
|           echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}" |           echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}" | ||||||
|  |  | ||||||
|       # Attest container images |       # Attest container images | ||||||
|       - name: Attest - docker.io - ${{ matrix.base_image }} |       - name: Attest - docker.io - ${{ matrix.base_image }} | ||||||
|         if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} |         if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} | ||||||
|         uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0 |         uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 | ||||||
|         with: |         with: | ||||||
|           subject-name: ${{ vars.DOCKERHUB_REPO }} |           subject-name: ${{ vars.DOCKERHUB_REPO }} | ||||||
|           subject-digest: ${{ env.DIGEST_SHA }} |           subject-digest: ${{ env.DIGEST_SHA }} | ||||||
| @@ -228,7 +212,7 @@ jobs: | |||||||
|  |  | ||||||
|       - name: Attest - ghcr.io - ${{ matrix.base_image }} |       - name: Attest - ghcr.io - ${{ matrix.base_image }} | ||||||
|         if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} |         if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} | ||||||
|         uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0 |         uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 | ||||||
|         with: |         with: | ||||||
|           subject-name: ${{ vars.GHCR_REPO }} |           subject-name: ${{ vars.GHCR_REPO }} | ||||||
|           subject-digest: ${{ env.DIGEST_SHA }} |           subject-digest: ${{ env.DIGEST_SHA }} | ||||||
| @@ -236,7 +220,7 @@ jobs: | |||||||
|  |  | ||||||
|       - name: Attest - quay.io - ${{ matrix.base_image }} |       - name: Attest - quay.io - ${{ matrix.base_image }} | ||||||
|         if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} |         if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} | ||||||
|         uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0 |         uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 | ||||||
|         with: |         with: | ||||||
|           subject-name: ${{ vars.QUAY_REPO }} |           subject-name: ${{ vars.QUAY_REPO }} | ||||||
|           subject-digest: ${{ env.DIGEST_SHA }} |           subject-digest: ${{ env.DIGEST_SHA }} | ||||||
| @@ -248,6 +232,7 @@ jobs: | |||||||
|         shell: bash |         shell: bash | ||||||
|         env: |         env: | ||||||
|           REF_TYPE: ${{ github.ref_type }} |           REF_TYPE: ${{ github.ref_type }} | ||||||
|  |           BASE_IMAGE: ${{ matrix.base_image }} | ||||||
|         run: | |         run: | | ||||||
|           # Check which main tag we are going to build determined by ref_type |           # Check which main tag we are going to build determined by ref_type | ||||||
|           if [[ "${REF_TYPE}" == "tag" ]]; then |           if [[ "${REF_TYPE}" == "tag" ]]; then | ||||||
| @@ -257,7 +242,7 @@ jobs: | |||||||
|           fi |           fi | ||||||
|  |  | ||||||
|           # Check which base_image was used and append -alpine if needed |           # Check which base_image was used and append -alpine if needed | ||||||
|           if [[ "${{ matrix.base_image }}" == "alpine" ]]; then |           if [[ "${BASE_IMAGE}" == "alpine" ]]; then | ||||||
|             EXTRACT_TAG="${EXTRACT_TAG}-alpine" |             EXTRACT_TAG="${EXTRACT_TAG}-alpine" | ||||||
|           fi |           fi | ||||||
|  |  | ||||||
| @@ -266,55 +251,55 @@ jobs: | |||||||
|  |  | ||||||
|           # Extract amd64 binary |           # Extract amd64 binary | ||||||
|           docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |           docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||||
|           docker cp amd64:/vaultwarden vaultwarden-amd64-${{ matrix.base_image }} |           docker cp amd64:/vaultwarden vaultwarden-amd64-${BASE_IMAGE} | ||||||
|           docker rm --force amd64 |           docker rm --force amd64 | ||||||
|           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||||
|  |  | ||||||
|           # Extract arm64 binary |           # Extract arm64 binary | ||||||
|           docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |           docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||||
|           docker cp arm64:/vaultwarden vaultwarden-arm64-${{ matrix.base_image }} |           docker cp arm64:/vaultwarden vaultwarden-arm64-${BASE_IMAGE} | ||||||
|           docker rm --force arm64 |           docker rm --force arm64 | ||||||
|           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||||
|  |  | ||||||
|           # Extract armv7 binary |           # Extract armv7 binary | ||||||
|           docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |           docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||||
|           docker cp armv7:/vaultwarden vaultwarden-armv7-${{ matrix.base_image }} |           docker cp armv7:/vaultwarden vaultwarden-armv7-${BASE_IMAGE} | ||||||
|           docker rm --force armv7 |           docker rm --force armv7 | ||||||
|           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||||
|  |  | ||||||
|           # Extract armv6 binary |           # Extract armv6 binary | ||||||
|           docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |           docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||||
|           docker cp armv6:/vaultwarden vaultwarden-armv6-${{ matrix.base_image }} |           docker cp armv6:/vaultwarden vaultwarden-armv6-${BASE_IMAGE} | ||||||
|           docker rm --force armv6 |           docker rm --force armv6 | ||||||
|           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" |           docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" | ||||||
|  |  | ||||||
|       # Upload artifacts to Github Actions and Attest the binaries |       # Upload artifacts to Github Actions and Attest the binaries | ||||||
|       - name: "Upload amd64 artifact ${{ matrix.base_image }}" |       - name: "Upload amd64 artifact ${{ matrix.base_image }}" | ||||||
|         uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0 |         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||||
|         with: |         with: | ||||||
|           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }} |           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }} | ||||||
|           path: vaultwarden-amd64-${{ matrix.base_image }} |           path: vaultwarden-amd64-${{ matrix.base_image }} | ||||||
|  |  | ||||||
|       - name: "Upload arm64 artifact ${{ matrix.base_image }}" |       - name: "Upload arm64 artifact ${{ matrix.base_image }}" | ||||||
|         uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0 |         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||||
|         with: |         with: | ||||||
|           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }} |           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }} | ||||||
|           path: vaultwarden-arm64-${{ matrix.base_image }} |           path: vaultwarden-arm64-${{ matrix.base_image }} | ||||||
|  |  | ||||||
|       - name: "Upload armv7 artifact ${{ matrix.base_image }}" |       - name: "Upload armv7 artifact ${{ matrix.base_image }}" | ||||||
|         uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0 |         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||||
|         with: |         with: | ||||||
|           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }} |           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }} | ||||||
|           path: vaultwarden-armv7-${{ matrix.base_image }} |           path: vaultwarden-armv7-${{ matrix.base_image }} | ||||||
|  |  | ||||||
|       - name: "Upload armv6 artifact ${{ matrix.base_image }}" |       - name: "Upload armv6 artifact ${{ matrix.base_image }}" | ||||||
|         uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0 |         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | ||||||
|         with: |         with: | ||||||
|           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }} |           name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }} | ||||||
|           path: vaultwarden-armv6-${{ matrix.base_image }} |           path: vaultwarden-armv6-${{ matrix.base_image }} | ||||||
|  |  | ||||||
|       - name: "Attest artifacts ${{ matrix.base_image }}" |       - name: "Attest artifacts ${{ matrix.base_image }}" | ||||||
|         uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0 |         uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 | ||||||
|         with: |         with: | ||||||
|           subject-path: vaultwarden-* |           subject-path: vaultwarden-* | ||||||
|       # End Upload artifacts to Github Actions |       # End Upload artifacts to Github Actions | ||||||
|   | |||||||
							
								
								
									
										6
									
								
								.github/workflows/trivy.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/trivy.yml
									
									
									
									
										vendored
									
									
								
							| @@ -31,12 +31,12 @@ jobs: | |||||||
|  |  | ||||||
|     steps: |     steps: | ||||||
|       - name: Checkout code |       - name: Checkout code | ||||||
|         uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 |         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 | ||||||
|         with: |         with: | ||||||
|           persist-credentials: false |           persist-credentials: false | ||||||
|  |  | ||||||
|       - name: Run Trivy vulnerability scanner |       - name: Run Trivy vulnerability scanner | ||||||
|         uses: aquasecurity/trivy-action@18f2510ee396bbf400402947b394f2dd8c87dbb0 # v0.29.0 |         uses: aquasecurity/trivy-action@dc5a429b52fcf669ce959baa2c2dd26090d2a6c4 # v0.32.0 | ||||||
|         env: |         env: | ||||||
|           TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2,public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 |           TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2,public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 | ||||||
|           TRIVY_JAVA_DB_REPOSITORY: docker.io/aquasec/trivy-java-db:1,public.ecr.aws/aquasecurity/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1 |           TRIVY_JAVA_DB_REPOSITORY: docker.io/aquasec/trivy-java-db:1,public.ecr.aws/aquasecurity/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1 | ||||||
| @@ -48,6 +48,6 @@ jobs: | |||||||
|           severity: CRITICAL,HIGH |           severity: CRITICAL,HIGH | ||||||
|  |  | ||||||
|       - name: Upload Trivy scan results to GitHub Security tab |       - name: Upload Trivy scan results to GitHub Security tab | ||||||
|         uses: github/codeql-action/upload-sarif@86b04fb0e47484f7282357688f21d5d0e32175fe # v3.27.5 |         uses: github/codeql-action/upload-sarif@3c3833e0f8c1c83d449a7478aa59c036a9165498 # v3.29.11 | ||||||
|         with: |         with: | ||||||
|           sarif_file: 'trivy-results.sarif' |           sarif_file: 'trivy-results.sarif' | ||||||
|   | |||||||
							
								
								
									
										28
									
								
								.github/workflows/zizmor.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								.github/workflows/zizmor.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,28 @@ | |||||||
|  | name: Security Analysis with zizmor | ||||||
|  |  | ||||||
|  | on: | ||||||
|  |   push: | ||||||
|  |     branches: ["main"] | ||||||
|  |   pull_request: | ||||||
|  |     branches: ["**"] | ||||||
|  |  | ||||||
|  | permissions: {} | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   zizmor: | ||||||
|  |     name: Run zizmor | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     permissions: | ||||||
|  |       security-events: write | ||||||
|  |     steps: | ||||||
|  |       - name: Checkout repository | ||||||
|  |         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 | ||||||
|  |         with: | ||||||
|  |           persist-credentials: false | ||||||
|  |  | ||||||
|  |       - name: Run zizmor | ||||||
|  |         uses: zizmorcore/zizmor-action@5ca5fc7a4779c5263a3ffa0e1f693009994446d1 # v0.1.2 | ||||||
|  |         with: | ||||||
|  |           # intentionally not scanning the entire repository, | ||||||
|  |           # since it contains integration tests. | ||||||
|  |           inputs: ./.github/ | ||||||
| @@ -1,7 +1,7 @@ | |||||||
| --- | --- | ||||||
| repos: | repos: | ||||||
| -   repo: https://github.com/pre-commit/pre-commit-hooks | -   repo: https://github.com/pre-commit/pre-commit-hooks | ||||||
|     rev: v4.6.0 |     rev: v6.0.0 | ||||||
|     hooks: |     hooks: | ||||||
|     - id: check-yaml |     - id: check-yaml | ||||||
|     - id: check-json |     - id: check-json | ||||||
| @@ -31,7 +31,7 @@ repos: | |||||||
|       language: system |       language: system | ||||||
|       args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--"] |       args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--"] | ||||||
|       types_or: [rust, file] |       types_or: [rust, file] | ||||||
|       files: (Cargo.toml|Cargo.lock|rust-toolchain|.*\.rs$) |       files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$) | ||||||
|       pass_filenames: false |       pass_filenames: false | ||||||
|     - id: cargo-clippy |     - id: cargo-clippy | ||||||
|       name: cargo clippy |       name: cargo clippy | ||||||
| @@ -40,5 +40,13 @@ repos: | |||||||
|       language: system |       language: system | ||||||
|       args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--", "-D", "warnings"] |       args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--", "-D", "warnings"] | ||||||
|       types_or: [rust, file] |       types_or: [rust, file] | ||||||
|       files: (Cargo.toml|Cargo.lock|rust-toolchain|clippy.toml|.*\.rs$) |       files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$) | ||||||
|       pass_filenames: false |       pass_filenames: false | ||||||
|  |     - id: check-docker-templates | ||||||
|  |       name: check docker templates | ||||||
|  |       description: Check if the Docker templates are updated | ||||||
|  |       language: system | ||||||
|  |       entry: sh | ||||||
|  |       args: | ||||||
|  |         - "-c" | ||||||
|  |         - "cd docker && make" | ||||||
|   | |||||||
							
								
								
									
										3365
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										3365
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										126
									
								
								Cargo.toml
									
									
									
									
									
								
							
							
						
						
									
										126
									
								
								Cargo.toml
									
									
									
									
									
								
							| @@ -1,11 +1,12 @@ | |||||||
| workspace = { members = ["macros"] } | [workspace] | ||||||
|  | members = ["macros"] | ||||||
|  |  | ||||||
| [package] | [package] | ||||||
| name = "vaultwarden" | name = "vaultwarden" | ||||||
| version = "1.0.0" | version = "1.0.0" | ||||||
| authors = ["Daniel García <dani-garcia@users.noreply.github.com>"] | authors = ["Daniel García <dani-garcia@users.noreply.github.com>"] | ||||||
| edition = "2021" | edition = "2021" | ||||||
| rust-version = "1.83.0" | rust-version = "1.87.0" | ||||||
| resolver = "2" | resolver = "2" | ||||||
|  |  | ||||||
| repository = "https://github.com/dani-garcia/vaultwarden" | repository = "https://github.com/dani-garcia/vaultwarden" | ||||||
| @@ -31,6 +32,11 @@ enable_mimalloc = ["dep:mimalloc"] | |||||||
| # You also need to set an env variable `QUERY_LOGGER=1` to fully activate this so you do not have to re-compile | # You also need to set an env variable `QUERY_LOGGER=1` to fully activate this so you do not have to re-compile | ||||||
| # if you want to turn off the logging for a specific run. | # if you want to turn off the logging for a specific run. | ||||||
| query_logger = ["dep:diesel_logger"] | query_logger = ["dep:diesel_logger"] | ||||||
|  | s3 = ["opendal/services-s3", "dep:aws-config", "dep:aws-credential-types", "dep:aws-smithy-runtime-api", "dep:anyhow", "dep:http", "dep:reqsign"] | ||||||
|  |  | ||||||
|  | # OIDC specific features | ||||||
|  | oidc-accept-rfc3339-timestamps = ["openidconnect/accept-rfc3339-timestamps"] | ||||||
|  | oidc-accept-string-booleans = ["openidconnect/accept-string-booleans"] | ||||||
|  |  | ||||||
| # Enable unstable features, requires nightly | # Enable unstable features, requires nightly | ||||||
| # Currently only used to enable rusts official ip support | # Currently only used to enable rusts official ip support | ||||||
| @@ -44,7 +50,7 @@ syslog = "7.0.0" | |||||||
| macros = { path = "./macros" } | macros = { path = "./macros" } | ||||||
|  |  | ||||||
| # Logging | # Logging | ||||||
| log = "0.4.25" | log = "0.4.27" | ||||||
| fern = { version = "0.7.1", features = ["syslog-7", "reopen-1"] } | fern = { version = "0.7.1", features = ["syslog-7", "reopen-1"] } | ||||||
| tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work | tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work | ||||||
|  |  | ||||||
| @@ -52,12 +58,12 @@ tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and | |||||||
| dotenvy = { version = "0.15.7", default-features = false } | dotenvy = { version = "0.15.7", default-features = false } | ||||||
|  |  | ||||||
| # Lazy initialization | # Lazy initialization | ||||||
| once_cell = "1.20.2" | once_cell = "1.21.3" | ||||||
|  |  | ||||||
| # Numerical libraries | # Numerical libraries | ||||||
| num-traits = "0.2.19" | num-traits = "0.2.19" | ||||||
| num-derive = "0.4.2" | num-derive = "0.4.2" | ||||||
| bigdecimal = "0.4.7" | bigdecimal = "0.4.8" | ||||||
|  |  | ||||||
| # Web framework | # Web framework | ||||||
| rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false } | rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false } | ||||||
| @@ -71,114 +77,133 @@ dashmap = "6.1.0" | |||||||
|  |  | ||||||
| # Async futures | # Async futures | ||||||
| futures = "0.3.31" | futures = "0.3.31" | ||||||
| tokio = { version = "1.43.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] } | tokio = { version = "1.47.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] } | ||||||
|  | tokio-util = { version = "0.7.16", features = ["compat"]} | ||||||
|  |  | ||||||
| # A generic serialization/deserialization framework | # A generic serialization/deserialization framework | ||||||
| serde = { version = "1.0.217", features = ["derive"] } | serde = { version = "1.0.219", features = ["derive"] } | ||||||
| serde_json = "1.0.138" | serde_json = "1.0.143" | ||||||
|  |  | ||||||
| # A safe, extensible ORM and Query builder | # A safe, extensible ORM and Query builder | ||||||
| diesel = { version = "2.2.7", features = ["chrono", "r2d2", "numeric"] } | diesel = { version = "2.2.12", features = ["chrono", "r2d2", "numeric"] } | ||||||
| diesel_migrations = "2.2.0" | diesel_migrations = "2.2.0" | ||||||
| diesel_logger = { version = "0.4.0", optional = true } | diesel_logger = { version = "0.4.0", optional = true } | ||||||
|  |  | ||||||
| derive_more = { version = "2.0.0", features = ["from", "into", "as_ref", "deref", "display"] } | derive_more = { version = "2.0.1", features = ["from", "into", "as_ref", "deref", "display"] } | ||||||
| diesel-derive-newtype = "2.1.2" | diesel-derive-newtype = "2.1.2" | ||||||
|  |  | ||||||
| # Bundled/Static SQLite | # Bundled/Static SQLite | ||||||
| libsqlite3-sys = { version = "0.31.0", features = ["bundled"], optional = true } | libsqlite3-sys = { version = "0.35.0", features = ["bundled"], optional = true } | ||||||
|  |  | ||||||
| # Crypto-related libraries | # Crypto-related libraries | ||||||
| rand = "0.9.0" | rand = "0.9.2" | ||||||
| ring = "0.17.8" | ring = "0.17.14" | ||||||
|  | subtle = "2.6.1" | ||||||
|  |  | ||||||
| # UUID generation | # UUID generation | ||||||
| uuid = { version = "1.12.1", features = ["v4"] } | uuid = { version = "1.18.0", features = ["v4"] } | ||||||
|  |  | ||||||
| # Date and time libraries | # Date and time libraries | ||||||
| chrono = { version = "0.4.39", features = ["clock", "serde"], default-features = false } | chrono = { version = "0.4.41", features = ["clock", "serde"], default-features = false } | ||||||
| chrono-tz = "0.10.1" | chrono-tz = "0.10.4" | ||||||
| time = "0.3.37" | time = "0.3.41" | ||||||
|  |  | ||||||
| # Job scheduler | # Job scheduler | ||||||
| job_scheduler_ng = "2.0.5" | job_scheduler_ng = "2.3.0" | ||||||
|  |  | ||||||
| # Data encoding library Hex/Base32/Base64 | # Data encoding library Hex/Base32/Base64 | ||||||
| data-encoding = "2.7.0" | data-encoding = "2.9.0" | ||||||
|  |  | ||||||
| # JWT library | # JWT library | ||||||
| jsonwebtoken = "9.3.0" | jsonwebtoken = "9.3.1" | ||||||
|  |  | ||||||
| # TOTP library | # TOTP library | ||||||
| totp-lite = "2.0.1" | totp-lite = "2.0.1" | ||||||
|  |  | ||||||
| # Yubico Library | # Yubico Library | ||||||
| yubico = { version = "0.12.0", features = ["online-tokio"], default-features = false } | yubico = { package = "yubico_ng", version = "0.14.1", features = ["online-tokio"], default-features = false } | ||||||
|  |  | ||||||
| # WebAuthn libraries | # WebAuthn libraries | ||||||
| webauthn-rs = "0.3.2" | # danger-allow-state-serialisation is needed to save the state in the db | ||||||
|  | # danger-credential-internals is needed to support U2F to Webauthn migration | ||||||
|  | webauthn-rs = { version = "0.5.2", features = ["danger-allow-state-serialisation", "danger-credential-internals"] } | ||||||
|  | webauthn-rs-proto = "0.5.2" | ||||||
|  | webauthn-rs-core = "0.5.2" | ||||||
|  |  | ||||||
| # Handling of URL's for WebAuthn and favicons | # Handling of URL's for WebAuthn and favicons | ||||||
| url = "2.5.4" | url = "2.5.7" | ||||||
|  |  | ||||||
| # Email libraries | # Email libraries | ||||||
| lettre = { version = "0.11.12", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false } | lettre = { version = "0.11.18", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "hostname", "tracing", "tokio1-rustls", "ring", "rustls-native-certs"], default-features = false } | ||||||
| percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails | percent-encoding = "2.3.2" # URL encoding library used for URL's in the emails | ||||||
| email_address = "0.2.9" | email_address = "0.2.9" | ||||||
|  |  | ||||||
| # HTML Template library | # HTML Template library | ||||||
| handlebars = { version = "6.3.0", features = ["dir_source"] } | handlebars = { version = "6.3.2", features = ["dir_source"] } | ||||||
|  |  | ||||||
| # HTTP client (Used for favicons, version check, DUO and HIBP API) | # HTTP client (Used for favicons, version check, DUO and HIBP API) | ||||||
| reqwest = { version = "0.12.12", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] } | reqwest = { version = "0.12.23", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false} | ||||||
| hickory-resolver = "0.24.2" | hickory-resolver = "0.25.2" | ||||||
|  |  | ||||||
| # Favicon extraction libraries | # Favicon extraction libraries | ||||||
| html5gum = "0.7.0" | html5gum = "0.8.0" | ||||||
| regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false } | regex = { version = "1.11.2", features = ["std", "perf", "unicode-perl"], default-features = false } | ||||||
| data-url = "0.3.1" | data-url = "0.3.2" | ||||||
| bytes = "1.10.0" | bytes = "1.10.1" | ||||||
|  | svg-hush = "0.9.5" | ||||||
|  |  | ||||||
| # Cache function results (Used for version check and favicon fetching) | # Cache function results (Used for version check and favicon fetching) | ||||||
| cached = { version = "0.54.0", features = ["async"] } | cached = { version = "0.56.0", features = ["async"] } | ||||||
|  |  | ||||||
| # Used for custom short lived cookie jar during favicon extraction | # Used for custom short lived cookie jar during favicon extraction | ||||||
| cookie = "0.18.1" | cookie = "0.18.1" | ||||||
| cookie_store = "0.21.1" | cookie_store = "0.21.1" | ||||||
|  |  | ||||||
| # Used by U2F, JWT and PostgreSQL | # Used by U2F, JWT and PostgreSQL | ||||||
| openssl = "0.10.70" | openssl = "0.10.73" | ||||||
|  |  | ||||||
| # CLI argument parsing | # CLI argument parsing | ||||||
| pico-args = "0.5.0" | pico-args = "0.5.0" | ||||||
|  |  | ||||||
| # Macro ident concatenation | # Macro ident concatenation | ||||||
| paste = "1.0.15" | pastey = "0.1.1" | ||||||
| governor = "0.8.0" | governor = "0.10.1" | ||||||
|  |  | ||||||
|  | # OIDC for SSO | ||||||
|  | openidconnect = { version = "4.0.1", features = ["reqwest", "native-tls"] } | ||||||
|  | mini-moka = "0.10.3" | ||||||
|  |  | ||||||
| # Check client versions for specific features. | # Check client versions for specific features. | ||||||
| semver = "1.0.25" | semver = "1.0.26" | ||||||
|  |  | ||||||
| # Allow overriding the default memory allocator | # Allow overriding the default memory allocator | ||||||
| # Mainly used for the musl builds, since the default musl malloc is very slow | # Mainly used for the musl builds, since the default musl malloc is very slow | ||||||
| mimalloc = { version = "0.1.43", features = ["secure"], default-features = false, optional = true } | mimalloc = { version = "0.1.48", features = ["secure"], default-features = false, optional = true } | ||||||
| which = "7.0.1" |  | ||||||
|  | which = "8.0.0" | ||||||
|  |  | ||||||
| # Argon2 library with support for the PHC format | # Argon2 library with support for the PHC format | ||||||
| argon2 = "0.5.3" | argon2 = "0.5.3" | ||||||
|  |  | ||||||
| # Reading a password from the cli for generating the Argon2id ADMIN_TOKEN | # Reading a password from the cli for generating the Argon2id ADMIN_TOKEN | ||||||
| rpassword = "7.3.1" | rpassword = "7.4.0" | ||||||
|  |  | ||||||
| # Loading a dynamic CSS Stylesheet | # Loading a dynamic CSS Stylesheet | ||||||
| grass_compiler = { version = "0.13.4", default-features = false } | grass_compiler = { version = "0.13.4", default-features = false } | ||||||
|  |  | ||||||
| [patch.crates-io] | # File are accessed through Apache OpenDAL | ||||||
| # Patch yubico to remove duplicate crates of older versions | opendal = { version = "0.54.0", features = ["services-fs"], default-features = false } | ||||||
| yubico = { git = "https://github.com/BlackDex/yubico-rs", rev = "00df14811f58155c0f02e3ab10f1570ed3e115c6" } |  | ||||||
|  | # For retrieving AWS credentials, including temporary SSO credentials | ||||||
|  | anyhow = { version = "1.0.99", optional = true } | ||||||
|  | aws-config = { version = "1.8.5", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true } | ||||||
|  | aws-credential-types = { version = "1.2.5", optional = true } | ||||||
|  | aws-smithy-runtime-api = { version = "1.9.0", optional = true } | ||||||
|  | http = { version = "1.3.1", optional = true } | ||||||
|  | reqsign = { version = "0.16.5", optional = true } | ||||||
|  |  | ||||||
| # Strip debuginfo from the release builds | # Strip debuginfo from the release builds | ||||||
| # The symbols are the provide better panic traces | # The debug symbols are to provide better panic traces | ||||||
| # Also enable fat LTO and use 1 codegen unit for optimizations | # Also enable fat LTO and use 1 codegen unit for optimizations | ||||||
| [profile.release] | [profile.release] | ||||||
| strip = "debuginfo" | strip = "debuginfo" | ||||||
| @@ -213,7 +238,7 @@ codegen-units = 16 | |||||||
|  |  | ||||||
| # Linting config | # Linting config | ||||||
| # https://doc.rust-lang.org/rustc/lints/groups.html | # https://doc.rust-lang.org/rustc/lints/groups.html | ||||||
| [lints.rust] | [workspace.lints.rust] | ||||||
| # Forbid | # Forbid | ||||||
| unsafe_code = "forbid" | unsafe_code = "forbid" | ||||||
| non_ascii_idents = "forbid" | non_ascii_idents = "forbid" | ||||||
| @@ -243,11 +268,14 @@ if_let_rescope = "allow" | |||||||
| tail_expr_drop_order = "allow" | tail_expr_drop_order = "allow" | ||||||
|  |  | ||||||
| # https://rust-lang.github.io/rust-clippy/stable/index.html | # https://rust-lang.github.io/rust-clippy/stable/index.html | ||||||
| [lints.clippy] | [workspace.lints.clippy] | ||||||
| # Warn | # Warn | ||||||
| dbg_macro = "warn" | dbg_macro = "warn" | ||||||
| todo = "warn" | todo = "warn" | ||||||
|  |  | ||||||
|  | # Ignore/Allow | ||||||
|  | result_large_err = "allow" | ||||||
|  |  | ||||||
| # Deny | # Deny | ||||||
| case_sensitive_file_extension_comparisons = "deny" | case_sensitive_file_extension_comparisons = "deny" | ||||||
| cast_lossless = "deny" | cast_lossless = "deny" | ||||||
| @@ -255,6 +283,7 @@ clone_on_ref_ptr = "deny" | |||||||
| equatable_if_let = "deny" | equatable_if_let = "deny" | ||||||
| filter_map_next = "deny" | filter_map_next = "deny" | ||||||
| float_cmp_const = "deny" | float_cmp_const = "deny" | ||||||
|  | implicit_clone = "deny" | ||||||
| inefficient_to_string = "deny" | inefficient_to_string = "deny" | ||||||
| iter_on_empty_collections = "deny" | iter_on_empty_collections = "deny" | ||||||
| iter_on_single_items = "deny" | iter_on_single_items = "deny" | ||||||
| @@ -263,14 +292,12 @@ macro_use_imports = "deny" | |||||||
| manual_assert = "deny" | manual_assert = "deny" | ||||||
| manual_instant_elapsed = "deny" | manual_instant_elapsed = "deny" | ||||||
| manual_string_new = "deny" | manual_string_new = "deny" | ||||||
| match_on_vec_items = "deny" |  | ||||||
| match_wildcard_for_single_variants = "deny" | match_wildcard_for_single_variants = "deny" | ||||||
| mem_forget = "deny" | mem_forget = "deny" | ||||||
| needless_continue = "deny" | needless_continue = "deny" | ||||||
| needless_lifetimes = "deny" | needless_lifetimes = "deny" | ||||||
| option_option = "deny" | option_option = "deny" | ||||||
| string_add_assign = "deny" | string_add_assign = "deny" | ||||||
| string_to_string = "deny" |  | ||||||
| unnecessary_join = "deny" | unnecessary_join = "deny" | ||||||
| unnecessary_self_imports = "deny" | unnecessary_self_imports = "deny" | ||||||
| unnested_or_patterns = "deny" | unnested_or_patterns = "deny" | ||||||
| @@ -278,3 +305,6 @@ unused_async = "deny" | |||||||
| unused_self = "deny" | unused_self = "deny" | ||||||
| verbose_file_reads = "deny" | verbose_file_reads = "deny" | ||||||
| zero_sized_map_values = "deny" | zero_sized_map_values = "deny" | ||||||
|  |  | ||||||
|  | [lints] | ||||||
|  | workspace = true | ||||||
|   | |||||||
							
								
								
									
										24
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										24
									
								
								README.md
									
									
									
									
									
								
							| @@ -59,19 +59,21 @@ A nearly complete implementation of the Bitwarden Client API is provided, includ | |||||||
| ## Usage | ## Usage | ||||||
|  |  | ||||||
| > [!IMPORTANT] | > [!IMPORTANT] | ||||||
| > Most modern web browsers disallow the use of Web Crypto APIs in insecure contexts. In this case, you might get an error like `Cannot read property 'importKey'`. To solve this problem, you need to access the web vault via HTTPS or localhost. | > The web-vault requires the use a secure context for the [Web Crypto API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Crypto_API). | ||||||
| > | > That means it will only work via `http://localhost:8000` (using the port from the example below) or if you [enable HTTPS](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS). | ||||||
| >This can be configured in [Vaultwarden directly](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS) or using a third-party reverse proxy ([some examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)). |  | ||||||
| > | The recommended way to install and use Vaultwarden is via our container images which are published to [ghcr.io](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden), [docker.io](https://hub.docker.com/r/vaultwarden/server) and [quay.io](https://quay.io/repository/vaultwarden/server). | ||||||
| >If you have an available domain name, you can get HTTPS certificates with [Let's Encrypt](https://letsencrypt.org/), or you can generate self-signed certificates with utilities like [mkcert](https://github.com/FiloSottile/mkcert). Some proxies automatically do this step, like Caddy or Traefik (see examples linked above). | See [which container image to use](https://github.com/dani-garcia/vaultwarden/wiki/Which-container-image-to-use) for an explanation of the provided tags. | ||||||
|  |  | ||||||
|  | There are also [community driven packages](https://github.com/dani-garcia/vaultwarden/wiki/Third-party-packages) which can be used, but those might be lagging behind the latest version or might deviate in the way Vaultwarden is configured, as described in our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki). | ||||||
|  |  | ||||||
|  | Alternatively, you can also [build Vaultwarden](https://github.com/dani-garcia/vaultwarden/wiki/Building-binary) yourself. | ||||||
|  |  | ||||||
|  | While Vaultwarden is based upon the [Rocket web framework](https://rocket.rs) which has built-in support for TLS our recommendation would be that you setup a reverse proxy (see [proxy examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)). | ||||||
|  |  | ||||||
| > [!TIP] | > [!TIP] | ||||||
| >**For more detailed examples on how to install, use and configure Vaultwarden you can check our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).** | >**For more detailed examples on how to install, use and configure Vaultwarden you can check our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).** | ||||||
|  |  | ||||||
| The main way to use Vaultwarden is via our container images which are published to [ghcr.io](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden), [docker.io](https://hub.docker.com/r/vaultwarden/server) and [quay.io](https://quay.io/repository/vaultwarden/server). |  | ||||||
|  |  | ||||||
| There are also [community driven packages](https://github.com/dani-garcia/vaultwarden/wiki/Third-party-packages) which can be used, but those might be lagging behind the latest version or might deviate in the way Vaultwarden is configured, as described in our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki). |  | ||||||
|  |  | ||||||
| ### Docker/Podman CLI | ### Docker/Podman CLI | ||||||
|  |  | ||||||
| Pull the container image and mount a volume from the host for persistent storage.<br> | Pull the container image and mount a volume from the host for persistent storage.<br> | ||||||
| @@ -83,7 +85,7 @@ docker run --detach --name vaultwarden \ | |||||||
|   --env DOMAIN="https://vw.domain.tld" \ |   --env DOMAIN="https://vw.domain.tld" \ | ||||||
|   --volume /vw-data/:/data/ \ |   --volume /vw-data/:/data/ \ | ||||||
|   --restart unless-stopped \ |   --restart unless-stopped \ | ||||||
|   --publish 80:80 \ |   --publish 127.0.0.1:8000:80 \ | ||||||
|   vaultwarden/server:latest |   vaultwarden/server:latest | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| @@ -104,7 +106,7 @@ services: | |||||||
|     volumes: |     volumes: | ||||||
|       - ./vw-data/:/data/ |       - ./vw-data/:/data/ | ||||||
|     ports: |     ports: | ||||||
|       - 80:80 |       - 127.0.0.1:8000:80 | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| <br> | <br> | ||||||
|   | |||||||
							
								
								
									
										7
									
								
								build.rs
									
									
									
									
									
								
							
							
						
						
									
										7
									
								
								build.rs
									
									
									
									
									
								
							| @@ -11,6 +11,8 @@ fn main() { | |||||||
|     println!("cargo:rustc-cfg=postgresql"); |     println!("cargo:rustc-cfg=postgresql"); | ||||||
|     #[cfg(feature = "query_logger")] |     #[cfg(feature = "query_logger")] | ||||||
|     println!("cargo:rustc-cfg=query_logger"); |     println!("cargo:rustc-cfg=query_logger"); | ||||||
|  |     #[cfg(feature = "s3")] | ||||||
|  |     println!("cargo:rustc-cfg=s3"); | ||||||
|  |  | ||||||
|     #[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))] |     #[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))] | ||||||
|     compile_error!( |     compile_error!( | ||||||
| @@ -23,6 +25,7 @@ fn main() { | |||||||
|     println!("cargo::rustc-check-cfg=cfg(mysql)"); |     println!("cargo::rustc-check-cfg=cfg(mysql)"); | ||||||
|     println!("cargo::rustc-check-cfg=cfg(postgresql)"); |     println!("cargo::rustc-check-cfg=cfg(postgresql)"); | ||||||
|     println!("cargo::rustc-check-cfg=cfg(query_logger)"); |     println!("cargo::rustc-check-cfg=cfg(query_logger)"); | ||||||
|  |     println!("cargo::rustc-check-cfg=cfg(s3)"); | ||||||
|  |  | ||||||
|     // Rerun when these paths are changed. |     // Rerun when these paths are changed. | ||||||
|     // Someone could have checked-out a tag or specific commit, but no other files changed. |     // Someone could have checked-out a tag or specific commit, but no other files changed. | ||||||
| @@ -48,8 +51,8 @@ fn main() { | |||||||
| fn run(args: &[&str]) -> Result<String, std::io::Error> { | fn run(args: &[&str]) -> Result<String, std::io::Error> { | ||||||
|     let out = Command::new(args[0]).args(&args[1..]).output()?; |     let out = Command::new(args[0]).args(&args[1..]).output()?; | ||||||
|     if !out.status.success() { |     if !out.status.success() { | ||||||
|         use std::io::{Error, ErrorKind}; |         use std::io::Error; | ||||||
|         return Err(Error::new(ErrorKind::Other, "Command not successful")); |         return Err(Error::other("Command not successful")); | ||||||
|     } |     } | ||||||
|     Ok(String::from_utf8(out.stdout).unwrap().trim().to_string()) |     Ok(String::from_utf8(out.stdout).unwrap().trim().to_string()) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,13 +1,13 @@ | |||||||
| --- | --- | ||||||
| vault_version: "v2025.1.1" | vault_version: "v2025.8.0" | ||||||
| vault_image_digest: "sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025df5ff175a4918" | vault_image_digest: "sha256:41c2b51c87882248f405d5a0ab37210d2672a312ec5d4f3b9afcdbbe8eb9d57d" | ||||||
| # Cross Compile Docker Helper Scripts v1.6.1 | # Cross Compile Docker Helper Scripts v1.6.1 | ||||||
| # We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts | # We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts | ||||||
| # https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags | # https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags | ||||||
| xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894" | xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894" | ||||||
| rust_version: 1.84.1 # Rust version to be used | rust_version: 1.89.0 # Rust version to be used | ||||||
| debian_version: bookworm # Debian release name to be used | debian_version: trixie # Debian release name to be used | ||||||
| alpine_version: "3.21" # Alpine version to be used | alpine_version: "3.22" # Alpine version to be used | ||||||
| # For which platforms/architectures will we try to build images | # For which platforms/architectures will we try to build images | ||||||
| platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"] | platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"] | ||||||
| # Determine the build images per OS/Arch | # Determine the build images per OS/Arch | ||||||
|   | |||||||
| @@ -19,23 +19,23 @@ | |||||||
| # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, | # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, | ||||||
| #   click the tag name to view the digest of the image it currently points to. | #   click the tag name to view the digest of the image it currently points to. | ||||||
| # - From the command line: | # - From the command line: | ||||||
| #     $ docker pull docker.io/vaultwarden/web-vault:v2025.1.1 | #     $ docker pull docker.io/vaultwarden/web-vault:v2025.8.0 | ||||||
| #     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.1.1 | #     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.8.0 | ||||||
| #     [docker.io/vaultwarden/web-vault@sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025df5ff175a4918] | #     [docker.io/vaultwarden/web-vault@sha256:41c2b51c87882248f405d5a0ab37210d2672a312ec5d4f3b9afcdbbe8eb9d57d] | ||||||
| # | # | ||||||
| # - Conversely, to get the tag name from the digest: | # - Conversely, to get the tag name from the digest: | ||||||
| #     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025df5ff175a4918 | #     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:41c2b51c87882248f405d5a0ab37210d2672a312ec5d4f3b9afcdbbe8eb9d57d | ||||||
| #     [docker.io/vaultwarden/web-vault:v2025.1.1] | #     [docker.io/vaultwarden/web-vault:v2025.8.0] | ||||||
| # | # | ||||||
| FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025df5ff175a4918 AS vault | FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:41c2b51c87882248f405d5a0ab37210d2672a312ec5d4f3b9afcdbbe8eb9d57d AS vault | ||||||
|  |  | ||||||
| ########################## ALPINE BUILD IMAGES ########################## | ########################## ALPINE BUILD IMAGES ########################## | ||||||
| ## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 | ## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 | ||||||
| ## And for Alpine we define all build images here, they will only be loaded when actually used | ## And for Alpine we define all build images here, they will only be loaded when actually used | ||||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.84.1 AS build_amd64 | FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.89.0 AS build_amd64 | ||||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.84.1 AS build_arm64 | FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.89.0 AS build_arm64 | ||||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.84.1 AS build_armv7 | FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.89.0 AS build_armv7 | ||||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.84.1 AS build_armv6 | FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.89.0 AS build_armv6 | ||||||
|  |  | ||||||
| ########################## BUILD IMAGE ########################## | ########################## BUILD IMAGE ########################## | ||||||
| # hadolint ignore=DL3006 | # hadolint ignore=DL3006 | ||||||
| @@ -127,7 +127,7 @@ RUN source /env-cargo && \ | |||||||
| # To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' | # To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' | ||||||
| # | # | ||||||
| # We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742 | # We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742 | ||||||
| FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.21 | FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.22 | ||||||
|  |  | ||||||
| ENV ROCKET_PROFILE="release" \ | ENV ROCKET_PROFILE="release" \ | ||||||
|     ROCKET_ADDRESS=0.0.0.0 \ |     ROCKET_ADDRESS=0.0.0.0 \ | ||||||
|   | |||||||
| @@ -19,15 +19,15 @@ | |||||||
| # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, | # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, | ||||||
| #   click the tag name to view the digest of the image it currently points to. | #   click the tag name to view the digest of the image it currently points to. | ||||||
| # - From the command line: | # - From the command line: | ||||||
| #     $ docker pull docker.io/vaultwarden/web-vault:v2025.1.1 | #     $ docker pull docker.io/vaultwarden/web-vault:v2025.8.0 | ||||||
| #     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.1.1 | #     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.8.0 | ||||||
| #     [docker.io/vaultwarden/web-vault@sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025df5ff175a4918] | #     [docker.io/vaultwarden/web-vault@sha256:41c2b51c87882248f405d5a0ab37210d2672a312ec5d4f3b9afcdbbe8eb9d57d] | ||||||
| # | # | ||||||
| # - Conversely, to get the tag name from the digest: | # - Conversely, to get the tag name from the digest: | ||||||
| #     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025df5ff175a4918 | #     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:41c2b51c87882248f405d5a0ab37210d2672a312ec5d4f3b9afcdbbe8eb9d57d | ||||||
| #     [docker.io/vaultwarden/web-vault:v2025.1.1] | #     [docker.io/vaultwarden/web-vault:v2025.8.0] | ||||||
| # | # | ||||||
| FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025df5ff175a4918 AS vault | FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:41c2b51c87882248f405d5a0ab37210d2672a312ec5d4f3b9afcdbbe8eb9d57d AS vault | ||||||
|  |  | ||||||
| ########################## Cross Compile Docker Helper Scripts ########################## | ########################## Cross Compile Docker Helper Scripts ########################## | ||||||
| ## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts | ## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts | ||||||
| @@ -36,7 +36,7 @@ FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bd | |||||||
|  |  | ||||||
| ########################## BUILD IMAGE ########################## | ########################## BUILD IMAGE ########################## | ||||||
| # hadolint ignore=DL3006 | # hadolint ignore=DL3006 | ||||||
| FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.84.1-slim-bookworm AS build | FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.89.0-slim-trixie AS build | ||||||
| COPY --from=xx / / | COPY --from=xx / / | ||||||
| ARG TARGETARCH | ARG TARGETARCH | ||||||
| ARG TARGETVARIANT | ARG TARGETVARIANT | ||||||
| @@ -68,15 +68,11 @@ RUN apt-get update && \ | |||||||
|     xx-apt-get install -y \ |     xx-apt-get install -y \ | ||||||
|         --no-install-recommends \ |         --no-install-recommends \ | ||||||
|         gcc \ |         gcc \ | ||||||
|         libmariadb3 \ |  | ||||||
|         libpq-dev \ |         libpq-dev \ | ||||||
|         libpq5 \ |         libpq5 \ | ||||||
|         libssl-dev \ |         libssl-dev \ | ||||||
|  |         libmariadb-dev \ | ||||||
|         zlib1g-dev && \ |         zlib1g-dev && \ | ||||||
|     # Force install arch dependend mariadb dev packages |  | ||||||
|     # Installing them the normal way breaks several other packages (again) |  | ||||||
|     apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \ |  | ||||||
|     dpkg --force-all -i ./libmariadb-dev*.deb && \ |  | ||||||
|     # Run xx-cargo early, since it sometimes seems to break when run at a later stage |     # Run xx-cargo early, since it sometimes seems to break when run at a later stage | ||||||
|     echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo |     echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo | ||||||
|  |  | ||||||
| @@ -89,24 +85,24 @@ RUN USER=root cargo new --bin /app | |||||||
| WORKDIR /app | WORKDIR /app | ||||||
|  |  | ||||||
| # Environment variables for Cargo on Debian based builds | # Environment variables for Cargo on Debian based builds | ||||||
| ARG ARCH_OPENSSL_LIB_DIR \ | ARG TARGET_PKG_CONFIG_PATH | ||||||
|     ARCH_OPENSSL_INCLUDE_DIR |  | ||||||
|  |  | ||||||
| RUN source /env-cargo && \ | RUN source /env-cargo && \ | ||||||
|     if xx-info is-cross ; then \ |     if xx-info is-cross ; then \ | ||||||
|         # Some special variables if needed to override some build paths |  | ||||||
|         if [[ -n "${ARCH_OPENSSL_LIB_DIR}" && -n "${ARCH_OPENSSL_INCLUDE_DIR}" ]]; then \ |  | ||||||
|             echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_LIB_DIR=${ARCH_OPENSSL_LIB_DIR}" >> /env-cargo && \ |  | ||||||
|             echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_INCLUDE_DIR=${ARCH_OPENSSL_INCLUDE_DIR}" >> /env-cargo ; \ |  | ||||||
|         fi && \ |  | ||||||
|         # We can't use xx-cargo since that uses clang, which doesn't work for our libraries. |         # We can't use xx-cargo since that uses clang, which doesn't work for our libraries. | ||||||
|         # Because of this we generate the needed environment variables here which we can load in the needed steps. |         # Because of this we generate the needed environment variables here which we can load in the needed steps. | ||||||
|         echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ |         echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ | ||||||
|         echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ |         echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ | ||||||
|         echo "export PKG_CONFIG=/usr/bin/$(xx-info)-pkg-config" >> /env-cargo && \ |  | ||||||
|         echo "export CROSS_COMPILE=1" >> /env-cargo && \ |         echo "export CROSS_COMPILE=1" >> /env-cargo && \ | ||||||
|         echo "export OPENSSL_INCLUDE_DIR=/usr/include/$(xx-info)" >> /env-cargo && \ |         echo "export PKG_CONFIG_ALLOW_CROSS=1" >> /env-cargo && \ | ||||||
|         echo "export OPENSSL_LIB_DIR=/usr/lib/$(xx-info)" >> /env-cargo ; \ |         # For some architectures `xx-info` returns a triple which doesn't matches the path on disk | ||||||
|  |         # In those cases you can override this by setting the `TARGET_PKG_CONFIG_PATH` build-arg | ||||||
|  |         if [[ -n "${TARGET_PKG_CONFIG_PATH}" ]]; then \ | ||||||
|  |             echo "export TARGET_PKG_CONFIG_PATH=${TARGET_PKG_CONFIG_PATH}" >> /env-cargo ; \ | ||||||
|  |         else \ | ||||||
|  |             echo "export PKG_CONFIG_PATH=/usr/lib/$(xx-info)/pkgconfig" >> /env-cargo ; \ | ||||||
|  |         fi && \ | ||||||
|  |         echo "# End of env-cargo" >> /env-cargo ; \ | ||||||
|     fi && \ |     fi && \ | ||||||
|     # Output the current contents of the file |     # Output the current contents of the file | ||||||
|     cat /env-cargo |     cat /env-cargo | ||||||
| @@ -166,7 +162,7 @@ RUN source /env-cargo && \ | |||||||
| # To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' | # To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' | ||||||
| # | # | ||||||
| # We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742 | # We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742 | ||||||
| FROM --platform=$TARGETPLATFORM docker.io/library/debian:bookworm-slim | FROM --platform=$TARGETPLATFORM docker.io/library/debian:trixie-slim | ||||||
|  |  | ||||||
| ENV ROCKET_PROFILE="release" \ | ENV ROCKET_PROFILE="release" \ | ||||||
|     ROCKET_ADDRESS=0.0.0.0 \ |     ROCKET_ADDRESS=0.0.0.0 \ | ||||||
| @@ -179,7 +175,7 @@ RUN mkdir /data && \ | |||||||
|         --no-install-recommends \ |         --no-install-recommends \ | ||||||
|         ca-certificates \ |         ca-certificates \ | ||||||
|         curl \ |         curl \ | ||||||
|         libmariadb-dev-compat \ |         libmariadb-dev \ | ||||||
|         libpq5 \ |         libpq5 \ | ||||||
|         openssl && \ |         openssl && \ | ||||||
|     apt-get clean && \ |     apt-get clean && \ | ||||||
|   | |||||||
| @@ -86,15 +86,11 @@ RUN apt-get update && \ | |||||||
|     xx-apt-get install -y \ |     xx-apt-get install -y \ | ||||||
|         --no-install-recommends \ |         --no-install-recommends \ | ||||||
|         gcc \ |         gcc \ | ||||||
|         libmariadb3 \ |  | ||||||
|         libpq-dev \ |         libpq-dev \ | ||||||
|         libpq5 \ |         libpq5 \ | ||||||
|         libssl-dev \ |         libssl-dev \ | ||||||
|  |         libmariadb-dev \ | ||||||
|         zlib1g-dev && \ |         zlib1g-dev && \ | ||||||
|     # Force install arch dependend mariadb dev packages |  | ||||||
|     # Installing them the normal way breaks several other packages (again) |  | ||||||
|     apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \ |  | ||||||
|     dpkg --force-all -i ./libmariadb-dev*.deb && \ |  | ||||||
|     # Run xx-cargo early, since it sometimes seems to break when run at a later stage |     # Run xx-cargo early, since it sometimes seems to break when run at a later stage | ||||||
|     echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo |     echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo | ||||||
| {% endif %} | {% endif %} | ||||||
| @@ -109,24 +105,24 @@ WORKDIR /app | |||||||
|  |  | ||||||
| {% if base == "debian" %} | {% if base == "debian" %} | ||||||
| # Environment variables for Cargo on Debian based builds | # Environment variables for Cargo on Debian based builds | ||||||
| ARG ARCH_OPENSSL_LIB_DIR \ | ARG TARGET_PKG_CONFIG_PATH | ||||||
|     ARCH_OPENSSL_INCLUDE_DIR |  | ||||||
|  |  | ||||||
| RUN source /env-cargo && \ | RUN source /env-cargo && \ | ||||||
|     if xx-info is-cross ; then \ |     if xx-info is-cross ; then \ | ||||||
|         # Some special variables if needed to override some build paths |  | ||||||
|         if [[ -n "${ARCH_OPENSSL_LIB_DIR}" && -n "${ARCH_OPENSSL_INCLUDE_DIR}" ]]; then \ |  | ||||||
|             echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_LIB_DIR=${ARCH_OPENSSL_LIB_DIR}" >> /env-cargo && \ |  | ||||||
|             echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_INCLUDE_DIR=${ARCH_OPENSSL_INCLUDE_DIR}" >> /env-cargo ; \ |  | ||||||
|         fi && \ |  | ||||||
|         # We can't use xx-cargo since that uses clang, which doesn't work for our libraries. |         # We can't use xx-cargo since that uses clang, which doesn't work for our libraries. | ||||||
|         # Because of this we generate the needed environment variables here which we can load in the needed steps. |         # Because of this we generate the needed environment variables here which we can load in the needed steps. | ||||||
|         echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ |         echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ | ||||||
|         echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ |         echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ | ||||||
|         echo "export PKG_CONFIG=/usr/bin/$(xx-info)-pkg-config" >> /env-cargo && \ |  | ||||||
|         echo "export CROSS_COMPILE=1" >> /env-cargo && \ |         echo "export CROSS_COMPILE=1" >> /env-cargo && \ | ||||||
|         echo "export OPENSSL_INCLUDE_DIR=/usr/include/$(xx-info)" >> /env-cargo && \ |         echo "export PKG_CONFIG_ALLOW_CROSS=1" >> /env-cargo && \ | ||||||
|         echo "export OPENSSL_LIB_DIR=/usr/lib/$(xx-info)" >> /env-cargo ; \ |         # For some architectures `xx-info` returns a triple which doesn't matches the path on disk | ||||||
|  |         # In those cases you can override this by setting the `TARGET_PKG_CONFIG_PATH` build-arg | ||||||
|  |         if [[ -n "${TARGET_PKG_CONFIG_PATH}" ]]; then \ | ||||||
|  |             echo "export TARGET_PKG_CONFIG_PATH=${TARGET_PKG_CONFIG_PATH}" >> /env-cargo ; \ | ||||||
|  |         else \ | ||||||
|  |             echo "export PKG_CONFIG_PATH=/usr/lib/$(xx-info)/pkgconfig" >> /env-cargo ; \ | ||||||
|  |         fi && \ | ||||||
|  |         echo "# End of env-cargo" >> /env-cargo ; \ | ||||||
|     fi && \ |     fi && \ | ||||||
|     # Output the current contents of the file |     # Output the current contents of the file | ||||||
|     cat /env-cargo |     cat /env-cargo | ||||||
| @@ -216,7 +212,7 @@ RUN mkdir /data && \ | |||||||
|         --no-install-recommends \ |         --no-install-recommends \ | ||||||
|         ca-certificates \ |         ca-certificates \ | ||||||
|         curl \ |         curl \ | ||||||
|         libmariadb-dev-compat \ |         libmariadb-dev \ | ||||||
|         libpq5 \ |         libpq5 \ | ||||||
|         openssl && \ |         openssl && \ | ||||||
|     apt-get clean && \ |     apt-get clean && \ | ||||||
|   | |||||||
| @@ -133,8 +133,7 @@ target "debian-386" { | |||||||
|   platforms = ["linux/386"] |   platforms = ["linux/386"] | ||||||
|   tags = generate_tags("", "-386") |   tags = generate_tags("", "-386") | ||||||
|   args = { |   args = { | ||||||
|     ARCH_OPENSSL_LIB_DIR = "/usr/lib/i386-linux-gnu" |     TARGET_PKG_CONFIG_PATH = "/usr/lib/i386-linux-gnu/pkgconfig" | ||||||
|     ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/i386-linux-gnu" |  | ||||||
|   } |   } | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -142,20 +141,12 @@ target "debian-ppc64le" { | |||||||
|   inherits = ["debian"] |   inherits = ["debian"] | ||||||
|   platforms = ["linux/ppc64le"] |   platforms = ["linux/ppc64le"] | ||||||
|   tags = generate_tags("", "-ppc64le") |   tags = generate_tags("", "-ppc64le") | ||||||
|   args = { |  | ||||||
|     ARCH_OPENSSL_LIB_DIR = "/usr/lib/powerpc64le-linux-gnu" |  | ||||||
|     ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/powerpc64le-linux-gnu" |  | ||||||
|   } |  | ||||||
| } | } | ||||||
|  |  | ||||||
| target "debian-s390x" { | target "debian-s390x" { | ||||||
|   inherits = ["debian"] |   inherits = ["debian"] | ||||||
|   platforms = ["linux/s390x"] |   platforms = ["linux/s390x"] | ||||||
|   tags = generate_tags("", "-s390x") |   tags = generate_tags("", "-s390x") | ||||||
|   args = { |  | ||||||
|     ARCH_OPENSSL_LIB_DIR = "/usr/lib/s390x-linux-gnu" |  | ||||||
|     ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/s390x-linux-gnu" |  | ||||||
|   } |  | ||||||
| } | } | ||||||
| // ==== End of unsupported Debian architecture targets === | // ==== End of unsupported Debian architecture targets === | ||||||
|  |  | ||||||
|   | |||||||
| @@ -9,5 +9,8 @@ path = "src/lib.rs" | |||||||
| proc-macro = true | proc-macro = true | ||||||
|  |  | ||||||
| [dependencies] | [dependencies] | ||||||
| quote = "1.0.38" | quote = "1.0.40" | ||||||
| syn = "2.0.98" | syn = "2.0.105" | ||||||
|  |  | ||||||
|  | [lints] | ||||||
|  | workspace = true | ||||||
|   | |||||||
| @@ -1,5 +1,3 @@ | |||||||
| extern crate proc_macro; |  | ||||||
|  |  | ||||||
| use proc_macro::TokenStream; | use proc_macro::TokenStream; | ||||||
| use quote::quote; | use quote::quote; | ||||||
|  |  | ||||||
| @@ -12,7 +10,7 @@ pub fn derive_uuid_from_param(input: TokenStream) -> TokenStream { | |||||||
|  |  | ||||||
| fn impl_derive_uuid_macro(ast: &syn::DeriveInput) -> TokenStream { | fn impl_derive_uuid_macro(ast: &syn::DeriveInput) -> TokenStream { | ||||||
|     let name = &ast.ident; |     let name = &ast.ident; | ||||||
|     let gen = quote! { |     let gen_derive = quote! { | ||||||
|         #[automatically_derived] |         #[automatically_derived] | ||||||
|         impl<'r> rocket::request::FromParam<'r> for #name { |         impl<'r> rocket::request::FromParam<'r> for #name { | ||||||
|             type Error = (); |             type Error = (); | ||||||
| @@ -27,7 +25,7 @@ fn impl_derive_uuid_macro(ast: &syn::DeriveInput) -> TokenStream { | |||||||
|             } |             } | ||||||
|         } |         } | ||||||
|     }; |     }; | ||||||
|     gen.into() |     gen_derive.into() | ||||||
| } | } | ||||||
|  |  | ||||||
| #[proc_macro_derive(IdFromParam)] | #[proc_macro_derive(IdFromParam)] | ||||||
| @@ -39,7 +37,7 @@ pub fn derive_id_from_param(input: TokenStream) -> TokenStream { | |||||||
|  |  | ||||||
| fn impl_derive_safestring_macro(ast: &syn::DeriveInput) -> TokenStream { | fn impl_derive_safestring_macro(ast: &syn::DeriveInput) -> TokenStream { | ||||||
|     let name = &ast.ident; |     let name = &ast.ident; | ||||||
|     let gen = quote! { |     let gen_derive = quote! { | ||||||
|         #[automatically_derived] |         #[automatically_derived] | ||||||
|         impl<'r> rocket::request::FromParam<'r> for #name { |         impl<'r> rocket::request::FromParam<'r> for #name { | ||||||
|             type Error = (); |             type Error = (); | ||||||
| @@ -54,5 +52,5 @@ fn impl_derive_safestring_macro(ast: &syn::DeriveInput) -> TokenStream { | |||||||
|             } |             } | ||||||
|         } |         } | ||||||
|     }; |     }; | ||||||
|     gen.into() |     gen_derive.into() | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										1
									
								
								migrations/mysql/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								migrations/mysql/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | |||||||
|  | DROP TABLE sso_nonce; | ||||||
							
								
								
									
										4
									
								
								migrations/mysql/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								migrations/mysql/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,4 @@ | |||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  |   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||||
|  |   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||||
|  | ); | ||||||
| @@ -0,0 +1 @@ | |||||||
|  | ALTER TABLE users_organizations DROP COLUMN invited_by_email; | ||||||
| @@ -0,0 +1 @@ | |||||||
|  | ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; | ||||||
| @@ -0,0 +1,6 @@ | |||||||
|  | DROP TABLE IF EXISTS sso_nonce; | ||||||
|  |  | ||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  |   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||||
|  |   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||||
|  | ); | ||||||
| @@ -0,0 +1,8 @@ | |||||||
|  | DROP TABLE IF EXISTS sso_nonce; | ||||||
|  |  | ||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  | 	state               VARCHAR(512) NOT NULL PRIMARY KEY, | ||||||
|  |   	nonce               TEXT NOT NULL, | ||||||
|  |   	redirect_uri 		TEXT NOT NULL, | ||||||
|  |   	created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||||
|  | ); | ||||||
| @@ -0,0 +1,8 @@ | |||||||
|  | DROP TABLE IF EXISTS sso_nonce; | ||||||
|  |  | ||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  |     state               VARCHAR(512) NOT NULL PRIMARY KEY, | ||||||
|  |     nonce               TEXT NOT NULL, | ||||||
|  |     redirect_uri        TEXT NOT NULL, | ||||||
|  |     created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||||
|  | ); | ||||||
| @@ -0,0 +1,9 @@ | |||||||
|  | DROP TABLE IF EXISTS sso_nonce; | ||||||
|  |  | ||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  |     state               VARCHAR(512) NOT NULL PRIMARY KEY, | ||||||
|  |   	nonce               TEXT NOT NULL, | ||||||
|  |     verifier            TEXT, | ||||||
|  |   	redirect_uri 		TEXT NOT NULL, | ||||||
|  |   	created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||||
|  | ); | ||||||
| @@ -0,0 +1 @@ | |||||||
|  | DROP TABLE IF EXISTS sso_users; | ||||||
							
								
								
									
										7
									
								
								migrations/mysql/2024-03-06-170000_add_sso_users/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								migrations/mysql/2024-03-06-170000_add_sso_users/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | |||||||
|  | CREATE TABLE sso_users ( | ||||||
|  |   user_uuid           CHAR(36) NOT NULL PRIMARY KEY, | ||||||
|  |   identifier          VARCHAR(768) NOT NULL UNIQUE, | ||||||
|  |   created_at          TIMESTAMP NOT NULL DEFAULT now(), | ||||||
|  |  | ||||||
|  |   FOREIGN KEY(user_uuid) REFERENCES users(uuid) | ||||||
|  | ); | ||||||
| @@ -0,0 +1,2 @@ | |||||||
|  | ALTER TABLE sso_users DROP FOREIGN KEY `sso_users_ibfk_1`; | ||||||
|  | ALTER TABLE sso_users ADD FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE; | ||||||
							
								
								
									
										1
									
								
								migrations/postgresql/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								migrations/postgresql/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | |||||||
|  | DROP TABLE sso_nonce; | ||||||
							
								
								
									
										4
									
								
								migrations/postgresql/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								migrations/postgresql/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,4 @@ | |||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  |   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||||
|  |   created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||||
|  | ); | ||||||
| @@ -0,0 +1 @@ | |||||||
|  | ALTER TABLE users_organizations DROP COLUMN invited_by_email; | ||||||
| @@ -0,0 +1 @@ | |||||||
|  | ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; | ||||||
| @@ -0,0 +1,6 @@ | |||||||
|  | DROP TABLE sso_nonce; | ||||||
|  |  | ||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  |   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||||
|  |   created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||||
|  | ); | ||||||
| @@ -0,0 +1,8 @@ | |||||||
|  | DROP TABLE sso_nonce; | ||||||
|  |  | ||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  | 	state               TEXT NOT NULL PRIMARY KEY, | ||||||
|  |   	nonce               TEXT NOT NULL, | ||||||
|  |   	redirect_uri 		TEXT NOT NULL, | ||||||
|  |   	created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||||
|  | ); | ||||||
| @@ -0,0 +1,8 @@ | |||||||
|  | DROP TABLE IF EXISTS sso_nonce; | ||||||
|  |  | ||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  |     state               TEXT NOT NULL PRIMARY KEY, | ||||||
|  |     nonce               TEXT NOT NULL, | ||||||
|  |     redirect_uri        TEXT NOT NULL, | ||||||
|  |     created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||||
|  | ); | ||||||
| @@ -0,0 +1,9 @@ | |||||||
|  | DROP TABLE IF EXISTS sso_nonce; | ||||||
|  |  | ||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  |     state               TEXT NOT NULL PRIMARY KEY, | ||||||
|  |     nonce               TEXT NOT NULL, | ||||||
|  |     verifier            TEXT, | ||||||
|  |     redirect_uri        TEXT NOT NULL, | ||||||
|  |     created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||||
|  | ); | ||||||
| @@ -0,0 +1 @@ | |||||||
|  | DROP TABLE IF EXISTS sso_users; | ||||||
| @@ -0,0 +1,7 @@ | |||||||
|  | CREATE TABLE sso_users ( | ||||||
|  |   user_uuid           CHAR(36) NOT NULL PRIMARY KEY, | ||||||
|  |   identifier          TEXT NOT NULL UNIQUE, | ||||||
|  |   created_at          TIMESTAMP NOT NULL DEFAULT now(), | ||||||
|  |  | ||||||
|  |   FOREIGN KEY(user_uuid) REFERENCES users(uuid) | ||||||
|  | ); | ||||||
| @@ -0,0 +1,3 @@ | |||||||
|  | ALTER TABLE sso_users | ||||||
|  |   DROP CONSTRAINT "sso_users_user_uuid_fkey", | ||||||
|  |   ADD CONSTRAINT "sso_users_user_uuid_fkey" FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE; | ||||||
							
								
								
									
										1
									
								
								migrations/sqlite/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								migrations/sqlite/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | |||||||
|  | DROP TABLE sso_nonce; | ||||||
							
								
								
									
										4
									
								
								migrations/sqlite/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								migrations/sqlite/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,4 @@ | |||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  |   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||||
|  |   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||||
|  | ); | ||||||
| @@ -0,0 +1 @@ | |||||||
|  | ALTER TABLE users_organizations DROP COLUMN invited_by_email; | ||||||
| @@ -0,0 +1 @@ | |||||||
|  | ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; | ||||||
| @@ -0,0 +1,6 @@ | |||||||
|  | DROP TABLE sso_nonce; | ||||||
|  |  | ||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  |   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||||
|  |   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||||
|  | ); | ||||||
| @@ -0,0 +1,8 @@ | |||||||
|  | DROP TABLE sso_nonce; | ||||||
|  |  | ||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  |   state               TEXT NOT NULL PRIMARY KEY, | ||||||
|  |   nonce               TEXT NOT NULL, | ||||||
|  |   redirect_uri        TEXT NOT NULL, | ||||||
|  |   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||||
|  | ); | ||||||
| @@ -0,0 +1,8 @@ | |||||||
|  | DROP TABLE IF EXISTS sso_nonce; | ||||||
|  |  | ||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  |   state               TEXT NOT NULL PRIMARY KEY, | ||||||
|  |   nonce               TEXT NOT NULL, | ||||||
|  |   redirect_uri        TEXT NOT NULL, | ||||||
|  |   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||||
|  | ); | ||||||
| @@ -0,0 +1,9 @@ | |||||||
|  | DROP TABLE IF EXISTS sso_nonce; | ||||||
|  |  | ||||||
|  | CREATE TABLE sso_nonce ( | ||||||
|  |   state               TEXT NOT NULL PRIMARY KEY, | ||||||
|  |   nonce               TEXT NOT NULL, | ||||||
|  |   verifier            TEXT, | ||||||
|  |   redirect_uri        TEXT NOT NULL, | ||||||
|  |   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||||
|  | ); | ||||||
| @@ -0,0 +1 @@ | |||||||
|  | DROP TABLE IF EXISTS sso_users; | ||||||
							
								
								
									
										7
									
								
								migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | |||||||
|  | CREATE TABLE sso_users ( | ||||||
|  |   user_uuid           CHAR(36) NOT NULL PRIMARY KEY, | ||||||
|  |   identifier          TEXT NOT NULL UNIQUE, | ||||||
|  |   created_at          TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, | ||||||
|  |  | ||||||
|  |   FOREIGN KEY(user_uuid) REFERENCES users(uuid) | ||||||
|  | ); | ||||||
| @@ -0,0 +1,9 @@ | |||||||
|  | DROP TABLE IF EXISTS sso_users; | ||||||
|  |  | ||||||
|  | CREATE TABLE sso_users ( | ||||||
|  |   user_uuid           CHAR(36) NOT NULL PRIMARY KEY, | ||||||
|  |   identifier          TEXT NOT NULL UNIQUE, | ||||||
|  |   created_at          TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, | ||||||
|  |  | ||||||
|  |   FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE | ||||||
|  | ); | ||||||
							
								
								
									
										64
									
								
								playwright/.env.template
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								playwright/.env.template
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,64 @@ | |||||||
|  | ################################# | ||||||
|  | ### Conf to run dev instances ### | ||||||
|  | ################################# | ||||||
|  | ENV=dev | ||||||
|  | DC_ENV_FILE=.env | ||||||
|  | COMPOSE_IGNORE_ORPHANS=True | ||||||
|  | DOCKER_BUILDKIT=1 | ||||||
|  |  | ||||||
|  | ################ | ||||||
|  | # Users Config # | ||||||
|  | ################ | ||||||
|  | TEST_USER=test | ||||||
|  | TEST_USER_PASSWORD=${TEST_USER} | ||||||
|  | TEST_USER_MAIL=${TEST_USER}@yopmail.com | ||||||
|  |  | ||||||
|  | TEST_USER2=test2 | ||||||
|  | TEST_USER2_PASSWORD=${TEST_USER2} | ||||||
|  | TEST_USER2_MAIL=${TEST_USER2}@yopmail.com | ||||||
|  |  | ||||||
|  | TEST_USER3=test3 | ||||||
|  | TEST_USER3_PASSWORD=${TEST_USER3} | ||||||
|  | TEST_USER3_MAIL=${TEST_USER3}@yopmail.com | ||||||
|  |  | ||||||
|  | ################### | ||||||
|  | # Keycloak Config # | ||||||
|  | ################### | ||||||
|  | KEYCLOAK_ADMIN=admin | ||||||
|  | KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN} | ||||||
|  | KC_HTTP_HOST=127.0.0.1 | ||||||
|  | KC_HTTP_PORT=8080 | ||||||
|  |  | ||||||
|  | # Script parameters (use Keycloak and Vaultwarden config too) | ||||||
|  | TEST_REALM=test | ||||||
|  | DUMMY_REALM=dummy | ||||||
|  | DUMMY_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${DUMMY_REALM} | ||||||
|  |  | ||||||
|  | ###################### | ||||||
|  | # Vaultwarden Config # | ||||||
|  | ###################### | ||||||
|  | ROCKET_ADDRESS=0.0.0.0 | ||||||
|  | ROCKET_PORT=8000 | ||||||
|  | DOMAIN=http://localhost:${ROCKET_PORT} | ||||||
|  | LOG_LEVEL=info,oidcwarden::sso=debug | ||||||
|  | I_REALLY_WANT_VOLATILE_STORAGE=true | ||||||
|  |  | ||||||
|  | SSO_ENABLED=true | ||||||
|  | SSO_ONLY=false | ||||||
|  | SSO_CLIENT_ID=warden | ||||||
|  | SSO_CLIENT_SECRET=warden | ||||||
|  | SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM} | ||||||
|  |  | ||||||
|  | SMTP_HOST=127.0.0.1 | ||||||
|  | SMTP_PORT=1025 | ||||||
|  | SMTP_SECURITY=off | ||||||
|  | SMTP_TIMEOUT=5 | ||||||
|  | SMTP_FROM=vaultwarden@test | ||||||
|  | SMTP_FROM_NAME=Vaultwarden | ||||||
|  |  | ||||||
|  | ######################################################## | ||||||
|  | # DUMMY values for docker-compose to stop bothering us # | ||||||
|  | ######################################################## | ||||||
|  | MARIADB_PORT=3305 | ||||||
|  | MYSQL_PORT=3307 | ||||||
|  | POSTGRES_PORT=5432 | ||||||
							
								
								
									
										6
									
								
								playwright/.gitignore
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								playwright/.gitignore
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | |||||||
|  | logs | ||||||
|  | node_modules/ | ||||||
|  | /test-results/ | ||||||
|  | /playwright-report/ | ||||||
|  | /playwright/.cache/ | ||||||
|  | temp | ||||||
							
								
								
									
										177
									
								
								playwright/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										177
									
								
								playwright/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,177 @@ | |||||||
|  | # Integration tests | ||||||
|  |  | ||||||
|  | This allows running integration tests using [Playwright](https://playwright.dev/). | ||||||
|  |  | ||||||
|  | It uses its own `test.env` with different ports to not collide with a running dev instance. | ||||||
|  |  | ||||||
|  | ## Install | ||||||
|  |  | ||||||
|  | This relies on `docker` and the `compose` [plugin](https://docs.docker.com/compose/install/). | ||||||
|  | Databases (`Mariadb`, `Mysql` and `Postgres`) and `Playwright` will run in containers. | ||||||
|  |  | ||||||
|  | ### Running Playwright outside docker | ||||||
|  |  | ||||||
|  | It is possible to run `Playwright` outside of the container, this removes the need to rebuild the image for each change. | ||||||
|  | You will additionally need `nodejs` then run: | ||||||
|  |  | ||||||
|  | ```bash | ||||||
|  | npm install | ||||||
|  | npx playwright install-deps | ||||||
|  | npx playwright install firefox | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ## Usage | ||||||
|  |  | ||||||
|  | To run all the tests: | ||||||
|  |  | ||||||
|  | ```bash | ||||||
|  | DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | To force a rebuild of the Playwright image: | ||||||
|  | ```bash | ||||||
|  | DOCKER_BUILDKIT=1 docker compose --env-file test.env build Playwright | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | To access the UI to easily run test individually and debug if needed (this will not work in docker): | ||||||
|  |  | ||||||
|  | ```bash | ||||||
|  | npx playwright test --ui | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### DB | ||||||
|  |  | ||||||
|  | Projects are configured to allow to run tests only on specific database. | ||||||
|  |  | ||||||
|  | You can use: | ||||||
|  |  | ||||||
|  | ```bash | ||||||
|  | DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=mariadb | ||||||
|  | DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=mysql | ||||||
|  | DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=postgres | ||||||
|  | DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### SSO | ||||||
|  |  | ||||||
|  | To run the SSO tests: | ||||||
|  |  | ||||||
|  | ```bash | ||||||
|  | DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project sso-sqlite | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### Keep services running | ||||||
|  |  | ||||||
|  | If you want you can keep the DB and Keycloak runnning (states are not impacted by the tests): | ||||||
|  |  | ||||||
|  | ```bash | ||||||
|  | PW_KEEP_SERVICE_RUNNNING=true npx playwright test | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ### Running specific tests | ||||||
|  |  | ||||||
|  | To run a whole file you can : | ||||||
|  |  | ||||||
|  | ```bash | ||||||
|  | DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite tests/login.spec.ts | ||||||
|  | DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite login | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | To run only a specifc test (It might fail if it has dependency): | ||||||
|  |  | ||||||
|  | ```bash | ||||||
|  | DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite -g "Account creation" | ||||||
|  | DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite tests/login.spec.ts:16 | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ## Writing scenario | ||||||
|  |  | ||||||
|  | When creating new scenario use the recorder to more easily identify elements | ||||||
|  | (in general try to rely on visible hint to identify elements and not hidden IDs). | ||||||
|  | This does not start the server, you will need to start it manually. | ||||||
|  |  | ||||||
|  | ```bash | ||||||
|  | DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env up Vaultwarden | ||||||
|  | npx playwright codegen "http://127.0.0.1:8003" | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ## Override web-vault | ||||||
|  |  | ||||||
|  | It is possible to change the `web-vault` used by referencing a different `bw_web_builds` commit. | ||||||
|  |  | ||||||
|  | Simplest is to set and uncomment `PW_WV_REPO_URL` and `PW_WV_COMMIT_HASH` in the `test.env`. | ||||||
|  | Ensure that the image is built with: | ||||||
|  |  | ||||||
|  | ```bash | ||||||
|  | DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env build Vaultwarden | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | You can check the result running: | ||||||
|  |  | ||||||
|  | ```bash | ||||||
|  | DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env up Vaultwarden | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | # OpenID Connect test setup | ||||||
|  |  | ||||||
|  | Additionally this `docker-compose` template allows to run locally Vaultwarden, | ||||||
|  | [Keycloak](https://www.keycloak.org/) and [Maildev](https://github.com/timshel/maildev) to test OIDC. | ||||||
|  |  | ||||||
|  | ## Setup | ||||||
|  |  | ||||||
|  | This rely on `docker` and the `compose` [plugin](https://docs.docker.com/compose/install/). | ||||||
|  | First create a copy of `.env.template` as `.env` (This is done to prevent committing your custom settings, Ex `SMTP_`). | ||||||
|  |  | ||||||
|  | ## Usage | ||||||
|  |  | ||||||
|  | Then start the stack (the `profile` is required to run `Vaultwarden`) : | ||||||
|  |  | ||||||
|  | ```bash | ||||||
|  | > docker compose --profile vaultwarden --env-file .env up | ||||||
|  | .... | ||||||
|  | keycloakSetup_1  | Logging into http://127.0.0.1:8080 as user admin of realm master | ||||||
|  | keycloakSetup_1  | Created new realm with id 'test' | ||||||
|  | keycloakSetup_1  | 74af4933-e386-4e64-ba15-a7b61212c45e | ||||||
|  | oidc_keycloakSetup_1 exited with code 0 | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | Wait until `oidc_keycloakSetup_1 exited with code 0` which indicates the correct setup of the Keycloak realm, client and user | ||||||
|  | (It is normal for this container to stop once the configuration is done). | ||||||
|  |  | ||||||
|  | Then you can access : | ||||||
|  |  | ||||||
|  | - `Vaultwarden` on http://0.0.0.0:8000 with the default user `test@yopmail.com/test`. | ||||||
|  | - `Keycloak` on http://0.0.0.0:8080/admin/master/console/ with the default user `admin/admin` | ||||||
|  | - `Maildev` on http://0.0.0.0:1080 | ||||||
|  |  | ||||||
|  | To proceed with an SSO login after you enter the email, on the screen prompting for `Master Password` the SSO button should be visible. | ||||||
|  | To use your computer external ip (for example when testing with a phone) you will have to configure `KC_HTTP_HOST` and `DOMAIN`. | ||||||
|  |  | ||||||
|  | ## Running only Keycloak | ||||||
|  |  | ||||||
|  | You can run just `Keycloak` with `--profile keycloak`: | ||||||
|  |  | ||||||
|  | ```bash | ||||||
|  | > docker compose --profile keycloak --env-file .env up | ||||||
|  | ``` | ||||||
|  | When running with a local Vaultwarden, you can use a front-end build from [dani-garcia/bw_web_builds](https://github.com/dani-garcia/bw_web_builds/releases). | ||||||
|  |  | ||||||
|  | ## Rebuilding the Vaultwarden | ||||||
|  |  | ||||||
|  | To force rebuilding the Vaultwarden image you can run | ||||||
|  |  | ||||||
|  | ```bash | ||||||
|  | docker compose --profile vaultwarden --env-file .env build VaultwardenPrebuild Vaultwarden | ||||||
|  | ``` | ||||||
|  |  | ||||||
|  | ## Configuration | ||||||
|  |  | ||||||
|  | All configuration for `keycloak` / `Vaultwarden` / `keycloak_setup.sh` can be found in [.env](.env.template). | ||||||
|  | The content of the file will be loaded as environment variables in all containers. | ||||||
|  |  | ||||||
|  | - `keycloak` [configuration](https://www.keycloak.org/server/all-config) includes `KEYCLOAK_ADMIN` / `KEYCLOAK_ADMIN_PASSWORD` and any variable prefixed `KC_` ([more information](https://www.keycloak.org/server/configuration#_example_configuring_the_db_url_host_parameter)). | ||||||
|  | - All `Vaultwarden` configuration can be set (EX: `SMTP_*`) | ||||||
|  |  | ||||||
|  | ## Cleanup | ||||||
|  |  | ||||||
|  | Use `docker compose --profile vaultwarden down`. | ||||||
							
								
								
									
										40
									
								
								playwright/compose/keycloak/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								playwright/compose/keycloak/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | |||||||
|  | FROM docker.io/library/debian:bookworm-slim as build | ||||||
|  |  | ||||||
|  | ENV DEBIAN_FRONTEND=noninteractive | ||||||
|  | ARG KEYCLOAK_VERSION | ||||||
|  |  | ||||||
|  | SHELL ["/bin/bash", "-o", "pipefail", "-c"] | ||||||
|  |  | ||||||
|  | RUN apt-get update \ | ||||||
|  |     && apt-get install -y ca-certificates curl wget \ | ||||||
|  |     && rm -rf /var/lib/apt/lists/* | ||||||
|  |  | ||||||
|  | WORKDIR / | ||||||
|  |  | ||||||
|  | RUN wget -c https://github.com/keycloak/keycloak/releases/download/${KEYCLOAK_VERSION}/keycloak-${KEYCLOAK_VERSION}.tar.gz -O - | tar -xz | ||||||
|  |  | ||||||
|  | FROM docker.io/library/debian:bookworm-slim | ||||||
|  |  | ||||||
|  | ENV DEBIAN_FRONTEND=noninteractive | ||||||
|  | ARG KEYCLOAK_VERSION | ||||||
|  |  | ||||||
|  | SHELL ["/bin/bash", "-o", "pipefail", "-c"] | ||||||
|  |  | ||||||
|  | RUN apt-get update \ | ||||||
|  |     && apt-get install -y ca-certificates curl wget \ | ||||||
|  |     && rm -rf /var/lib/apt/lists/* | ||||||
|  |  | ||||||
|  | ARG JAVA_URL | ||||||
|  | ARG JAVA_VERSION | ||||||
|  |  | ||||||
|  | ENV JAVA_VERSION=${JAVA_VERSION} | ||||||
|  |  | ||||||
|  | RUN mkdir -p /opt/openjdk && cd /opt/openjdk \ | ||||||
|  |     && wget -c "${JAVA_URL}"  -O - | tar -xz | ||||||
|  |  | ||||||
|  | WORKDIR / | ||||||
|  |  | ||||||
|  | COPY setup.sh /setup.sh | ||||||
|  | COPY --from=build /keycloak-${KEYCLOAK_VERSION}/bin /opt/keycloak/bin | ||||||
|  |  | ||||||
|  | CMD "/setup.sh" | ||||||
							
								
								
									
										36
									
								
								playwright/compose/keycloak/setup.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										36
									
								
								playwright/compose/keycloak/setup.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,36 @@ | |||||||
|  | #!/bin/bash | ||||||
|  |  | ||||||
|  | export PATH=/opt/keycloak/bin:/opt/openjdk/jdk-${JAVA_VERSION}/bin:$PATH | ||||||
|  | export JAVA_HOME=/opt/openjdk/jdk-${JAVA_VERSION} | ||||||
|  |  | ||||||
|  | STATUS_CODE=0 | ||||||
|  | while [[ "$STATUS_CODE" != "404" ]] ; do | ||||||
|  |     echo "Will retry in 2 seconds" | ||||||
|  |     sleep 2 | ||||||
|  |  | ||||||
|  |     STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}"  "$DUMMY_AUTHORITY") | ||||||
|  |  | ||||||
|  |     if [[ "$STATUS_CODE" = "200" ]]; then | ||||||
|  |         echo "Setup should already be done. Will not run." | ||||||
|  |         exit 0 | ||||||
|  |     fi | ||||||
|  | done | ||||||
|  |  | ||||||
|  | set -e | ||||||
|  |  | ||||||
|  | kcadm.sh config credentials --server "http://${KC_HTTP_HOST}:${KC_HTTP_PORT}" --realm master --user "$KEYCLOAK_ADMIN" --password "$KEYCLOAK_ADMIN_PASSWORD" --client admin-cli | ||||||
|  |  | ||||||
|  | kcadm.sh create realms -s realm="$TEST_REALM" -s enabled=true -s "accessTokenLifespan=600" | ||||||
|  | kcadm.sh create clients -r test -s "clientId=$SSO_CLIENT_ID" -s "secret=$SSO_CLIENT_SECRET" -s "redirectUris=[\"$DOMAIN/*\"]" -i | ||||||
|  |  | ||||||
|  | TEST_USER_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER" -s "firstName=$TEST_USER" -s "lastName=$TEST_USER" -s "email=$TEST_USER_MAIL"  -s emailVerified=true -s enabled=true -i) | ||||||
|  | kcadm.sh update users/$TEST_USER_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER_PASSWORD" -n | ||||||
|  |  | ||||||
|  | TEST_USER2_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER2" -s "firstName=$TEST_USER2" -s "lastName=$TEST_USER2" -s "email=$TEST_USER2_MAIL"  -s emailVerified=true -s enabled=true -i) | ||||||
|  | kcadm.sh update users/$TEST_USER2_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER2_PASSWORD" -n | ||||||
|  |  | ||||||
|  | TEST_USER3_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER3" -s "firstName=$TEST_USER3" -s "lastName=$TEST_USER3" -s "email=$TEST_USER3_MAIL"  -s emailVerified=true -s enabled=true -i) | ||||||
|  | kcadm.sh update users/$TEST_USER3_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER3_PASSWORD" -n | ||||||
|  |  | ||||||
|  | # Dummy realm to mark end of setup | ||||||
|  | kcadm.sh create realms -s realm="$DUMMY_REALM" -s enabled=true -s "accessTokenLifespan=600" | ||||||
							
								
								
									
										40
									
								
								playwright/compose/playwright/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								playwright/compose/playwright/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | |||||||
|  | FROM docker.io/library/debian:bookworm-slim | ||||||
|  |  | ||||||
|  | SHELL ["/bin/bash", "-o", "pipefail", "-c"] | ||||||
|  |  | ||||||
|  | ENV DEBIAN_FRONTEND=noninteractive | ||||||
|  |  | ||||||
|  | RUN apt-get update \ | ||||||
|  |     && apt-get install -y ca-certificates curl \ | ||||||
|  |     && curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc \ | ||||||
|  |     && chmod a+r /etc/apt/keyrings/docker.asc \ | ||||||
|  |     && echo "deb [signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian bookworm stable" | tee /etc/apt/sources.list.d/docker.list \ | ||||||
|  |     && apt-get update \ | ||||||
|  |     && apt-get install -y --no-install-recommends \ | ||||||
|  |         containerd.io \ | ||||||
|  |         docker-buildx-plugin \ | ||||||
|  |         docker-ce \ | ||||||
|  |         docker-ce-cli \ | ||||||
|  |         docker-compose-plugin \ | ||||||
|  |         git \ | ||||||
|  |         libmariadb-dev-compat \ | ||||||
|  |         libpq5 \ | ||||||
|  |         nodejs \ | ||||||
|  |         npm \ | ||||||
|  |         openssl \ | ||||||
|  |     && rm -rf /var/lib/apt/lists/* | ||||||
|  |  | ||||||
|  | RUN mkdir /playwright | ||||||
|  | WORKDIR /playwright | ||||||
|  |  | ||||||
|  | COPY package.json . | ||||||
|  | RUN npm install && npx playwright install-deps && npx playwright install firefox | ||||||
|  |  | ||||||
|  | COPY docker-compose.yml test.env ./ | ||||||
|  | COPY compose ./compose | ||||||
|  |  | ||||||
|  | COPY *.ts test.env ./ | ||||||
|  | COPY tests ./tests | ||||||
|  |  | ||||||
|  | ENTRYPOINT ["/usr/bin/npx", "playwright"] | ||||||
|  | CMD ["test"] | ||||||
							
								
								
									
										40
									
								
								playwright/compose/warden/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								playwright/compose/warden/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | |||||||
|  | FROM playwright_oidc_vaultwarden_prebuilt AS prebuilt | ||||||
|  |  | ||||||
|  | FROM node:22-trixie AS build | ||||||
|  |  | ||||||
|  | ARG REPO_URL | ||||||
|  | ARG COMMIT_HASH | ||||||
|  |  | ||||||
|  | ENV REPO_URL=$REPO_URL | ||||||
|  | ENV COMMIT_HASH=$COMMIT_HASH | ||||||
|  |  | ||||||
|  | COPY --from=prebuilt /web-vault /web-vault | ||||||
|  |  | ||||||
|  | COPY build.sh /build.sh | ||||||
|  | RUN /build.sh | ||||||
|  |  | ||||||
|  | ######################## RUNTIME IMAGE  ######################## | ||||||
|  | FROM docker.io/library/debian:trixie-slim | ||||||
|  |  | ||||||
|  | ENV DEBIAN_FRONTEND=noninteractive | ||||||
|  |  | ||||||
|  | # Create data folder and Install needed libraries | ||||||
|  | RUN mkdir /data && \ | ||||||
|  |     apt-get update && apt-get install -y \ | ||||||
|  |         --no-install-recommends \ | ||||||
|  |         ca-certificates \ | ||||||
|  |         curl \ | ||||||
|  |         libmariadb-dev \ | ||||||
|  |         libpq5 \ | ||||||
|  |         openssl && \ | ||||||
|  |     rm -rf /var/lib/apt/lists/* | ||||||
|  |  | ||||||
|  | # Copies the files from the context (Rocket.toml file and web-vault) | ||||||
|  | # and the binary from the "build" stage to the current stage | ||||||
|  | WORKDIR / | ||||||
|  |  | ||||||
|  | COPY --from=prebuilt /start.sh . | ||||||
|  | COPY --from=prebuilt /vaultwarden . | ||||||
|  | COPY --from=build /web-vault ./web-vault | ||||||
|  |  | ||||||
|  | ENTRYPOINT ["/start.sh"] | ||||||
							
								
								
									
										23
									
								
								playwright/compose/warden/build.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										23
									
								
								playwright/compose/warden/build.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,23 @@ | |||||||
|  | #!/bin/bash | ||||||
|  |  | ||||||
|  | echo $REPO_URL | ||||||
|  | echo $COMMIT_HASH | ||||||
|  |  | ||||||
|  | if [[ ! -z "$REPO_URL" ]] && [[ ! -z "$COMMIT_HASH" ]] ; then | ||||||
|  |     rm -rf /web-vault | ||||||
|  |  | ||||||
|  |     mkdir bw_web_builds; | ||||||
|  |     cd bw_web_builds; | ||||||
|  |  | ||||||
|  |     git -c init.defaultBranch=main init | ||||||
|  |     git remote add origin "$REPO_URL" | ||||||
|  |     git fetch --depth 1 origin "$COMMIT_HASH" | ||||||
|  |     git -c advice.detachedHead=false checkout FETCH_HEAD | ||||||
|  |  | ||||||
|  |     export VAULT_VERSION=$(cat Dockerfile | grep "ARG VAULT_VERSION" | cut -d "=" -f2) | ||||||
|  |     ./scripts/checkout_web_vault.sh | ||||||
|  |     ./scripts/build_web_vault.sh | ||||||
|  |     printf '{"version":"%s"}' "$COMMIT_HASH" > ./web-vault/apps/web/build/vw-version.json | ||||||
|  |  | ||||||
|  |     mv ./web-vault/apps/web/build /web-vault | ||||||
|  | fi | ||||||
							
								
								
									
										124
									
								
								playwright/docker-compose.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										124
									
								
								playwright/docker-compose.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,124 @@ | |||||||
|  | services: | ||||||
|  |   VaultwardenPrebuild: | ||||||
|  |     profiles: ["playwright", "vaultwarden"] | ||||||
|  |     container_name: playwright_oidc_vaultwarden_prebuilt | ||||||
|  |     image: playwright_oidc_vaultwarden_prebuilt | ||||||
|  |     build: | ||||||
|  |       context: .. | ||||||
|  |       dockerfile: Dockerfile | ||||||
|  |     entrypoint: /bin/bash | ||||||
|  |     restart: "no" | ||||||
|  |  | ||||||
|  |   Vaultwarden: | ||||||
|  |     profiles: ["playwright", "vaultwarden"] | ||||||
|  |     container_name: playwright_oidc_vaultwarden-${ENV:-dev} | ||||||
|  |     image: playwright_oidc_vaultwarden-${ENV:-dev} | ||||||
|  |     network_mode: "host" | ||||||
|  |     build: | ||||||
|  |       context: compose/warden | ||||||
|  |       dockerfile: Dockerfile | ||||||
|  |       args: | ||||||
|  |         REPO_URL: ${PW_WV_REPO_URL:-} | ||||||
|  |         COMMIT_HASH: ${PW_WV_COMMIT_HASH:-} | ||||||
|  |     env_file: ${DC_ENV_FILE:-.env} | ||||||
|  |     environment: | ||||||
|  |       - DATABASE_URL | ||||||
|  |       - I_REALLY_WANT_VOLATILE_STORAGE | ||||||
|  |       - LOG_LEVEL | ||||||
|  |       - LOGIN_RATELIMIT_MAX_BURST | ||||||
|  |       - SMTP_HOST | ||||||
|  |       - SMTP_FROM | ||||||
|  |       - SMTP_DEBUG | ||||||
|  |       - SSO_DEBUG_TOKENS | ||||||
|  |       - SSO_FRONTEND | ||||||
|  |       - SSO_ENABLED | ||||||
|  |       - SSO_ONLY | ||||||
|  |     restart: "no" | ||||||
|  |     depends_on: | ||||||
|  |       - VaultwardenPrebuild | ||||||
|  |  | ||||||
|  |   Playwright: | ||||||
|  |     profiles: ["playwright"] | ||||||
|  |     container_name: playwright_oidc_playwright | ||||||
|  |     image: playwright_oidc_playwright | ||||||
|  |     network_mode: "host" | ||||||
|  |     build: | ||||||
|  |       context: . | ||||||
|  |       dockerfile: compose/playwright/Dockerfile | ||||||
|  |     environment: | ||||||
|  |       - PW_WV_REPO_URL | ||||||
|  |       - PW_WV_COMMIT_HASH | ||||||
|  |     restart: "no" | ||||||
|  |     volumes: | ||||||
|  |       - /var/run/docker.sock:/var/run/docker.sock | ||||||
|  |       - ..:/project | ||||||
|  |  | ||||||
|  |   Mariadb: | ||||||
|  |     profiles: ["playwright"] | ||||||
|  |     container_name: playwright_mariadb | ||||||
|  |     image: mariadb:11.2.4 | ||||||
|  |     env_file: test.env | ||||||
|  |     healthcheck: | ||||||
|  |       test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"] | ||||||
|  |       start_period: 10s | ||||||
|  |       interval: 10s | ||||||
|  |     ports: | ||||||
|  |       - ${MARIADB_PORT}:3306 | ||||||
|  |  | ||||||
|  |   Mysql: | ||||||
|  |     profiles: ["playwright"] | ||||||
|  |     container_name: playwright_mysql | ||||||
|  |     image: mysql:8.4.1 | ||||||
|  |     env_file: test.env | ||||||
|  |     healthcheck: | ||||||
|  |       test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"] | ||||||
|  |       start_period: 10s | ||||||
|  |       interval: 10s | ||||||
|  |     ports: | ||||||
|  |       - ${MYSQL_PORT}:3306 | ||||||
|  |  | ||||||
|  |   Postgres: | ||||||
|  |     profiles: ["playwright"] | ||||||
|  |     container_name: playwright_postgres | ||||||
|  |     image: postgres:16.3 | ||||||
|  |     env_file: test.env | ||||||
|  |     healthcheck: | ||||||
|  |       test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}"] | ||||||
|  |       start_period: 20s | ||||||
|  |       interval: 30s | ||||||
|  |     ports: | ||||||
|  |       - ${POSTGRES_PORT}:5432 | ||||||
|  |  | ||||||
|  |   Maildev: | ||||||
|  |     profiles: ["vaultwarden", "maildev"] | ||||||
|  |     container_name: maildev | ||||||
|  |     image: timshel/maildev:3.0.4 | ||||||
|  |     ports: | ||||||
|  |       - ${SMTP_PORT}:1025 | ||||||
|  |       - 1080:1080 | ||||||
|  |  | ||||||
|  |   Keycloak: | ||||||
|  |     profiles: ["keycloak", "vaultwarden"] | ||||||
|  |     container_name: keycloak-${ENV:-dev} | ||||||
|  |     image: quay.io/keycloak/keycloak:25.0.4 | ||||||
|  |     network_mode: "host" | ||||||
|  |     command: | ||||||
|  |       - start-dev | ||||||
|  |     env_file: ${DC_ENV_FILE:-.env} | ||||||
|  |  | ||||||
|  |   KeycloakSetup: | ||||||
|  |     profiles: ["keycloak", "vaultwarden"] | ||||||
|  |     container_name: keycloakSetup-${ENV:-dev} | ||||||
|  |     image: keycloak_setup-${ENV:-dev} | ||||||
|  |     build: | ||||||
|  |       context: compose/keycloak | ||||||
|  |       dockerfile: Dockerfile | ||||||
|  |       args: | ||||||
|  |         KEYCLOAK_VERSION: 25.0.4 | ||||||
|  |         JAVA_URL: https://download.java.net/java/GA/jdk21.0.2/f2283984656d49d69e91c558476027ac/13/GPL/openjdk-21.0.2_linux-x64_bin.tar.gz | ||||||
|  |         JAVA_VERSION: 21.0.2 | ||||||
|  |     network_mode: "host" | ||||||
|  |     depends_on: | ||||||
|  |       - Keycloak | ||||||
|  |     restart: "no" | ||||||
|  |     env_file: ${DC_ENV_FILE:-.env} | ||||||
							
								
								
									
										22
									
								
								playwright/global-setup.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								playwright/global-setup.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | |||||||
|  | import { firefox, type FullConfig } from '@playwright/test'; | ||||||
|  | import { execSync } from 'node:child_process'; | ||||||
|  | import fs from 'fs'; | ||||||
|  |  | ||||||
|  | const utils = require('./global-utils'); | ||||||
|  |  | ||||||
|  | utils.loadEnv(); | ||||||
|  |  | ||||||
|  | async function globalSetup(config: FullConfig) { | ||||||
|  |     // Are we running in docker and the project is mounted ? | ||||||
|  |     const path = (fs.existsSync("/project/playwright/playwright.config.ts") ? "/project/playwright" : "."); | ||||||
|  |     execSync(`docker compose --project-directory ${path} --profile playwright --env-file test.env build VaultwardenPrebuild`, { | ||||||
|  |         env: { ...process.env }, | ||||||
|  |         stdio: "inherit" | ||||||
|  |     }); | ||||||
|  |     execSync(`docker compose --project-directory ${path} --profile playwright --env-file test.env build Vaultwarden`, { | ||||||
|  |         env: { ...process.env }, | ||||||
|  |         stdio: "inherit" | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export default globalSetup; | ||||||
							
								
								
									
										246
									
								
								playwright/global-utils.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										246
									
								
								playwright/global-utils.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,246 @@ | |||||||
|  | import { expect, type Browser, type TestInfo } from '@playwright/test'; | ||||||
|  | import { EventEmitter } from "events"; | ||||||
|  | import { type Mail, MailServer } from 'maildev'; | ||||||
|  | import { execSync } from 'node:child_process'; | ||||||
|  |  | ||||||
|  | import dotenv from 'dotenv'; | ||||||
|  | import dotenvExpand from 'dotenv-expand'; | ||||||
|  |  | ||||||
|  | const fs = require("fs"); | ||||||
|  | const { spawn } = require('node:child_process'); | ||||||
|  |  | ||||||
|  | export function loadEnv(){ | ||||||
|  |     var myEnv = dotenv.config({ path: 'test.env' }); | ||||||
|  |     dotenvExpand.expand(myEnv); | ||||||
|  |  | ||||||
|  |     return { | ||||||
|  |         user1: { | ||||||
|  |             email: process.env.TEST_USER_MAIL, | ||||||
|  |             name: process.env.TEST_USER, | ||||||
|  |             password: process.env.TEST_USER_PASSWORD, | ||||||
|  |         }, | ||||||
|  |         user2: { | ||||||
|  |             email: process.env.TEST_USER2_MAIL, | ||||||
|  |             name: process.env.TEST_USER2, | ||||||
|  |             password: process.env.TEST_USER2_PASSWORD, | ||||||
|  |         }, | ||||||
|  |         user3: { | ||||||
|  |             email: process.env.TEST_USER3_MAIL, | ||||||
|  |             name: process.env.TEST_USER3, | ||||||
|  |             password: process.env.TEST_USER3_PASSWORD, | ||||||
|  |         }, | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function waitFor(url: String, browser: Browser) { | ||||||
|  |     var ready = false; | ||||||
|  |     var context; | ||||||
|  |  | ||||||
|  |     do { | ||||||
|  |         try { | ||||||
|  |             context = await browser.newContext(); | ||||||
|  |             const page = await context.newPage(); | ||||||
|  |             await page.waitForTimeout(500); | ||||||
|  |             const result = await page.goto(url); | ||||||
|  |             ready = result.status() === 200; | ||||||
|  |         } catch(e) { | ||||||
|  |             if( !e.message.includes("CONNECTION_REFUSED") ){ | ||||||
|  |                 throw e; | ||||||
|  |             } | ||||||
|  |         } finally { | ||||||
|  |             await context.close(); | ||||||
|  |         } | ||||||
|  |     } while(!ready); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export function startComposeService(serviceName: String){ | ||||||
|  |     console.log(`Starting ${serviceName}`); | ||||||
|  |     execSync(`docker compose --profile playwright --env-file test.env  up -d ${serviceName}`); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export function stopComposeService(serviceName: String){ | ||||||
|  |     console.log(`Stopping ${serviceName}`); | ||||||
|  |     execSync(`docker compose --profile playwright --env-file test.env  stop ${serviceName}`); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | function wipeSqlite(){ | ||||||
|  |     console.log(`Delete Vaultwarden container to wipe sqlite`); | ||||||
|  |     execSync(`docker compose --env-file test.env stop Vaultwarden`); | ||||||
|  |     execSync(`docker compose --env-file test.env rm -f Vaultwarden`); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | async function wipeMariaDB(){ | ||||||
|  |     var mysql = require('mysql2/promise'); | ||||||
|  |     var ready = false; | ||||||
|  |     var connection; | ||||||
|  |  | ||||||
|  |     do { | ||||||
|  |         try { | ||||||
|  |             connection = await mysql.createConnection({ | ||||||
|  |                 user: process.env.MARIADB_USER, | ||||||
|  |                 host: "127.0.0.1", | ||||||
|  |                 database: process.env.MARIADB_DATABASE, | ||||||
|  |                 password: process.env.MARIADB_PASSWORD, | ||||||
|  |                 port: process.env.MARIADB_PORT, | ||||||
|  |             }); | ||||||
|  |  | ||||||
|  |             await connection.execute(`DROP DATABASE ${process.env.MARIADB_DATABASE}`); | ||||||
|  |             await connection.execute(`CREATE DATABASE ${process.env.MARIADB_DATABASE}`); | ||||||
|  |             console.log('Successfully wiped mariadb'); | ||||||
|  |             ready = true; | ||||||
|  |         } catch (err) { | ||||||
|  |             console.log(`Error when wiping mariadb: ${err}`); | ||||||
|  |         } finally { | ||||||
|  |             if( connection ){ | ||||||
|  |                 connection.end(); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         await new Promise(r => setTimeout(r, 1000)); | ||||||
|  |     } while(!ready); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | async function wipeMysqlDB(){ | ||||||
|  |     var mysql = require('mysql2/promise'); | ||||||
|  |     var ready = false; | ||||||
|  |     var connection; | ||||||
|  |  | ||||||
|  |     do{ | ||||||
|  |         try { | ||||||
|  |             connection = await mysql.createConnection({ | ||||||
|  |                 user: process.env.MYSQL_USER, | ||||||
|  |                 host: "127.0.0.1", | ||||||
|  |                 database: process.env.MYSQL_DATABASE, | ||||||
|  |                 password: process.env.MYSQL_PASSWORD, | ||||||
|  |                 port: process.env.MYSQL_PORT, | ||||||
|  |             }); | ||||||
|  |  | ||||||
|  |             await connection.execute(`DROP DATABASE ${process.env.MYSQL_DATABASE}`); | ||||||
|  |             await connection.execute(`CREATE DATABASE ${process.env.MYSQL_DATABASE}`); | ||||||
|  |             console.log('Successfully wiped mysql'); | ||||||
|  |             ready = true; | ||||||
|  |         } catch (err) { | ||||||
|  |             console.log(`Error when wiping mysql: ${err}`); | ||||||
|  |         } finally { | ||||||
|  |             if( connection ){ | ||||||
|  |                 connection.end(); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         await new Promise(r => setTimeout(r, 1000)); | ||||||
|  |     } while(!ready); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | async function wipePostgres(){ | ||||||
|  |     const { Client } = require('pg'); | ||||||
|  |  | ||||||
|  |     const client = new Client({ | ||||||
|  |         user: process.env.POSTGRES_USER, | ||||||
|  |         host: "127.0.0.1", | ||||||
|  |         database: "postgres", | ||||||
|  |         password: process.env.POSTGRES_PASSWORD, | ||||||
|  |         port: process.env.POSTGRES_PORT, | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     try { | ||||||
|  |         await client.connect(); | ||||||
|  |         await client.query(`DROP DATABASE ${process.env.POSTGRES_DB}`); | ||||||
|  |         await client.query(`CREATE DATABASE ${process.env.POSTGRES_DB}`); | ||||||
|  |         console.log('Successfully wiped postgres'); | ||||||
|  |     } catch (err) { | ||||||
|  |         console.log(`Error when wiping postgres: ${err}`); | ||||||
|  |     } finally { | ||||||
|  |         client.end(); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | function dbConfig(testInfo: TestInfo){ | ||||||
|  |     switch(testInfo.project.name) { | ||||||
|  |         case "postgres": | ||||||
|  |         case "sso-postgres": | ||||||
|  |             return { DATABASE_URL: `postgresql://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PASSWORD}@127.0.0.1:${process.env.POSTGRES_PORT}/${process.env.POSTGRES_DB}` }; | ||||||
|  |         case "mariadb": | ||||||
|  |         case "sso-mariadb": | ||||||
|  |             return { DATABASE_URL: `mysql://${process.env.MARIADB_USER}:${process.env.MARIADB_PASSWORD}@127.0.0.1:${process.env.MARIADB_PORT}/${process.env.MARIADB_DATABASE}` }; | ||||||
|  |         case "mysql": | ||||||
|  |         case "sso-mysql": | ||||||
|  |             return { DATABASE_URL: `mysql://${process.env.MYSQL_USER}:${process.env.MYSQL_PASSWORD}@127.0.0.1:${process.env.MYSQL_PORT}/${process.env.MYSQL_DATABASE}`}; | ||||||
|  |         case "sqlite": | ||||||
|  |         case "sso-sqlite": | ||||||
|  |             return { I_REALLY_WANT_VOLATILE_STORAGE: true }; | ||||||
|  |         default: | ||||||
|  |             throw new Error(`Unknow database name: ${testInfo.project.name}`); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  *  All parameters passed in `env` need to be added to the docker-compose.yml | ||||||
|  |  **/ | ||||||
|  | export async function startVault(browser: Browser, testInfo: TestInfo, env = {}, resetDB: Boolean = true) { | ||||||
|  |     if( resetDB ){ | ||||||
|  |         switch(testInfo.project.name) { | ||||||
|  |             case "postgres": | ||||||
|  |             case "sso-postgres": | ||||||
|  |                 await wipePostgres(); | ||||||
|  |                 break; | ||||||
|  |             case "mariadb": | ||||||
|  |             case "sso-mariadb": | ||||||
|  |                 await wipeMariaDB(); | ||||||
|  |                 break; | ||||||
|  |             case "mysql": | ||||||
|  |             case "sso-mysql": | ||||||
|  |                 await wipeMysqlDB(); | ||||||
|  |                 break; | ||||||
|  |             case "sqlite": | ||||||
|  |             case "sso-sqlite": | ||||||
|  |                 wipeSqlite(); | ||||||
|  |                 break; | ||||||
|  |             default: | ||||||
|  |                 throw new Error(`Unknow database name: ${testInfo.project.name}`); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     console.log(`Starting Vaultwarden`); | ||||||
|  |     execSync(`docker compose --profile playwright --env-file test.env up -d Vaultwarden`, { | ||||||
|  |         env: { ...env, ...dbConfig(testInfo) }, | ||||||
|  |     }); | ||||||
|  |     await waitFor("/", browser); | ||||||
|  |     console.log(`Vaultwarden running on: ${process.env.DOMAIN}`); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function stopVault(force: boolean = false) { | ||||||
|  |     if( force === false && process.env.PW_KEEP_SERVICE_RUNNNING === "true" ) { | ||||||
|  |         console.log(`Keep vaultwarden running on: ${process.env.DOMAIN}`); | ||||||
|  |     } else { | ||||||
|  |         console.log(`Vaultwarden stopping`); | ||||||
|  |         execSync(`docker compose --profile playwright --env-file test.env stop Vaultwarden`); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function restartVault(page: Page, testInfo: TestInfo, env, resetDB: Boolean = true) { | ||||||
|  |     stopVault(true); | ||||||
|  |     return startVault(page.context().browser(), testInfo, env, resetDB); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function checkNotification(page: Page, hasText: string) { | ||||||
|  |     await expect(page.locator('bit-toast').filter({ hasText })).toBeVisible(); | ||||||
|  |     await page.locator('bit-toast').filter({ hasText }).getByRole('button').click(); | ||||||
|  |     await expect(page.locator('bit-toast').filter({ hasText })).toHaveCount(0); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function cleanLanding(page: Page) { | ||||||
|  |     await page.goto('/', { waitUntil: 'domcontentloaded' }); | ||||||
|  |     await expect(page.getByRole('button').nth(0)).toBeVisible(); | ||||||
|  |  | ||||||
|  |     const logged = await page.getByRole('button', { name: 'Log out' }).count(); | ||||||
|  |     if( logged > 0 ){ | ||||||
|  |         await page.getByRole('button', { name: 'Log out' }).click(); | ||||||
|  |         await page.getByRole('button', { name: 'Log out' }).click(); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function logout(test: Test, page: Page, user: { name: string }) { | ||||||
|  |     await test.step('logout', async () => { | ||||||
|  |         await page.getByRole('button', { name: user.name, exact: true }).click(); | ||||||
|  |         await page.getByRole('menuitem', { name: 'Log out' }).click(); | ||||||
|  |         await expect(page.getByRole('heading', { name: 'Log in' })).toBeVisible(); | ||||||
|  |     }); | ||||||
|  | } | ||||||
							
								
								
									
										2594
									
								
								playwright/package-lock.json
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										2594
									
								
								playwright/package-lock.json
									
									
									
										generated
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										21
									
								
								playwright/package.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								playwright/package.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | |||||||
|  | { | ||||||
|  |     "name": "scenarios", | ||||||
|  |     "version": "1.0.0", | ||||||
|  |     "description": "", | ||||||
|  |     "main": "index.js", | ||||||
|  |     "scripts": {}, | ||||||
|  |     "keywords": [], | ||||||
|  |     "author": "", | ||||||
|  |     "license": "ISC", | ||||||
|  |     "devDependencies": { | ||||||
|  |         "@playwright/test": "^1.54.2", | ||||||
|  |         "dotenv": "^16.6.1", | ||||||
|  |         "dotenv-expand": "^12.0.2", | ||||||
|  |         "maildev": "npm:@timshel_npm/maildev@^3.2.1" | ||||||
|  |     }, | ||||||
|  |     "dependencies": { | ||||||
|  |         "mysql2": "^3.14.3", | ||||||
|  |         "otpauth": "^9.4.0", | ||||||
|  |         "pg": "^8.16.3" | ||||||
|  |     } | ||||||
|  | } | ||||||
							
								
								
									
										143
									
								
								playwright/playwright.config.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										143
									
								
								playwright/playwright.config.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,143 @@ | |||||||
|  | import { defineConfig, devices } from '@playwright/test'; | ||||||
|  | import { exec } from 'node:child_process'; | ||||||
|  |  | ||||||
|  | const utils = require('./global-utils'); | ||||||
|  |  | ||||||
|  | utils.loadEnv(); | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * See https://playwright.dev/docs/test-configuration. | ||||||
|  |  */ | ||||||
|  | export default defineConfig({ | ||||||
|  |     testDir: './.', | ||||||
|  |     /* Run tests in files in parallel */ | ||||||
|  |     fullyParallel: false, | ||||||
|  |  | ||||||
|  |     /* Fail the build on CI if you accidentally left test.only in the source code. */ | ||||||
|  |     forbidOnly: !!process.env.CI, | ||||||
|  |  | ||||||
|  |     retries: 0, | ||||||
|  |     workers: 1, | ||||||
|  |  | ||||||
|  |     /* Reporter to use. See https://playwright.dev/docs/test-reporters */ | ||||||
|  |     reporter: 'html', | ||||||
|  |  | ||||||
|  |     /* Long global timeout for complex tests | ||||||
|  |      * But short action/nav/expect timeouts to fail on specific step (raise locally if not enough). | ||||||
|  |      */ | ||||||
|  |     timeout: 120 * 1000, | ||||||
|  |     actionTimeout: 20 * 1000, | ||||||
|  |     navigationTimeout: 20 * 1000, | ||||||
|  |     expect: { timeout: 20 * 1000 }, | ||||||
|  |  | ||||||
|  |     /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ | ||||||
|  |     use: { | ||||||
|  |         /* Base URL to use in actions like `await page.goto('/')`. */ | ||||||
|  |         baseURL: process.env.DOMAIN, | ||||||
|  |         browserName: 'firefox', | ||||||
|  |         locale: 'en-GB', | ||||||
|  |         timezoneId: 'Europe/London', | ||||||
|  |  | ||||||
|  |         /* Always collect trace (other values add random test failures) See https://playwright.dev/docs/trace-viewer */ | ||||||
|  |         trace: 'on', | ||||||
|  |         viewport: { | ||||||
|  |             width: 1080, | ||||||
|  |             height: 720, | ||||||
|  |         }, | ||||||
|  |         video: "on", | ||||||
|  |     }, | ||||||
|  |  | ||||||
|  |     /* Configure projects for major browsers */ | ||||||
|  |     projects: [ | ||||||
|  |         { | ||||||
|  |             name: 'mariadb-setup', | ||||||
|  |             testMatch: 'tests/setups/db-setup.ts', | ||||||
|  |             use: { serviceName: "Mariadb" }, | ||||||
|  |             teardown: 'mariadb-teardown', | ||||||
|  |         }, | ||||||
|  |         { | ||||||
|  |             name: 'mysql-setup', | ||||||
|  |             testMatch: 'tests/setups/db-setup.ts', | ||||||
|  |             use: { serviceName: "Mysql" }, | ||||||
|  |             teardown: 'mysql-teardown', | ||||||
|  |         }, | ||||||
|  |         { | ||||||
|  |             name: 'postgres-setup', | ||||||
|  |             testMatch: 'tests/setups/db-setup.ts', | ||||||
|  |             use: { serviceName: "Postgres" }, | ||||||
|  |             teardown: 'postgres-teardown', | ||||||
|  |         }, | ||||||
|  |         { | ||||||
|  |             name: 'sso-setup', | ||||||
|  |             testMatch: 'tests/setups/sso-setup.ts', | ||||||
|  |             teardown: 'sso-teardown', | ||||||
|  |         }, | ||||||
|  |  | ||||||
|  |         { | ||||||
|  |             name: 'mariadb', | ||||||
|  |             testMatch: 'tests/*.spec.ts', | ||||||
|  |             testIgnore: 'tests/sso_*.spec.ts', | ||||||
|  |             dependencies: ['mariadb-setup'], | ||||||
|  |         }, | ||||||
|  |         { | ||||||
|  |             name: 'mysql', | ||||||
|  |             testMatch: 'tests/*.spec.ts', | ||||||
|  |             testIgnore: 'tests/sso_*.spec.ts', | ||||||
|  |             dependencies: ['mysql-setup'], | ||||||
|  |         }, | ||||||
|  |         { | ||||||
|  |             name: 'postgres', | ||||||
|  |             testMatch: 'tests/*.spec.ts', | ||||||
|  |             testIgnore: 'tests/sso_*.spec.ts', | ||||||
|  |             dependencies: ['postgres-setup'], | ||||||
|  |         }, | ||||||
|  |         { | ||||||
|  |             name: 'sqlite', | ||||||
|  |             testMatch: 'tests/*.spec.ts', | ||||||
|  |             testIgnore: 'tests/sso_*.spec.ts', | ||||||
|  |         }, | ||||||
|  |  | ||||||
|  |         { | ||||||
|  |             name: 'sso-mariadb', | ||||||
|  |             testMatch: 'tests/sso_*.spec.ts', | ||||||
|  |             dependencies: ['sso-setup', 'mariadb-setup'], | ||||||
|  |         }, | ||||||
|  |         { | ||||||
|  |             name: 'sso-mysql', | ||||||
|  |             testMatch: 'tests/sso_*.spec.ts', | ||||||
|  |             dependencies: ['sso-setup', 'mysql-setup'], | ||||||
|  |         }, | ||||||
|  |         { | ||||||
|  |             name: 'sso-postgres', | ||||||
|  |             testMatch: 'tests/sso_*.spec.ts', | ||||||
|  |             dependencies: ['sso-setup', 'postgres-setup'], | ||||||
|  |         }, | ||||||
|  |         { | ||||||
|  |             name: 'sso-sqlite', | ||||||
|  |             testMatch: 'tests/sso_*.spec.ts', | ||||||
|  |             dependencies: ['sso-setup'], | ||||||
|  |         }, | ||||||
|  |  | ||||||
|  |         { | ||||||
|  |             name: 'mariadb-teardown', | ||||||
|  |             testMatch: 'tests/setups/db-teardown.ts', | ||||||
|  |             use: { serviceName: "Mariadb" }, | ||||||
|  |         }, | ||||||
|  |         { | ||||||
|  |             name: 'mysql-teardown', | ||||||
|  |             testMatch: 'tests/setups/db-teardown.ts', | ||||||
|  |             use: { serviceName: "Mysql" }, | ||||||
|  |         }, | ||||||
|  |         { | ||||||
|  |             name: 'postgres-teardown', | ||||||
|  |             testMatch: 'tests/setups/db-teardown.ts', | ||||||
|  |             use: { serviceName: "Postgres" }, | ||||||
|  |         }, | ||||||
|  |         { | ||||||
|  |             name: 'sso-teardown', | ||||||
|  |             testMatch: 'tests/setups/sso-teardown.ts', | ||||||
|  |         }, | ||||||
|  |     ], | ||||||
|  |  | ||||||
|  |     globalSetup: require.resolve('./global-setup'), | ||||||
|  | }); | ||||||
							
								
								
									
										97
									
								
								playwright/test.env
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										97
									
								
								playwright/test.env
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,97 @@ | |||||||
|  | ################################################################## | ||||||
|  | ### Shared Playwright conf test file Vaultwarden and Databases ### | ||||||
|  | ################################################################## | ||||||
|  |  | ||||||
|  | ENV=test | ||||||
|  | DC_ENV_FILE=test.env | ||||||
|  | COMPOSE_IGNORE_ORPHANS=True | ||||||
|  | DOCKER_BUILDKIT=1 | ||||||
|  |  | ||||||
|  | ##################### | ||||||
|  | # Playwright Config # | ||||||
|  | ##################### | ||||||
|  | PW_KEEP_SERVICE_RUNNNING=${PW_KEEP_SERVICE_RUNNNING:-false} | ||||||
|  | PW_SMTP_FROM=vaultwarden@playwright.test | ||||||
|  |  | ||||||
|  | ##################### | ||||||
|  | # Maildev Config 	# | ||||||
|  | ##################### | ||||||
|  | MAILDEV_HTTP_PORT=1081 | ||||||
|  | MAILDEV_SMTP_PORT=1026 | ||||||
|  | MAILDEV_HOST=127.0.0.1 | ||||||
|  |  | ||||||
|  | ################ | ||||||
|  | # Users Config # | ||||||
|  | ################ | ||||||
|  | TEST_USER=test | ||||||
|  | TEST_USER_PASSWORD=Master Password | ||||||
|  | TEST_USER_MAIL=${TEST_USER}@example.com | ||||||
|  |  | ||||||
|  | TEST_USER2=test2 | ||||||
|  | TEST_USER2_PASSWORD=Master Password | ||||||
|  | TEST_USER2_MAIL=${TEST_USER2}@example.com | ||||||
|  |  | ||||||
|  | TEST_USER3=test3 | ||||||
|  | TEST_USER3_PASSWORD=Master Password | ||||||
|  | TEST_USER3_MAIL=${TEST_USER3}@example.com | ||||||
|  |  | ||||||
|  | ################### | ||||||
|  | # Keycloak Config # | ||||||
|  | ################### | ||||||
|  | KEYCLOAK_ADMIN=admin | ||||||
|  | KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN} | ||||||
|  | KC_HTTP_HOST=127.0.0.1 | ||||||
|  | KC_HTTP_PORT=8081 | ||||||
|  |  | ||||||
|  | # Script parameters (use Keycloak and Vaultwarden config too) | ||||||
|  | TEST_REALM=test | ||||||
|  | DUMMY_REALM=dummy | ||||||
|  | DUMMY_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${DUMMY_REALM} | ||||||
|  |  | ||||||
|  | ###################### | ||||||
|  | # Vaultwarden Config # | ||||||
|  | ###################### | ||||||
|  | ROCKET_PORT=8003 | ||||||
|  | DOMAIN=http://localhost:${ROCKET_PORT} | ||||||
|  | LOG_LEVEL=info,oidcwarden::sso=debug | ||||||
|  | LOGIN_RATELIMIT_MAX_BURST=100 | ||||||
|  |  | ||||||
|  | SMTP_SECURITY=off | ||||||
|  | SMTP_PORT=${MAILDEV_SMTP_PORT} | ||||||
|  | SMTP_FROM_NAME=Vaultwarden | ||||||
|  | SMTP_TIMEOUT=5 | ||||||
|  |  | ||||||
|  | SSO_CLIENT_ID=warden | ||||||
|  | SSO_CLIENT_SECRET=warden | ||||||
|  | SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM} | ||||||
|  | SSO_DEBUG_TOKENS=true | ||||||
|  |  | ||||||
|  | # Custom web-vault build | ||||||
|  | # PW_WV_REPO_URL=https://github.com/dani-garcia/bw_web_builds.git | ||||||
|  | # PW_WV_COMMIT_HASH=a5f5390895516bce2f48b7baadb6dc399e5fe75a | ||||||
|  |  | ||||||
|  | ########################### | ||||||
|  | # Docker MariaDb container# | ||||||
|  | ########################### | ||||||
|  | MARIADB_PORT=3307 | ||||||
|  | MARIADB_ROOT_PASSWORD=warden | ||||||
|  | MARIADB_USER=warden | ||||||
|  | MARIADB_PASSWORD=warden | ||||||
|  | MARIADB_DATABASE=warden | ||||||
|  |  | ||||||
|  | ########################### | ||||||
|  | # Docker Mysql container# | ||||||
|  | ########################### | ||||||
|  | MYSQL_PORT=3309 | ||||||
|  | MYSQL_ROOT_PASSWORD=warden | ||||||
|  | MYSQL_USER=warden | ||||||
|  | MYSQL_PASSWORD=warden | ||||||
|  | MYSQL_DATABASE=warden | ||||||
|  |  | ||||||
|  | ############################ | ||||||
|  | # Docker Postgres container# | ||||||
|  | ############################ | ||||||
|  | POSTGRES_PORT=5433 | ||||||
|  | POSTGRES_USER=warden | ||||||
|  | POSTGRES_PASSWORD=warden | ||||||
|  | POSTGRES_DB=warden | ||||||
							
								
								
									
										37
									
								
								playwright/tests/collection.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										37
									
								
								playwright/tests/collection.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,37 @@ | |||||||
|  | import { test, expect, type TestInfo } from '@playwright/test'; | ||||||
|  |  | ||||||
|  | import * as utils from "../global-utils"; | ||||||
|  | import { createAccount } from './setups/user'; | ||||||
|  |  | ||||||
|  | let users = utils.loadEnv(); | ||||||
|  |  | ||||||
|  | test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||||
|  |     await utils.startVault(browser, testInfo); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test.afterAll('Teardown', async ({}) => { | ||||||
|  |     utils.stopVault(); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Create', async ({ page }) => { | ||||||
|  |     await createAccount(test, page, users.user1); | ||||||
|  |  | ||||||
|  |     await test.step('Create Org', async () => { | ||||||
|  |         await page.getByRole('link', { name: 'New organisation' }).click(); | ||||||
|  |         await page.getByLabel('Organisation name (required)').fill('Test'); | ||||||
|  |         await page.getByRole('button', { name: 'Submit' }).click(); | ||||||
|  |         await page.locator('div').filter({ hasText: 'Members' }).nth(2).click(); | ||||||
|  |  | ||||||
|  |         await utils.checkNotification(page, 'Organisation created'); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('Create Collection', async () => { | ||||||
|  |         await page.getByRole('link', { name: 'Collections' }).click(); | ||||||
|  |         await page.getByRole('button', { name: 'New' }).click(); | ||||||
|  |         await page.getByRole('menuitem', { name: 'Collection' }).click(); | ||||||
|  |         await page.getByLabel('Name (required)').fill('RandomCollec'); | ||||||
|  |         await page.getByRole('button', { name: 'Save' }).click(); | ||||||
|  |         await utils.checkNotification(page, 'Created collection RandomCollec'); | ||||||
|  |         await expect(page.getByRole('button', { name: 'RandomCollec' })).toBeVisible(); | ||||||
|  |     }); | ||||||
|  | }); | ||||||
							
								
								
									
										100
									
								
								playwright/tests/login.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										100
									
								
								playwright/tests/login.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,100 @@ | |||||||
|  | import { test, expect, type TestInfo } from '@playwright/test'; | ||||||
|  | import { MailDev } from 'maildev'; | ||||||
|  |  | ||||||
|  | const utils = require('../global-utils'); | ||||||
|  | import { createAccount, logUser } from './setups/user'; | ||||||
|  | import { activateEmail, retrieveEmailCode, disableEmail } from './setups/2fa'; | ||||||
|  |  | ||||||
|  | let users = utils.loadEnv(); | ||||||
|  |  | ||||||
|  | let mailserver; | ||||||
|  |  | ||||||
|  | test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||||
|  |     mailserver = new MailDev({ | ||||||
|  |         port: process.env.MAILDEV_SMTP_PORT, | ||||||
|  |         web: { port: process.env.MAILDEV_HTTP_PORT }, | ||||||
|  |     }) | ||||||
|  |  | ||||||
|  |     await mailserver.listen(); | ||||||
|  |  | ||||||
|  |     await utils.startVault(browser, testInfo, { | ||||||
|  |         SMTP_HOST: process.env.MAILDEV_HOST, | ||||||
|  |         SMTP_FROM: process.env.PW_SMTP_FROM, | ||||||
|  |     }); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test.afterAll('Teardown', async ({}) => { | ||||||
|  |     utils.stopVault(); | ||||||
|  |     if( mailserver ){ | ||||||
|  |         await mailserver.close(); | ||||||
|  |     } | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Account creation', async ({ page }) => { | ||||||
|  |     const mailBuffer = mailserver.buffer(users.user1.email); | ||||||
|  |  | ||||||
|  |     await createAccount(test, page, users.user1, mailBuffer); | ||||||
|  |  | ||||||
|  |     mailBuffer.close(); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Login', async ({ context, page }) => { | ||||||
|  |     const mailBuffer = mailserver.buffer(users.user1.email); | ||||||
|  |  | ||||||
|  |     await logUser(test, page, users.user1, mailBuffer); | ||||||
|  |  | ||||||
|  |     await test.step('verify email', async () => { | ||||||
|  |         await page.getByText('Verify your account\'s email').click(); | ||||||
|  |         await expect(page.getByText('Verify your account\'s email')).toBeVisible(); | ||||||
|  |         await page.getByRole('button', { name: 'Send email' }).click(); | ||||||
|  |  | ||||||
|  |         await utils.checkNotification(page, 'Check your email inbox for a verification link'); | ||||||
|  |  | ||||||
|  |         const verify = await mailBuffer.expect((m) => m.subject === "Verify Your Email"); | ||||||
|  |         expect(verify.from[0]?.address).toBe(process.env.PW_SMTP_FROM); | ||||||
|  |  | ||||||
|  |         const page2 = await context.newPage(); | ||||||
|  |         await page2.setContent(verify.html); | ||||||
|  |         const link = await page2.getByTestId("verify").getAttribute("href"); | ||||||
|  |         await page2.close(); | ||||||
|  |  | ||||||
|  |         await page.goto(link); | ||||||
|  |         await utils.checkNotification(page, 'Account email verified'); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     mailBuffer.close(); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Activate 2fa', async ({ page }) => { | ||||||
|  |     const emails = mailserver.buffer(users.user1.email); | ||||||
|  |  | ||||||
|  |     await logUser(test, page, users.user1); | ||||||
|  |  | ||||||
|  |     await activateEmail(test, page, users.user1, emails); | ||||||
|  |  | ||||||
|  |     emails.close(); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('2fa', async ({ page }) => { | ||||||
|  |     const emails = mailserver.buffer(users.user1.email); | ||||||
|  |  | ||||||
|  |     await test.step('login', async () => { | ||||||
|  |         await page.goto('/'); | ||||||
|  |  | ||||||
|  |         await page.getByLabel(/Email address/).fill(users.user1.email); | ||||||
|  |         await page.getByRole('button', { name: 'Continue' }).click(); | ||||||
|  |         await page.getByLabel('Master password').fill(users.user1.password); | ||||||
|  |         await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||||
|  |  | ||||||
|  |         await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible(); | ||||||
|  |         const code = await retrieveEmailCode(test, page, emails); | ||||||
|  |         await page.getByLabel(/Verification code/).fill(code); | ||||||
|  |         await page.getByRole('button', { name: 'Continue' }).click(); | ||||||
|  |  | ||||||
|  |         await expect(page).toHaveTitle(/Vaults/); | ||||||
|  |     }) | ||||||
|  |  | ||||||
|  |     await disableEmail(test, page, users.user1); | ||||||
|  |  | ||||||
|  |     emails.close(); | ||||||
|  | }); | ||||||
							
								
								
									
										51
									
								
								playwright/tests/login.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								playwright/tests/login.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,51 @@ | |||||||
|  | import { test, expect, type Page, type TestInfo } from '@playwright/test'; | ||||||
|  | import * as OTPAuth from "otpauth"; | ||||||
|  |  | ||||||
|  | import * as utils from "../global-utils"; | ||||||
|  | import { createAccount, logUser } from './setups/user'; | ||||||
|  | import { activateTOTP, disableTOTP } from './setups/2fa'; | ||||||
|  |  | ||||||
|  | let users = utils.loadEnv(); | ||||||
|  | let totp; | ||||||
|  |  | ||||||
|  | test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||||
|  |     await utils.startVault(browser, testInfo, {}); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test.afterAll('Teardown', async ({}) => { | ||||||
|  |     utils.stopVault(); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Account creation', async ({ page }) => { | ||||||
|  |     await createAccount(test, page, users.user1); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Master password login', async ({ page }) => { | ||||||
|  |     await logUser(test, page, users.user1); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Authenticator 2fa', async ({ page }) => { | ||||||
|  |     await logUser(test, page, users.user1); | ||||||
|  |  | ||||||
|  |     let totp = await activateTOTP(test, page, users.user1); | ||||||
|  |  | ||||||
|  |     await utils.logout(test, page, users.user1); | ||||||
|  |  | ||||||
|  |     await test.step('login', async () => { | ||||||
|  |         let timestamp = Date.now(); // Needed to use the next token | ||||||
|  |         timestamp = timestamp + (totp.period - (Math.floor(timestamp / 1000) % totp.period) + 1) * 1000; | ||||||
|  |  | ||||||
|  |         await page.getByLabel(/Email address/).fill(users.user1.email); | ||||||
|  |         await page.getByRole('button', { name: 'Continue' }).click(); | ||||||
|  |         await page.getByLabel('Master password').fill(users.user1.password); | ||||||
|  |         await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||||
|  |  | ||||||
|  |         await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible(); | ||||||
|  |         await page.getByLabel(/Verification code/).fill(totp.generate({timestamp})); | ||||||
|  |         await page.getByRole('button', { name: 'Continue' }).click(); | ||||||
|  |  | ||||||
|  |         await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await disableTOTP(test, page, users.user1); | ||||||
|  | }); | ||||||
							
								
								
									
										115
									
								
								playwright/tests/organization.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										115
									
								
								playwright/tests/organization.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,115 @@ | |||||||
|  | import { test, expect, type TestInfo } from '@playwright/test'; | ||||||
|  | import { MailDev } from 'maildev'; | ||||||
|  |  | ||||||
|  | import * as utils from '../global-utils'; | ||||||
|  | import * as orgs from './setups/orgs'; | ||||||
|  | import { createAccount, logUser } from './setups/user'; | ||||||
|  |  | ||||||
|  | let users = utils.loadEnv(); | ||||||
|  |  | ||||||
|  | let mailServer, mail1Buffer, mail2Buffer, mail3Buffer; | ||||||
|  |  | ||||||
|  | test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||||
|  |     mailServer = new MailDev({ | ||||||
|  |         port: process.env.MAILDEV_SMTP_PORT, | ||||||
|  |         web: { port: process.env.MAILDEV_HTTP_PORT }, | ||||||
|  |     }) | ||||||
|  |  | ||||||
|  |     await mailServer.listen(); | ||||||
|  |  | ||||||
|  |     await utils.startVault(browser, testInfo, { | ||||||
|  |         SMTP_HOST: process.env.MAILDEV_HOST, | ||||||
|  |         SMTP_FROM: process.env.PW_SMTP_FROM, | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     mail1Buffer = mailServer.buffer(users.user1.email); | ||||||
|  |     mail2Buffer = mailServer.buffer(users.user2.email); | ||||||
|  |     mail3Buffer = mailServer.buffer(users.user3.email); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test.afterAll('Teardown', async ({}, testInfo: TestInfo) => { | ||||||
|  |     utils.stopVault(testInfo); | ||||||
|  |     [mail1Buffer, mail2Buffer, mail3Buffer, mailServer].map((m) => m?.close()); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Create user3', async ({ page }) => { | ||||||
|  |     await createAccount(test, page, users.user3, mail3Buffer); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Invite users', async ({ page }) => { | ||||||
|  |     await createAccount(test, page, users.user1, mail1Buffer); | ||||||
|  |  | ||||||
|  |     await orgs.create(test, page, 'Test'); | ||||||
|  |     await orgs.members(test, page, 'Test'); | ||||||
|  |     await orgs.invite(test, page, 'Test', users.user2.email); | ||||||
|  |     await orgs.invite(test, page, 'Test', users.user3.email, { | ||||||
|  |         navigate: false, | ||||||
|  |     }); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('invited with new account', async ({ page }) => { | ||||||
|  |     const invited = await mail2Buffer.expect((mail) => mail.subject === 'Join Test'); | ||||||
|  |  | ||||||
|  |     await test.step('Create account', async () => { | ||||||
|  |         await page.setContent(invited.html); | ||||||
|  |         const link = await page.getByTestId('invite').getAttribute('href'); | ||||||
|  |         await page.goto(link); | ||||||
|  |         await expect(page).toHaveTitle(/Create account | Vaultwarden Web/); | ||||||
|  |  | ||||||
|  |         //await page.getByLabel('Name').fill(users.user2.name); | ||||||
|  |         await page.getByLabel('New master password (required)', { exact: true }).fill(users.user2.password); | ||||||
|  |         await page.getByLabel('Confirm new master password (').fill(users.user2.password); | ||||||
|  |         await page.getByRole('button', { name: 'Create account' }).click(); | ||||||
|  |         await utils.checkNotification(page, 'Your new account has been created'); | ||||||
|  |  | ||||||
|  |         // Redirected to the vault | ||||||
|  |         await expect(page).toHaveTitle('Vaults | Vaultwarden Web'); | ||||||
|  |         await utils.checkNotification(page, 'You have been logged in!'); | ||||||
|  |         await utils.checkNotification(page, 'Invitation accepted'); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('Check mails', async () => { | ||||||
|  |         await mail2Buffer.expect((m) => m.subject === 'Welcome'); | ||||||
|  |         await mail2Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox'); | ||||||
|  |         await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted')); | ||||||
|  |     }); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('invited with existing account', async ({ page }) => { | ||||||
|  |     const invited = await mail3Buffer.expect((mail) => mail.subject === 'Join Test'); | ||||||
|  |  | ||||||
|  |     await page.setContent(invited.html); | ||||||
|  |     const link = await page.getByTestId('invite').getAttribute('href'); | ||||||
|  |  | ||||||
|  |     await page.goto(link); | ||||||
|  |  | ||||||
|  |     // We should be on login page with email prefilled | ||||||
|  |     await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||||
|  |     await page.getByRole('button', { name: 'Continue' }).click(); | ||||||
|  |  | ||||||
|  |     // Unlock page | ||||||
|  |     await page.getByLabel('Master password').fill(users.user3.password); | ||||||
|  |     await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||||
|  |  | ||||||
|  |     // We are now in the default vault page | ||||||
|  |     await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||||
|  |     await utils.checkNotification(page, 'Invitation accepted'); | ||||||
|  |  | ||||||
|  |     await mail3Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox'); | ||||||
|  |     await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted')); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Confirm invited user', async ({ page }) => { | ||||||
|  |     await logUser(test, page, users.user1, mail1Buffer); | ||||||
|  |  | ||||||
|  |     await orgs.members(test, page, 'Test'); | ||||||
|  |     await orgs.confirm(test, page, 'Test', users.user2.email); | ||||||
|  |  | ||||||
|  |     await mail2Buffer.expect((m) => m.subject.includes('Invitation to Test confirmed')); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Organization is visible', async ({ page }) => { | ||||||
|  |     await logUser(test, page, users.user2, mail2Buffer); | ||||||
|  |     await page.getByRole('button', { name: 'vault: Test', exact: true }).click(); | ||||||
|  |     await expect(page.getByLabel('Filter: Default collection')).toBeVisible(); | ||||||
|  | }); | ||||||
							
								
								
									
										54
									
								
								playwright/tests/organization.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										54
									
								
								playwright/tests/organization.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,54 @@ | |||||||
|  | import { test, expect, type TestInfo } from '@playwright/test'; | ||||||
|  | import { MailDev } from 'maildev'; | ||||||
|  |  | ||||||
|  | import * as utils from "../global-utils"; | ||||||
|  | import * as orgs from './setups/orgs'; | ||||||
|  | import { createAccount, logUser } from './setups/user'; | ||||||
|  |  | ||||||
|  | let users = utils.loadEnv(); | ||||||
|  |  | ||||||
|  | test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||||
|  |     await utils.startVault(browser, testInfo); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test.afterAll('Teardown', async ({}) => { | ||||||
|  |     utils.stopVault(); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Invite', async ({ page }) => { | ||||||
|  |     await createAccount(test, page, users.user3); | ||||||
|  |     await createAccount(test, page, users.user1); | ||||||
|  |  | ||||||
|  |     await orgs.create(test, page, 'New organisation'); | ||||||
|  |     await orgs.members(test, page, 'New organisation'); | ||||||
|  |  | ||||||
|  |     await test.step('missing user2', async () => { | ||||||
|  |         await orgs.invite(test, page, 'New organisation', users.user2.email); | ||||||
|  |         await expect(page.getByRole('row', { name: users.user2.email })).toHaveText(/Invited/); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('existing user3', async () => { | ||||||
|  |         await orgs.invite(test, page, 'New organisation', users.user3.email); | ||||||
|  |         await expect(page.getByRole('row', { name: users.user3.email })).toHaveText(/Needs confirmation/); | ||||||
|  |         await orgs.confirm(test, page, 'New organisation', users.user3.email); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('confirm user2', async () => { | ||||||
|  |         await createAccount(test, page, users.user2); | ||||||
|  |         await logUser(test, page, users.user1); | ||||||
|  |         await orgs.members(test, page, 'New organisation'); | ||||||
|  |         await orgs.confirm(test, page, 'New organisation', users.user2.email); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('Org visible user2  ', async () => { | ||||||
|  |         await logUser(test, page, users.user2); | ||||||
|  |         await page.getByRole('button', { name: 'vault: New organisation', exact: true }).click(); | ||||||
|  |         await expect(page.getByLabel('Filter: Default collection')).toBeVisible(); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('Org visible user3  ', async () => { | ||||||
|  |         await logUser(test, page, users.user3); | ||||||
|  |         await page.getByRole('button', { name: 'vault: New organisation', exact: true }).click(); | ||||||
|  |         await expect(page.getByLabel('Filter: Default collection')).toBeVisible(); | ||||||
|  |     }); | ||||||
|  | }); | ||||||
							
								
								
									
										92
									
								
								playwright/tests/setups/2fa.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										92
									
								
								playwright/tests/setups/2fa.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,92 @@ | |||||||
|  | import { expect, type Page, Test } from '@playwright/test'; | ||||||
|  | import { type MailBuffer } from 'maildev'; | ||||||
|  | import * as OTPAuth from "otpauth"; | ||||||
|  |  | ||||||
|  | import * as utils from '../../global-utils'; | ||||||
|  |  | ||||||
|  | export async function activateTOTP(test: Test, page: Page, user: { name: string, password: string }): OTPAuth.TOTP { | ||||||
|  |     return await test.step('Activate TOTP 2FA', async () => { | ||||||
|  |         await page.getByRole('button', { name: user.name }).click(); | ||||||
|  |         await page.getByRole('menuitem', { name: 'Account settings' }).click(); | ||||||
|  |         await page.getByRole('link', { name: 'Security' }).click(); | ||||||
|  |         await page.getByRole('link', { name: 'Two-step login' }).click(); | ||||||
|  |         await page.locator('bit-item').filter({ hasText: /Authenticator app/ }).getByRole('button').click(); | ||||||
|  |         await page.getByLabel('Master password (required)').fill(user.password); | ||||||
|  |         await page.getByRole('button', { name: 'Continue' }).click(); | ||||||
|  |  | ||||||
|  |         const secret = await page.getByLabel('Key').innerText(); | ||||||
|  |         let totp = new OTPAuth.TOTP({ secret, period: 30 }); | ||||||
|  |  | ||||||
|  |         await page.getByLabel(/Verification code/).fill(totp.generate()); | ||||||
|  |         await page.getByRole('button', { name: 'Turn on' }).click(); | ||||||
|  |         await page.getByRole('heading', { name: 'Turned on', exact: true }); | ||||||
|  |         await page.getByLabel('Close').click(); | ||||||
|  |  | ||||||
|  |         return totp; | ||||||
|  |     }) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function disableTOTP(test: Test, page: Page, user: { password: string }) { | ||||||
|  |     await test.step('Disable TOTP 2FA', async () => { | ||||||
|  |         await page.getByRole('button', { name: 'Test' }).click(); | ||||||
|  |         await page.getByRole('menuitem', { name: 'Account settings' }).click(); | ||||||
|  |         await page.getByRole('link', { name: 'Security' }).click(); | ||||||
|  |         await page.getByRole('link', { name: 'Two-step login' }).click(); | ||||||
|  |         await page.locator('bit-item').filter({ hasText: /Authenticator app/ }).getByRole('button').click(); | ||||||
|  |         await page.getByLabel('Master password (required)').click(); | ||||||
|  |         await page.getByLabel('Master password (required)').fill(user.password); | ||||||
|  |         await page.getByRole('button', { name: 'Continue' }).click(); | ||||||
|  |         await page.getByRole('button', { name: 'Turn off' }).click(); | ||||||
|  |         await page.getByRole('button', { name: 'Yes' }).click(); | ||||||
|  |         await utils.checkNotification(page, 'Two-step login provider turned off'); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function activateEmail(test: Test, page: Page, user: { name: string, password: string }, mailBuffer: MailBuffer) { | ||||||
|  |     await test.step('Activate Email 2FA', async () => { | ||||||
|  |         await page.getByRole('button', { name: user.name }).click(); | ||||||
|  |         await page.getByRole('menuitem', { name: 'Account settings' }).click(); | ||||||
|  |         await page.getByRole('link', { name: 'Security' }).click(); | ||||||
|  |         await page.getByRole('link', { name: 'Two-step login' }).click(); | ||||||
|  |         await page.locator('bit-item').filter({ hasText: 'Email Email Enter a code sent' }).getByRole('button').click(); | ||||||
|  |         await page.getByLabel('Master password (required)').fill(user.password); | ||||||
|  |         await page.getByRole('button', { name: 'Continue' }).click(); | ||||||
|  |         await page.getByRole('button', { name: 'Send email' }).click(); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     let code = await retrieveEmailCode(test, page, mailBuffer); | ||||||
|  |  | ||||||
|  |     await test.step('input code', async () => { | ||||||
|  |         await page.getByLabel('2. Enter the resulting 6').fill(code); | ||||||
|  |         await page.getByRole('button', { name: 'Turn on' }).click(); | ||||||
|  |         await page.getByRole('heading', { name: 'Turned on', exact: true }); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function retrieveEmailCode(test: Test, page: Page, mailBuffer: MailBuffer): string { | ||||||
|  |     return await test.step('retrieve code', async () => { | ||||||
|  |         const codeMail = await mailBuffer.expect((mail) => mail.subject.includes("Login Verification Code")); | ||||||
|  |         const page2 = await page.context().newPage(); | ||||||
|  |         await page2.setContent(codeMail.html); | ||||||
|  |         const code = await page2.getByTestId("2fa").innerText(); | ||||||
|  |         await page2.close(); | ||||||
|  |         return code; | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function disableEmail(test: Test, page: Page, user: { password: string }) { | ||||||
|  |     await test.step('Disable Email 2FA', async () => { | ||||||
|  |         await page.getByRole('button', { name: 'Test' }).click(); | ||||||
|  |         await page.getByRole('menuitem', { name: 'Account settings' }).click(); | ||||||
|  |         await page.getByRole('link', { name: 'Security' }).click(); | ||||||
|  |         await page.getByRole('link', { name: 'Two-step login' }).click(); | ||||||
|  |         await page.locator('bit-item').filter({ hasText: 'Email' }).getByRole('button').click(); | ||||||
|  |         await page.getByLabel('Master password (required)').click(); | ||||||
|  |         await page.getByLabel('Master password (required)').fill(user.password); | ||||||
|  |         await page.getByRole('button', { name: 'Continue' }).click(); | ||||||
|  |         await page.getByRole('button', { name: 'Turn off' }).click(); | ||||||
|  |         await page.getByRole('button', { name: 'Yes' }).click(); | ||||||
|  |  | ||||||
|  |         await utils.checkNotification(page, 'Two-step login provider turned off'); | ||||||
|  |     }); | ||||||
|  | } | ||||||
							
								
								
									
										7
									
								
								playwright/tests/setups/db-setup.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								playwright/tests/setups/db-setup.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | |||||||
|  | import { test } from './db-test'; | ||||||
|  |  | ||||||
|  | const utils = require('../../global-utils'); | ||||||
|  |  | ||||||
|  | test('DB start', async ({ serviceName }) => { | ||||||
|  | 	utils.startComposeService(serviceName); | ||||||
|  | }); | ||||||
							
								
								
									
										11
									
								
								playwright/tests/setups/db-teardown.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								playwright/tests/setups/db-teardown.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | |||||||
|  | import { test } from './db-test'; | ||||||
|  |  | ||||||
|  | const utils = require('../../global-utils'); | ||||||
|  |  | ||||||
|  | utils.loadEnv(); | ||||||
|  |  | ||||||
|  | test('DB teardown ?', async ({ serviceName }) => { | ||||||
|  |     if( process.env.PW_KEEP_SERVICE_RUNNNING !== "true" ) { | ||||||
|  |         utils.stopComposeService(serviceName); | ||||||
|  |     } | ||||||
|  | }); | ||||||
							
								
								
									
										9
									
								
								playwright/tests/setups/db-test.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								playwright/tests/setups/db-test.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | |||||||
|  | import { test as base } from '@playwright/test'; | ||||||
|  |  | ||||||
|  | export type TestOptions = { | ||||||
|  |   serviceName: string; | ||||||
|  | }; | ||||||
|  |  | ||||||
|  | export const test = base.extend<TestOptions>({ | ||||||
|  |   serviceName: ['', { option: true }], | ||||||
|  | }); | ||||||
							
								
								
									
										77
									
								
								playwright/tests/setups/orgs.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										77
									
								
								playwright/tests/setups/orgs.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,77 @@ | |||||||
|  | import { expect, type Browser,Page } from '@playwright/test'; | ||||||
|  |  | ||||||
|  | import * as utils from '../../global-utils'; | ||||||
|  |  | ||||||
|  | export async function create(test, page: Page, name: string) { | ||||||
|  |     await test.step('Create Org', async () => { | ||||||
|  |         await page.locator('a').filter({ hasText: 'Password Manager' }).first().click(); | ||||||
|  |         await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible(); | ||||||
|  |         await page.getByRole('link', { name: 'New organisation' }).click(); | ||||||
|  |         await page.getByLabel('Organisation name (required)').fill(name); | ||||||
|  |         await page.getByRole('button', { name: 'Submit' }).click(); | ||||||
|  |  | ||||||
|  |         await utils.checkNotification(page, 'Organisation created'); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function policies(test, page: Page, name: string) { | ||||||
|  |     await test.step(`Navigate to ${name} policies`, async () => { | ||||||
|  |         await page.locator('a').filter({ hasText: 'Admin Console' }).first().click(); | ||||||
|  |         await page.locator('org-switcher').getByLabel(/Toggle collapse/).click(); | ||||||
|  |         await page.locator('org-switcher').getByRole('link', { name: `${name}` }).first().click(); | ||||||
|  |         await expect(page.getByRole('heading', { name: `${name} collections` })).toBeVisible(); | ||||||
|  |         await page.getByRole('button', { name: 'Toggle collapse Settings' }).click(); | ||||||
|  |         await page.getByRole('link', { name: 'Policies' }).click(); | ||||||
|  |         await expect(page.getByRole('heading', { name: 'Policies' })).toBeVisible(); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function members(test, page: Page, name: string) { | ||||||
|  |     await test.step(`Navigate to ${name} members`, async () => { | ||||||
|  |         await page.locator('a').filter({ hasText: 'Admin Console' }).first().click(); | ||||||
|  |         await page.locator('org-switcher').getByLabel(/Toggle collapse/).click(); | ||||||
|  |         await page.locator('org-switcher').getByRole('link', { name: `${name}` }).first().click(); | ||||||
|  |         await expect(page.getByRole('heading', { name: `${name} collections` })).toBeVisible(); | ||||||
|  |         await page.locator('div').filter({ hasText: 'Members' }).nth(2).click(); | ||||||
|  |         await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible(); | ||||||
|  |         await expect(page.getByRole('cell', { name: 'All' })).toBeVisible(); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function invite(test, page: Page, name: string, email: string) { | ||||||
|  |     await test.step(`Invite ${email}`, async () => { | ||||||
|  |         await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible(); | ||||||
|  |         await page.getByRole('button', { name: 'Invite member' }).click(); | ||||||
|  |         await page.getByLabel('Email (required)').fill(email); | ||||||
|  |         await page.getByRole('tab', { name: 'Collections' }).click(); | ||||||
|  |         await page.getByRole('combobox', { name: 'Permission' }).click(); | ||||||
|  |         await page.getByText('Edit items', { exact: true }).click(); | ||||||
|  |         await page.getByLabel('Select collections').click(); | ||||||
|  |         await page.getByText('Default collection').click(); | ||||||
|  |         await page.getByRole('cell', { name: 'Collection', exact: true }).click(); | ||||||
|  |         await page.getByRole('button', { name: 'Save' }).click(); | ||||||
|  |         await utils.checkNotification(page, 'User(s) invited'); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function confirm(test, page: Page, name: string, user_email: string) { | ||||||
|  |     await test.step(`Confirm ${user_email}`, async () => { | ||||||
|  |         await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible(); | ||||||
|  |         await page.getByRole('row').filter({hasText: user_email}).getByLabel('Options').click(); | ||||||
|  |         await page.getByRole('menuitem', { name: 'Confirm' }).click(); | ||||||
|  |         await expect(page.getByRole('heading', { name: 'Confirm user' })).toBeVisible(); | ||||||
|  |         await page.getByRole('button', { name: 'Confirm' }).click(); | ||||||
|  |         await utils.checkNotification(page, 'confirmed'); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function revoke(test, page: Page, name: string, user_email: string) { | ||||||
|  |     await test.step(`Revoke ${user_email}`, async () => { | ||||||
|  |         await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible(); | ||||||
|  |         await page.getByRole('row').filter({hasText: user_email}).getByLabel('Options').click(); | ||||||
|  |         await page.getByRole('menuitem', { name: 'Revoke access' }).click(); | ||||||
|  |         await expect(page.getByRole('heading', { name: 'Revoke access' })).toBeVisible(); | ||||||
|  |         await page.getByRole('button', { name: 'Revoke access' }).click(); | ||||||
|  |         await utils.checkNotification(page, 'Revoked organisation access'); | ||||||
|  |     }); | ||||||
|  | } | ||||||
							
								
								
									
										18
									
								
								playwright/tests/setups/sso-setup.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								playwright/tests/setups/sso-setup.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | |||||||
|  | import { test, expect, type TestInfo } from '@playwright/test'; | ||||||
|  |  | ||||||
|  | const { exec } = require('node:child_process'); | ||||||
|  | const utils = require('../../global-utils'); | ||||||
|  |  | ||||||
|  | utils.loadEnv(); | ||||||
|  |  | ||||||
|  | test.beforeAll('Setup', async () => { | ||||||
|  |     console.log("Starting Keycloak"); | ||||||
|  |     exec(`docker compose --profile keycloak --env-file test.env up`); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Keycloak is up', async ({ page }) => { | ||||||
|  |     await utils.waitFor(process.env.SSO_AUTHORITY, page.context().browser()); | ||||||
|  |     // Dummy authority is created at the end of the setup | ||||||
|  |     await utils.waitFor(process.env.DUMMY_AUTHORITY, page.context().browser()); | ||||||
|  |     console.log(`Keycloak running on: ${process.env.SSO_AUTHORITY}`); | ||||||
|  | }); | ||||||
							
								
								
									
										15
									
								
								playwright/tests/setups/sso-teardown.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								playwright/tests/setups/sso-teardown.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | |||||||
|  | import { test, type FullConfig } from '@playwright/test'; | ||||||
|  |  | ||||||
|  | const { execSync } = require('node:child_process'); | ||||||
|  | const utils = require('../../global-utils'); | ||||||
|  |  | ||||||
|  | utils.loadEnv(); | ||||||
|  |  | ||||||
|  | test('Keycloak teardown', async () => { | ||||||
|  |     if( process.env.PW_KEEP_SERVICE_RUNNNING === "true" ) { | ||||||
|  |         console.log("Keep Keycloak running"); | ||||||
|  |     } else { | ||||||
|  |         console.log("Keycloak stopping"); | ||||||
|  |         execSync(`docker compose --profile keycloak --env-file test.env stop Keycloak`); | ||||||
|  |     } | ||||||
|  | }); | ||||||
							
								
								
									
										129
									
								
								playwright/tests/setups/sso.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										129
									
								
								playwright/tests/setups/sso.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,129 @@ | |||||||
|  | import { expect, type Page, Test } from '@playwright/test'; | ||||||
|  | import { type MailBuffer, MailServer } from 'maildev'; | ||||||
|  | import * as OTPAuth from "otpauth"; | ||||||
|  |  | ||||||
|  | import * as utils from '../../global-utils'; | ||||||
|  | import { retrieveEmailCode } from './2fa'; | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * If a MailBuffer is passed it will be used and consume the expected emails | ||||||
|  |  */ | ||||||
|  | export async function logNewUser( | ||||||
|  |     test: Test, | ||||||
|  |     page: Page, | ||||||
|  |     user: { email: string, name: string, password: string }, | ||||||
|  |     options: { mailBuffer?: MailBuffer } = {} | ||||||
|  | ) { | ||||||
|  |     await test.step(`Create user ${user.name}`, async () => { | ||||||
|  |         await page.context().clearCookies(); | ||||||
|  |  | ||||||
|  |         await test.step('Landing page', async () => { | ||||||
|  |             await utils.cleanLanding(page); | ||||||
|  |  | ||||||
|  |             await page.locator("input[type=email].vw-email-sso").fill(user.email); | ||||||
|  |             await page.getByRole('button', { name: /Use single sign-on/ }).click(); | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |         await test.step('Keycloak login', async () => { | ||||||
|  |             await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible(); | ||||||
|  |             await page.getByLabel(/Username/).fill(user.name); | ||||||
|  |             await page.getByLabel('Password', { exact: true }).fill(user.password); | ||||||
|  |             await page.getByRole('button', { name: 'Sign In' }).click(); | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |         await test.step('Create Vault account', async () => { | ||||||
|  |             await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible(); | ||||||
|  |             await page.getByLabel('New master password (required)', { exact: true }).fill(user.password); | ||||||
|  |             await page.getByLabel('Confirm new master password (').fill(user.password); | ||||||
|  |             await page.getByRole('button', { name: 'Create account' }).click(); | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |         await test.step('Default vault page', async () => { | ||||||
|  |             await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||||
|  |             await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible(); | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |         await utils.checkNotification(page, 'Account successfully created!'); | ||||||
|  |         await utils.checkNotification(page, 'Invitation accepted'); | ||||||
|  |  | ||||||
|  |         if( options.mailBuffer ){ | ||||||
|  |             let mailBuffer = options.mailBuffer; | ||||||
|  |             await test.step('Check emails', async () => { | ||||||
|  |                 await mailBuffer.expect((m) => m.subject === "Welcome"); | ||||||
|  |                 await mailBuffer.expect((m) => m.subject.includes("New Device Logged")); | ||||||
|  |             }); | ||||||
|  |         } | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | /** | ||||||
|  |  * If a MailBuffer is passed it will be used and consume the expected emails | ||||||
|  |  */ | ||||||
|  | export async function logUser( | ||||||
|  |     test: Test, | ||||||
|  |     page: Page, | ||||||
|  |     user: { email: string, password: string }, | ||||||
|  |     options: { | ||||||
|  |         mailBuffer ?: MailBuffer, | ||||||
|  |         totp?: OTPAuth.TOTP, | ||||||
|  |         mail2fa?: boolean, | ||||||
|  |     } = {} | ||||||
|  | ) { | ||||||
|  |     let mailBuffer = options.mailBuffer; | ||||||
|  |  | ||||||
|  |     await test.step(`Log user ${user.email}`, async () => { | ||||||
|  |         await page.context().clearCookies(); | ||||||
|  |  | ||||||
|  |         await test.step('Landing page', async () => { | ||||||
|  |             await utils.cleanLanding(page); | ||||||
|  |  | ||||||
|  |             await page.locator("input[type=email].vw-email-sso").fill(user.email); | ||||||
|  |             await page.getByRole('button', { name: /Use single sign-on/ }).click(); | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |         await test.step('Keycloak login', async () => { | ||||||
|  |             await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible(); | ||||||
|  |             await page.getByLabel(/Username/).fill(user.name); | ||||||
|  |             await page.getByLabel('Password', { exact: true }).fill(user.password); | ||||||
|  |             await page.getByRole('button', { name: 'Sign In' }).click(); | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |         if( options.totp || options.mail2fa ){ | ||||||
|  |             let code; | ||||||
|  |  | ||||||
|  |             await test.step('2FA check', async () => { | ||||||
|  |                 await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible(); | ||||||
|  |  | ||||||
|  |                 if( options.totp ) { | ||||||
|  |                     const totp = options.totp; | ||||||
|  |                     let timestamp = Date.now(); // Needed to use the next token | ||||||
|  |                     timestamp = timestamp + (totp.period - (Math.floor(timestamp / 1000) % totp.period) + 1) * 1000; | ||||||
|  |                     code = totp.generate({timestamp}); | ||||||
|  |                 } else if( options.mail2fa ){ | ||||||
|  |                     code = await retrieveEmailCode(test, page, mailBuffer); | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 await page.getByLabel(/Verification code/).fill(code); | ||||||
|  |                 await page.getByRole('button', { name: 'Continue' }).click(); | ||||||
|  |             }); | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         await test.step('Unlock vault', async () => { | ||||||
|  |             await expect(page).toHaveTitle('Vaultwarden Web'); | ||||||
|  |             await expect(page.getByRole('heading', { name: 'Your vault is locked' })).toBeVisible(); | ||||||
|  |             await page.getByLabel('Master password').fill(user.password); | ||||||
|  |             await page.getByRole('button', { name: 'Unlock' }).click(); | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |         await test.step('Default vault page', async () => { | ||||||
|  |             await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||||
|  |             await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible(); | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |         if( mailBuffer ){ | ||||||
|  |             await test.step('Check email', async () => { | ||||||
|  |                 await mailBuffer.expect((m) => m.subject.includes("New Device Logged")); | ||||||
|  |             }); | ||||||
|  |         } | ||||||
|  |     }); | ||||||
|  | } | ||||||
							
								
								
									
										55
									
								
								playwright/tests/setups/user.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										55
									
								
								playwright/tests/setups/user.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,55 @@ | |||||||
|  | import { expect, type Browser, Page } from '@playwright/test'; | ||||||
|  |  | ||||||
|  | import { type MailBuffer } from 'maildev'; | ||||||
|  |  | ||||||
|  | import * as utils from '../../global-utils'; | ||||||
|  |  | ||||||
|  | export async function createAccount(test, page: Page, user: { email: string, name: string, password: string }, mailBuffer?: MailBuffer) { | ||||||
|  |     await test.step(`Create user ${user.name}`, async () => { | ||||||
|  |         await utils.cleanLanding(page); | ||||||
|  |  | ||||||
|  |         await page.getByRole('link', { name: 'Create account' }).click(); | ||||||
|  |  | ||||||
|  |         // Back to Vault create account | ||||||
|  |         await expect(page).toHaveTitle(/Create account | Vaultwarden Web/); | ||||||
|  |         await page.getByLabel(/Email address/).fill(user.email); | ||||||
|  |         await page.getByLabel('Name').fill(user.name); | ||||||
|  |         await page.getByRole('button', { name: 'Continue' }).click(); | ||||||
|  |  | ||||||
|  |         // Vault finish Creation | ||||||
|  |         await page.getByLabel('New master password (required)', { exact: true }).fill(user.password); | ||||||
|  |         await page.getByLabel('Confirm new master password (').fill(user.password); | ||||||
|  |         await page.getByRole('button', { name: 'Create account' }).click(); | ||||||
|  |  | ||||||
|  |         await utils.checkNotification(page, 'Your new account has been created') | ||||||
|  |  | ||||||
|  |         // We are now in the default vault page | ||||||
|  |         await expect(page).toHaveTitle('Vaults | Vaultwarden Web'); | ||||||
|  |         await utils.checkNotification(page, 'You have been logged in!'); | ||||||
|  |  | ||||||
|  |         if( mailBuffer ){ | ||||||
|  |             await mailBuffer.expect((m) => m.subject === "Welcome"); | ||||||
|  |             await mailBuffer.expect((m) => m.subject === "New Device Logged In From Firefox"); | ||||||
|  |         } | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function logUser(test, page: Page, user: { email: string, password: string }, mailBuffer?: MailBuffer) { | ||||||
|  |     await test.step(`Log user ${user.email}`, async () => { | ||||||
|  |         await utils.cleanLanding(page); | ||||||
|  |  | ||||||
|  |         await page.getByLabel(/Email address/).fill(user.email); | ||||||
|  |         await page.getByRole('button', { name: 'Continue' }).click(); | ||||||
|  |  | ||||||
|  |         // Unlock page | ||||||
|  |         await page.getByLabel('Master password').fill(user.password); | ||||||
|  |         await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||||
|  |  | ||||||
|  |         // We are now in the default vault page | ||||||
|  |         await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||||
|  |  | ||||||
|  |         if( mailBuffer ){ | ||||||
|  |             await mailBuffer.expect((m) => m.subject === "New Device Logged In From Firefox"); | ||||||
|  |         } | ||||||
|  |     }); | ||||||
|  | } | ||||||
							
								
								
									
										53
									
								
								playwright/tests/sso_login.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										53
									
								
								playwright/tests/sso_login.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,53 @@ | |||||||
|  | import { test, expect, type TestInfo } from '@playwright/test'; | ||||||
|  | import { MailDev } from 'maildev'; | ||||||
|  |  | ||||||
|  | import { logNewUser, logUser } from './setups/sso'; | ||||||
|  | import { activateEmail, disableEmail } from './setups/2fa'; | ||||||
|  | import * as utils from "../global-utils"; | ||||||
|  |  | ||||||
|  | let users = utils.loadEnv(); | ||||||
|  |  | ||||||
|  | let mailserver; | ||||||
|  |  | ||||||
|  | test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||||
|  |     mailserver = new MailDev({ | ||||||
|  |         port: process.env.MAILDEV_SMTP_PORT, | ||||||
|  |         web: { port: process.env.MAILDEV_HTTP_PORT }, | ||||||
|  |     }) | ||||||
|  |  | ||||||
|  |     await mailserver.listen(); | ||||||
|  |  | ||||||
|  |     await utils.startVault(browser, testInfo, { | ||||||
|  |         SSO_ENABLED: true, | ||||||
|  |         SSO_ONLY: false, | ||||||
|  |         SMTP_HOST: process.env.MAILDEV_HOST, | ||||||
|  |         SMTP_FROM: process.env.PW_SMTP_FROM, | ||||||
|  |     }); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test.afterAll('Teardown', async ({}) => { | ||||||
|  |     utils.stopVault(); | ||||||
|  |     if( mailserver ){ | ||||||
|  |         await mailserver.close(); | ||||||
|  |     } | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Create and activate 2FA', async ({ page }) => { | ||||||
|  |     const mailBuffer = mailserver.buffer(users.user1.email); | ||||||
|  |  | ||||||
|  |     await logNewUser(test, page, users.user1, {mailBuffer: mailBuffer}); | ||||||
|  |  | ||||||
|  |     await activateEmail(test, page, users.user1, mailBuffer); | ||||||
|  |  | ||||||
|  |     mailBuffer.close(); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Log and disable', async ({ page }) => { | ||||||
|  |     const mailBuffer = mailserver.buffer(users.user1.email); | ||||||
|  |  | ||||||
|  |     await logUser(test, page, users.user1, {mailBuffer: mailBuffer, mail2fa: true}); | ||||||
|  |  | ||||||
|  |     await disableEmail(test, page, users.user1); | ||||||
|  |  | ||||||
|  |     mailBuffer.close(); | ||||||
|  | }); | ||||||
							
								
								
									
										85
									
								
								playwright/tests/sso_login.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										85
									
								
								playwright/tests/sso_login.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,85 @@ | |||||||
|  | import { test, expect, type TestInfo } from '@playwright/test'; | ||||||
|  |  | ||||||
|  | import { logNewUser, logUser } from './setups/sso'; | ||||||
|  | import { activateTOTP, disableTOTP } from './setups/2fa'; | ||||||
|  | import * as utils from "../global-utils"; | ||||||
|  |  | ||||||
|  | let users = utils.loadEnv(); | ||||||
|  |  | ||||||
|  | test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||||
|  |     await utils.startVault(browser, testInfo, { | ||||||
|  |         SSO_ENABLED: true, | ||||||
|  |         SSO_ONLY: false | ||||||
|  |     }); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test.afterAll('Teardown', async ({}) => { | ||||||
|  |     utils.stopVault(); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Account creation using SSO', async ({ page }) => { | ||||||
|  |     // Landing page | ||||||
|  |     await logNewUser(test, page, users.user1); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('SSO login', async ({ page }) => { | ||||||
|  |     await logUser(test, page, users.user1); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Non SSO login', async ({ page }) => { | ||||||
|  |     // Landing page | ||||||
|  |     await page.goto('/'); | ||||||
|  |     await page.locator("input[type=email].vw-email-sso").fill(users.user1.email); | ||||||
|  |     await page.getByRole('button', { name: 'Other' }).click(); | ||||||
|  |  | ||||||
|  |     // Unlock page | ||||||
|  |     await page.getByLabel('Master password').fill(users.user1.password); | ||||||
|  |     await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||||
|  |  | ||||||
|  |     // We are now in the default vault page | ||||||
|  |     await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('SSO login with TOTP 2fa', async ({ page }) => { | ||||||
|  |     await logUser(test, page, users.user1); | ||||||
|  |  | ||||||
|  |     let totp = await activateTOTP(test, page, users.user1); | ||||||
|  |  | ||||||
|  |     await logUser(test, page, users.user1, { totp }); | ||||||
|  |  | ||||||
|  |     await disableTOTP(test, page, users.user1); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Non SSO login impossible', async ({ page, browser }, testInfo: TestInfo) => { | ||||||
|  |     await utils.restartVault(page, testInfo, { | ||||||
|  |         SSO_ENABLED: true, | ||||||
|  |         SSO_ONLY: true | ||||||
|  |     }, false); | ||||||
|  |  | ||||||
|  |     // Landing page | ||||||
|  |     await page.goto('/'); | ||||||
|  |  | ||||||
|  |     // Check that SSO login is available | ||||||
|  |     await expect(page.getByRole('button', { name: /Use single sign-on/ })).toHaveCount(1); | ||||||
|  |  | ||||||
|  |     // No Continue/Other | ||||||
|  |     await expect(page.getByRole('button', { name: 'Other' })).toHaveCount(0); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  |  | ||||||
|  | test('No SSO login', async ({ page }, testInfo: TestInfo) => { | ||||||
|  |     await utils.restartVault(page, testInfo, { | ||||||
|  |         SSO_ENABLED: false | ||||||
|  |     }, false); | ||||||
|  |  | ||||||
|  |     // Landing page | ||||||
|  |     await page.goto('/'); | ||||||
|  |  | ||||||
|  |     // No SSO button (rely on a correct selector checked in previous test) | ||||||
|  |     await expect(page.getByRole('button', { name: /Use single sign-on/ })).toHaveCount(0); | ||||||
|  |  | ||||||
|  |     // Can continue to Master password | ||||||
|  |     await page.getByLabel(/Email address/).fill(users.user1.email); | ||||||
|  |     await page.getByRole('button', { name: 'Continue' }).click(); | ||||||
|  |     await expect(page.getByRole('button', { name: 'Log in with master password' })).toHaveCount(1); | ||||||
|  | }); | ||||||
							
								
								
									
										121
									
								
								playwright/tests/sso_organization.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										121
									
								
								playwright/tests/sso_organization.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,121 @@ | |||||||
|  | import { test, expect, type TestInfo } from '@playwright/test'; | ||||||
|  | import { MailDev } from 'maildev'; | ||||||
|  |  | ||||||
|  | import * as utils from "../global-utils"; | ||||||
|  | import * as orgs from './setups/orgs'; | ||||||
|  | import { logNewUser, logUser } from './setups/sso'; | ||||||
|  |  | ||||||
|  | let users = utils.loadEnv(); | ||||||
|  |  | ||||||
|  | let mailServer, mail1Buffer, mail2Buffer, mail3Buffer; | ||||||
|  |  | ||||||
|  | test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||||
|  |     mailServer = new MailDev({ | ||||||
|  |         port: process.env.MAILDEV_SMTP_PORT, | ||||||
|  |         web: { port: process.env.MAILDEV_HTTP_PORT }, | ||||||
|  |     }) | ||||||
|  |  | ||||||
|  |     await mailServer.listen(); | ||||||
|  |  | ||||||
|  |     await utils.startVault(browser, testInfo, { | ||||||
|  |         SMTP_HOST: process.env.MAILDEV_HOST, | ||||||
|  |         SMTP_FROM: process.env.PW_SMTP_FROM, | ||||||
|  |         SSO_ENABLED: true, | ||||||
|  |         SSO_ONLY: true, | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     mail1Buffer = mailServer.buffer(users.user1.email); | ||||||
|  |     mail2Buffer = mailServer.buffer(users.user2.email); | ||||||
|  |     mail3Buffer = mailServer.buffer(users.user3.email); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test.afterAll('Teardown', async ({}) => { | ||||||
|  |     utils.stopVault(); | ||||||
|  |     [mail1Buffer, mail2Buffer, mail3Buffer, mailServer].map((m) => m?.close()); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Create user3', async ({ page }) => { | ||||||
|  |     await logNewUser(test, page, users.user3, { mailBuffer: mail3Buffer }); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Invite users', async ({ page }) => { | ||||||
|  |     await logNewUser(test, page, users.user1, { mailBuffer: mail1Buffer }); | ||||||
|  |  | ||||||
|  |     await orgs.create(test, page, '/Test'); | ||||||
|  |     await orgs.members(test, page, '/Test'); | ||||||
|  |     await orgs.invite(test, page, '/Test', users.user2.email); | ||||||
|  |     await orgs.invite(test, page, '/Test', users.user3.email); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('invited with new account', async ({ page }) => { | ||||||
|  |     const link = await test.step('Extract email link', async () => { | ||||||
|  |         const invited = await mail2Buffer.expect((m) => m.subject === "Join /Test"); | ||||||
|  |         await page.setContent(invited.html); | ||||||
|  |         return await page.getByTestId("invite").getAttribute("href"); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('Redirect to Keycloak', async () => { | ||||||
|  |         await page.goto(link); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('Keycloak login', async () => { | ||||||
|  |         await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible(); | ||||||
|  |         await page.getByLabel(/Username/).fill(users.user2.name); | ||||||
|  |         await page.getByLabel('Password', { exact: true }).fill(users.user2.password); | ||||||
|  |         await page.getByRole('button', { name: 'Sign In' }).click(); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('Create Vault account', async () => { | ||||||
|  |         await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible(); | ||||||
|  |         await page.getByLabel('New master password (required)', { exact: true }).fill(users.user2.password); | ||||||
|  |         await page.getByLabel('Confirm new master password (').fill(users.user2.password); | ||||||
|  |         await page.getByRole('button', { name: 'Create account' }).click(); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('Default vault page', async () => { | ||||||
|  |         await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||||
|  |  | ||||||
|  |         await utils.checkNotification(page, 'Account successfully created!'); | ||||||
|  |         await utils.checkNotification(page, 'Invitation accepted'); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('Check mails', async () => { | ||||||
|  |         await mail2Buffer.expect((m) => m.subject.includes("New Device Logged")); | ||||||
|  |         await mail1Buffer.expect((m) => m.subject === "Invitation to /Test accepted"); | ||||||
|  |     }); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('invited with existing account', async ({ page }) => { | ||||||
|  |     const link = await test.step('Extract email link', async () => { | ||||||
|  |         const invited = await mail3Buffer.expect((m) => m.subject === "Join /Test"); | ||||||
|  |         await page.setContent(invited.html); | ||||||
|  |         return await page.getByTestId("invite").getAttribute("href"); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('Redirect to Keycloak', async () => { | ||||||
|  |         await page.goto(link); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('Keycloak login', async () => { | ||||||
|  |         await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible(); | ||||||
|  |         await page.getByLabel(/Username/).fill(users.user3.name); | ||||||
|  |         await page.getByLabel('Password', { exact: true }).fill(users.user3.password); | ||||||
|  |         await page.getByRole('button', { name: 'Sign In' }).click(); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('Unlock vault', async () => { | ||||||
|  |         await expect(page).toHaveTitle('Vaultwarden Web'); | ||||||
|  |         await page.getByLabel('Master password').fill(users.user3.password); | ||||||
|  |         await page.getByRole('button', { name: 'Unlock' }).click(); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('Default vault page', async () => { | ||||||
|  |         await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||||
|  |         await utils.checkNotification(page, 'Invitation accepted'); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await test.step('Check mails', async () => { | ||||||
|  |         await mail3Buffer.expect((m) => m.subject.includes("New Device Logged")); | ||||||
|  |         await mail1Buffer.expect((m) => m.subject === "Invitation to /Test accepted"); | ||||||
|  |     }); | ||||||
|  | }); | ||||||
							
								
								
									
										76
									
								
								playwright/tests/sso_organization.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										76
									
								
								playwright/tests/sso_organization.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,76 @@ | |||||||
|  | import { test, expect, type TestInfo } from '@playwright/test'; | ||||||
|  | import { MailDev } from 'maildev'; | ||||||
|  |  | ||||||
|  | import * as utils from "../global-utils"; | ||||||
|  | import * as orgs from './setups/orgs'; | ||||||
|  | import { logNewUser, logUser } from './setups/sso'; | ||||||
|  |  | ||||||
|  | let users = utils.loadEnv(); | ||||||
|  |  | ||||||
|  | test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||||
|  |     await utils.startVault(browser, testInfo, { | ||||||
|  |         SSO_ENABLED: true, | ||||||
|  |         SSO_ONLY: true, | ||||||
|  |     }); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test.afterAll('Teardown', async ({}) => { | ||||||
|  |     utils.stopVault(); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Create user3', async ({ page }) => { | ||||||
|  |     await logNewUser(test, page, users.user3); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Invite users', async ({ page }) => { | ||||||
|  |     await logNewUser(test, page, users.user1); | ||||||
|  |  | ||||||
|  |     await orgs.create(test, page, '/Test'); | ||||||
|  |     await orgs.members(test, page, '/Test'); | ||||||
|  |     await orgs.invite(test, page, '/Test', users.user2.email); | ||||||
|  |     await orgs.invite(test, page, '/Test', users.user3.email); | ||||||
|  |     await orgs.confirm(test, page, '/Test', users.user3.email); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Create invited account', async ({ page }) => { | ||||||
|  |     await logNewUser(test, page, users.user2); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Confirm invited user', async ({ page }) => { | ||||||
|  |     await logUser(test, page, users.user1); | ||||||
|  |     await orgs.members(test, page, '/Test'); | ||||||
|  |     await expect(page.getByRole('row', { name: users.user2.name })).toHaveText(/Needs confirmation/); | ||||||
|  |     await orgs.confirm(test, page, '/Test', users.user2.email); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Organization is visible', async ({ page }) => { | ||||||
|  |     await logUser(test, page, users.user2); | ||||||
|  |     await page.getByLabel('vault: /Test').click(); | ||||||
|  |     await expect(page.getByLabel('Filter: Default collection')).toBeVisible(); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test('Enforce password policy', async ({ page }) => { | ||||||
|  |     await logUser(test, page, users.user1); | ||||||
|  |     await orgs.policies(test, page, '/Test'); | ||||||
|  |  | ||||||
|  |     await test.step(`Set master password policy`, async () => { | ||||||
|  |         await page.getByRole('button', { name: 'Master password requirements' }).click(); | ||||||
|  |         await page.getByRole('checkbox', { name: 'Turn on' }).check(); | ||||||
|  |         await page.getByRole('checkbox', { name: 'Require existing members to' }).check(); | ||||||
|  |         await page.getByRole('spinbutton', { name: 'Minimum length' }).fill('42'); | ||||||
|  |         await page.getByRole('button', { name: 'Save' }).click(); | ||||||
|  |         await utils.checkNotification(page, 'Edited policy Master password requirements.'); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     await utils.logout(test, page, users.user1); | ||||||
|  |  | ||||||
|  |     await test.step(`Unlock trigger policy`, async () => { | ||||||
|  |         await page.locator("input[type=email].vw-email-sso").fill(users.user1.email); | ||||||
|  |         await page.getByRole('button', { name: 'Use single sign-on' }).click(); | ||||||
|  |  | ||||||
|  |         await page.getByRole('textbox', { name: 'Master password (required)' }).fill(users.user1.password); | ||||||
|  |         await page.getByRole('button', { name: 'Unlock' }).click(); | ||||||
|  |  | ||||||
|  |         await expect(page.getByRole('heading', { name: 'Update master password' })).toBeVisible(); | ||||||
|  |     }); | ||||||
|  | }); | ||||||
| @@ -1,4 +1,4 @@ | |||||||
| [toolchain] | [toolchain] | ||||||
| channel = "1.84.1" | channel = "1.89.0" | ||||||
| components = [ "rustfmt", "clippy" ] | components = [ "rustfmt", "clippy" ] | ||||||
| profile = "minimal" | profile = "minimal" | ||||||
|   | |||||||
							
								
								
									
										131
									
								
								src/api/admin.rs
									
									
									
									
									
								
							
							
						
						
									
										131
									
								
								src/api/admin.rs
									
									
									
									
									
								
							| @@ -46,6 +46,7 @@ pub fn routes() -> Vec<Route> { | |||||||
|         invite_user, |         invite_user, | ||||||
|         logout, |         logout, | ||||||
|         delete_user, |         delete_user, | ||||||
|  |         delete_sso_user, | ||||||
|         deauth_user, |         deauth_user, | ||||||
|         disable_user, |         disable_user, | ||||||
|         enable_user, |         enable_user, | ||||||
| @@ -102,7 +103,7 @@ const ACTING_ADMIN_USER: &str = "vaultwarden-admin-00000-000000000000"; | |||||||
| pub const FAKE_ADMIN_UUID: &str = "00000000-0000-0000-0000-000000000000"; | pub const FAKE_ADMIN_UUID: &str = "00000000-0000-0000-0000-000000000000"; | ||||||
|  |  | ||||||
| fn admin_path() -> String { | fn admin_path() -> String { | ||||||
|     format!("{}{}", CONFIG.domain_path(), ADMIN_PATH) |     format!("{}{ADMIN_PATH}", CONFIG.domain_path()) | ||||||
| } | } | ||||||
|  |  | ||||||
| #[derive(Debug)] | #[derive(Debug)] | ||||||
| @@ -206,7 +207,7 @@ fn post_admin_login( | |||||||
|  |  | ||||||
|         cookies.add(cookie); |         cookies.add(cookie); | ||||||
|         if let Some(redirect) = redirect { |         if let Some(redirect) = redirect { | ||||||
|             Ok(Redirect::to(format!("{}{}", admin_path(), redirect))) |             Ok(Redirect::to(format!("{}{redirect}", admin_path()))) | ||||||
|         } else { |         } else { | ||||||
|             Err(AdminResponse::Ok(render_admin_page())) |             Err(AdminResponse::Ok(render_admin_page())) | ||||||
|         } |         } | ||||||
| @@ -239,6 +240,7 @@ struct AdminTemplateData { | |||||||
|     page_data: Option<Value>, |     page_data: Option<Value>, | ||||||
|     logged_in: bool, |     logged_in: bool, | ||||||
|     urlpath: String, |     urlpath: String, | ||||||
|  |     sso_enabled: bool, | ||||||
| } | } | ||||||
|  |  | ||||||
| impl AdminTemplateData { | impl AdminTemplateData { | ||||||
| @@ -248,6 +250,7 @@ impl AdminTemplateData { | |||||||
|             page_data: Some(page_data), |             page_data: Some(page_data), | ||||||
|             logged_in: true, |             logged_in: true, | ||||||
|             urlpath: CONFIG.domain_path(), |             urlpath: CONFIG.domain_path(), | ||||||
|  |             sso_enabled: CONFIG.sso_enabled(), | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -296,7 +299,7 @@ async fn invite_user(data: Json<InviteData>, _token: AdminToken, mut conn: DbCon | |||||||
|         err_code!("User already exists", Status::Conflict.code) |         err_code!("User already exists", Status::Conflict.code) | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     let mut user = User::new(data.email); |     let mut user = User::new(data.email, None); | ||||||
|  |  | ||||||
|     async fn _generate_invite(user: &User, conn: &mut DbConn) -> EmptyResult { |     async fn _generate_invite(user: &User, conn: &mut DbConn) -> EmptyResult { | ||||||
|         if CONFIG.mail_enabled() { |         if CONFIG.mail_enabled() { | ||||||
| @@ -336,7 +339,7 @@ fn logout(cookies: &CookieJar<'_>) -> Redirect { | |||||||
| async fn get_users_json(_token: AdminToken, mut conn: DbConn) -> Json<Value> { | async fn get_users_json(_token: AdminToken, mut conn: DbConn) -> Json<Value> { | ||||||
|     let users = User::get_all(&mut conn).await; |     let users = User::get_all(&mut conn).await; | ||||||
|     let mut users_json = Vec::with_capacity(users.len()); |     let mut users_json = Vec::with_capacity(users.len()); | ||||||
|     for u in users { |     for (u, _) in users { | ||||||
|         let mut usr = u.to_json(&mut conn).await; |         let mut usr = u.to_json(&mut conn).await; | ||||||
|         usr["userEnabled"] = json!(u.enabled); |         usr["userEnabled"] = json!(u.enabled); | ||||||
|         usr["createdAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); |         usr["createdAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); | ||||||
| @@ -354,7 +357,7 @@ async fn get_users_json(_token: AdminToken, mut conn: DbConn) -> Json<Value> { | |||||||
| async fn users_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html<String>> { | async fn users_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html<String>> { | ||||||
|     let users = User::get_all(&mut conn).await; |     let users = User::get_all(&mut conn).await; | ||||||
|     let mut users_json = Vec::with_capacity(users.len()); |     let mut users_json = Vec::with_capacity(users.len()); | ||||||
|     for u in users { |     for (u, sso_u) in users { | ||||||
|         let mut usr = u.to_json(&mut conn).await; |         let mut usr = u.to_json(&mut conn).await; | ||||||
|         usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &mut conn).await); |         usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &mut conn).await); | ||||||
|         usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &mut conn).await); |         usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &mut conn).await); | ||||||
| @@ -365,6 +368,9 @@ async fn users_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html< | |||||||
|             Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)), |             Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)), | ||||||
|             None => json!("Never"), |             None => json!("Never"), | ||||||
|         }; |         }; | ||||||
|  |  | ||||||
|  |         usr["sso_identifier"] = json!(sso_u.map(|u| u.identifier.to_string()).unwrap_or(String::new())); | ||||||
|  |  | ||||||
|         users_json.push(usr); |         users_json.push(usr); | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -417,17 +423,38 @@ async fn delete_user(user_id: UserId, token: AdminToken, mut conn: DbConn) -> Em | |||||||
|     res |     res | ||||||
| } | } | ||||||
|  |  | ||||||
|  | #[delete("/users/<user_id>/sso", format = "application/json")] | ||||||
|  | async fn delete_sso_user(user_id: UserId, token: AdminToken, mut conn: DbConn) -> EmptyResult { | ||||||
|  |     let memberships = Membership::find_any_state_by_user(&user_id, &mut conn).await; | ||||||
|  |     let res = SsoUser::delete(&user_id, &mut conn).await; | ||||||
|  |  | ||||||
|  |     for membership in memberships { | ||||||
|  |         log_event( | ||||||
|  |             EventType::OrganizationUserUnlinkedSso as i32, | ||||||
|  |             &membership.uuid, | ||||||
|  |             &membership.org_uuid, | ||||||
|  |             &ACTING_ADMIN_USER.into(), | ||||||
|  |             14, // Use UnknownBrowser type | ||||||
|  |             &token.ip.ip, | ||||||
|  |             &mut conn, | ||||||
|  |         ) | ||||||
|  |         .await; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     res | ||||||
|  | } | ||||||
|  |  | ||||||
| #[post("/users/<user_id>/deauth", format = "application/json")] | #[post("/users/<user_id>/deauth", format = "application/json")] | ||||||
| async fn deauth_user(user_id: UserId, _token: AdminToken, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | async fn deauth_user(user_id: UserId, _token: AdminToken, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | ||||||
|     let mut user = get_user_or_404(&user_id, &mut conn).await?; |     let mut user = get_user_or_404(&user_id, &mut conn).await?; | ||||||
|  |  | ||||||
|     nt.send_logout(&user, None).await; |     nt.send_logout(&user, None, &mut conn).await; | ||||||
|  |  | ||||||
|     if CONFIG.push_enabled() { |     if CONFIG.push_enabled() { | ||||||
|         for device in Device::find_push_devices_by_user(&user.uuid, &mut conn).await { |         for device in Device::find_push_devices_by_user(&user.uuid, &mut conn).await { | ||||||
|             match unregister_push_device(device.push_uuid).await { |             match unregister_push_device(&device.push_uuid).await { | ||||||
|                 Ok(r) => r, |                 Ok(r) => r, | ||||||
|                 Err(e) => error!("Unable to unregister devices from Bitwarden server: {}", e), |                 Err(e) => error!("Unable to unregister devices from Bitwarden server: {e}"), | ||||||
|             }; |             }; | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
| @@ -447,7 +474,7 @@ async fn disable_user(user_id: UserId, _token: AdminToken, mut conn: DbConn, nt: | |||||||
|  |  | ||||||
|     let save_result = user.save(&mut conn).await; |     let save_result = user.save(&mut conn).await; | ||||||
|  |  | ||||||
|     nt.send_logout(&user, None).await; |     nt.send_logout(&user, None, &mut conn).await; | ||||||
|  |  | ||||||
|     save_result |     save_result | ||||||
| } | } | ||||||
| @@ -591,20 +618,14 @@ struct GitCommit { | |||||||
|     sha: String, |     sha: String, | ||||||
| } | } | ||||||
|  |  | ||||||
| #[derive(Deserialize)] |  | ||||||
| struct TimeApi { |  | ||||||
|     year: u16, |  | ||||||
|     month: u8, |  | ||||||
|     day: u8, |  | ||||||
|     hour: u8, |  | ||||||
|     minute: u8, |  | ||||||
|     seconds: u8, |  | ||||||
| } |  | ||||||
|  |  | ||||||
| async fn get_json_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> { | async fn get_json_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> { | ||||||
|     Ok(make_http_request(Method::GET, url)?.send().await?.error_for_status()?.json::<T>().await?) |     Ok(make_http_request(Method::GET, url)?.send().await?.error_for_status()?.json::<T>().await?) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | async fn get_text_api(url: &str) -> Result<String, Error> { | ||||||
|  |     Ok(make_http_request(Method::GET, url)?.send().await?.error_for_status()?.text().await?) | ||||||
|  | } | ||||||
|  |  | ||||||
| async fn has_http_access() -> bool { | async fn has_http_access() -> bool { | ||||||
|     let Ok(req) = make_http_request(Method::HEAD, "https://github.com/dani-garcia/vaultwarden") else { |     let Ok(req) = make_http_request(Method::HEAD, "https://github.com/dani-garcia/vaultwarden") else { | ||||||
|         return false; |         return false; | ||||||
| @@ -616,10 +637,12 @@ async fn has_http_access() -> bool { | |||||||
| } | } | ||||||
|  |  | ||||||
| use cached::proc_macro::cached; | use cached::proc_macro::cached; | ||||||
| /// Cache this function to prevent API call rate limit. Github only allows 60 requests per hour, and we use 3 here already. | /// Cache this function to prevent API call rate limit. Github only allows 60 requests per hour, and we use 3 here already | ||||||
| /// It will cache this function for 300 seconds (5 minutes) which should prevent the exhaustion of the rate limit. | /// It will cache this function for 600 seconds (10 minutes) which should prevent the exhaustion of the rate limit | ||||||
| #[cached(time = 300, sync_writes = true)] | /// Any cache will be lost if Vaultwarden is restarted | ||||||
| async fn get_release_info(has_http_access: bool, running_within_container: bool) -> (String, String, String) { | use std::time::Duration; // Needed for cached | ||||||
|  | #[cached(time = 600, sync_writes = "default")] | ||||||
|  | async fn get_release_info(has_http_access: bool) -> (String, String, String) { | ||||||
|     // If the HTTP Check failed, do not even attempt to check for new versions since we were not able to connect with github.com anyway. |     // If the HTTP Check failed, do not even attempt to check for new versions since we were not able to connect with github.com anyway. | ||||||
|     if has_http_access { |     if has_http_access { | ||||||
|         ( |         ( | ||||||
| @@ -636,19 +659,13 @@ async fn get_release_info(has_http_access: bool, running_within_container: bool) | |||||||
|                 } |                 } | ||||||
|                 _ => "-".to_string(), |                 _ => "-".to_string(), | ||||||
|             }, |             }, | ||||||
|             // Do not fetch the web-vault version when running within a container. |             // Do not fetch the web-vault version when running within a container | ||||||
|             // The web-vault version is embedded within the container it self, and should not be updated manually |             // The web-vault version is embedded within the container it self, and should not be updated manually | ||||||
|             if running_within_container { |             match get_json_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest") | ||||||
|                 "-".to_string() |  | ||||||
|             } else { |  | ||||||
|                 match get_json_api::<GitRelease>( |  | ||||||
|                     "https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest", |  | ||||||
|                 ) |  | ||||||
|                 .await |                 .await | ||||||
|                 { |             { | ||||||
|                     Ok(r) => r.tag_name.trim_start_matches('v').to_string(), |                 Ok(r) => r.tag_name.trim_start_matches('v').to_string(), | ||||||
|                     _ => "-".to_string(), |                 _ => "-".to_string(), | ||||||
|                 } |  | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|     } else { |     } else { | ||||||
| @@ -658,17 +675,18 @@ async fn get_release_info(has_http_access: bool, running_within_container: bool) | |||||||
|  |  | ||||||
| async fn get_ntp_time(has_http_access: bool) -> String { | async fn get_ntp_time(has_http_access: bool) -> String { | ||||||
|     if has_http_access { |     if has_http_access { | ||||||
|         if let Ok(ntp_time) = get_json_api::<TimeApi>("https://www.timeapi.io/api/Time/current/zone?timeZone=UTC").await |         if let Ok(cf_trace) = get_text_api("https://cloudflare.com/cdn-cgi/trace").await { | ||||||
|         { |             for line in cf_trace.lines() { | ||||||
|             return format!( |                 if let Some((key, value)) = line.split_once('=') { | ||||||
|                 "{year}-{month:02}-{day:02} {hour:02}:{minute:02}:{seconds:02} UTC", |                     if key == "ts" { | ||||||
|                 year = ntp_time.year, |                         let ts = value.split_once('.').map_or(value, |(s, _)| s); | ||||||
|                 month = ntp_time.month, |                         if let Ok(dt) = chrono::DateTime::parse_from_str(ts, "%s") { | ||||||
|                 day = ntp_time.day, |                             return dt.format("%Y-%m-%d %H:%M:%S UTC").to_string(); | ||||||
|                 hour = ntp_time.hour, |                         } | ||||||
|                 minute = ntp_time.minute, |                         break; | ||||||
|                 seconds = ntp_time.seconds |                     } | ||||||
|             ); |                 } | ||||||
|  |             } | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|     String::from("Unable to fetch NTP time.") |     String::from("Unable to fetch NTP time.") | ||||||
| @@ -693,14 +711,23 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn) | |||||||
|         _ => "Unable to resolve domain name.".to_string(), |         _ => "Unable to resolve domain name.".to_string(), | ||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     let (latest_release, latest_commit, latest_web_build) = |     let (latest_release, latest_commit, latest_web_build) = get_release_info(has_http_access).await; | ||||||
|         get_release_info(has_http_access, running_within_container).await; |  | ||||||
|  |  | ||||||
|     let ip_header_name = &ip_header.0.unwrap_or_default(); |     let ip_header_name = &ip_header.0.unwrap_or_default(); | ||||||
|  |  | ||||||
|     // Get current running versions |     // Get current running versions | ||||||
|     let web_vault_version = get_web_vault_version(); |     let web_vault_version = get_web_vault_version(); | ||||||
|  |  | ||||||
|  |     // Check if the running version is newer than the latest stable released version | ||||||
|  |     let web_vault_pre_release = if let Ok(web_ver_match) = semver::VersionReq::parse(&format!(">{latest_web_build}")) { | ||||||
|  |         web_ver_match.matches( | ||||||
|  |             &semver::Version::parse(&web_vault_version).unwrap_or_else(|_| semver::Version::parse("2025.1.1").unwrap()), | ||||||
|  |         ) | ||||||
|  |     } else { | ||||||
|  |         error!("Unable to parse latest_web_build: '{latest_web_build}'"); | ||||||
|  |         false | ||||||
|  |     }; | ||||||
|  |  | ||||||
|     let diagnostics_json = json!({ |     let diagnostics_json = json!({ | ||||||
|         "dns_resolved": dns_resolved, |         "dns_resolved": dns_resolved, | ||||||
|         "current_release": VERSION, |         "current_release": VERSION, | ||||||
| @@ -709,6 +736,7 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn) | |||||||
|         "web_vault_enabled": &CONFIG.web_vault_enabled(), |         "web_vault_enabled": &CONFIG.web_vault_enabled(), | ||||||
|         "web_vault_version": web_vault_version, |         "web_vault_version": web_vault_version, | ||||||
|         "latest_web_build": latest_web_build, |         "latest_web_build": latest_web_build, | ||||||
|  |         "web_vault_pre_release": web_vault_pre_release, | ||||||
|         "running_within_container": running_within_container, |         "running_within_container": running_within_container, | ||||||
|         "container_base_image": if running_within_container { container_base_image() } else { "Not applicable" }, |         "container_base_image": if running_within_container { container_base_image() } else { "Not applicable" }, | ||||||
|         "has_http_access": has_http_access, |         "has_http_access": has_http_access, | ||||||
| @@ -724,6 +752,7 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn) | |||||||
|         "overrides": &CONFIG.get_overrides().join(", "), |         "overrides": &CONFIG.get_overrides().join(", "), | ||||||
|         "host_arch": env::consts::ARCH, |         "host_arch": env::consts::ARCH, | ||||||
|         "host_os":  env::consts::OS, |         "host_os":  env::consts::OS, | ||||||
|  |         "tz_env": env::var("TZ").unwrap_or_default(), | ||||||
|         "server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(), |         "server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(), | ||||||
|         "server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the server date/time check as late as possible to minimize the time difference |         "server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the server date/time check as late as possible to minimize the time difference | ||||||
|         "ntp_time": get_ntp_time(has_http_access).await, // Run the ntp check as late as possible to minimize the time difference |         "ntp_time": get_ntp_time(has_http_access).await, // Run the ntp check as late as possible to minimize the time difference | ||||||
| @@ -745,17 +774,17 @@ fn get_diagnostics_http(code: u16, _token: AdminToken) -> EmptyResult { | |||||||
| } | } | ||||||
|  |  | ||||||
| #[post("/config", format = "application/json", data = "<data>")] | #[post("/config", format = "application/json", data = "<data>")] | ||||||
| fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult { | async fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult { | ||||||
|     let data: ConfigBuilder = data.into_inner(); |     let data: ConfigBuilder = data.into_inner(); | ||||||
|     if let Err(e) = CONFIG.update_config(data, true) { |     if let Err(e) = CONFIG.update_config(data, true).await { | ||||||
|         err!(format!("Unable to save config: {e:?}")) |         err!(format!("Unable to save config: {e:?}")) | ||||||
|     } |     } | ||||||
|     Ok(()) |     Ok(()) | ||||||
| } | } | ||||||
|  |  | ||||||
| #[post("/config/delete", format = "application/json")] | #[post("/config/delete", format = "application/json")] | ||||||
| fn delete_config(_token: AdminToken) -> EmptyResult { | async fn delete_config(_token: AdminToken) -> EmptyResult { | ||||||
|     if let Err(e) = CONFIG.delete_user_config() { |     if let Err(e) = CONFIG.delete_user_config().await { | ||||||
|         err!(format!("Unable to delete config: {e:?}")) |         err!(format!("Unable to delete config: {e:?}")) | ||||||
|     } |     } | ||||||
|     Ok(()) |     Ok(()) | ||||||
|   | |||||||
| @@ -7,9 +7,9 @@ use serde_json::Value; | |||||||
|  |  | ||||||
| use crate::{ | use crate::{ | ||||||
|     api::{ |     api::{ | ||||||
|         core::{log_user_event, two_factor::email}, |         core::{accept_org_invite, log_user_event, two_factor::email}, | ||||||
|         register_push_device, unregister_push_device, AnonymousNotify, EmptyResult, JsonResult, Notify, |         master_password_policy, register_push_device, unregister_push_device, AnonymousNotify, ApiResult, EmptyResult, | ||||||
|         PasswordOrOtpData, UpdateType, |         JsonResult, Notify, PasswordOrOtpData, UpdateType, | ||||||
|     }, |     }, | ||||||
|     auth::{decode_delete, decode_invite, decode_verify_email, ClientHeaders, Headers}, |     auth::{decode_delete, decode_invite, decode_verify_email, ClientHeaders, Headers}, | ||||||
|     crypto, |     crypto, | ||||||
| @@ -34,6 +34,7 @@ pub fn routes() -> Vec<rocket::Route> { | |||||||
|         get_public_keys, |         get_public_keys, | ||||||
|         post_keys, |         post_keys, | ||||||
|         post_password, |         post_password, | ||||||
|  |         post_set_password, | ||||||
|         post_kdf, |         post_kdf, | ||||||
|         post_rotatekey, |         post_rotatekey, | ||||||
|         post_sstamp, |         post_sstamp, | ||||||
| @@ -68,20 +69,53 @@ pub fn routes() -> Vec<rocket::Route> { | |||||||
|  |  | ||||||
| #[derive(Debug, Deserialize)] | #[derive(Debug, Deserialize)] | ||||||
| #[serde(rename_all = "camelCase")] | #[serde(rename_all = "camelCase")] | ||||||
| pub struct RegisterData { | pub struct KDFData { | ||||||
|     email: String, |     kdf: i32, | ||||||
|     kdf: Option<i32>, |     kdf_iterations: i32, | ||||||
|     kdf_iterations: Option<i32>, |  | ||||||
|     kdf_memory: Option<i32>, |     kdf_memory: Option<i32>, | ||||||
|     kdf_parallelism: Option<i32>, |     kdf_parallelism: Option<i32>, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | #[derive(Debug, Deserialize)] | ||||||
|  | #[serde(rename_all = "camelCase")] | ||||||
|  | pub struct RegisterData { | ||||||
|  |     email: String, | ||||||
|  |  | ||||||
|  |     #[serde(flatten)] | ||||||
|  |     kdf: KDFData, | ||||||
|  |  | ||||||
|  |     #[serde(alias = "userSymmetricKey")] | ||||||
|  |     key: String, | ||||||
|  |     #[serde(alias = "userAsymmetricKeys")] | ||||||
|  |     keys: Option<KeysData>, | ||||||
|  |  | ||||||
|  |     master_password_hash: String, | ||||||
|  |     master_password_hint: Option<String>, | ||||||
|  |  | ||||||
|  |     name: Option<String>, | ||||||
|  |  | ||||||
|  |     #[allow(dead_code)] | ||||||
|  |     organization_user_id: Option<MembershipId>, | ||||||
|  |  | ||||||
|  |     // Used only from the register/finish endpoint | ||||||
|  |     email_verification_token: Option<String>, | ||||||
|  |     accept_emergency_access_id: Option<EmergencyAccessId>, | ||||||
|  |     accept_emergency_access_invite_token: Option<String>, | ||||||
|  |     #[serde(alias = "token")] | ||||||
|  |     org_invite_token: Option<String>, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | #[derive(Debug, Deserialize)] | ||||||
|  | #[serde(rename_all = "camelCase")] | ||||||
|  | pub struct SetPasswordData { | ||||||
|  |     #[serde(flatten)] | ||||||
|  |     kdf: KDFData, | ||||||
|  |  | ||||||
|     key: String, |     key: String, | ||||||
|     keys: Option<KeysData>, |     keys: Option<KeysData>, | ||||||
|     master_password_hash: String, |     master_password_hash: String, | ||||||
|     master_password_hint: Option<String>, |     master_password_hint: Option<String>, | ||||||
|     name: Option<String>, |     org_identifier: Option<String>, | ||||||
|     token: Option<String>, |  | ||||||
|     #[allow(dead_code)] |  | ||||||
|     organization_user_id: Option<MembershipId>, |  | ||||||
| } | } | ||||||
|  |  | ||||||
| #[derive(Debug, Deserialize)] | #[derive(Debug, Deserialize)] | ||||||
| @@ -115,22 +149,86 @@ async fn is_email_2fa_required(member_id: Option<MembershipId>, conn: &mut DbCon | |||||||
|     if CONFIG.email_2fa_enforce_on_verified_invite() { |     if CONFIG.email_2fa_enforce_on_verified_invite() { | ||||||
|         return true; |         return true; | ||||||
|     } |     } | ||||||
|     if member_id.is_some() { |     if let Some(member_id) = member_id { | ||||||
|         return OrgPolicy::is_enabled_for_member(&member_id.unwrap(), OrgPolicyType::TwoFactorAuthentication, conn) |         return OrgPolicy::is_enabled_for_member(&member_id, OrgPolicyType::TwoFactorAuthentication, conn).await; | ||||||
|             .await; |  | ||||||
|     } |     } | ||||||
|     false |     false | ||||||
| } | } | ||||||
|  |  | ||||||
| #[post("/accounts/register", data = "<data>")] | #[post("/accounts/register", data = "<data>")] | ||||||
| async fn register(data: Json<RegisterData>, conn: DbConn) -> JsonResult { | async fn register(data: Json<RegisterData>, conn: DbConn) -> JsonResult { | ||||||
|     _register(data, conn).await |     _register(data, false, conn).await | ||||||
| } | } | ||||||
|  |  | ||||||
| pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult { | pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut conn: DbConn) -> JsonResult { | ||||||
|     let data: RegisterData = data.into_inner(); |     let mut data: RegisterData = data.into_inner(); | ||||||
|     let email = data.email.to_lowercase(); |     let email = data.email.to_lowercase(); | ||||||
|  |  | ||||||
|  |     let mut email_verified = false; | ||||||
|  |  | ||||||
|  |     let mut pending_emergency_access = None; | ||||||
|  |  | ||||||
|  |     // First, validate the provided verification tokens | ||||||
|  |     if email_verification { | ||||||
|  |         match ( | ||||||
|  |             &data.email_verification_token, | ||||||
|  |             &data.accept_emergency_access_id, | ||||||
|  |             &data.accept_emergency_access_invite_token, | ||||||
|  |             &data.organization_user_id, | ||||||
|  |             &data.org_invite_token, | ||||||
|  |         ) { | ||||||
|  |             // Normal user registration, when email verification is required | ||||||
|  |             (Some(email_verification_token), None, None, None, None) => { | ||||||
|  |                 let claims = crate::auth::decode_register_verify(email_verification_token)?; | ||||||
|  |                 if claims.sub != data.email { | ||||||
|  |                     err!("Email verification token does not match email"); | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 // During this call we don't get the name, so extract it from the claims | ||||||
|  |                 if claims.name.is_some() { | ||||||
|  |                     data.name = claims.name; | ||||||
|  |                 } | ||||||
|  |                 email_verified = claims.verified; | ||||||
|  |             } | ||||||
|  |             // Emergency access registration | ||||||
|  |             (None, Some(accept_emergency_access_id), Some(accept_emergency_access_invite_token), None, None) => { | ||||||
|  |                 if !CONFIG.emergency_access_allowed() { | ||||||
|  |                     err!("Emergency access is not enabled.") | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 let claims = crate::auth::decode_emergency_access_invite(accept_emergency_access_invite_token)?; | ||||||
|  |  | ||||||
|  |                 if claims.email != data.email { | ||||||
|  |                     err!("Claim email does not match email") | ||||||
|  |                 } | ||||||
|  |                 if &claims.emer_id != accept_emergency_access_id { | ||||||
|  |                     err!("Claim emer_id does not match accept_emergency_access_id") | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 pending_emergency_access = Some((accept_emergency_access_id, claims)); | ||||||
|  |                 email_verified = true; | ||||||
|  |             } | ||||||
|  |             // Org invite | ||||||
|  |             (None, None, None, Some(organization_user_id), Some(org_invite_token)) => { | ||||||
|  |                 let claims = decode_invite(org_invite_token)?; | ||||||
|  |  | ||||||
|  |                 if claims.email != data.email { | ||||||
|  |                     err!("Claim email does not match email") | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 if &claims.member_id != organization_user_id { | ||||||
|  |                     err!("Claim org_user_id does not match organization_user_id") | ||||||
|  |                 } | ||||||
|  |  | ||||||
|  |                 email_verified = true; | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |             _ => { | ||||||
|  |                 err!("Registration is missing required parameters") | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|     // Check if the length of the username exceeds 50 characters (Same is Upstream Bitwarden) |     // Check if the length of the username exceeds 50 characters (Same is Upstream Bitwarden) | ||||||
|     // This also prevents issues with very long usernames causing to large JWT's. See #2419 |     // This also prevents issues with very long usernames causing to large JWT's. See #2419 | ||||||
|     if let Some(ref name) = data.name { |     if let Some(ref name) = data.name { | ||||||
| @@ -144,29 +242,23 @@ pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult | |||||||
|     let password_hint = clean_password_hint(&data.master_password_hint); |     let password_hint = clean_password_hint(&data.master_password_hint); | ||||||
|     enforce_password_hint_setting(&password_hint)?; |     enforce_password_hint_setting(&password_hint)?; | ||||||
|  |  | ||||||
|     let mut verified_by_invite = false; |  | ||||||
|  |  | ||||||
|     let mut user = match User::find_by_mail(&email, &mut conn).await { |     let mut user = match User::find_by_mail(&email, &mut conn).await { | ||||||
|         Some(mut user) => { |         Some(user) => { | ||||||
|             if !user.password_hash.is_empty() { |             if !user.password_hash.is_empty() { | ||||||
|                 err!("Registration not allowed or user already exists") |                 err!("Registration not allowed or user already exists") | ||||||
|             } |             } | ||||||
|  |  | ||||||
|             if let Some(token) = data.token { |             if let Some(token) = data.org_invite_token { | ||||||
|                 let claims = decode_invite(&token)?; |                 let claims = decode_invite(&token)?; | ||||||
|                 if claims.email == email { |                 if claims.email == email { | ||||||
|                     // Verify the email address when signing up via a valid invite token |                     // Verify the email address when signing up via a valid invite token | ||||||
|                     verified_by_invite = true; |                     email_verified = true; | ||||||
|                     user.verified_at = Some(Utc::now().naive_utc()); |  | ||||||
|                     user |                     user | ||||||
|                 } else { |                 } else { | ||||||
|                     err!("Registration email does not match invite email") |                     err!("Registration email does not match invite email") | ||||||
|                 } |                 } | ||||||
|             } else if Invitation::take(&email, &mut conn).await { |             } else if Invitation::take(&email, &mut conn).await { | ||||||
|                 for membership in Membership::find_invited_by_user(&user.uuid, &mut conn).await.iter_mut() { |                 Membership::accept_user_invitations(&user.uuid, &mut conn).await?; | ||||||
|                     membership.status = MembershipStatus::Accepted as i32; |  | ||||||
|                     membership.save(&mut conn).await?; |  | ||||||
|                 } |  | ||||||
|                 user |                 user | ||||||
|             } else if CONFIG.is_signup_allowed(&email) |             } else if CONFIG.is_signup_allowed(&email) | ||||||
|                 || (CONFIG.emergency_access_allowed() |                 || (CONFIG.emergency_access_allowed() | ||||||
| @@ -181,8 +273,11 @@ pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult | |||||||
|             // Order is important here; the invitation check must come first |             // Order is important here; the invitation check must come first | ||||||
|             // because the vaultwarden admin can invite anyone, regardless |             // because the vaultwarden admin can invite anyone, regardless | ||||||
|             // of other signup restrictions. |             // of other signup restrictions. | ||||||
|             if Invitation::take(&email, &mut conn).await || CONFIG.is_signup_allowed(&email) { |             if Invitation::take(&email, &mut conn).await | ||||||
|                 User::new(email.clone()) |                 || CONFIG.is_signup_allowed(&email) | ||||||
|  |                 || pending_emergency_access.is_some() | ||||||
|  |             { | ||||||
|  |                 User::new(email.clone(), None) | ||||||
|             } else { |             } else { | ||||||
|                 err!("Registration not allowed or user already exists") |                 err!("Registration not allowed or user already exists") | ||||||
|             } |             } | ||||||
| @@ -192,16 +287,7 @@ pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult | |||||||
|     // Make sure we don't leave a lingering invitation. |     // Make sure we don't leave a lingering invitation. | ||||||
|     Invitation::take(&email, &mut conn).await; |     Invitation::take(&email, &mut conn).await; | ||||||
|  |  | ||||||
|     if let Some(client_kdf_type) = data.kdf { |     set_kdf_data(&mut user, data.kdf)?; | ||||||
|         user.client_kdf_type = client_kdf_type; |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     if let Some(client_kdf_iter) = data.kdf_iterations { |  | ||||||
|         user.client_kdf_iter = client_kdf_iter; |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     user.client_kdf_memory = data.kdf_memory; |  | ||||||
|     user.client_kdf_parallelism = data.kdf_parallelism; |  | ||||||
|  |  | ||||||
|     user.set_password(&data.master_password_hash, Some(data.key), true, None); |     user.set_password(&data.master_password_hash, Some(data.key), true, None); | ||||||
|     user.password_hint = password_hint; |     user.password_hint = password_hint; | ||||||
| @@ -216,17 +302,21 @@ pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult | |||||||
|         user.public_key = Some(keys.public_key); |         user.public_key = Some(keys.public_key); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     if email_verified { | ||||||
|  |         user.verified_at = Some(Utc::now().naive_utc()); | ||||||
|  |     } | ||||||
|  |  | ||||||
|     if CONFIG.mail_enabled() { |     if CONFIG.mail_enabled() { | ||||||
|         if CONFIG.signups_verify() && !verified_by_invite { |         if CONFIG.signups_verify() && !email_verified { | ||||||
|             if let Err(e) = mail::send_welcome_must_verify(&user.email, &user.uuid).await { |             if let Err(e) = mail::send_welcome_must_verify(&user.email, &user.uuid).await { | ||||||
|                 error!("Error sending welcome email: {:#?}", e); |                 error!("Error sending welcome email: {e:#?}"); | ||||||
|             } |             } | ||||||
|             user.last_verifying_at = Some(user.created_at); |             user.last_verifying_at = Some(user.created_at); | ||||||
|         } else if let Err(e) = mail::send_welcome(&user.email).await { |         } else if let Err(e) = mail::send_welcome(&user.email).await { | ||||||
|             error!("Error sending welcome email: {:#?}", e); |             error!("Error sending welcome email: {e:#?}"); | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         if verified_by_invite && is_email_2fa_required(data.organization_user_id, &mut conn).await { |         if email_verified && is_email_2fa_required(data.organization_user_id, &mut conn).await { | ||||||
|             email::activate_email_2fa(&user, &mut conn).await.ok(); |             email::activate_email_2fa(&user, &mut conn).await.ok(); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
| @@ -246,6 +336,68 @@ pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult | |||||||
|     }))) |     }))) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | #[post("/accounts/set-password", data = "<data>")] | ||||||
|  | async fn post_set_password(data: Json<SetPasswordData>, headers: Headers, mut conn: DbConn) -> JsonResult { | ||||||
|  |     let data: SetPasswordData = data.into_inner(); | ||||||
|  |     let mut user = headers.user; | ||||||
|  |  | ||||||
|  |     if user.private_key.is_some() { | ||||||
|  |         err!("Account already initialized, cannot set password") | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // Check against the password hint setting here so if it fails, | ||||||
|  |     // the user can retry without losing their invitation below. | ||||||
|  |     let password_hint = clean_password_hint(&data.master_password_hint); | ||||||
|  |     enforce_password_hint_setting(&password_hint)?; | ||||||
|  |  | ||||||
|  |     set_kdf_data(&mut user, data.kdf)?; | ||||||
|  |  | ||||||
|  |     user.set_password( | ||||||
|  |         &data.master_password_hash, | ||||||
|  |         Some(data.key), | ||||||
|  |         false, | ||||||
|  |         Some(vec![String::from("revision_date")]), // We need to allow revision-date to use the old security_timestamp | ||||||
|  |     ); | ||||||
|  |     user.password_hint = password_hint; | ||||||
|  |  | ||||||
|  |     if let Some(keys) = data.keys { | ||||||
|  |         user.private_key = Some(keys.encrypted_private_key); | ||||||
|  |         user.public_key = Some(keys.public_key); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     if let Some(identifier) = data.org_identifier { | ||||||
|  |         if identifier != crate::sso::FAKE_IDENTIFIER { | ||||||
|  |             let org = match Organization::find_by_name(&identifier, &mut conn).await { | ||||||
|  |                 None => err!("Failed to retrieve the associated organization"), | ||||||
|  |                 Some(org) => org, | ||||||
|  |             }; | ||||||
|  |  | ||||||
|  |             let membership = match Membership::find_by_user_and_org(&user.uuid, &org.uuid, &mut conn).await { | ||||||
|  |                 None => err!("Failed to retrieve the invitation"), | ||||||
|  |                 Some(org) => org, | ||||||
|  |             }; | ||||||
|  |  | ||||||
|  |             accept_org_invite(&user, membership, None, &mut conn).await?; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     if CONFIG.mail_enabled() { | ||||||
|  |         mail::send_welcome(&user.email.to_lowercase()).await?; | ||||||
|  |     } else { | ||||||
|  |         Membership::accept_user_invitations(&user.uuid, &mut conn).await?; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     log_user_event(EventType::UserChangedPassword as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn) | ||||||
|  |         .await; | ||||||
|  |  | ||||||
|  |     user.save(&mut conn).await?; | ||||||
|  |  | ||||||
|  |     Ok(Json(json!({ | ||||||
|  |       "Object": "set-password", | ||||||
|  |       "CaptchaBypassToken": "", | ||||||
|  |     }))) | ||||||
|  | } | ||||||
|  |  | ||||||
| #[get("/accounts/profile")] | #[get("/accounts/profile")] | ||||||
| async fn profile(headers: Headers, mut conn: DbConn) -> Json<Value> { | async fn profile(headers: Headers, mut conn: DbConn) -> Json<Value> { | ||||||
|     Json(headers.user.to_json(&mut conn).await) |     Json(headers.user.to_json(&mut conn).await) | ||||||
| @@ -255,7 +407,6 @@ async fn profile(headers: Headers, mut conn: DbConn) -> Json<Value> { | |||||||
| #[serde(rename_all = "camelCase")] | #[serde(rename_all = "camelCase")] | ||||||
| struct ProfileData { | struct ProfileData { | ||||||
|     // culture: String, // Ignored, always use en-US |     // culture: String, // Ignored, always use en-US | ||||||
|     // masterPasswordHint: Option<String>, // Ignored, has been moved to ChangePassData |  | ||||||
|     name: String, |     name: String, | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -381,7 +532,7 @@ async fn post_password(data: Json<ChangePassData>, headers: Headers, mut conn: D | |||||||
|     // Prevent logging out the client where the user requested this endpoint from. |     // Prevent logging out the client where the user requested this endpoint from. | ||||||
|     // If you do logout the user it will causes issues at the client side. |     // If you do logout the user it will causes issues at the client side. | ||||||
|     // Adding the device uuid will prevent this. |     // Adding the device uuid will prevent this. | ||||||
|     nt.send_logout(&user, Some(headers.device.uuid.clone())).await; |     nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await; | ||||||
|  |  | ||||||
|     save_result |     save_result | ||||||
| } | } | ||||||
| @@ -389,25 +540,15 @@ async fn post_password(data: Json<ChangePassData>, headers: Headers, mut conn: D | |||||||
| #[derive(Deserialize)] | #[derive(Deserialize)] | ||||||
| #[serde(rename_all = "camelCase")] | #[serde(rename_all = "camelCase")] | ||||||
| struct ChangeKdfData { | struct ChangeKdfData { | ||||||
|     kdf: i32, |     #[serde(flatten)] | ||||||
|     kdf_iterations: i32, |     kdf: KDFData, | ||||||
|     kdf_memory: Option<i32>, |  | ||||||
|     kdf_parallelism: Option<i32>, |  | ||||||
|  |  | ||||||
|     master_password_hash: String, |     master_password_hash: String, | ||||||
|     new_master_password_hash: String, |     new_master_password_hash: String, | ||||||
|     key: String, |     key: String, | ||||||
| } | } | ||||||
|  |  | ||||||
| #[post("/accounts/kdf", data = "<data>")] | fn set_kdf_data(user: &mut User, data: KDFData) -> EmptyResult { | ||||||
| async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { |  | ||||||
|     let data: ChangeKdfData = data.into_inner(); |  | ||||||
|     let mut user = headers.user; |  | ||||||
|  |  | ||||||
|     if !user.check_valid_password(&data.master_password_hash) { |  | ||||||
|         err!("Invalid password") |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 { |     if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 { | ||||||
|         err!("PBKDF2 KDF iterations must be at least 100000.") |         err!("PBKDF2 KDF iterations must be at least 100000.") | ||||||
|     } |     } | ||||||
| @@ -438,10 +579,25 @@ async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn, | |||||||
|     } |     } | ||||||
|     user.client_kdf_iter = data.kdf_iterations; |     user.client_kdf_iter = data.kdf_iterations; | ||||||
|     user.client_kdf_type = data.kdf; |     user.client_kdf_type = data.kdf; | ||||||
|  |  | ||||||
|  |     Ok(()) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | #[post("/accounts/kdf", data = "<data>")] | ||||||
|  | async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | ||||||
|  |     let data: ChangeKdfData = data.into_inner(); | ||||||
|  |     let mut user = headers.user; | ||||||
|  |  | ||||||
|  |     if !user.check_valid_password(&data.master_password_hash) { | ||||||
|  |         err!("Invalid password") | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     set_kdf_data(&mut user, data.kdf)?; | ||||||
|  |  | ||||||
|     user.set_password(&data.new_master_password_hash, Some(data.key), true, None); |     user.set_password(&data.new_master_password_hash, Some(data.key), true, None); | ||||||
|     let save_result = user.save(&mut conn).await; |     let save_result = user.save(&mut conn).await; | ||||||
|  |  | ||||||
|     nt.send_logout(&user, Some(headers.device.uuid.clone())).await; |     nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await; | ||||||
|  |  | ||||||
|     save_result |     save_result | ||||||
| } | } | ||||||
| @@ -476,14 +632,45 @@ use super::sends::{update_send_from_data, SendData}; | |||||||
| #[derive(Deserialize)] | #[derive(Deserialize)] | ||||||
| #[serde(rename_all = "camelCase")] | #[serde(rename_all = "camelCase")] | ||||||
| struct KeyData { | struct KeyData { | ||||||
|  |     account_unlock_data: RotateAccountUnlockData, | ||||||
|  |     account_keys: RotateAccountKeys, | ||||||
|  |     account_data: RotateAccountData, | ||||||
|  |     old_master_key_authentication_hash: String, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | #[derive(Deserialize)] | ||||||
|  | #[serde(rename_all = "camelCase")] | ||||||
|  | struct RotateAccountUnlockData { | ||||||
|  |     emergency_access_unlock_data: Vec<UpdateEmergencyAccessData>, | ||||||
|  |     master_password_unlock_data: MasterPasswordUnlockData, | ||||||
|  |     organization_account_recovery_unlock_data: Vec<UpdateResetPasswordData>, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | #[derive(Deserialize)] | ||||||
|  | #[serde(rename_all = "camelCase")] | ||||||
|  | struct MasterPasswordUnlockData { | ||||||
|  |     kdf_type: i32, | ||||||
|  |     kdf_iterations: i32, | ||||||
|  |     kdf_parallelism: Option<i32>, | ||||||
|  |     kdf_memory: Option<i32>, | ||||||
|  |     email: String, | ||||||
|  |     master_key_authentication_hash: String, | ||||||
|  |     master_key_encrypted_user_key: String, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | #[derive(Deserialize)] | ||||||
|  | #[serde(rename_all = "camelCase")] | ||||||
|  | struct RotateAccountKeys { | ||||||
|  |     user_key_encrypted_account_private_key: String, | ||||||
|  |     account_public_key: String, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | #[derive(Deserialize)] | ||||||
|  | #[serde(rename_all = "camelCase")] | ||||||
|  | struct RotateAccountData { | ||||||
|     ciphers: Vec<CipherData>, |     ciphers: Vec<CipherData>, | ||||||
|     folders: Vec<UpdateFolderData>, |     folders: Vec<UpdateFolderData>, | ||||||
|     sends: Vec<SendData>, |     sends: Vec<SendData>, | ||||||
|     emergency_access_keys: Vec<UpdateEmergencyAccessData>, |  | ||||||
|     reset_password_keys: Vec<UpdateResetPasswordData>, |  | ||||||
|     key: String, |  | ||||||
|     master_password_hash: String, |  | ||||||
|     private_key: String, |  | ||||||
| } | } | ||||||
|  |  | ||||||
| fn validate_keydata( | fn validate_keydata( | ||||||
| @@ -493,10 +680,24 @@ fn validate_keydata( | |||||||
|     existing_emergency_access: &[EmergencyAccess], |     existing_emergency_access: &[EmergencyAccess], | ||||||
|     existing_memberships: &[Membership], |     existing_memberships: &[Membership], | ||||||
|     existing_sends: &[Send], |     existing_sends: &[Send], | ||||||
|  |     user: &User, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|  |     if user.client_kdf_type != data.account_unlock_data.master_password_unlock_data.kdf_type | ||||||
|  |         || user.client_kdf_iter != data.account_unlock_data.master_password_unlock_data.kdf_iterations | ||||||
|  |         || user.client_kdf_memory != data.account_unlock_data.master_password_unlock_data.kdf_memory | ||||||
|  |         || user.client_kdf_parallelism != data.account_unlock_data.master_password_unlock_data.kdf_parallelism | ||||||
|  |         || user.email != data.account_unlock_data.master_password_unlock_data.email | ||||||
|  |     { | ||||||
|  |         err!("Changing the kdf variant or email is not supported during key rotation"); | ||||||
|  |     } | ||||||
|  |     if user.public_key.as_ref() != Some(&data.account_keys.account_public_key) { | ||||||
|  |         err!("Changing the asymmetric keypair is not possible during key rotation") | ||||||
|  |     } | ||||||
|  |  | ||||||
|     // Check that we're correctly rotating all the user's ciphers |     // Check that we're correctly rotating all the user's ciphers | ||||||
|     let existing_cipher_ids = existing_ciphers.iter().map(|c| &c.uuid).collect::<HashSet<&CipherId>>(); |     let existing_cipher_ids = existing_ciphers.iter().map(|c| &c.uuid).collect::<HashSet<&CipherId>>(); | ||||||
|     let provided_cipher_ids = data |     let provided_cipher_ids = data | ||||||
|  |         .account_data | ||||||
|         .ciphers |         .ciphers | ||||||
|         .iter() |         .iter() | ||||||
|         .filter(|c| c.organization_id.is_none()) |         .filter(|c| c.organization_id.is_none()) | ||||||
| @@ -508,7 +709,8 @@ fn validate_keydata( | |||||||
|  |  | ||||||
|     // Check that we're correctly rotating all the user's folders |     // Check that we're correctly rotating all the user's folders | ||||||
|     let existing_folder_ids = existing_folders.iter().map(|f| &f.uuid).collect::<HashSet<&FolderId>>(); |     let existing_folder_ids = existing_folders.iter().map(|f| &f.uuid).collect::<HashSet<&FolderId>>(); | ||||||
|     let provided_folder_ids = data.folders.iter().filter_map(|f| f.id.as_ref()).collect::<HashSet<&FolderId>>(); |     let provided_folder_ids = | ||||||
|  |         data.account_data.folders.iter().filter_map(|f| f.id.as_ref()).collect::<HashSet<&FolderId>>(); | ||||||
|     if !provided_folder_ids.is_superset(&existing_folder_ids) { |     if !provided_folder_ids.is_superset(&existing_folder_ids) { | ||||||
|         err!("All existing folders must be included in the rotation") |         err!("All existing folders must be included in the rotation") | ||||||
|     } |     } | ||||||
| @@ -516,8 +718,12 @@ fn validate_keydata( | |||||||
|     // Check that we're correctly rotating all the user's emergency access keys |     // Check that we're correctly rotating all the user's emergency access keys | ||||||
|     let existing_emergency_access_ids = |     let existing_emergency_access_ids = | ||||||
|         existing_emergency_access.iter().map(|ea| &ea.uuid).collect::<HashSet<&EmergencyAccessId>>(); |         existing_emergency_access.iter().map(|ea| &ea.uuid).collect::<HashSet<&EmergencyAccessId>>(); | ||||||
|     let provided_emergency_access_ids = |     let provided_emergency_access_ids = data | ||||||
|         data.emergency_access_keys.iter().map(|ea| &ea.id).collect::<HashSet<&EmergencyAccessId>>(); |         .account_unlock_data | ||||||
|  |         .emergency_access_unlock_data | ||||||
|  |         .iter() | ||||||
|  |         .map(|ea| &ea.id) | ||||||
|  |         .collect::<HashSet<&EmergencyAccessId>>(); | ||||||
|     if !provided_emergency_access_ids.is_superset(&existing_emergency_access_ids) { |     if !provided_emergency_access_ids.is_superset(&existing_emergency_access_ids) { | ||||||
|         err!("All existing emergency access keys must be included in the rotation") |         err!("All existing emergency access keys must be included in the rotation") | ||||||
|     } |     } | ||||||
| @@ -525,15 +731,19 @@ fn validate_keydata( | |||||||
|     // Check that we're correctly rotating all the user's reset password keys |     // Check that we're correctly rotating all the user's reset password keys | ||||||
|     let existing_reset_password_ids = |     let existing_reset_password_ids = | ||||||
|         existing_memberships.iter().map(|m| &m.org_uuid).collect::<HashSet<&OrganizationId>>(); |         existing_memberships.iter().map(|m| &m.org_uuid).collect::<HashSet<&OrganizationId>>(); | ||||||
|     let provided_reset_password_ids = |     let provided_reset_password_ids = data | ||||||
|         data.reset_password_keys.iter().map(|rp| &rp.organization_id).collect::<HashSet<&OrganizationId>>(); |         .account_unlock_data | ||||||
|  |         .organization_account_recovery_unlock_data | ||||||
|  |         .iter() | ||||||
|  |         .map(|rp| &rp.organization_id) | ||||||
|  |         .collect::<HashSet<&OrganizationId>>(); | ||||||
|     if !provided_reset_password_ids.is_superset(&existing_reset_password_ids) { |     if !provided_reset_password_ids.is_superset(&existing_reset_password_ids) { | ||||||
|         err!("All existing reset password keys must be included in the rotation") |         err!("All existing reset password keys must be included in the rotation") | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     // Check that we're correctly rotating all the user's sends |     // Check that we're correctly rotating all the user's sends | ||||||
|     let existing_send_ids = existing_sends.iter().map(|s| &s.uuid).collect::<HashSet<&SendId>>(); |     let existing_send_ids = existing_sends.iter().map(|s| &s.uuid).collect::<HashSet<&SendId>>(); | ||||||
|     let provided_send_ids = data.sends.iter().filter_map(|s| s.id.as_ref()).collect::<HashSet<&SendId>>(); |     let provided_send_ids = data.account_data.sends.iter().filter_map(|s| s.id.as_ref()).collect::<HashSet<&SendId>>(); | ||||||
|     if !provided_send_ids.is_superset(&existing_send_ids) { |     if !provided_send_ids.is_superset(&existing_send_ids) { | ||||||
|         err!("All existing sends must be included in the rotation") |         err!("All existing sends must be included in the rotation") | ||||||
|     } |     } | ||||||
| @@ -541,12 +751,12 @@ fn validate_keydata( | |||||||
|     Ok(()) |     Ok(()) | ||||||
| } | } | ||||||
|  |  | ||||||
| #[post("/accounts/key", data = "<data>")] | #[post("/accounts/key-management/rotate-user-account-keys", data = "<data>")] | ||||||
| async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | ||||||
|     // TODO: See if we can wrap everything within a SQL Transaction. If something fails it should revert everything. |     // TODO: See if we can wrap everything within a SQL Transaction. If something fails it should revert everything. | ||||||
|     let data: KeyData = data.into_inner(); |     let data: KeyData = data.into_inner(); | ||||||
|  |  | ||||||
|     if !headers.user.check_valid_password(&data.master_password_hash) { |     if !headers.user.check_valid_password(&data.old_master_key_authentication_hash) { | ||||||
|         err!("Invalid password") |         err!("Invalid password") | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -554,7 +764,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, | |||||||
|     // Bitwarden does not process the import if there is one item invalid. |     // Bitwarden does not process the import if there is one item invalid. | ||||||
|     // Since we check for the size of the encrypted note length, we need to do that here to pre-validate it. |     // Since we check for the size of the encrypted note length, we need to do that here to pre-validate it. | ||||||
|     // TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks. |     // TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks. | ||||||
|     Cipher::validate_cipher_data(&data.ciphers)?; |     Cipher::validate_cipher_data(&data.account_data.ciphers)?; | ||||||
|  |  | ||||||
|     let user_id = &headers.user.uuid; |     let user_id = &headers.user.uuid; | ||||||
|  |  | ||||||
| @@ -575,10 +785,11 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, | |||||||
|         &existing_emergency_access, |         &existing_emergency_access, | ||||||
|         &existing_memberships, |         &existing_memberships, | ||||||
|         &existing_sends, |         &existing_sends, | ||||||
|  |         &headers.user, | ||||||
|     )?; |     )?; | ||||||
|  |  | ||||||
|     // Update folder data |     // Update folder data | ||||||
|     for folder_data in data.folders { |     for folder_data in data.account_data.folders { | ||||||
|         // Skip `null` folder id entries. |         // Skip `null` folder id entries. | ||||||
|         // See: https://github.com/bitwarden/clients/issues/8453 |         // See: https://github.com/bitwarden/clients/issues/8453 | ||||||
|         if let Some(folder_id) = folder_data.id { |         if let Some(folder_id) = folder_data.id { | ||||||
| @@ -592,7 +803,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     // Update emergency access data |     // Update emergency access data | ||||||
|     for emergency_access_data in data.emergency_access_keys { |     for emergency_access_data in data.account_unlock_data.emergency_access_unlock_data { | ||||||
|         let Some(saved_emergency_access) = |         let Some(saved_emergency_access) = | ||||||
|             existing_emergency_access.iter_mut().find(|ea| ea.uuid == emergency_access_data.id) |             existing_emergency_access.iter_mut().find(|ea| ea.uuid == emergency_access_data.id) | ||||||
|         else { |         else { | ||||||
| @@ -604,7 +815,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     // Update reset password data |     // Update reset password data | ||||||
|     for reset_password_data in data.reset_password_keys { |     for reset_password_data in data.account_unlock_data.organization_account_recovery_unlock_data { | ||||||
|         let Some(membership) = |         let Some(membership) = | ||||||
|             existing_memberships.iter_mut().find(|m| m.org_uuid == reset_password_data.organization_id) |             existing_memberships.iter_mut().find(|m| m.org_uuid == reset_password_data.organization_id) | ||||||
|         else { |         else { | ||||||
| @@ -616,7 +827,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     // Update send data |     // Update send data | ||||||
|     for send_data in data.sends { |     for send_data in data.account_data.sends { | ||||||
|         let Some(send) = existing_sends.iter_mut().find(|s| &s.uuid == send_data.id.as_ref().unwrap()) else { |         let Some(send) = existing_sends.iter_mut().find(|s| &s.uuid == send_data.id.as_ref().unwrap()) else { | ||||||
|             err!("Send doesn't exist") |             err!("Send doesn't exist") | ||||||
|         }; |         }; | ||||||
| @@ -627,7 +838,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, | |||||||
|     // Update cipher data |     // Update cipher data | ||||||
|     use super::ciphers::update_cipher_from_data; |     use super::ciphers::update_cipher_from_data; | ||||||
|  |  | ||||||
|     for cipher_data in data.ciphers { |     for cipher_data in data.account_data.ciphers { | ||||||
|         if cipher_data.organization_id.is_none() { |         if cipher_data.organization_id.is_none() { | ||||||
|             let Some(saved_cipher) = existing_ciphers.iter_mut().find(|c| &c.uuid == cipher_data.id.as_ref().unwrap()) |             let Some(saved_cipher) = existing_ciphers.iter_mut().find(|c| &c.uuid == cipher_data.id.as_ref().unwrap()) | ||||||
|             else { |             else { | ||||||
| @@ -644,16 +855,20 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, | |||||||
|     // Update user data |     // Update user data | ||||||
|     let mut user = headers.user; |     let mut user = headers.user; | ||||||
|  |  | ||||||
|     user.akey = data.key; |     user.private_key = Some(data.account_keys.user_key_encrypted_account_private_key); | ||||||
|     user.private_key = Some(data.private_key); |     user.set_password( | ||||||
|     user.reset_security_stamp(); |         &data.account_unlock_data.master_password_unlock_data.master_key_authentication_hash, | ||||||
|  |         Some(data.account_unlock_data.master_password_unlock_data.master_key_encrypted_user_key), | ||||||
|  |         true, | ||||||
|  |         None, | ||||||
|  |     ); | ||||||
|  |  | ||||||
|     let save_result = user.save(&mut conn).await; |     let save_result = user.save(&mut conn).await; | ||||||
|  |  | ||||||
|     // Prevent logging out the client where the user requested this endpoint from. |     // Prevent logging out the client where the user requested this endpoint from. | ||||||
|     // If you do logout the user it will causes issues at the client side. |     // If you do logout the user it will causes issues at the client side. | ||||||
|     // Adding the device uuid will prevent this. |     // Adding the device uuid will prevent this. | ||||||
|     nt.send_logout(&user, Some(headers.device.uuid.clone())).await; |     nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await; | ||||||
|  |  | ||||||
|     save_result |     save_result | ||||||
| } | } | ||||||
| @@ -669,7 +884,7 @@ async fn post_sstamp(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: | |||||||
|     user.reset_security_stamp(); |     user.reset_security_stamp(); | ||||||
|     let save_result = user.save(&mut conn).await; |     let save_result = user.save(&mut conn).await; | ||||||
|  |  | ||||||
|     nt.send_logout(&user, None).await; |     nt.send_logout(&user, None, &mut conn).await; | ||||||
|  |  | ||||||
|     save_result |     save_result | ||||||
| } | } | ||||||
| @@ -695,6 +910,11 @@ async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, mut conn | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     if User::find_by_mail(&data.new_email, &mut conn).await.is_some() { |     if User::find_by_mail(&data.new_email, &mut conn).await.is_some() { | ||||||
|  |         if CONFIG.mail_enabled() { | ||||||
|  |             if let Err(e) = mail::send_change_email_existing(&data.new_email, &user.email).await { | ||||||
|  |                 error!("Error sending change-email-existing email: {e:#?}"); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|         err!("Email already in use"); |         err!("Email already in use"); | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -706,10 +926,10 @@ async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, mut conn | |||||||
|  |  | ||||||
|     if CONFIG.mail_enabled() { |     if CONFIG.mail_enabled() { | ||||||
|         if let Err(e) = mail::send_change_email(&data.new_email, &token).await { |         if let Err(e) = mail::send_change_email(&data.new_email, &token).await { | ||||||
|             error!("Error sending change-email email: {:#?}", e); |             error!("Error sending change-email email: {e:#?}"); | ||||||
|         } |         } | ||||||
|     } else { |     } else { | ||||||
|         debug!("Email change request for user ({}) to email ({}) with token ({})", user.uuid, data.new_email, token); |         debug!("Email change request for user ({}) to email ({}) with token ({token})", user.uuid, data.new_email); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     user.email_new = Some(data.new_email); |     user.email_new = Some(data.new_email); | ||||||
| @@ -777,7 +997,7 @@ async fn post_email(data: Json<ChangeEmailData>, headers: Headers, mut conn: DbC | |||||||
|  |  | ||||||
|     let save_result = user.save(&mut conn).await; |     let save_result = user.save(&mut conn).await; | ||||||
|  |  | ||||||
|     nt.send_logout(&user, None).await; |     nt.send_logout(&user, None, &mut conn).await; | ||||||
|  |  | ||||||
|     save_result |     save_result | ||||||
| } | } | ||||||
| @@ -791,7 +1011,7 @@ async fn post_verify_email(headers: Headers) -> EmptyResult { | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     if let Err(e) = mail::send_verify_email(&user.email, &user.uuid).await { |     if let Err(e) = mail::send_verify_email(&user.email, &user.uuid).await { | ||||||
|         error!("Error sending verify_email email: {:#?}", e); |         error!("Error sending verify_email email: {e:#?}"); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     Ok(()) |     Ok(()) | ||||||
| @@ -822,7 +1042,7 @@ async fn post_verify_email_token(data: Json<VerifyEmailTokenData>, mut conn: DbC | |||||||
|     user.last_verifying_at = None; |     user.last_verifying_at = None; | ||||||
|     user.login_verify_count = 0; |     user.login_verify_count = 0; | ||||||
|     if let Err(e) = user.save(&mut conn).await { |     if let Err(e) = user.save(&mut conn).await { | ||||||
|         error!("Error saving email verification: {:#?}", e); |         error!("Error saving email verification: {e:#?}"); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     Ok(()) |     Ok(()) | ||||||
| @@ -841,7 +1061,7 @@ async fn post_delete_recover(data: Json<DeleteRecoverData>, mut conn: DbConn) -> | |||||||
|     if CONFIG.mail_enabled() { |     if CONFIG.mail_enabled() { | ||||||
|         if let Some(user) = User::find_by_mail(&data.email, &mut conn).await { |         if let Some(user) = User::find_by_mail(&data.email, &mut conn).await { | ||||||
|             if let Err(e) = mail::send_delete_account(&user.email, &user.uuid).await { |             if let Err(e) = mail::send_delete_account(&user.email, &user.uuid).await { | ||||||
|                 error!("Error sending delete account email: {:#?}", e); |                 error!("Error sending delete account email: {e:#?}"); | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|         Ok(()) |         Ok(()) | ||||||
| @@ -975,23 +1195,38 @@ pub async fn _prelogin(data: Json<PreloginData>, mut conn: DbConn) -> Json<Value | |||||||
|     })) |     })) | ||||||
| } | } | ||||||
|  |  | ||||||
| // https://github.com/bitwarden/server/blob/master/src/Api/Models/Request/Accounts/SecretVerificationRequestModel.cs | // https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Auth/Models/Request/Accounts/SecretVerificationRequestModel.cs | ||||||
| #[derive(Deserialize)] | #[derive(Deserialize)] | ||||||
| #[serde(rename_all = "camelCase")] | #[serde(rename_all = "camelCase")] | ||||||
| struct SecretVerificationRequest { | struct SecretVerificationRequest { | ||||||
|     master_password_hash: String, |     master_password_hash: String, | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // Change the KDF Iterations if necessary | ||||||
|  | pub async fn kdf_upgrade(user: &mut User, pwd_hash: &str, conn: &mut DbConn) -> ApiResult<()> { | ||||||
|  |     if user.password_iterations < CONFIG.password_iterations() { | ||||||
|  |         user.password_iterations = CONFIG.password_iterations(); | ||||||
|  |         user.set_password(pwd_hash, None, false, None); | ||||||
|  |  | ||||||
|  |         if let Err(e) = user.save(conn).await { | ||||||
|  |             error!("Error updating user: {e:#?}"); | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |     Ok(()) | ||||||
|  | } | ||||||
|  |  | ||||||
| #[post("/accounts/verify-password", data = "<data>")] | #[post("/accounts/verify-password", data = "<data>")] | ||||||
| fn verify_password(data: Json<SecretVerificationRequest>, headers: Headers) -> EmptyResult { | async fn verify_password(data: Json<SecretVerificationRequest>, headers: Headers, mut conn: DbConn) -> JsonResult { | ||||||
|     let data: SecretVerificationRequest = data.into_inner(); |     let data: SecretVerificationRequest = data.into_inner(); | ||||||
|     let user = headers.user; |     let mut user = headers.user; | ||||||
|  |  | ||||||
|     if !user.check_valid_password(&data.master_password_hash) { |     if !user.check_valid_password(&data.master_password_hash) { | ||||||
|         err!("Invalid password") |         err!("Invalid password") | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     Ok(()) |     kdf_upgrade(&mut user, &data.master_password_hash, &mut conn).await?; | ||||||
|  |  | ||||||
|  |     Ok(Json(master_password_policy(&user, &conn).await)) | ||||||
| } | } | ||||||
|  |  | ||||||
| async fn _api_key(data: Json<PasswordOrOtpData>, rotate: bool, headers: Headers, mut conn: DbConn) -> JsonResult { | async fn _api_key(data: Json<PasswordOrOtpData>, rotate: bool, headers: Headers, mut conn: DbConn) -> JsonResult { | ||||||
| @@ -1116,19 +1351,14 @@ async fn put_device_token( | |||||||
|         err!(format!("Error: device {device_id} should be present before a token can be assigned")) |         err!(format!("Error: device {device_id} should be present before a token can be assigned")) | ||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     // if the device already has been registered |     // Check if the new token is the same as the registered token | ||||||
|     if device.is_registered() { |     // Although upstream seems to always register a device on login, we do not. | ||||||
|         // check if the new token is the same as the registered token |     // Unless this causes issues, lets keep it this way, else we might need to also register on every login. | ||||||
|         if device.push_token.is_some() && device.push_token.unwrap() == token.clone() { |     if device.push_token.as_ref() == Some(&token) { | ||||||
|             debug!("Device {} is already registered and token is the same", device_id); |         debug!("Device {device_id} for user {} is already registered and token is identical", headers.user.uuid); | ||||||
|             return Ok(()); |         return Ok(()); | ||||||
|         } else { |  | ||||||
|             // Try to unregister already registered device |  | ||||||
|             unregister_push_device(device.push_uuid).await.ok(); |  | ||||||
|         } |  | ||||||
|         // clear the push_uuid |  | ||||||
|         device.push_uuid = None; |  | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     device.push_token = Some(token); |     device.push_token = Some(token); | ||||||
|     if let Err(e) = device.save(&mut conn).await { |     if let Err(e) = device.save(&mut conn).await { | ||||||
|         err!(format!("An error occurred while trying to save the device push token: {e}")); |         err!(format!("An error occurred while trying to save the device push token: {e}")); | ||||||
| @@ -1142,16 +1372,19 @@ async fn put_device_token( | |||||||
| #[put("/devices/identifier/<device_id>/clear-token")] | #[put("/devices/identifier/<device_id>/clear-token")] | ||||||
| async fn put_clear_device_token(device_id: DeviceId, mut conn: DbConn) -> EmptyResult { | async fn put_clear_device_token(device_id: DeviceId, mut conn: DbConn) -> EmptyResult { | ||||||
|     // This only clears push token |     // This only clears push token | ||||||
|     // https://github.com/bitwarden/core/blob/master/src/Api/Controllers/DevicesController.cs#L109 |     // https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Controllers/DevicesController.cs#L215 | ||||||
|     // https://github.com/bitwarden/core/blob/master/src/Core/Services/Implementations/DeviceService.cs#L37 |     // https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/Services/Implementations/DeviceService.cs#L37 | ||||||
|     // This is somehow not implemented in any app, added it in case it is required |     // This is somehow not implemented in any app, added it in case it is required | ||||||
|  |     // 2025: Also, it looks like it only clears the first found device upstream, which is probably faulty. | ||||||
|  |     //       This because currently multiple accounts could be on the same device/app and that would cause issues. | ||||||
|  |     //       Vaultwarden removes the push-token for all devices, but this probably means we should also unregister all these devices. | ||||||
|     if !CONFIG.push_enabled() { |     if !CONFIG.push_enabled() { | ||||||
|         return Ok(()); |         return Ok(()); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     if let Some(device) = Device::find_by_uuid(&device_id, &mut conn).await { |     if let Some(device) = Device::find_by_uuid(&device_id, &mut conn).await { | ||||||
|         Device::clear_push_token_by_uuid(&device_id, &mut conn).await?; |         Device::clear_push_token_by_uuid(&device_id, &mut conn).await?; | ||||||
|         unregister_push_device(device.push_uuid).await?; |         unregister_push_device(&device.push_uuid).await?; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     Ok(()) |     Ok(()) | ||||||
| @@ -1189,10 +1422,10 @@ async fn post_auth_request( | |||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     // Validate device uuid and type |     // Validate device uuid and type | ||||||
|     match Device::find_by_uuid_and_user(&data.device_identifier, &user.uuid, &mut conn).await { |     let device = match Device::find_by_uuid_and_user(&data.device_identifier, &user.uuid, &mut conn).await { | ||||||
|         Some(device) if device.atype == client_headers.device_type => {} |         Some(device) if device.atype == client_headers.device_type => device, | ||||||
|         _ => err!("AuthRequest doesn't exist", "Device verification failed"), |         _ => err!("AuthRequest doesn't exist", "Device verification failed"), | ||||||
|     } |     }; | ||||||
|  |  | ||||||
|     let mut auth_request = AuthRequest::new( |     let mut auth_request = AuthRequest::new( | ||||||
|         user.uuid.clone(), |         user.uuid.clone(), | ||||||
| @@ -1204,7 +1437,7 @@ async fn post_auth_request( | |||||||
|     ); |     ); | ||||||
|     auth_request.save(&mut conn).await?; |     auth_request.save(&mut conn).await?; | ||||||
|  |  | ||||||
|     nt.send_auth_request(&user.uuid, &auth_request.uuid, &data.device_identifier, &mut conn).await; |     nt.send_auth_request(&user.uuid, &auth_request.uuid, &device, &mut conn).await; | ||||||
|  |  | ||||||
|     log_user_event( |     log_user_event( | ||||||
|         EventType::UserRequestedDeviceApproval as i32, |         EventType::UserRequestedDeviceApproval as i32, | ||||||
| @@ -1279,6 +1512,10 @@ async fn put_auth_request( | |||||||
|         err!("AuthRequest doesn't exist", "Record not found or user uuid does not match") |         err!("AuthRequest doesn't exist", "Record not found or user uuid does not match") | ||||||
|     }; |     }; | ||||||
|  |  | ||||||
|  |     if headers.device.uuid != data.device_identifier { | ||||||
|  |         err!("AuthRequest doesn't exist", "Device verification failed") | ||||||
|  |     } | ||||||
|  |  | ||||||
|     if auth_request.approved.is_some() { |     if auth_request.approved.is_some() { | ||||||
|         err!("An authentication request with the same device already exists") |         err!("An authentication request with the same device already exists") | ||||||
|     } |     } | ||||||
| @@ -1295,7 +1532,7 @@ async fn put_auth_request( | |||||||
|         auth_request.save(&mut conn).await?; |         auth_request.save(&mut conn).await?; | ||||||
|  |  | ||||||
|         ant.send_auth_response(&auth_request.user_uuid, &auth_request.uuid).await; |         ant.send_auth_response(&auth_request.user_uuid, &auth_request.uuid).await; | ||||||
|         nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, &data.device_identifier, &mut conn).await; |         nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, &headers.device, &mut conn).await; | ||||||
|  |  | ||||||
|         log_user_event( |         log_user_event( | ||||||
|             EventType::OrganizationUserApprovedAuthRequest as i32, |             EventType::OrganizationUserApprovedAuthRequest as i32, | ||||||
|   | |||||||
| @@ -11,10 +11,11 @@ use rocket::{ | |||||||
| use serde_json::Value; | use serde_json::Value; | ||||||
|  |  | ||||||
| use crate::auth::ClientVersion; | use crate::auth::ClientVersion; | ||||||
| use crate::util::NumberOrString; | use crate::util::{save_temp_file, NumberOrString}; | ||||||
| use crate::{ | use crate::{ | ||||||
|     api::{self, core::log_event, EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType}, |     api::{self, core::log_event, EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType}, | ||||||
|     auth::Headers, |     auth::Headers, | ||||||
|  |     config::PathType, | ||||||
|     crypto, |     crypto, | ||||||
|     db::{models::*, DbConn, DbPool}, |     db::{models::*, DbConn, DbPool}, | ||||||
|     CONFIG, |     CONFIG, | ||||||
| @@ -77,6 +78,7 @@ pub fn routes() -> Vec<Route> { | |||||||
|         restore_cipher_put, |         restore_cipher_put, | ||||||
|         restore_cipher_put_admin, |         restore_cipher_put_admin, | ||||||
|         restore_cipher_selected, |         restore_cipher_selected, | ||||||
|  |         restore_cipher_selected_admin, | ||||||
|         delete_all, |         delete_all, | ||||||
|         move_cipher_selected, |         move_cipher_selected, | ||||||
|         move_cipher_selected_put, |         move_cipher_selected_put, | ||||||
| @@ -105,12 +107,7 @@ struct SyncData { | |||||||
| } | } | ||||||
|  |  | ||||||
| #[get("/sync?<data..>")] | #[get("/sync?<data..>")] | ||||||
| async fn sync( | async fn sync(data: SyncData, headers: Headers, client_version: Option<ClientVersion>, mut conn: DbConn) -> JsonResult { | ||||||
|     data: SyncData, |  | ||||||
|     headers: Headers, |  | ||||||
|     client_version: Option<ClientVersion>, |  | ||||||
|     mut conn: DbConn, |  | ||||||
| ) -> Json<Value> { |  | ||||||
|     let user_json = headers.user.to_json(&mut conn).await; |     let user_json = headers.user.to_json(&mut conn).await; | ||||||
|  |  | ||||||
|     // Get all ciphers which are visible by the user |     // Get all ciphers which are visible by the user | ||||||
| @@ -134,7 +131,7 @@ async fn sync( | |||||||
|     for c in ciphers { |     for c in ciphers { | ||||||
|         ciphers_json.push( |         ciphers_json.push( | ||||||
|             c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), CipherSyncType::User, &mut conn) |             c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), CipherSyncType::User, &mut conn) | ||||||
|                 .await, |                 .await?, | ||||||
|         ); |         ); | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -159,7 +156,7 @@ async fn sync( | |||||||
|         api::core::_get_eq_domains(headers, true).into_inner() |         api::core::_get_eq_domains(headers, true).into_inner() | ||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     Json(json!({ |     Ok(Json(json!({ | ||||||
|         "profile": user_json, |         "profile": user_json, | ||||||
|         "folders": folders_json, |         "folders": folders_json, | ||||||
|         "collections": collections_json, |         "collections": collections_json, | ||||||
| @@ -168,11 +165,11 @@ async fn sync( | |||||||
|         "domains": domains_json, |         "domains": domains_json, | ||||||
|         "sends": sends_json, |         "sends": sends_json, | ||||||
|         "object": "sync" |         "object": "sync" | ||||||
|     })) |     }))) | ||||||
| } | } | ||||||
|  |  | ||||||
| #[get("/ciphers")] | #[get("/ciphers")] | ||||||
| async fn get_ciphers(headers: Headers, mut conn: DbConn) -> Json<Value> { | async fn get_ciphers(headers: Headers, mut conn: DbConn) -> JsonResult { | ||||||
|     let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &mut conn).await; |     let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &mut conn).await; | ||||||
|     let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, CipherSyncType::User, &mut conn).await; |     let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, CipherSyncType::User, &mut conn).await; | ||||||
|  |  | ||||||
| @@ -180,15 +177,15 @@ async fn get_ciphers(headers: Headers, mut conn: DbConn) -> Json<Value> { | |||||||
|     for c in ciphers { |     for c in ciphers { | ||||||
|         ciphers_json.push( |         ciphers_json.push( | ||||||
|             c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), CipherSyncType::User, &mut conn) |             c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), CipherSyncType::User, &mut conn) | ||||||
|                 .await, |                 .await?, | ||||||
|         ); |         ); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     Json(json!({ |     Ok(Json(json!({ | ||||||
|       "data": ciphers_json, |       "data": ciphers_json, | ||||||
|       "object": "list", |       "object": "list", | ||||||
|       "continuationToken": null |       "continuationToken": null | ||||||
|     })) |     }))) | ||||||
| } | } | ||||||
|  |  | ||||||
| #[get("/ciphers/<cipher_id>")] | #[get("/ciphers/<cipher_id>")] | ||||||
| @@ -201,7 +198,7 @@ async fn get_cipher(cipher_id: CipherId, headers: Headers, mut conn: DbConn) -> | |||||||
|         err!("Cipher is not owned by user") |         err!("Cipher is not owned by user") | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) |     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?)) | ||||||
| } | } | ||||||
|  |  | ||||||
| #[get("/ciphers/<cipher_id>/admin")] | #[get("/ciphers/<cipher_id>/admin")] | ||||||
| @@ -322,7 +319,7 @@ async fn post_ciphers_create( | |||||||
|     // or otherwise), we can just ignore this field entirely. |     // or otherwise), we can just ignore this field entirely. | ||||||
|     data.cipher.last_known_revision_date = None; |     data.cipher.last_known_revision_date = None; | ||||||
|  |  | ||||||
|     share_cipher_by_uuid(&cipher.uuid, data, &headers, &mut conn, &nt).await |     share_cipher_by_uuid(&cipher.uuid, data, &headers, &mut conn, &nt, None).await | ||||||
| } | } | ||||||
|  |  | ||||||
| /// Called when creating a new user-owned cipher. | /// Called when creating a new user-owned cipher. | ||||||
| @@ -339,7 +336,7 @@ async fn post_ciphers(data: Json<CipherData>, headers: Headers, mut conn: DbConn | |||||||
|     let mut cipher = Cipher::new(data.r#type, data.name.clone()); |     let mut cipher = Cipher::new(data.r#type, data.name.clone()); | ||||||
|     update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherCreate).await?; |     update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherCreate).await?; | ||||||
|  |  | ||||||
|     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) |     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?)) | ||||||
| } | } | ||||||
|  |  | ||||||
| /// Enforces the personal ownership policy on user-owned ciphers, if applicable. | /// Enforces the personal ownership policy on user-owned ciphers, if applicable. | ||||||
| @@ -381,7 +378,7 @@ pub async fn update_cipher_from_data( | |||||||
|         if let Some(dt) = data.last_known_revision_date { |         if let Some(dt) = data.last_known_revision_date { | ||||||
|             match NaiveDateTime::parse_from_str(&dt, "%+") { |             match NaiveDateTime::parse_from_str(&dt, "%+") { | ||||||
|                 // ISO 8601 format |                 // ISO 8601 format | ||||||
|                 Err(err) => warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err), |                 Err(err) => warn!("Error parsing LastKnownRevisionDate '{dt}': {err}"), | ||||||
|                 Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => { |                 Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => { | ||||||
|                     err!("The client copy of this cipher is out of date. Resync the client and try again.") |                     err!("The client copy of this cipher is out of date. Resync the client and try again.") | ||||||
|                 } |                 } | ||||||
| @@ -535,7 +532,7 @@ pub async fn update_cipher_from_data( | |||||||
|             ut, |             ut, | ||||||
|             cipher, |             cipher, | ||||||
|             &cipher.update_users_revision(conn).await, |             &cipher.update_users_revision(conn).await, | ||||||
|             &headers.device.uuid, |             &headers.device, | ||||||
|             shared_to_collections, |             shared_to_collections, | ||||||
|             conn, |             conn, | ||||||
|         ) |         ) | ||||||
| @@ -612,7 +609,7 @@ async fn post_ciphers_import( | |||||||
|  |  | ||||||
|     let mut user = headers.user; |     let mut user = headers.user; | ||||||
|     user.update_revision(&mut conn).await?; |     user.update_revision(&mut conn).await?; | ||||||
|     nt.send_user_update(UpdateType::SyncVault, &user).await; |     nt.send_user_update(UpdateType::SyncVault, &user, &headers.device.push_uuid, &mut conn).await; | ||||||
|  |  | ||||||
|     Ok(()) |     Ok(()) | ||||||
| } | } | ||||||
| @@ -676,7 +673,7 @@ async fn put_cipher( | |||||||
|  |  | ||||||
|     update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherUpdate).await?; |     update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherUpdate).await?; | ||||||
|  |  | ||||||
|     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) |     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?)) | ||||||
| } | } | ||||||
|  |  | ||||||
| #[post("/ciphers/<cipher_id>/partial", data = "<data>")] | #[post("/ciphers/<cipher_id>/partial", data = "<data>")] | ||||||
| @@ -714,7 +711,7 @@ async fn put_cipher_partial( | |||||||
|     // Update favorite |     // Update favorite | ||||||
|     cipher.set_favorite(Some(data.favorite), &headers.user.uuid, &mut conn).await?; |     cipher.set_favorite(Some(data.favorite), &headers.user.uuid, &mut conn).await?; | ||||||
|  |  | ||||||
|     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) |     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?)) | ||||||
| } | } | ||||||
|  |  | ||||||
| #[derive(Deserialize)] | #[derive(Deserialize)] | ||||||
| @@ -808,7 +805,7 @@ async fn post_collections_update( | |||||||
|         UpdateType::SyncCipherUpdate, |         UpdateType::SyncCipherUpdate, | ||||||
|         &cipher, |         &cipher, | ||||||
|         &cipher.update_users_revision(&mut conn).await, |         &cipher.update_users_revision(&mut conn).await, | ||||||
|         &headers.device.uuid, |         &headers.device, | ||||||
|         Some(Vec::from_iter(posted_collections)), |         Some(Vec::from_iter(posted_collections)), | ||||||
|         &mut conn, |         &mut conn, | ||||||
|     ) |     ) | ||||||
| @@ -825,7 +822,7 @@ async fn post_collections_update( | |||||||
|     ) |     ) | ||||||
|     .await; |     .await; | ||||||
|  |  | ||||||
|     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) |     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?)) | ||||||
| } | } | ||||||
|  |  | ||||||
| #[put("/ciphers/<cipher_id>/collections-admin", data = "<data>")] | #[put("/ciphers/<cipher_id>/collections-admin", data = "<data>")] | ||||||
| @@ -885,7 +882,7 @@ async fn post_collections_admin( | |||||||
|         UpdateType::SyncCipherUpdate, |         UpdateType::SyncCipherUpdate, | ||||||
|         &cipher, |         &cipher, | ||||||
|         &cipher.update_users_revision(&mut conn).await, |         &cipher.update_users_revision(&mut conn).await, | ||||||
|         &headers.device.uuid, |         &headers.device, | ||||||
|         Some(Vec::from_iter(posted_collections)), |         Some(Vec::from_iter(posted_collections)), | ||||||
|         &mut conn, |         &mut conn, | ||||||
|     ) |     ) | ||||||
| @@ -924,7 +921,7 @@ async fn post_cipher_share( | |||||||
| ) -> JsonResult { | ) -> JsonResult { | ||||||
|     let data: ShareCipherData = data.into_inner(); |     let data: ShareCipherData = data.into_inner(); | ||||||
|  |  | ||||||
|     share_cipher_by_uuid(&cipher_id, data, &headers, &mut conn, &nt).await |     share_cipher_by_uuid(&cipher_id, data, &headers, &mut conn, &nt, None).await | ||||||
| } | } | ||||||
|  |  | ||||||
| #[put("/ciphers/<cipher_id>/share", data = "<data>")] | #[put("/ciphers/<cipher_id>/share", data = "<data>")] | ||||||
| @@ -937,7 +934,7 @@ async fn put_cipher_share( | |||||||
| ) -> JsonResult { | ) -> JsonResult { | ||||||
|     let data: ShareCipherData = data.into_inner(); |     let data: ShareCipherData = data.into_inner(); | ||||||
|  |  | ||||||
|     share_cipher_by_uuid(&cipher_id, data, &headers, &mut conn, &nt).await |     share_cipher_by_uuid(&cipher_id, data, &headers, &mut conn, &nt, None).await | ||||||
| } | } | ||||||
|  |  | ||||||
| #[derive(Deserialize)] | #[derive(Deserialize)] | ||||||
| @@ -977,11 +974,16 @@ async fn put_cipher_share_selected( | |||||||
|         }; |         }; | ||||||
|  |  | ||||||
|         match shared_cipher_data.cipher.id.take() { |         match shared_cipher_data.cipher.id.take() { | ||||||
|             Some(id) => share_cipher_by_uuid(&id, shared_cipher_data, &headers, &mut conn, &nt).await?, |             Some(id) => { | ||||||
|  |                 share_cipher_by_uuid(&id, shared_cipher_data, &headers, &mut conn, &nt, Some(UpdateType::None)).await? | ||||||
|  |             } | ||||||
|             None => err!("Request missing ids field"), |             None => err!("Request missing ids field"), | ||||||
|         }; |         }; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     // Multi share actions do not send out a push for each cipher, we need to send a general sync here | ||||||
|  |     nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, &mut conn).await; | ||||||
|  |  | ||||||
|     Ok(()) |     Ok(()) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -991,6 +993,7 @@ async fn share_cipher_by_uuid( | |||||||
|     headers: &Headers, |     headers: &Headers, | ||||||
|     conn: &mut DbConn, |     conn: &mut DbConn, | ||||||
|     nt: &Notify<'_>, |     nt: &Notify<'_>, | ||||||
|  |     override_ut: Option<UpdateType>, | ||||||
| ) -> JsonResult { | ) -> JsonResult { | ||||||
|     let mut cipher = match Cipher::find_by_uuid(cipher_id, conn).await { |     let mut cipher = match Cipher::find_by_uuid(cipher_id, conn).await { | ||||||
|         Some(cipher) => { |         Some(cipher) => { | ||||||
| @@ -1022,7 +1025,10 @@ async fn share_cipher_by_uuid( | |||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     // When LastKnownRevisionDate is None, it is a new cipher, so send CipherCreate. |     // When LastKnownRevisionDate is None, it is a new cipher, so send CipherCreate. | ||||||
|     let ut = if data.cipher.last_known_revision_date.is_some() { |     // If there is an override, like when handling multiple items, we want to prevent a push notification for every single item | ||||||
|  |     let ut = if let Some(ut) = override_ut { | ||||||
|  |         ut | ||||||
|  |     } else if data.cipher.last_known_revision_date.is_some() { | ||||||
|         UpdateType::SyncCipherUpdate |         UpdateType::SyncCipherUpdate | ||||||
|     } else { |     } else { | ||||||
|         UpdateType::SyncCipherCreate |         UpdateType::SyncCipherCreate | ||||||
| @@ -1030,7 +1036,7 @@ async fn share_cipher_by_uuid( | |||||||
|  |  | ||||||
|     update_cipher_from_data(&mut cipher, data.cipher, headers, Some(shared_to_collections), conn, nt, ut).await?; |     update_cipher_from_data(&mut cipher, data.cipher, headers, Some(shared_to_collections), conn, nt, ut).await?; | ||||||
|  |  | ||||||
|     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await)) |     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await?)) | ||||||
| } | } | ||||||
|  |  | ||||||
| /// v2 API for downloading an attachment. This just redirects the client to | /// v2 API for downloading an attachment. This just redirects the client to | ||||||
| @@ -1055,7 +1061,7 @@ async fn get_attachment( | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     match Attachment::find_by_id(&attachment_id, &mut conn).await { |     match Attachment::find_by_id(&attachment_id, &mut conn).await { | ||||||
|         Some(attachment) if cipher_id == attachment.cipher_uuid => Ok(Json(attachment.to_json(&headers.host))), |         Some(attachment) if cipher_id == attachment.cipher_uuid => Ok(Json(attachment.to_json(&headers.host).await?)), | ||||||
|         Some(_) => err!("Attachment doesn't belong to cipher"), |         Some(_) => err!("Attachment doesn't belong to cipher"), | ||||||
|         None => err!("Attachment doesn't exist"), |         None => err!("Attachment doesn't exist"), | ||||||
|     } |     } | ||||||
| @@ -1105,7 +1111,7 @@ async fn post_attachment_v2( | |||||||
|         Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.file_name, file_size, Some(data.key)); |         Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.file_name, file_size, Some(data.key)); | ||||||
|     attachment.save(&mut conn).await.expect("Error saving attachment"); |     attachment.save(&mut conn).await.expect("Error saving attachment"); | ||||||
|  |  | ||||||
|     let url = format!("/ciphers/{}/attachment/{}", cipher.uuid, attachment_id); |     let url = format!("/ciphers/{}/attachment/{attachment_id}", cipher.uuid); | ||||||
|     let response_key = match data.admin_request { |     let response_key = match data.admin_request { | ||||||
|         Some(b) if b => "cipherMiniResponse", |         Some(b) if b => "cipherMiniResponse", | ||||||
|         _ => "cipherResponse", |         _ => "cipherResponse", | ||||||
| @@ -1116,7 +1122,7 @@ async fn post_attachment_v2( | |||||||
|         "attachmentId": attachment_id, |         "attachmentId": attachment_id, | ||||||
|         "url": url, |         "url": url, | ||||||
|         "fileUploadType": FileUploadType::Direct as i32, |         "fileUploadType": FileUploadType::Direct as i32, | ||||||
|         response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await, |         response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?, | ||||||
|     }))) |     }))) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -1142,7 +1148,7 @@ async fn save_attachment( | |||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> Result<(Cipher, DbConn), crate::error::Error> { | ) -> Result<(Cipher, DbConn), crate::error::Error> { | ||||||
|     let mut data = data.into_inner(); |     let data = data.into_inner(); | ||||||
|  |  | ||||||
|     let Some(size) = data.data.len().to_i64() else { |     let Some(size) = data.data.len().to_i64() else { | ||||||
|         err!("Attachment data size overflow"); |         err!("Attachment data size overflow"); | ||||||
| @@ -1269,19 +1275,13 @@ async fn save_attachment( | |||||||
|         attachment.save(&mut conn).await.expect("Error saving attachment"); |         attachment.save(&mut conn).await.expect("Error saving attachment"); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     let folder_path = tokio::fs::canonicalize(&CONFIG.attachments_folder()).await?.join(cipher_id.as_ref()); |     save_temp_file(PathType::Attachments, &format!("{cipher_id}/{file_id}"), data.data, true).await?; | ||||||
|     let file_path = folder_path.join(file_id.as_ref()); |  | ||||||
|     tokio::fs::create_dir_all(&folder_path).await?; |  | ||||||
|  |  | ||||||
|     if let Err(_err) = data.data.persist_to(&file_path).await { |  | ||||||
|         data.data.move_copy_to(file_path).await? |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     nt.send_cipher_update( |     nt.send_cipher_update( | ||||||
|         UpdateType::SyncCipherUpdate, |         UpdateType::SyncCipherUpdate, | ||||||
|         &cipher, |         &cipher, | ||||||
|         &cipher.update_users_revision(&mut conn).await, |         &cipher.update_users_revision(&mut conn).await, | ||||||
|         &headers.device.uuid, |         &headers.device, | ||||||
|         None, |         None, | ||||||
|         &mut conn, |         &mut conn, | ||||||
|     ) |     ) | ||||||
| @@ -1342,7 +1342,7 @@ async fn post_attachment( | |||||||
|  |  | ||||||
|     let (cipher, mut conn) = save_attachment(attachment, cipher_id, data, &headers, conn, nt).await?; |     let (cipher, mut conn) = save_attachment(attachment, cipher_id, data, &headers, conn, nt).await?; | ||||||
|  |  | ||||||
|     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) |     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?)) | ||||||
| } | } | ||||||
|  |  | ||||||
| #[post("/ciphers/<cipher_id>/attachment-admin", format = "multipart/form-data", data = "<data>")] | #[post("/ciphers/<cipher_id>/attachment-admin", format = "multipart/form-data", data = "<data>")] | ||||||
| @@ -1376,7 +1376,7 @@ async fn delete_attachment_post_admin( | |||||||
|     headers: Headers, |     headers: Headers, | ||||||
|     conn: DbConn, |     conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> JsonResult { | ||||||
|     delete_attachment(cipher_id, attachment_id, headers, conn, nt).await |     delete_attachment(cipher_id, attachment_id, headers, conn, nt).await | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -1387,7 +1387,7 @@ async fn delete_attachment_post( | |||||||
|     headers: Headers, |     headers: Headers, | ||||||
|     conn: DbConn, |     conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> JsonResult { | ||||||
|     delete_attachment(cipher_id, attachment_id, headers, conn, nt).await |     delete_attachment(cipher_id, attachment_id, headers, conn, nt).await | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -1398,7 +1398,7 @@ async fn delete_attachment( | |||||||
|     headers: Headers, |     headers: Headers, | ||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> JsonResult { | ||||||
|     _delete_cipher_attachment_by_id(&cipher_id, &attachment_id, &headers, &mut conn, &nt).await |     _delete_cipher_attachment_by_id(&cipher_id, &attachment_id, &headers, &mut conn, &nt).await | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -1409,13 +1409,13 @@ async fn delete_attachment_admin( | |||||||
|     headers: Headers, |     headers: Headers, | ||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> JsonResult { | ||||||
|     _delete_cipher_attachment_by_id(&cipher_id, &attachment_id, &headers, &mut conn, &nt).await |     _delete_cipher_attachment_by_id(&cipher_id, &attachment_id, &headers, &mut conn, &nt).await | ||||||
| } | } | ||||||
|  |  | ||||||
| #[post("/ciphers/<cipher_id>/delete")] | #[post("/ciphers/<cipher_id>/delete")] | ||||||
| async fn delete_cipher_post(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | async fn delete_cipher_post(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | ||||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, false, &nt).await |     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::HardSingle, &nt).await | ||||||
|     // permanent delete |     // permanent delete | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -1426,13 +1426,13 @@ async fn delete_cipher_post_admin( | |||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, false, &nt).await |     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::HardSingle, &nt).await | ||||||
|     // permanent delete |     // permanent delete | ||||||
| } | } | ||||||
|  |  | ||||||
| #[put("/ciphers/<cipher_id>/delete")] | #[put("/ciphers/<cipher_id>/delete")] | ||||||
| async fn delete_cipher_put(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | async fn delete_cipher_put(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | ||||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, true, &nt).await |     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::SoftSingle, &nt).await | ||||||
|     // soft delete |     // soft delete | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -1443,18 +1443,19 @@ async fn delete_cipher_put_admin( | |||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, true, &nt).await |     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::SoftSingle, &nt).await | ||||||
|  |     // soft delete | ||||||
| } | } | ||||||
|  |  | ||||||
| #[delete("/ciphers/<cipher_id>")] | #[delete("/ciphers/<cipher_id>")] | ||||||
| async fn delete_cipher(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | async fn delete_cipher(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | ||||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, false, &nt).await |     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::HardSingle, &nt).await | ||||||
|     // permanent delete |     // permanent delete | ||||||
| } | } | ||||||
|  |  | ||||||
| #[delete("/ciphers/<cipher_id>/admin")] | #[delete("/ciphers/<cipher_id>/admin")] | ||||||
| async fn delete_cipher_admin(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | async fn delete_cipher_admin(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | ||||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, false, &nt).await |     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::HardSingle, &nt).await | ||||||
|     // permanent delete |     // permanent delete | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -1465,7 +1466,8 @@ async fn delete_cipher_selected( | |||||||
|     conn: DbConn, |     conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|     _delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete |     _delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::HardMulti, nt).await | ||||||
|  |     // permanent delete | ||||||
| } | } | ||||||
|  |  | ||||||
| #[post("/ciphers/delete", data = "<data>")] | #[post("/ciphers/delete", data = "<data>")] | ||||||
| @@ -1475,7 +1477,8 @@ async fn delete_cipher_selected_post( | |||||||
|     conn: DbConn, |     conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|     _delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete |     _delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::HardMulti, nt).await | ||||||
|  |     // permanent delete | ||||||
| } | } | ||||||
|  |  | ||||||
| #[put("/ciphers/delete", data = "<data>")] | #[put("/ciphers/delete", data = "<data>")] | ||||||
| @@ -1485,7 +1488,8 @@ async fn delete_cipher_selected_put( | |||||||
|     conn: DbConn, |     conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|     _delete_multiple_ciphers(data, headers, conn, true, nt).await // soft delete |     _delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::SoftMulti, nt).await | ||||||
|  |     // soft delete | ||||||
| } | } | ||||||
|  |  | ||||||
| #[delete("/ciphers/admin", data = "<data>")] | #[delete("/ciphers/admin", data = "<data>")] | ||||||
| @@ -1495,7 +1499,8 @@ async fn delete_cipher_selected_admin( | |||||||
|     conn: DbConn, |     conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|     _delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete |     _delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::HardMulti, nt).await | ||||||
|  |     // permanent delete | ||||||
| } | } | ||||||
|  |  | ||||||
| #[post("/ciphers/delete-admin", data = "<data>")] | #[post("/ciphers/delete-admin", data = "<data>")] | ||||||
| @@ -1505,7 +1510,8 @@ async fn delete_cipher_selected_post_admin( | |||||||
|     conn: DbConn, |     conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|     _delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete |     _delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::HardMulti, nt).await | ||||||
|  |     // permanent delete | ||||||
| } | } | ||||||
|  |  | ||||||
| #[put("/ciphers/delete-admin", data = "<data>")] | #[put("/ciphers/delete-admin", data = "<data>")] | ||||||
| @@ -1515,12 +1521,13 @@ async fn delete_cipher_selected_put_admin( | |||||||
|     conn: DbConn, |     conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|     _delete_multiple_ciphers(data, headers, conn, true, nt).await // soft delete |     _delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::SoftMulti, nt).await | ||||||
|  |     // soft delete | ||||||
| } | } | ||||||
|  |  | ||||||
| #[put("/ciphers/<cipher_id>/restore")] | #[put("/ciphers/<cipher_id>/restore")] | ||||||
| async fn restore_cipher_put(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { | async fn restore_cipher_put(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { | ||||||
|     _restore_cipher_by_uuid(&cipher_id, &headers, &mut conn, &nt).await |     _restore_cipher_by_uuid(&cipher_id, &headers, false, &mut conn, &nt).await | ||||||
| } | } | ||||||
|  |  | ||||||
| #[put("/ciphers/<cipher_id>/restore-admin")] | #[put("/ciphers/<cipher_id>/restore-admin")] | ||||||
| @@ -1530,7 +1537,17 @@ async fn restore_cipher_put_admin( | |||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> JsonResult { | ) -> JsonResult { | ||||||
|     _restore_cipher_by_uuid(&cipher_id, &headers, &mut conn, &nt).await |     _restore_cipher_by_uuid(&cipher_id, &headers, false, &mut conn, &nt).await | ||||||
|  | } | ||||||
|  |  | ||||||
|  | #[put("/ciphers/restore-admin", data = "<data>")] | ||||||
|  | async fn restore_cipher_selected_admin( | ||||||
|  |     data: Json<CipherIdsData>, | ||||||
|  |     headers: Headers, | ||||||
|  |     mut conn: DbConn, | ||||||
|  |     nt: Notify<'_>, | ||||||
|  | ) -> JsonResult { | ||||||
|  |     _restore_multiple_ciphers(data, &headers, &mut conn, &nt).await | ||||||
| } | } | ||||||
|  |  | ||||||
| #[put("/ciphers/restore", data = "<data>")] | #[put("/ciphers/restore", data = "<data>")] | ||||||
| @@ -1558,35 +1575,47 @@ async fn move_cipher_selected( | |||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|     let data = data.into_inner(); |     let data = data.into_inner(); | ||||||
|     let user_id = headers.user.uuid; |     let user_id = &headers.user.uuid; | ||||||
|  |  | ||||||
|     if let Some(ref folder_id) = data.folder_id { |     if let Some(ref folder_id) = data.folder_id { | ||||||
|         if Folder::find_by_uuid_and_user(folder_id, &user_id, &mut conn).await.is_none() { |         if Folder::find_by_uuid_and_user(folder_id, user_id, &mut conn).await.is_none() { | ||||||
|             err!("Invalid folder", "Folder does not exist or belongs to another user"); |             err!("Invalid folder", "Folder does not exist or belongs to another user"); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     for cipher_id in data.ids { |     let cipher_count = data.ids.len(); | ||||||
|         let Some(cipher) = Cipher::find_by_uuid(&cipher_id, &mut conn).await else { |     let mut single_cipher: Option<Cipher> = None; | ||||||
|             err!("Cipher doesn't exist") |  | ||||||
|         }; |  | ||||||
|  |  | ||||||
|         if !cipher.is_accessible_to_user(&user_id, &mut conn).await { |     // TODO: Convert this to use a single query (or at least less) to update all items | ||||||
|             err!("Cipher is not accessible by user") |     // Find all ciphers a user has access to, all others will be ignored | ||||||
|  |     let accessible_ciphers = Cipher::find_by_user_and_ciphers(user_id, &data.ids, &mut conn).await; | ||||||
|  |     let accessible_ciphers_count = accessible_ciphers.len(); | ||||||
|  |     for cipher in accessible_ciphers { | ||||||
|  |         cipher.move_to_folder(data.folder_id.clone(), user_id, &mut conn).await?; | ||||||
|  |         if cipher_count == 1 { | ||||||
|  |             single_cipher = Some(cipher); | ||||||
|         } |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|         // Move cipher |     if let Some(cipher) = single_cipher { | ||||||
|         cipher.move_to_folder(data.folder_id.clone(), &user_id, &mut conn).await?; |  | ||||||
|  |  | ||||||
|         nt.send_cipher_update( |         nt.send_cipher_update( | ||||||
|             UpdateType::SyncCipherUpdate, |             UpdateType::SyncCipherUpdate, | ||||||
|             &cipher, |             &cipher, | ||||||
|             &[user_id.clone()], |             std::slice::from_ref(user_id), | ||||||
|             &headers.device.uuid, |             &headers.device, | ||||||
|             None, |             None, | ||||||
|             &mut conn, |             &mut conn, | ||||||
|         ) |         ) | ||||||
|         .await; |         .await; | ||||||
|  |     } else { | ||||||
|  |         // Multi move actions do not send out a push for each cipher, we need to send a general sync here | ||||||
|  |         nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, &mut conn).await; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     if cipher_count != accessible_ciphers_count { | ||||||
|  |         err!(format!( | ||||||
|  |             "Not all ciphers are moved! {accessible_ciphers_count} of the selected {cipher_count} were moved." | ||||||
|  |         )) | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     Ok(()) |     Ok(()) | ||||||
| @@ -1629,7 +1658,7 @@ async fn delete_all( | |||||||
|                 Some(member) => { |                 Some(member) => { | ||||||
|                     if member.atype == MembershipType::Owner { |                     if member.atype == MembershipType::Owner { | ||||||
|                         Cipher::delete_all_by_organization(&org_data.org_id, &mut conn).await?; |                         Cipher::delete_all_by_organization(&org_data.org_id, &mut conn).await?; | ||||||
|                         nt.send_user_update(UpdateType::SyncVault, &user).await; |                         nt.send_user_update(UpdateType::SyncVault, &user, &headers.device.push_uuid, &mut conn).await; | ||||||
|  |  | ||||||
|                         log_event( |                         log_event( | ||||||
|                             EventType::OrganizationPurgedVault as i32, |                             EventType::OrganizationPurgedVault as i32, | ||||||
| @@ -1662,18 +1691,26 @@ async fn delete_all( | |||||||
|             } |             } | ||||||
|  |  | ||||||
|             user.update_revision(&mut conn).await?; |             user.update_revision(&mut conn).await?; | ||||||
|             nt.send_user_update(UpdateType::SyncVault, &user).await; |             nt.send_user_update(UpdateType::SyncVault, &user, &headers.device.push_uuid, &mut conn).await; | ||||||
|  |  | ||||||
|             Ok(()) |             Ok(()) | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
| } | } | ||||||
|  |  | ||||||
|  | #[derive(PartialEq)] | ||||||
|  | pub enum CipherDeleteOptions { | ||||||
|  |     SoftSingle, | ||||||
|  |     SoftMulti, | ||||||
|  |     HardSingle, | ||||||
|  |     HardMulti, | ||||||
|  | } | ||||||
|  |  | ||||||
| async fn _delete_cipher_by_uuid( | async fn _delete_cipher_by_uuid( | ||||||
|     cipher_id: &CipherId, |     cipher_id: &CipherId, | ||||||
|     headers: &Headers, |     headers: &Headers, | ||||||
|     conn: &mut DbConn, |     conn: &mut DbConn, | ||||||
|     soft_delete: bool, |     delete_options: &CipherDeleteOptions, | ||||||
|     nt: &Notify<'_>, |     nt: &Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|     let Some(mut cipher) = Cipher::find_by_uuid(cipher_id, conn).await else { |     let Some(mut cipher) = Cipher::find_by_uuid(cipher_id, conn).await else { | ||||||
| @@ -1684,35 +1721,42 @@ async fn _delete_cipher_by_uuid( | |||||||
|         err!("Cipher can't be deleted by user") |         err!("Cipher can't be deleted by user") | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     if soft_delete { |     if *delete_options == CipherDeleteOptions::SoftSingle || *delete_options == CipherDeleteOptions::SoftMulti { | ||||||
|         cipher.deleted_at = Some(Utc::now().naive_utc()); |         cipher.deleted_at = Some(Utc::now().naive_utc()); | ||||||
|         cipher.save(conn).await?; |         cipher.save(conn).await?; | ||||||
|         nt.send_cipher_update( |         if *delete_options == CipherDeleteOptions::SoftSingle { | ||||||
|             UpdateType::SyncCipherUpdate, |             nt.send_cipher_update( | ||||||
|             &cipher, |                 UpdateType::SyncCipherUpdate, | ||||||
|             &cipher.update_users_revision(conn).await, |                 &cipher, | ||||||
|             &headers.device.uuid, |                 &cipher.update_users_revision(conn).await, | ||||||
|             None, |                 &headers.device, | ||||||
|             conn, |                 None, | ||||||
|         ) |                 conn, | ||||||
|         .await; |             ) | ||||||
|  |             .await; | ||||||
|  |         } | ||||||
|     } else { |     } else { | ||||||
|         cipher.delete(conn).await?; |         cipher.delete(conn).await?; | ||||||
|         nt.send_cipher_update( |         if *delete_options == CipherDeleteOptions::HardSingle { | ||||||
|             UpdateType::SyncCipherDelete, |             nt.send_cipher_update( | ||||||
|             &cipher, |                 UpdateType::SyncLoginDelete, | ||||||
|             &cipher.update_users_revision(conn).await, |                 &cipher, | ||||||
|             &headers.device.uuid, |                 &cipher.update_users_revision(conn).await, | ||||||
|             None, |                 &headers.device, | ||||||
|             conn, |                 None, | ||||||
|         ) |                 conn, | ||||||
|         .await; |             ) | ||||||
|  |             .await; | ||||||
|  |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     if let Some(org_id) = cipher.organization_uuid { |     if let Some(org_id) = cipher.organization_uuid { | ||||||
|         let event_type = match soft_delete { |         let event_type = if *delete_options == CipherDeleteOptions::SoftSingle | ||||||
|             true => EventType::CipherSoftDeleted as i32, |             || *delete_options == CipherDeleteOptions::SoftMulti | ||||||
|             false => EventType::CipherDeleted as i32, |         { | ||||||
|  |             EventType::CipherSoftDeleted as i32 | ||||||
|  |         } else { | ||||||
|  |             EventType::CipherDeleted as i32 | ||||||
|         }; |         }; | ||||||
|  |  | ||||||
|         log_event(event_type, &cipher.uuid, &org_id, &headers.user.uuid, headers.device.atype, &headers.ip.ip, conn) |         log_event(event_type, &cipher.uuid, &org_id, &headers.user.uuid, headers.device.atype, &headers.ip.ip, conn) | ||||||
| @@ -1732,23 +1776,27 @@ async fn _delete_multiple_ciphers( | |||||||
|     data: Json<CipherIdsData>, |     data: Json<CipherIdsData>, | ||||||
|     headers: Headers, |     headers: Headers, | ||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
|     soft_delete: bool, |     delete_options: CipherDeleteOptions, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|     let data = data.into_inner(); |     let data = data.into_inner(); | ||||||
|  |  | ||||||
|     for cipher_id in data.ids { |     for cipher_id in data.ids { | ||||||
|         if let error @ Err(_) = _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, soft_delete, &nt).await { |         if let error @ Err(_) = _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &delete_options, &nt).await { | ||||||
|             return error; |             return error; | ||||||
|         }; |         }; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     // Multi delete actions do not send out a push for each cipher, we need to send a general sync here | ||||||
|  |     nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, &mut conn).await; | ||||||
|  |  | ||||||
|     Ok(()) |     Ok(()) | ||||||
| } | } | ||||||
|  |  | ||||||
| async fn _restore_cipher_by_uuid( | async fn _restore_cipher_by_uuid( | ||||||
|     cipher_id: &CipherId, |     cipher_id: &CipherId, | ||||||
|     headers: &Headers, |     headers: &Headers, | ||||||
|  |     multi_restore: bool, | ||||||
|     conn: &mut DbConn, |     conn: &mut DbConn, | ||||||
|     nt: &Notify<'_>, |     nt: &Notify<'_>, | ||||||
| ) -> JsonResult { | ) -> JsonResult { | ||||||
| @@ -1763,15 +1811,17 @@ async fn _restore_cipher_by_uuid( | |||||||
|     cipher.deleted_at = None; |     cipher.deleted_at = None; | ||||||
|     cipher.save(conn).await?; |     cipher.save(conn).await?; | ||||||
|  |  | ||||||
|     nt.send_cipher_update( |     if !multi_restore { | ||||||
|         UpdateType::SyncCipherUpdate, |         nt.send_cipher_update( | ||||||
|         &cipher, |             UpdateType::SyncCipherUpdate, | ||||||
|         &cipher.update_users_revision(conn).await, |             &cipher, | ||||||
|         &headers.device.uuid, |             &cipher.update_users_revision(conn).await, | ||||||
|         None, |             &headers.device, | ||||||
|         conn, |             None, | ||||||
|     ) |             conn, | ||||||
|     .await; |         ) | ||||||
|  |         .await; | ||||||
|  |     } | ||||||
|  |  | ||||||
|     if let Some(org_id) = &cipher.organization_uuid { |     if let Some(org_id) = &cipher.organization_uuid { | ||||||
|         log_event( |         log_event( | ||||||
| @@ -1786,7 +1836,7 @@ async fn _restore_cipher_by_uuid( | |||||||
|         .await; |         .await; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await)) |     Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await?)) | ||||||
| } | } | ||||||
|  |  | ||||||
| async fn _restore_multiple_ciphers( | async fn _restore_multiple_ciphers( | ||||||
| @@ -1799,12 +1849,15 @@ async fn _restore_multiple_ciphers( | |||||||
|  |  | ||||||
|     let mut ciphers: Vec<Value> = Vec::new(); |     let mut ciphers: Vec<Value> = Vec::new(); | ||||||
|     for cipher_id in data.ids { |     for cipher_id in data.ids { | ||||||
|         match _restore_cipher_by_uuid(&cipher_id, headers, conn, nt).await { |         match _restore_cipher_by_uuid(&cipher_id, headers, true, conn, nt).await { | ||||||
|             Ok(json) => ciphers.push(json.into_inner()), |             Ok(json) => ciphers.push(json.into_inner()), | ||||||
|             err => return err, |             err => return err, | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     // Multi move actions do not send out a push for each cipher, we need to send a general sync here | ||||||
|  |     nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, conn).await; | ||||||
|  |  | ||||||
|     Ok(Json(json!({ |     Ok(Json(json!({ | ||||||
|       "data": ciphers, |       "data": ciphers, | ||||||
|       "object": "list", |       "object": "list", | ||||||
| @@ -1818,7 +1871,7 @@ async fn _delete_cipher_attachment_by_id( | |||||||
|     headers: &Headers, |     headers: &Headers, | ||||||
|     conn: &mut DbConn, |     conn: &mut DbConn, | ||||||
|     nt: &Notify<'_>, |     nt: &Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> JsonResult { | ||||||
|     let Some(attachment) = Attachment::find_by_id(attachment_id, conn).await else { |     let Some(attachment) = Attachment::find_by_id(attachment_id, conn).await else { | ||||||
|         err!("Attachment doesn't exist") |         err!("Attachment doesn't exist") | ||||||
|     }; |     }; | ||||||
| @@ -1841,17 +1894,17 @@ async fn _delete_cipher_attachment_by_id( | |||||||
|         UpdateType::SyncCipherUpdate, |         UpdateType::SyncCipherUpdate, | ||||||
|         &cipher, |         &cipher, | ||||||
|         &cipher.update_users_revision(conn).await, |         &cipher.update_users_revision(conn).await, | ||||||
|         &headers.device.uuid, |         &headers.device, | ||||||
|         None, |         None, | ||||||
|         conn, |         conn, | ||||||
|     ) |     ) | ||||||
|     .await; |     .await; | ||||||
|  |  | ||||||
|     if let Some(org_id) = cipher.organization_uuid { |     if let Some(ref org_id) = cipher.organization_uuid { | ||||||
|         log_event( |         log_event( | ||||||
|             EventType::CipherAttachmentDeleted as i32, |             EventType::CipherAttachmentDeleted as i32, | ||||||
|             &cipher.uuid, |             &cipher.uuid, | ||||||
|             &org_id, |             org_id, | ||||||
|             &headers.user.uuid, |             &headers.user.uuid, | ||||||
|             headers.device.atype, |             headers.device.atype, | ||||||
|             &headers.ip.ip, |             &headers.ip.ip, | ||||||
| @@ -1859,7 +1912,8 @@ async fn _delete_cipher_attachment_by_id( | |||||||
|         ) |         ) | ||||||
|         .await; |         .await; | ||||||
|     } |     } | ||||||
|     Ok(()) |     let cipher_json = cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await?; | ||||||
|  |     Ok(Json(json!({"cipher":cipher_json}))) | ||||||
| } | } | ||||||
|  |  | ||||||
| /// This will hold all the necessary data to improve a full sync of all the ciphers | /// This will hold all the necessary data to improve a full sync of all the ciphers | ||||||
| @@ -1933,11 +1987,21 @@ impl CipherSyncData { | |||||||
|  |  | ||||||
|         // Generate a HashMap with the collections_uuid as key and the CollectionGroup record |         // Generate a HashMap with the collections_uuid as key and the CollectionGroup record | ||||||
|         let user_collections_groups: HashMap<CollectionId, CollectionGroup> = if CONFIG.org_groups_enabled() { |         let user_collections_groups: HashMap<CollectionId, CollectionGroup> = if CONFIG.org_groups_enabled() { | ||||||
|             CollectionGroup::find_by_user(user_id, conn) |             CollectionGroup::find_by_user(user_id, conn).await.into_iter().fold( | ||||||
|                 .await |                 HashMap::new(), | ||||||
|                 .into_iter() |                 |mut combined_permissions, cg| { | ||||||
|                 .map(|collection_group| (collection_group.collections_uuid.clone(), collection_group)) |                     combined_permissions | ||||||
|                 .collect() |                         .entry(cg.collections_uuid.clone()) | ||||||
|  |                         .and_modify(|existing| { | ||||||
|  |                             // Combine permissions: take the most permissive settings. | ||||||
|  |                             existing.read_only &= cg.read_only; // false if ANY group allows write | ||||||
|  |                             existing.hide_passwords &= cg.hide_passwords; // false if ANY group allows password view | ||||||
|  |                             existing.manage |= cg.manage; // true if ANY group allows manage | ||||||
|  |                         }) | ||||||
|  |                         .or_insert(cg); | ||||||
|  |                     combined_permissions | ||||||
|  |                 }, | ||||||
|  |             ) | ||||||
|         } else { |         } else { | ||||||
|             HashMap::new() |             HashMap::new() | ||||||
|         }; |         }; | ||||||
|   | |||||||
| @@ -227,7 +227,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mu | |||||||
|     let (grantee_user, new_user) = match User::find_by_mail(&email, &mut conn).await { |     let (grantee_user, new_user) = match User::find_by_mail(&email, &mut conn).await { | ||||||
|         None => { |         None => { | ||||||
|             if !CONFIG.invitations_allowed() { |             if !CONFIG.invitations_allowed() { | ||||||
|                 err!(format!("Grantee user does not exist: {}", &email)) |                 err!(format!("Grantee user does not exist: {email}")) | ||||||
|             } |             } | ||||||
|  |  | ||||||
|             if !CONFIG.is_email_domain_allowed(&email) { |             if !CONFIG.is_email_domain_allowed(&email) { | ||||||
| @@ -239,7 +239,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mu | |||||||
|                 invitation.save(&mut conn).await?; |                 invitation.save(&mut conn).await?; | ||||||
|             } |             } | ||||||
|  |  | ||||||
|             let mut user = User::new(email.clone()); |             let mut user = User::new(email.clone(), None); | ||||||
|             user.save(&mut conn).await?; |             user.save(&mut conn).await?; | ||||||
|             (user, true) |             (user, true) | ||||||
|         } |         } | ||||||
| @@ -582,7 +582,7 @@ async fn view_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut | |||||||
|                 CipherSyncType::User, |                 CipherSyncType::User, | ||||||
|                 &mut conn, |                 &mut conn, | ||||||
|             ) |             ) | ||||||
|             .await, |             .await?, | ||||||
|         ); |         ); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -29,7 +29,7 @@ struct EventRange { | |||||||
|     continuation_token: Option<String>, |     continuation_token: Option<String>, | ||||||
| } | } | ||||||
|  |  | ||||||
| // Upstream: https://github.com/bitwarden/server/blob/9ecf69d9cabce732cf2c57976dd9afa5728578fb/src/Api/Controllers/EventsController.cs#LL84C35-L84C41 | // Upstream: https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/AdminConsole/Controllers/EventsController.cs#L87 | ||||||
| #[get("/organizations/<org_id>/events?<data..>")] | #[get("/organizations/<org_id>/events?<data..>")] | ||||||
| async fn get_org_events( | async fn get_org_events( | ||||||
|     org_id: OrganizationId, |     org_id: OrganizationId, | ||||||
| @@ -169,8 +169,8 @@ struct EventCollection { | |||||||
| } | } | ||||||
|  |  | ||||||
| // Upstream: | // Upstream: | ||||||
| // https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Events/Controllers/CollectController.cs | // https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Events/Controllers/CollectController.cs | ||||||
| // https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Core/Services/Implementations/EventService.cs | // https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Services/Implementations/EventService.cs | ||||||
| #[post("/collect", format = "application/json", data = "<data>")] | #[post("/collect", format = "application/json", data = "<data>")] | ||||||
| async fn post_events_collect(data: Json<Vec<EventCollection>>, headers: Headers, mut conn: DbConn) -> EmptyResult { | async fn post_events_collect(data: Json<Vec<EventCollection>>, headers: Headers, mut conn: DbConn) -> EmptyResult { | ||||||
|     if !CONFIG.org_events_enabled() { |     if !CONFIG.org_events_enabled() { | ||||||
|   | |||||||
| @@ -45,7 +45,7 @@ async fn post_folders(data: Json<FolderData>, headers: Headers, mut conn: DbConn | |||||||
|     let mut folder = Folder::new(headers.user.uuid, data.name); |     let mut folder = Folder::new(headers.user.uuid, data.name); | ||||||
|  |  | ||||||
|     folder.save(&mut conn).await?; |     folder.save(&mut conn).await?; | ||||||
|     nt.send_folder_update(UpdateType::SyncFolderCreate, &folder, &headers.device.uuid, &mut conn).await; |     nt.send_folder_update(UpdateType::SyncFolderCreate, &folder, &headers.device, &mut conn).await; | ||||||
|  |  | ||||||
|     Ok(Json(folder.to_json())) |     Ok(Json(folder.to_json())) | ||||||
| } | } | ||||||
| @@ -78,7 +78,7 @@ async fn put_folder( | |||||||
|     folder.name = data.name; |     folder.name = data.name; | ||||||
|  |  | ||||||
|     folder.save(&mut conn).await?; |     folder.save(&mut conn).await?; | ||||||
|     nt.send_folder_update(UpdateType::SyncFolderUpdate, &folder, &headers.device.uuid, &mut conn).await; |     nt.send_folder_update(UpdateType::SyncFolderUpdate, &folder, &headers.device, &mut conn).await; | ||||||
|  |  | ||||||
|     Ok(Json(folder.to_json())) |     Ok(Json(folder.to_json())) | ||||||
| } | } | ||||||
| @@ -97,6 +97,6 @@ async fn delete_folder(folder_id: FolderId, headers: Headers, mut conn: DbConn, | |||||||
|     // Delete the actual folder entry |     // Delete the actual folder entry | ||||||
|     folder.delete(&mut conn).await?; |     folder.delete(&mut conn).await?; | ||||||
|  |  | ||||||
|     nt.send_folder_update(UpdateType::SyncFolderDelete, &folder, &headers.device.uuid, &mut conn).await; |     nt.send_folder_update(UpdateType::SyncFolderDelete, &folder, &headers.device, &mut conn).await; | ||||||
|     Ok(()) |     Ok(()) | ||||||
| } | } | ||||||
|   | |||||||
| @@ -50,11 +50,12 @@ pub fn events_routes() -> Vec<Route> { | |||||||
| use rocket::{serde::json::Json, serde::json::Value, Catcher, Route}; | use rocket::{serde::json::Json, serde::json::Value, Catcher, Route}; | ||||||
|  |  | ||||||
| use crate::{ | use crate::{ | ||||||
|     api::{JsonResult, Notify, UpdateType}, |     api::{EmptyResult, JsonResult, Notify, UpdateType}, | ||||||
|     auth::Headers, |     auth::Headers, | ||||||
|     db::DbConn, |     db::{models::*, DbConn}, | ||||||
|     error::Error, |     error::Error, | ||||||
|     http_client::make_http_request, |     http_client::make_http_request, | ||||||
|  |     mail, | ||||||
|     util::parse_experimental_client_feature_flags, |     util::parse_experimental_client_feature_flags, | ||||||
| }; | }; | ||||||
|  |  | ||||||
| @@ -124,7 +125,7 @@ async fn post_eq_domains( | |||||||
|  |  | ||||||
|     user.save(&mut conn).await?; |     user.save(&mut conn).await?; | ||||||
|  |  | ||||||
|     nt.send_user_update(UpdateType::SyncSettings, &user).await; |     nt.send_user_update(UpdateType::SyncSettings, &user, &headers.device.push_uuid, &mut conn).await; | ||||||
|  |  | ||||||
|     Ok(Json(json!({}))) |     Ok(Json(json!({}))) | ||||||
| } | } | ||||||
| @@ -199,11 +200,18 @@ fn get_api_webauthn(_headers: Headers) -> Json<Value> { | |||||||
| #[get("/config")] | #[get("/config")] | ||||||
| fn config() -> Json<Value> { | fn config() -> Json<Value> { | ||||||
|     let domain = crate::CONFIG.domain(); |     let domain = crate::CONFIG.domain(); | ||||||
|  |     // Official available feature flags can be found here: | ||||||
|  |     // Server (v2025.6.2): https://github.com/bitwarden/server/blob/d094be3267f2030bd0dc62106bc6871cf82682f5/src/Core/Constants.cs#L103 | ||||||
|  |     // Client (web-v2025.6.1): https://github.com/bitwarden/clients/blob/747c2fd6a1c348a57a76e4a7de8128466ffd3c01/libs/common/src/enums/feature-flag.enum.ts#L12 | ||||||
|  |     // Android (v2025.6.0): https://github.com/bitwarden/android/blob/b5b022caaad33390c31b3021b2c1205925b0e1a2/app/src/main/kotlin/com/x8bit/bitwarden/data/platform/manager/model/FlagKey.kt#L22 | ||||||
|  |     // iOS (v2025.6.0): https://github.com/bitwarden/ios/blob/ff06d9c6cc8da89f78f37f376495800201d7261a/BitwardenShared/Core/Platform/Models/Enum/FeatureFlag.swift#L7 | ||||||
|     let mut feature_states = |     let mut feature_states = | ||||||
|         parse_experimental_client_feature_flags(&crate::CONFIG.experimental_client_feature_flags()); |         parse_experimental_client_feature_flags(&crate::CONFIG.experimental_client_feature_flags()); | ||||||
|     // Force the new key rotation feature |     feature_states.insert("duo-redirect".to_string(), true); | ||||||
|     feature_states.insert("key-rotation-improvements".to_string(), true); |     feature_states.insert("email-verification".to_string(), true); | ||||||
|     feature_states.insert("flexible-collections-v-1".to_string(), false); |     feature_states.insert("unauth-ui-refresh".to_string(), true); | ||||||
|  |     feature_states.insert("enable-pm-flight-recorder".to_string(), true); | ||||||
|  |     feature_states.insert("mobile-error-reporting".to_string(), true); | ||||||
|  |  | ||||||
|     Json(json!({ |     Json(json!({ | ||||||
|         // Note: The clients use this version to handle backwards compatibility concerns |         // Note: The clients use this version to handle backwards compatibility concerns | ||||||
| @@ -211,14 +219,14 @@ fn config() -> Json<Value> { | |||||||
|         // We should make sure that we keep this updated when we support the new server features |         // We should make sure that we keep this updated when we support the new server features | ||||||
|         // Version history: |         // Version history: | ||||||
|         // - Individual cipher key encryption: 2024.2.0 |         // - Individual cipher key encryption: 2024.2.0 | ||||||
|         "version": "2025.1.0", |         "version": "2025.6.0", | ||||||
|         "gitHash": option_env!("GIT_REV"), |         "gitHash": option_env!("GIT_REV"), | ||||||
|         "server": { |         "server": { | ||||||
|           "name": "Vaultwarden", |           "name": "Vaultwarden", | ||||||
|           "url": "https://github.com/dani-garcia/vaultwarden" |           "url": "https://github.com/dani-garcia/vaultwarden" | ||||||
|         }, |         }, | ||||||
|         "settings": { |         "settings": { | ||||||
|             "disableUserRegistration": !crate::CONFIG.signups_allowed() && crate::CONFIG.signups_domains_whitelist().is_empty(), |             "disableUserRegistration": crate::CONFIG.is_signup_disabled() | ||||||
|         }, |         }, | ||||||
|         "environment": { |         "environment": { | ||||||
|           "vault": domain, |           "vault": domain, | ||||||
| @@ -226,6 +234,12 @@ fn config() -> Json<Value> { | |||||||
|           "identity": format!("{domain}/identity"), |           "identity": format!("{domain}/identity"), | ||||||
|           "notifications": format!("{domain}/notifications"), |           "notifications": format!("{domain}/notifications"), | ||||||
|           "sso": "", |           "sso": "", | ||||||
|  |           "cloudRegion": null, | ||||||
|  |         }, | ||||||
|  |         // Bitwarden uses this for the self-hosted servers to indicate the default push technology | ||||||
|  |         "push": { | ||||||
|  |           "pushTechnology": 0, | ||||||
|  |           "vapidPublicKey": null | ||||||
|         }, |         }, | ||||||
|         "featureStates": feature_states, |         "featureStates": feature_states, | ||||||
|         "object": "config", |         "object": "config", | ||||||
| @@ -246,3 +260,49 @@ fn api_not_found() -> Json<Value> { | |||||||
|         } |         } | ||||||
|     })) |     })) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | async fn accept_org_invite( | ||||||
|  |     user: &User, | ||||||
|  |     mut member: Membership, | ||||||
|  |     reset_password_key: Option<String>, | ||||||
|  |     conn: &mut DbConn, | ||||||
|  | ) -> EmptyResult { | ||||||
|  |     if member.status != MembershipStatus::Invited as i32 { | ||||||
|  |         err!("User already accepted the invitation"); | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type | ||||||
|  |     // It returns different error messages per function. | ||||||
|  |     if member.atype < MembershipType::Admin { | ||||||
|  |         match OrgPolicy::is_user_allowed(&member.user_uuid, &member.org_uuid, false, conn).await { | ||||||
|  |             Ok(_) => {} | ||||||
|  |             Err(OrgPolicyErr::TwoFactorMissing) => { | ||||||
|  |                 if crate::CONFIG.email_2fa_auto_fallback() { | ||||||
|  |                     two_factor::email::activate_email_2fa(user, conn).await?; | ||||||
|  |                 } else { | ||||||
|  |                     err!("You cannot join this organization until you enable two-step login on your user account"); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |             Err(OrgPolicyErr::SingleOrgEnforced) => { | ||||||
|  |                 err!("You cannot join this organization because you are a member of an organization which forbids it"); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     member.status = MembershipStatus::Accepted as i32; | ||||||
|  |     member.reset_password_key = reset_password_key; | ||||||
|  |  | ||||||
|  |     member.save(conn).await?; | ||||||
|  |  | ||||||
|  |     if crate::CONFIG.mail_enabled() { | ||||||
|  |         let org = match Organization::find_by_uuid(&member.org_uuid, conn).await { | ||||||
|  |             Some(org) => org, | ||||||
|  |             None => err!("Organization not found."), | ||||||
|  |         }; | ||||||
|  |         // User was invited to an organization, so they must be confirmed manually after acceptance | ||||||
|  |         mail::send_invite_accepted(&user.email, &member.invited_by_email.unwrap_or(org.billing_email), &org.name) | ||||||
|  |             .await?; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     Ok(()) | ||||||
|  | } | ||||||
|   | |||||||
| @@ -7,16 +7,13 @@ use std::collections::{HashMap, HashSet}; | |||||||
| use crate::api::admin::FAKE_ADMIN_UUID; | use crate::api::admin::FAKE_ADMIN_UUID; | ||||||
| use crate::{ | use crate::{ | ||||||
|     api::{ |     api::{ | ||||||
|         core::{log_event, two_factor, CipherSyncData, CipherSyncType}, |         core::{accept_org_invite, log_event, two_factor, CipherSyncData, CipherSyncType}, | ||||||
|         EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType, |         EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType, | ||||||
|     }, |     }, | ||||||
|     auth::{ |     auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OrgMemberHeaders, OwnerHeaders}, | ||||||
|         decode_invite, AdminHeaders, ClientVersion, Headers, ManagerHeaders, ManagerHeadersLoose, OrgMemberHeaders, |  | ||||||
|         OwnerHeaders, |  | ||||||
|     }, |  | ||||||
|     db::{models::*, DbConn}, |     db::{models::*, DbConn}, | ||||||
|     mail, |     mail, | ||||||
|     util::{convert_json_key_lcase_first, NumberOrString}, |     util::{convert_json_key_lcase_first, get_uuid, NumberOrString}, | ||||||
|     CONFIG, |     CONFIG, | ||||||
| }; | }; | ||||||
|  |  | ||||||
| @@ -46,6 +43,7 @@ pub fn routes() -> Vec<Route> { | |||||||
|         bulk_delete_organization_collections, |         bulk_delete_organization_collections, | ||||||
|         post_bulk_collections, |         post_bulk_collections, | ||||||
|         get_org_details, |         get_org_details, | ||||||
|  |         get_org_domain_sso_verified, | ||||||
|         get_members, |         get_members, | ||||||
|         send_invite, |         send_invite, | ||||||
|         reinvite_member, |         reinvite_member, | ||||||
| @@ -63,6 +61,7 @@ pub fn routes() -> Vec<Route> { | |||||||
|         post_org_import, |         post_org_import, | ||||||
|         list_policies, |         list_policies, | ||||||
|         list_policies_token, |         list_policies_token, | ||||||
|  |         get_master_password_policy, | ||||||
|         get_policy, |         get_policy, | ||||||
|         put_policy, |         put_policy, | ||||||
|         get_organization_tax, |         get_organization_tax, | ||||||
| @@ -106,6 +105,7 @@ pub fn routes() -> Vec<Route> { | |||||||
|         api_key, |         api_key, | ||||||
|         rotate_api_key, |         rotate_api_key, | ||||||
|         get_billing_metadata, |         get_billing_metadata, | ||||||
|  |         get_auto_enroll_status, | ||||||
|     ] |     ] | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -195,7 +195,7 @@ async fn create_organization(headers: Headers, data: Json<OrgData>, mut conn: Db | |||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     let org = Organization::new(data.name, data.billing_email, private_key, public_key); |     let org = Organization::new(data.name, data.billing_email, private_key, public_key); | ||||||
|     let mut member = Membership::new(headers.user.uuid, org.uuid.clone()); |     let mut member = Membership::new(headers.user.uuid, org.uuid.clone(), None); | ||||||
|     let collection = Collection::new(org.uuid.clone(), data.collection_name, None); |     let collection = Collection::new(org.uuid.clone(), data.collection_name, None); | ||||||
|  |  | ||||||
|     member.akey = data.key; |     member.akey = data.key; | ||||||
| @@ -338,6 +338,34 @@ async fn get_user_collections(headers: Headers, mut conn: DbConn) -> Json<Value> | |||||||
|     })) |     })) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // Called during the SSO enrollment | ||||||
|  | // The `identifier` should be the value returned by `get_org_domain_sso_details` | ||||||
|  | // The returned `Id` will then be passed to `get_master_password_policy` which will mainly ignore it | ||||||
|  | #[get("/organizations/<identifier>/auto-enroll-status")] | ||||||
|  | async fn get_auto_enroll_status(identifier: &str, headers: Headers, mut conn: DbConn) -> JsonResult { | ||||||
|  |     let org = if identifier == crate::sso::FAKE_IDENTIFIER { | ||||||
|  |         match Membership::find_main_user_org(&headers.user.uuid, &mut conn).await { | ||||||
|  |             Some(member) => Organization::find_by_uuid(&member.org_uuid, &mut conn).await, | ||||||
|  |             None => None, | ||||||
|  |         } | ||||||
|  |     } else { | ||||||
|  |         Organization::find_by_name(identifier, &mut conn).await | ||||||
|  |     }; | ||||||
|  |  | ||||||
|  |     let (id, identifier, rp_auto_enroll) = match org { | ||||||
|  |         None => (get_uuid(), identifier.to_string(), false), | ||||||
|  |         Some(org) => { | ||||||
|  |             (org.uuid.to_string(), org.name, OrgPolicy::org_is_reset_password_auto_enroll(&org.uuid, &mut conn).await) | ||||||
|  |         } | ||||||
|  |     }; | ||||||
|  |  | ||||||
|  |     Ok(Json(json!({ | ||||||
|  |         "Id": id, | ||||||
|  |         "Identifier": identifier, | ||||||
|  |         "ResetPasswordEnabled": rp_auto_enroll, | ||||||
|  |     }))) | ||||||
|  | } | ||||||
|  |  | ||||||
| #[get("/organizations/<org_id>/collections")] | #[get("/organizations/<org_id>/collections")] | ||||||
| async fn get_org_collections(org_id: OrganizationId, headers: ManagerHeadersLoose, mut conn: DbConn) -> JsonResult { | async fn get_org_collections(org_id: OrganizationId, headers: ManagerHeadersLoose, mut conn: DbConn) -> JsonResult { | ||||||
|     if org_id != headers.membership.org_uuid { |     if org_id != headers.membership.org_uuid { | ||||||
| @@ -377,6 +405,21 @@ async fn get_org_collections_details( | |||||||
|         || (CONFIG.org_groups_enabled() |         || (CONFIG.org_groups_enabled() | ||||||
|             && GroupUser::has_full_access_by_member(&org_id, &member.uuid, &mut conn).await); |             && GroupUser::has_full_access_by_member(&org_id, &member.uuid, &mut conn).await); | ||||||
|  |  | ||||||
|  |     // Get all admins, owners and managers who can manage/access all | ||||||
|  |     // Those are currently not listed in the col_users but need to be listed too. | ||||||
|  |     let manage_all_members: Vec<Value> = Membership::find_confirmed_and_manage_all_by_org(&org_id, &mut conn) | ||||||
|  |         .await | ||||||
|  |         .into_iter() | ||||||
|  |         .map(|member| { | ||||||
|  |             json!({ | ||||||
|  |                 "id": member.uuid, | ||||||
|  |                 "readOnly": false, | ||||||
|  |                 "hidePasswords": false, | ||||||
|  |                 "manage": true, | ||||||
|  |             }) | ||||||
|  |         }) | ||||||
|  |         .collect(); | ||||||
|  |  | ||||||
|     for col in Collection::find_by_organization(&org_id, &mut conn).await { |     for col in Collection::find_by_organization(&org_id, &mut conn).await { | ||||||
|         // check whether the current user has access to the given collection |         // check whether the current user has access to the given collection | ||||||
|         let assigned = has_full_access_to_org |         let assigned = has_full_access_to_org | ||||||
| @@ -385,7 +428,7 @@ async fn get_org_collections_details( | |||||||
|                 && GroupUser::has_access_to_collection_by_member(&col.uuid, &member.uuid, &mut conn).await); |                 && GroupUser::has_access_to_collection_by_member(&col.uuid, &member.uuid, &mut conn).await); | ||||||
|  |  | ||||||
|         // get the users assigned directly to the given collection |         // get the users assigned directly to the given collection | ||||||
|         let users: Vec<Value> = col_users |         let mut users: Vec<Value> = col_users | ||||||
|             .iter() |             .iter() | ||||||
|             .filter(|collection_member| collection_member.collection_uuid == col.uuid) |             .filter(|collection_member| collection_member.collection_uuid == col.uuid) | ||||||
|             .map(|collection_member| { |             .map(|collection_member| { | ||||||
| @@ -394,6 +437,7 @@ async fn get_org_collections_details( | |||||||
|                 ) |                 ) | ||||||
|             }) |             }) | ||||||
|             .collect(); |             .collect(); | ||||||
|  |         users.extend_from_slice(&manage_all_members); | ||||||
|  |  | ||||||
|         // get the group details for the given collection |         // get the group details for the given collection | ||||||
|         let groups: Vec<Value> = if CONFIG.org_groups_enabled() { |         let groups: Vec<Value> = if CONFIG.org_groups_enabled() { | ||||||
| @@ -684,6 +728,9 @@ async fn _delete_organization_collection( | |||||||
|     headers: &ManagerHeaders, |     headers: &ManagerHeaders, | ||||||
|     conn: &mut DbConn, |     conn: &mut DbConn, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|  |     if org_id != &headers.org_id { | ||||||
|  |         err!("Organization not found", "Organization id's do not match"); | ||||||
|  |     } | ||||||
|     let Some(collection) = Collection::find_by_uuid_and_org(col_id, org_id, conn).await else { |     let Some(collection) = Collection::find_by_uuid_and_org(col_id, org_id, conn).await else { | ||||||
|         err!("Collection not found", "Collection does not exist or does not belong to this organization") |         err!("Collection not found", "Collection does not exist or does not belong to this organization") | ||||||
|     }; |     }; | ||||||
| @@ -710,15 +757,6 @@ async fn delete_organization_collection( | |||||||
|     _delete_organization_collection(&org_id, &col_id, &headers, &mut conn).await |     _delete_organization_collection(&org_id, &col_id, &headers, &mut conn).await | ||||||
| } | } | ||||||
|  |  | ||||||
| #[derive(Deserialize, Debug)] |  | ||||||
| #[serde(rename_all = "camelCase")] |  | ||||||
| struct DeleteCollectionData { |  | ||||||
|     #[allow(dead_code)] |  | ||||||
|     id: String, |  | ||||||
|     #[allow(dead_code)] |  | ||||||
|     org_id: OrganizationId, |  | ||||||
| } |  | ||||||
|  |  | ||||||
| #[post("/organizations/<org_id>/collections/<col_id>/delete")] | #[post("/organizations/<org_id>/collections/<col_id>/delete")] | ||||||
| async fn post_organization_collection_delete( | async fn post_organization_collection_delete( | ||||||
|     org_id: OrganizationId, |     org_id: OrganizationId, | ||||||
| @@ -896,26 +934,64 @@ struct OrgIdData { | |||||||
|  |  | ||||||
| #[get("/ciphers/organization-details?<data..>")] | #[get("/ciphers/organization-details?<data..>")] | ||||||
| async fn get_org_details(data: OrgIdData, headers: OrgMemberHeaders, mut conn: DbConn) -> JsonResult { | async fn get_org_details(data: OrgIdData, headers: OrgMemberHeaders, mut conn: DbConn) -> JsonResult { | ||||||
|     if data.organization_id != headers.org_id { |     if data.organization_id != headers.membership.org_uuid { | ||||||
|         err_code!("Resource not found.", "Organization id's do not match", rocket::http::Status::NotFound.code); |         err_code!("Resource not found.", "Organization id's do not match", rocket::http::Status::NotFound.code); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     Ok(Json(json!({ |     Ok(Json(json!({ | ||||||
|         "data": _get_org_details(&data.organization_id, &headers.host, &headers.user.uuid, &mut conn).await, |         "data": _get_org_details(&data.organization_id, &headers.host, &headers.user.uuid, &mut conn).await?, | ||||||
|         "object": "list", |         "object": "list", | ||||||
|         "continuationToken": null, |         "continuationToken": null, | ||||||
|     }))) |     }))) | ||||||
| } | } | ||||||
|  |  | ||||||
| async fn _get_org_details(org_id: &OrganizationId, host: &str, user_id: &UserId, conn: &mut DbConn) -> Value { | async fn _get_org_details( | ||||||
|  |     org_id: &OrganizationId, | ||||||
|  |     host: &str, | ||||||
|  |     user_id: &UserId, | ||||||
|  |     conn: &mut DbConn, | ||||||
|  | ) -> Result<Value, crate::Error> { | ||||||
|     let ciphers = Cipher::find_by_org(org_id, conn).await; |     let ciphers = Cipher::find_by_org(org_id, conn).await; | ||||||
|     let cipher_sync_data = CipherSyncData::new(user_id, CipherSyncType::Organization, conn).await; |     let cipher_sync_data = CipherSyncData::new(user_id, CipherSyncType::Organization, conn).await; | ||||||
|  |  | ||||||
|     let mut ciphers_json = Vec::with_capacity(ciphers.len()); |     let mut ciphers_json = Vec::with_capacity(ciphers.len()); | ||||||
|     for c in ciphers { |     for c in ciphers { | ||||||
|         ciphers_json.push(c.to_json(host, user_id, Some(&cipher_sync_data), CipherSyncType::Organization, conn).await); |         ciphers_json.push(c.to_json(host, user_id, Some(&cipher_sync_data), CipherSyncType::Organization, conn).await?); | ||||||
|     } |     } | ||||||
|     json!(ciphers_json) |     Ok(json!(ciphers_json)) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | #[derive(Deserialize)] | ||||||
|  | #[serde(rename_all = "camelCase")] | ||||||
|  | struct OrgDomainDetails { | ||||||
|  |     email: String, | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // Returning a Domain/Organization here allow to prefill it and prevent prompting the user | ||||||
|  | // So we either return an Org name associated to the user or a dummy value. | ||||||
|  | // In use since `v2025.6.0`, appears to use only the first `organizationIdentifier` | ||||||
|  | #[post("/organizations/domain/sso/verified", data = "<data>")] | ||||||
|  | async fn get_org_domain_sso_verified(data: Json<OrgDomainDetails>, mut conn: DbConn) -> JsonResult { | ||||||
|  |     let data: OrgDomainDetails = data.into_inner(); | ||||||
|  |  | ||||||
|  |     let identifiers = match Organization::find_org_user_email(&data.email, &mut conn) | ||||||
|  |         .await | ||||||
|  |         .into_iter() | ||||||
|  |         .map(|o| o.name) | ||||||
|  |         .collect::<Vec<String>>() | ||||||
|  |     { | ||||||
|  |         v if !v.is_empty() => v, | ||||||
|  |         _ => vec![crate::sso::FAKE_IDENTIFIER.to_string()], | ||||||
|  |     }; | ||||||
|  |  | ||||||
|  |     Ok(Json(json!({ | ||||||
|  |         "object": "list", | ||||||
|  |         "data": identifiers.into_iter().map(|identifier| json!({ | ||||||
|  |             "organizationName": identifier,     // appear unused | ||||||
|  |             "organizationIdentifier": identifier, | ||||||
|  |             "domainName": CONFIG.domain(),      // appear unused | ||||||
|  |         })).collect::<Vec<Value>>() | ||||||
|  |     }))) | ||||||
| } | } | ||||||
|  |  | ||||||
| #[derive(FromForm)] | #[derive(FromForm)] | ||||||
| @@ -997,8 +1073,6 @@ struct InviteData { | |||||||
|     r#type: NumberOrString, |     r#type: NumberOrString, | ||||||
|     collections: Option<Vec<CollectionData>>, |     collections: Option<Vec<CollectionData>>, | ||||||
|     #[serde(default)] |     #[serde(default)] | ||||||
|     access_all: bool, |  | ||||||
|     #[serde(default)] |  | ||||||
|     permissions: HashMap<String, Value>, |     permissions: HashMap<String, Value>, | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -1012,7 +1086,7 @@ async fn send_invite( | |||||||
|     if org_id != headers.org_id { |     if org_id != headers.org_id { | ||||||
|         err!("Organization not found", "Organization id's do not match"); |         err!("Organization not found", "Organization id's do not match"); | ||||||
|     } |     } | ||||||
|     let mut data: InviteData = data.into_inner(); |     let data: InviteData = data.into_inner(); | ||||||
|  |  | ||||||
|     // HACK: We need the raw user-type to be sure custom role is selected to determine the access_all permission |     // HACK: We need the raw user-type to be sure custom role is selected to determine the access_all permission | ||||||
|     // The from_str() will convert the custom role type into a manager role type |     // The from_str() will convert the custom role type into a manager role type | ||||||
| @@ -1030,13 +1104,11 @@ async fn send_invite( | |||||||
|     // HACK: This converts the Custom role which has the `Manage all collections` box checked into an access_all flag |     // HACK: This converts the Custom role which has the `Manage all collections` box checked into an access_all flag | ||||||
|     // Since the parent checkbox is not sent to the server we need to check and verify the child checkboxes |     // Since the parent checkbox is not sent to the server we need to check and verify the child checkboxes | ||||||
|     // If the box is not checked, the user will still be a manager, but not with the access_all permission |     // If the box is not checked, the user will still be a manager, but not with the access_all permission | ||||||
|     if raw_type.eq("4") |     let access_all = new_type >= MembershipType::Admin | ||||||
|         && data.permissions.get("editAnyCollection") == Some(&json!(true)) |         || (raw_type.eq("4") | ||||||
|         && data.permissions.get("deleteAnyCollection") == Some(&json!(true)) |             && data.permissions.get("editAnyCollection") == Some(&json!(true)) | ||||||
|         && data.permissions.get("createNewCollections") == Some(&json!(true)) |             && data.permissions.get("deleteAnyCollection") == Some(&json!(true)) | ||||||
|     { |             && data.permissions.get("createNewCollections") == Some(&json!(true))); | ||||||
|         data.access_all = true; |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     let mut user_created: bool = false; |     let mut user_created: bool = false; | ||||||
|     for email in data.emails.iter() { |     for email in data.emails.iter() { | ||||||
| @@ -1055,7 +1127,7 @@ async fn send_invite( | |||||||
|                     Invitation::new(email).save(&mut conn).await?; |                     Invitation::new(email).save(&mut conn).await?; | ||||||
|                 } |                 } | ||||||
|  |  | ||||||
|                 let mut new_user = User::new(email.clone()); |                 let mut new_user = User::new(email.clone(), None); | ||||||
|                 new_user.save(&mut conn).await?; |                 new_user.save(&mut conn).await?; | ||||||
|                 user_created = true; |                 user_created = true; | ||||||
|                 new_user |                 new_user | ||||||
| @@ -1073,8 +1145,7 @@ async fn send_invite( | |||||||
|             } |             } | ||||||
|         }; |         }; | ||||||
|  |  | ||||||
|         let mut new_member = Membership::new(user.uuid.clone(), org_id.clone()); |         let mut new_member = Membership::new(user.uuid.clone(), org_id.clone(), Some(headers.user.email.clone())); | ||||||
|         let access_all = data.access_all; |  | ||||||
|         new_member.access_all = access_all; |         new_member.access_all = access_all; | ||||||
|         new_member.atype = new_type; |         new_member.atype = new_type; | ||||||
|         new_member.status = member_status; |         new_member.status = member_status; | ||||||
| @@ -1188,6 +1259,9 @@ async fn reinvite_member( | |||||||
|     headers: AdminHeaders, |     headers: AdminHeaders, | ||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|  |     if org_id != headers.org_id { | ||||||
|  |         err!("Organization not found", "Organization id's do not match"); | ||||||
|  |     } | ||||||
|     _reinvite_member(&org_id, &member_id, &headers.user.email, &mut conn).await |     _reinvite_member(&org_id, &member_id, &headers.user.email, &mut conn).await | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -1257,71 +1331,39 @@ async fn accept_invite( | |||||||
|         err!("Invitation was issued to a different account", "Claim does not match user_id") |         err!("Invitation was issued to a different account", "Claim does not match user_id") | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     // If a claim org_id does not match the one in from the URI, something is wrong. | ||||||
|  |     if !claims.org_id.eq(&org_id) { | ||||||
|  |         err!("Error accepting the invitation", "Claim does not match the org_id") | ||||||
|  |     } | ||||||
|  |  | ||||||
|     // If a claim does not have a member_id or it does not match the one in from the URI, something is wrong. |     // If a claim does not have a member_id or it does not match the one in from the URI, something is wrong. | ||||||
|     if !claims.member_id.eq(&member_id) { |     if !claims.member_id.eq(&member_id) { | ||||||
|         err!("Error accepting the invitation", "Claim does not match the member_id") |         err!("Error accepting the invitation", "Claim does not match the member_id") | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     let member = &claims.member_id; |     let member_id = &claims.member_id; | ||||||
|     let org = &claims.org_id; |  | ||||||
|  |  | ||||||
|     Invitation::take(&claims.email, &mut conn).await; |     Invitation::take(&claims.email, &mut conn).await; | ||||||
|  |  | ||||||
|     // skip invitation logic when we were invited via the /admin panel |     // skip invitation logic when we were invited via the /admin panel | ||||||
|     if **member != FAKE_ADMIN_UUID { |     if **member_id != FAKE_ADMIN_UUID { | ||||||
|         let Some(mut member) = Membership::find_by_uuid_and_org(member, org, &mut conn).await else { |         let Some(mut member) = Membership::find_by_uuid_and_org(member_id, &claims.org_id, &mut conn).await else { | ||||||
|             err!("Error accepting the invitation") |             err!("Error accepting the invitation") | ||||||
|         }; |         }; | ||||||
|  |  | ||||||
|         if member.status != MembershipStatus::Invited as i32 { |         let reset_password_key = match OrgPolicy::org_is_reset_password_auto_enroll(&member.org_uuid, &mut conn).await { | ||||||
|             err!("User already accepted the invitation") |             true if data.reset_password_key.is_none() => err!("Reset password key is required, but not provided."), | ||||||
|         } |             true => data.reset_password_key, | ||||||
|  |             false => None, | ||||||
|  |         }; | ||||||
|  |  | ||||||
|         let master_password_required = OrgPolicy::org_is_reset_password_auto_enroll(org, &mut conn).await; |         // In case the user was invited before the mail was saved in db. | ||||||
|         if data.reset_password_key.is_none() && master_password_required { |         member.invited_by_email = member.invited_by_email.or(claims.invited_by_email); | ||||||
|             err!("Reset password key is required, but not provided."); |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type |         accept_org_invite(&headers.user, member, reset_password_key, &mut conn).await?; | ||||||
|         // It returns different error messages per function. |     } else if CONFIG.mail_enabled() { | ||||||
|         if member.atype < MembershipType::Admin { |         // User was invited from /admin, so they are automatically confirmed | ||||||
|             match OrgPolicy::is_user_allowed(&member.user_uuid, &org_id, false, &mut conn).await { |         let org_name = CONFIG.invitation_org_name(); | ||||||
|                 Ok(_) => {} |         mail::send_invite_confirmed(&claims.email, &org_name).await?; | ||||||
|                 Err(OrgPolicyErr::TwoFactorMissing) => { |  | ||||||
|                     if CONFIG.email_2fa_auto_fallback() { |  | ||||||
|                         two_factor::email::activate_email_2fa(&headers.user, &mut conn).await?; |  | ||||||
|                     } else { |  | ||||||
|                         err!("You cannot join this organization until you enable two-step login on your user account"); |  | ||||||
|                     } |  | ||||||
|                 } |  | ||||||
|                 Err(OrgPolicyErr::SingleOrgEnforced) => { |  | ||||||
|                     err!("You cannot join this organization because you are a member of an organization which forbids it"); |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         member.status = MembershipStatus::Accepted as i32; |  | ||||||
|  |  | ||||||
|         if master_password_required { |  | ||||||
|             member.reset_password_key = data.reset_password_key; |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         member.save(&mut conn).await?; |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     if CONFIG.mail_enabled() { |  | ||||||
|         if let Some(invited_by_email) = &claims.invited_by_email { |  | ||||||
|             let org_name = match Organization::find_by_uuid(&claims.org_id, &mut conn).await { |  | ||||||
|                 Some(org) => org.name, |  | ||||||
|                 None => err!("Organization not found."), |  | ||||||
|             }; |  | ||||||
|             // User was invited to an organization, so they must be confirmed manually after acceptance |  | ||||||
|             mail::send_invite_accepted(&claims.email, invited_by_email, &org_name).await?; |  | ||||||
|         } else { |  | ||||||
|             // User was invited from /admin, so they are automatically confirmed |  | ||||||
|             let org_name = CONFIG.invitation_org_name(); |  | ||||||
|             mail::send_invite_confirmed(&claims.email, &org_name).await?; |  | ||||||
|         } |  | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     Ok(()) |     Ok(()) | ||||||
| @@ -1405,6 +1447,9 @@ async fn _confirm_invite( | |||||||
|     conn: &mut DbConn, |     conn: &mut DbConn, | ||||||
|     nt: &Notify<'_>, |     nt: &Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|  |     if org_id != &headers.org_id { | ||||||
|  |         err!("Organization not found", "Organization id's do not match"); | ||||||
|  |     } | ||||||
|     if key.is_empty() || member_id.is_empty() { |     if key.is_empty() || member_id.is_empty() { | ||||||
|         err!("Key or UserId is not set, unable to process request"); |         err!("Key or UserId is not set, unable to process request"); | ||||||
|     } |     } | ||||||
| @@ -1468,7 +1513,7 @@ async fn _confirm_invite( | |||||||
|     let save_result = member_to_confirm.save(conn).await; |     let save_result = member_to_confirm.save(conn).await; | ||||||
|  |  | ||||||
|     if let Some(user) = User::find_by_uuid(&member_to_confirm.user_uuid, conn).await { |     if let Some(user) = User::find_by_uuid(&member_to_confirm.user_uuid, conn).await { | ||||||
|         nt.send_user_update(UpdateType::SyncOrgKeys, &user).await; |         nt.send_user_update(UpdateType::SyncOrgKeys, &user, &headers.device.push_uuid, conn).await; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     save_result |     save_result | ||||||
| @@ -1525,8 +1570,6 @@ struct EditUserData { | |||||||
|     collections: Option<Vec<CollectionData>>, |     collections: Option<Vec<CollectionData>>, | ||||||
|     groups: Option<Vec<GroupId>>, |     groups: Option<Vec<GroupId>>, | ||||||
|     #[serde(default)] |     #[serde(default)] | ||||||
|     access_all: bool, |  | ||||||
|     #[serde(default)] |  | ||||||
|     permissions: HashMap<String, Value>, |     permissions: HashMap<String, Value>, | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -1552,7 +1595,7 @@ async fn edit_member( | |||||||
|     if org_id != headers.org_id { |     if org_id != headers.org_id { | ||||||
|         err!("Organization not found", "Organization id's do not match"); |         err!("Organization not found", "Organization id's do not match"); | ||||||
|     } |     } | ||||||
|     let mut data: EditUserData = data.into_inner(); |     let data: EditUserData = data.into_inner(); | ||||||
|  |  | ||||||
|     // HACK: We need the raw user-type to be sure custom role is selected to determine the access_all permission |     // HACK: We need the raw user-type to be sure custom role is selected to determine the access_all permission | ||||||
|     // The from_str() will convert the custom role type into a manager role type |     // The from_str() will convert the custom role type into a manager role type | ||||||
| @@ -1565,13 +1608,11 @@ async fn edit_member( | |||||||
|     // HACK: This converts the Custom role which has the `Manage all collections` box checked into an access_all flag |     // HACK: This converts the Custom role which has the `Manage all collections` box checked into an access_all flag | ||||||
|     // Since the parent checkbox is not sent to the server we need to check and verify the child checkboxes |     // Since the parent checkbox is not sent to the server we need to check and verify the child checkboxes | ||||||
|     // If the box is not checked, the user will still be a manager, but not with the access_all permission |     // If the box is not checked, the user will still be a manager, but not with the access_all permission | ||||||
|     if raw_type.eq("4") |     let access_all = new_type >= MembershipType::Admin | ||||||
|         && data.permissions.get("editAnyCollection") == Some(&json!(true)) |         || (raw_type.eq("4") | ||||||
|         && data.permissions.get("deleteAnyCollection") == Some(&json!(true)) |             && data.permissions.get("editAnyCollection") == Some(&json!(true)) | ||||||
|         && data.permissions.get("createNewCollections") == Some(&json!(true)) |             && data.permissions.get("deleteAnyCollection") == Some(&json!(true)) | ||||||
|     { |             && data.permissions.get("createNewCollections") == Some(&json!(true))); | ||||||
|         data.access_all = true; |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     let mut member_to_edit = match Membership::find_by_uuid_and_org(&member_id, &org_id, &mut conn).await { |     let mut member_to_edit = match Membership::find_by_uuid_and_org(&member_id, &org_id, &mut conn).await { | ||||||
|         Some(member) => member, |         Some(member) => member, | ||||||
| @@ -1617,7 +1658,7 @@ async fn edit_member( | |||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     member_to_edit.access_all = data.access_all; |     member_to_edit.access_all = access_all; | ||||||
|     member_to_edit.atype = new_type as i32; |     member_to_edit.atype = new_type as i32; | ||||||
|  |  | ||||||
|     // Delete all the odd collections |     // Delete all the odd collections | ||||||
| @@ -1626,7 +1667,7 @@ async fn edit_member( | |||||||
|     } |     } | ||||||
|  |  | ||||||
|     // If no accessAll, add the collections received |     // If no accessAll, add the collections received | ||||||
|     if !data.access_all { |     if !access_all { | ||||||
|         for col in data.collections.iter().flatten() { |         for col in data.collections.iter().flatten() { | ||||||
|             match Collection::find_by_uuid_and_org(&col.id, &org_id, &mut conn).await { |             match Collection::find_by_uuid_and_org(&col.id, &org_id, &mut conn).await { | ||||||
|                 None => err!("Collection not found in Organization"), |                 None => err!("Collection not found in Organization"), | ||||||
| @@ -1731,6 +1772,9 @@ async fn _delete_member( | |||||||
|     conn: &mut DbConn, |     conn: &mut DbConn, | ||||||
|     nt: &Notify<'_>, |     nt: &Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|  |     if org_id != &headers.org_id { | ||||||
|  |         err!("Organization not found", "Organization id's do not match"); | ||||||
|  |     } | ||||||
|     let Some(member_to_delete) = Membership::find_by_uuid_and_org(member_id, org_id, conn).await else { |     let Some(member_to_delete) = Membership::find_by_uuid_and_org(member_id, org_id, conn).await else { | ||||||
|         err!("User to delete isn't member of the organization") |         err!("User to delete isn't member of the organization") | ||||||
|     }; |     }; | ||||||
| @@ -1759,7 +1803,7 @@ async fn _delete_member( | |||||||
|     .await; |     .await; | ||||||
|  |  | ||||||
|     if let Some(user) = User::find_by_uuid(&member_to_delete.user_uuid, conn).await { |     if let Some(user) = User::find_by_uuid(&member_to_delete.user_uuid, conn).await { | ||||||
|         nt.send_user_update(UpdateType::SyncOrgKeys, &user).await; |         nt.send_user_update(UpdateType::SyncOrgKeys, &user, &headers.device.push_uuid, conn).await; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     member_to_delete.delete(conn).await |     member_to_delete.delete(conn).await | ||||||
| @@ -1825,16 +1869,20 @@ struct RelationsData { | |||||||
|     value: usize, |     value: usize, | ||||||
| } | } | ||||||
|  |  | ||||||
|  | // https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Tools/Controllers/ImportCiphersController.cs#L62 | ||||||
| #[post("/ciphers/import-organization?<query..>", data = "<data>")] | #[post("/ciphers/import-organization?<query..>", data = "<data>")] | ||||||
| async fn post_org_import( | async fn post_org_import( | ||||||
|     query: OrgIdData, |     query: OrgIdData, | ||||||
|     data: Json<ImportData>, |     data: Json<ImportData>, | ||||||
|     headers: AdminHeaders, |     headers: OrgMemberHeaders, | ||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
|     nt: Notify<'_>, |     nt: Notify<'_>, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|     let data: ImportData = data.into_inner(); |  | ||||||
|     let org_id = query.organization_id; |     let org_id = query.organization_id; | ||||||
|  |     if org_id != headers.membership.org_uuid { | ||||||
|  |         err!("Organization not found", "Organization id's do not match"); | ||||||
|  |     } | ||||||
|  |     let data: ImportData = data.into_inner(); | ||||||
|  |  | ||||||
|     // Validate the import before continuing |     // Validate the import before continuing | ||||||
|     // Bitwarden does not process the import if there is one item invalid. |     // Bitwarden does not process the import if there is one item invalid. | ||||||
| @@ -1847,8 +1895,20 @@ async fn post_org_import( | |||||||
|     let mut collections: Vec<CollectionId> = Vec::with_capacity(data.collections.len()); |     let mut collections: Vec<CollectionId> = Vec::with_capacity(data.collections.len()); | ||||||
|     for col in data.collections { |     for col in data.collections { | ||||||
|         let collection_uuid = if existing_collections.contains(&col.id) { |         let collection_uuid = if existing_collections.contains(&col.id) { | ||||||
|             col.id.unwrap() |             let col_id = col.id.unwrap(); | ||||||
|  |             // When not an Owner or Admin, check if the member is allowed to access the collection. | ||||||
|  |             if headers.membership.atype < MembershipType::Admin | ||||||
|  |                 && !Collection::can_access_collection(&headers.membership, &col_id, &mut conn).await | ||||||
|  |             { | ||||||
|  |                 err!(Compact, "The current user isn't allowed to manage this collection") | ||||||
|  |             } | ||||||
|  |             col_id | ||||||
|         } else { |         } else { | ||||||
|  |             // We do not allow users or managers which can not manage all collections to create new collections | ||||||
|  |             // If there is any collection other than an existing import collection, abort the import. | ||||||
|  |             if headers.membership.atype <= MembershipType::Manager && !headers.membership.has_full_access() { | ||||||
|  |                 err!(Compact, "The current user isn't allowed to create new collections") | ||||||
|  |             } | ||||||
|             let new_collection = Collection::new(org_id.clone(), col.name, col.external_id); |             let new_collection = Collection::new(org_id.clone(), col.name, col.external_id); | ||||||
|             new_collection.save(&mut conn).await?; |             new_collection.save(&mut conn).await?; | ||||||
|             new_collection.uuid |             new_collection.uuid | ||||||
| @@ -1871,7 +1931,17 @@ async fn post_org_import( | |||||||
|         // Always clear folder_id's via an organization import |         // Always clear folder_id's via an organization import | ||||||
|         cipher_data.folder_id = None; |         cipher_data.folder_id = None; | ||||||
|         let mut cipher = Cipher::new(cipher_data.r#type, cipher_data.name.clone()); |         let mut cipher = Cipher::new(cipher_data.r#type, cipher_data.name.clone()); | ||||||
|         update_cipher_from_data(&mut cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await.ok(); |         update_cipher_from_data( | ||||||
|  |             &mut cipher, | ||||||
|  |             cipher_data, | ||||||
|  |             &headers, | ||||||
|  |             Some(collections.clone()), | ||||||
|  |             &mut conn, | ||||||
|  |             &nt, | ||||||
|  |             UpdateType::None, | ||||||
|  |         ) | ||||||
|  |         .await | ||||||
|  |         .ok(); | ||||||
|         ciphers.push(cipher.uuid); |         ciphers.push(cipher.uuid); | ||||||
|     } |     } | ||||||
|  |  | ||||||
| @@ -1902,12 +1972,6 @@ struct BulkCollectionsData { | |||||||
| async fn post_bulk_collections(data: Json<BulkCollectionsData>, headers: Headers, mut conn: DbConn) -> EmptyResult { | async fn post_bulk_collections(data: Json<BulkCollectionsData>, headers: Headers, mut conn: DbConn) -> EmptyResult { | ||||||
|     let data: BulkCollectionsData = data.into_inner(); |     let data: BulkCollectionsData = data.into_inner(); | ||||||
|  |  | ||||||
|     // This feature does not seem to be active on all the clients |  | ||||||
|     // To prevent future issues, add a check to block a call when this is set to true |  | ||||||
|     if data.remove_collections { |  | ||||||
|         err!("Bulk removing of collections is not yet implemented") |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     // Get all the collection available to the user in one query |     // Get all the collection available to the user in one query | ||||||
|     // Also filter based upon the provided collections |     // Also filter based upon the provided collections | ||||||
|     let user_collections: HashMap<CollectionId, Collection> = |     let user_collections: HashMap<CollectionId, Collection> = | ||||||
| @@ -1936,8 +2000,16 @@ async fn post_bulk_collections(data: Json<BulkCollectionsData>, headers: Headers | |||||||
|         // Do not abort the operation just ignore it, it could be a cipher was just deleted for example |         // Do not abort the operation just ignore it, it could be a cipher was just deleted for example | ||||||
|         if let Some(cipher) = Cipher::find_by_uuid_and_org(cipher_id, &data.organization_id, &mut conn).await { |         if let Some(cipher) = Cipher::find_by_uuid_and_org(cipher_id, &data.organization_id, &mut conn).await { | ||||||
|             if cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await { |             if cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await { | ||||||
|                 for collection in &data.collection_ids { |                 // When selecting a specific collection from the left filter list, and use the bulk option, you can remove an item from that collection | ||||||
|                     CollectionCipher::save(&cipher.uuid, collection, &mut conn).await?; |                 // In these cases the client will call this endpoint twice, once for adding the new collections and a second for deleting. | ||||||
|  |                 if data.remove_collections { | ||||||
|  |                     for collection in &data.collection_ids { | ||||||
|  |                         CollectionCipher::delete(&cipher.uuid, collection, &mut conn).await?; | ||||||
|  |                     } | ||||||
|  |                 } else { | ||||||
|  |                     for collection in &data.collection_ids { | ||||||
|  |                         CollectionCipher::save(&cipher.uuid, collection, &mut conn).await?; | ||||||
|  |                     } | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|         }; |         }; | ||||||
| @@ -1985,18 +2057,36 @@ async fn list_policies_token(org_id: OrganizationId, token: &str, mut conn: DbCo | |||||||
|     }))) |     }))) | ||||||
| } | } | ||||||
|  |  | ||||||
| #[get("/organizations/<org_id>/policies/<pol_type>")] | // Called during the SSO enrollment. | ||||||
|  | // Return the org policy if it exists, otherwise use the default one. | ||||||
|  | #[get("/organizations/<org_id>/policies/master-password", rank = 1)] | ||||||
|  | async fn get_master_password_policy(org_id: OrganizationId, _headers: Headers, mut conn: DbConn) -> JsonResult { | ||||||
|  |     let policy = | ||||||
|  |         OrgPolicy::find_by_org_and_type(&org_id, OrgPolicyType::MasterPassword, &mut conn).await.unwrap_or_else(|| { | ||||||
|  |             let (enabled, data) = match CONFIG.sso_master_password_policy_value() { | ||||||
|  |                 Some(policy) if CONFIG.sso_enabled() => (true, policy.to_string()), | ||||||
|  |                 _ => (false, "null".to_string()), | ||||||
|  |             }; | ||||||
|  |  | ||||||
|  |             OrgPolicy::new(org_id, OrgPolicyType::MasterPassword, enabled, data) | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |     Ok(Json(policy.to_json())) | ||||||
|  | } | ||||||
|  |  | ||||||
|  | #[get("/organizations/<org_id>/policies/<pol_type>", rank = 2)] | ||||||
| async fn get_policy(org_id: OrganizationId, pol_type: i32, headers: AdminHeaders, mut conn: DbConn) -> JsonResult { | async fn get_policy(org_id: OrganizationId, pol_type: i32, headers: AdminHeaders, mut conn: DbConn) -> JsonResult { | ||||||
|     if org_id != headers.org_id { |     if org_id != headers.org_id { | ||||||
|         err!("Organization not found", "Organization id's do not match"); |         err!("Organization not found", "Organization id's do not match"); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     let Some(pol_type_enum) = OrgPolicyType::from_i32(pol_type) else { |     let Some(pol_type_enum) = OrgPolicyType::from_i32(pol_type) else { | ||||||
|         err!("Invalid or unsupported policy type") |         err!("Invalid or unsupported policy type") | ||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     let policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type_enum, &mut conn).await { |     let policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type_enum, &mut conn).await { | ||||||
|         Some(p) => p, |         Some(p) => p, | ||||||
|         None => OrgPolicy::new(org_id.clone(), pol_type_enum, "null".to_string()), |         None => OrgPolicy::new(org_id.clone(), pol_type_enum, false, "null".to_string()), | ||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     Ok(Json(policy.to_json())) |     Ok(Json(policy.to_json())) | ||||||
| @@ -2107,7 +2197,7 @@ async fn put_policy( | |||||||
|  |  | ||||||
|     let mut policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type_enum, &mut conn).await { |     let mut policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type_enum, &mut conn).await { | ||||||
|         Some(p) => p, |         Some(p) => p, | ||||||
|         None => OrgPolicy::new(org_id.clone(), pol_type_enum, "{}".to_string()), |         None => OrgPolicy::new(org_id.clone(), pol_type_enum, false, "{}".to_string()), | ||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     policy.enabled = data.enabled; |     policy.enabled = data.enabled; | ||||||
| @@ -2220,7 +2310,7 @@ struct OrgImportData { | |||||||
|     users: Vec<OrgImportUserData>, |     users: Vec<OrgImportUserData>, | ||||||
| } | } | ||||||
|  |  | ||||||
| /// This function seems to be deprected | /// This function seems to be deprecated | ||||||
| /// It is only used with older directory connectors | /// It is only used with older directory connectors | ||||||
| /// TODO: Cleanup Tech debt | /// TODO: Cleanup Tech debt | ||||||
| #[post("/organizations/<org_id>/import", data = "<data>")] | #[post("/organizations/<org_id>/import", data = "<data>")] | ||||||
| @@ -2266,7 +2356,8 @@ async fn import(org_id: OrganizationId, data: Json<OrgImportData>, headers: Head | |||||||
|                     MembershipStatus::Accepted as i32 // Automatically mark user as accepted if no email invites |                     MembershipStatus::Accepted as i32 // Automatically mark user as accepted if no email invites | ||||||
|                 }; |                 }; | ||||||
|  |  | ||||||
|                 let mut new_member = Membership::new(user.uuid.clone(), org_id.clone()); |                 let mut new_member = | ||||||
|  |                     Membership::new(user.uuid.clone(), org_id.clone(), Some(headers.user.email.clone())); | ||||||
|                 new_member.access_all = false; |                 new_member.access_all = false; | ||||||
|                 new_member.atype = MembershipType::User as i32; |                 new_member.atype = MembershipType::User as i32; | ||||||
|                 new_member.status = member_status; |                 new_member.status = member_status; | ||||||
| @@ -2414,6 +2505,9 @@ async fn _revoke_member( | |||||||
|     headers: &AdminHeaders, |     headers: &AdminHeaders, | ||||||
|     conn: &mut DbConn, |     conn: &mut DbConn, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|  |     if org_id != &headers.org_id { | ||||||
|  |         err!("Organization not found", "Organization id's do not match"); | ||||||
|  |     } | ||||||
|     match Membership::find_by_uuid_and_org(member_id, org_id, conn).await { |     match Membership::find_by_uuid_and_org(member_id, org_id, conn).await { | ||||||
|         Some(mut member) if member.status > MembershipStatus::Revoked as i32 => { |         Some(mut member) if member.status > MembershipStatus::Revoked as i32 => { | ||||||
|             if member.user_uuid == headers.user.uuid { |             if member.user_uuid == headers.user.uuid { | ||||||
| @@ -2521,6 +2615,9 @@ async fn _restore_member( | |||||||
|     headers: &AdminHeaders, |     headers: &AdminHeaders, | ||||||
|     conn: &mut DbConn, |     conn: &mut DbConn, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|  |     if org_id != &headers.org_id { | ||||||
|  |         err!("Organization not found", "Organization id's do not match"); | ||||||
|  |     } | ||||||
|     match Membership::find_by_uuid_and_org(member_id, org_id, conn).await { |     match Membership::find_by_uuid_and_org(member_id, org_id, conn).await { | ||||||
|         Some(mut member) if member.status < MembershipStatus::Accepted as i32 => { |         Some(mut member) if member.status < MembershipStatus::Accepted as i32 => { | ||||||
|             if member.user_uuid == headers.user.uuid { |             if member.user_uuid == headers.user.uuid { | ||||||
| @@ -2568,18 +2665,27 @@ async fn _restore_member( | |||||||
|     Ok(()) |     Ok(()) | ||||||
| } | } | ||||||
|  |  | ||||||
| #[get("/organizations/<org_id>/groups")] | async fn get_groups_data( | ||||||
| async fn get_groups(org_id: OrganizationId, headers: ManagerHeadersLoose, mut conn: DbConn) -> JsonResult { |     details: bool, | ||||||
|  |     org_id: OrganizationId, | ||||||
|  |     headers: ManagerHeadersLoose, | ||||||
|  |     mut conn: DbConn, | ||||||
|  | ) -> JsonResult { | ||||||
|     if org_id != headers.membership.org_uuid { |     if org_id != headers.membership.org_uuid { | ||||||
|         err!("Organization not found", "Organization id's do not match"); |         err!("Organization not found", "Organization id's do not match"); | ||||||
|     } |     } | ||||||
|     let groups: Vec<Value> = if CONFIG.org_groups_enabled() { |     let groups: Vec<Value> = if CONFIG.org_groups_enabled() { | ||||||
|         // Group::find_by_organization(&org_id, &mut conn).await.iter().map(Group::to_json).collect::<Value>() |  | ||||||
|         let groups = Group::find_by_organization(&org_id, &mut conn).await; |         let groups = Group::find_by_organization(&org_id, &mut conn).await; | ||||||
|         let mut groups_json = Vec::with_capacity(groups.len()); |         let mut groups_json = Vec::with_capacity(groups.len()); | ||||||
|  |  | ||||||
|         for g in groups { |         if details { | ||||||
|             groups_json.push(g.to_json_details(&mut conn).await) |             for g in groups { | ||||||
|  |                 groups_json.push(g.to_json_details(&mut conn).await) | ||||||
|  |             } | ||||||
|  |         } else { | ||||||
|  |             for g in groups { | ||||||
|  |                 groups_json.push(g.to_json()) | ||||||
|  |             } | ||||||
|         } |         } | ||||||
|         groups_json |         groups_json | ||||||
|     } else { |     } else { | ||||||
| @@ -2595,9 +2701,14 @@ async fn get_groups(org_id: OrganizationId, headers: ManagerHeadersLoose, mut co | |||||||
|     }))) |     }))) | ||||||
| } | } | ||||||
|  |  | ||||||
|  | #[get("/organizations/<org_id>/groups")] | ||||||
|  | async fn get_groups(org_id: OrganizationId, headers: ManagerHeadersLoose, conn: DbConn) -> JsonResult { | ||||||
|  |     get_groups_data(false, org_id, headers, conn).await | ||||||
|  | } | ||||||
|  |  | ||||||
| #[get("/organizations/<org_id>/groups/details", rank = 1)] | #[get("/organizations/<org_id>/groups/details", rank = 1)] | ||||||
| async fn get_groups_details(org_id: OrganizationId, headers: ManagerHeadersLoose, conn: DbConn) -> JsonResult { | async fn get_groups_details(org_id: OrganizationId, headers: ManagerHeadersLoose, conn: DbConn) -> JsonResult { | ||||||
|     get_groups(org_id, headers, conn).await |     get_groups_data(true, org_id, headers, conn).await | ||||||
| } | } | ||||||
|  |  | ||||||
| #[derive(Deserialize)] | #[derive(Deserialize)] | ||||||
| @@ -2659,6 +2770,9 @@ async fn post_groups( | |||||||
|     data: Json<GroupRequest>, |     data: Json<GroupRequest>, | ||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
| ) -> JsonResult { | ) -> JsonResult { | ||||||
|  |     if org_id != headers.org_id { | ||||||
|  |         err!("Organization not found", "Organization id's do not match"); | ||||||
|  |     } | ||||||
|     if !CONFIG.org_groups_enabled() { |     if !CONFIG.org_groups_enabled() { | ||||||
|         err!("Group support is disabled"); |         err!("Group support is disabled"); | ||||||
|     } |     } | ||||||
| @@ -2688,6 +2802,9 @@ async fn put_group( | |||||||
|     headers: AdminHeaders, |     headers: AdminHeaders, | ||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
| ) -> JsonResult { | ) -> JsonResult { | ||||||
|  |     if org_id != headers.org_id { | ||||||
|  |         err!("Organization not found", "Organization id's do not match"); | ||||||
|  |     } | ||||||
|     if !CONFIG.org_groups_enabled() { |     if !CONFIG.org_groups_enabled() { | ||||||
|         err!("Group support is disabled"); |         err!("Group support is disabled"); | ||||||
|     } |     } | ||||||
| @@ -2752,7 +2869,8 @@ async fn add_update_group( | |||||||
|         "organizationId": group.organizations_uuid, |         "organizationId": group.organizations_uuid, | ||||||
|         "name": group.name, |         "name": group.name, | ||||||
|         "accessAll": group.access_all, |         "accessAll": group.access_all, | ||||||
|         "externalId": group.external_id |         "externalId": group.external_id, | ||||||
|  |         "object": "group" | ||||||
|     }))) |     }))) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -2803,6 +2921,9 @@ async fn _delete_group( | |||||||
|     headers: &AdminHeaders, |     headers: &AdminHeaders, | ||||||
|     conn: &mut DbConn, |     conn: &mut DbConn, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|  |     if org_id != &headers.org_id { | ||||||
|  |         err!("Organization not found", "Organization id's do not match"); | ||||||
|  |     } | ||||||
|     if !CONFIG.org_groups_enabled() { |     if !CONFIG.org_groups_enabled() { | ||||||
|         err!("Group support is disabled"); |         err!("Group support is disabled"); | ||||||
|     } |     } | ||||||
| @@ -2832,6 +2953,9 @@ async fn bulk_delete_groups( | |||||||
|     headers: AdminHeaders, |     headers: AdminHeaders, | ||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|  |     if org_id != headers.org_id { | ||||||
|  |         err!("Organization not found", "Organization id's do not match"); | ||||||
|  |     } | ||||||
|     if !CONFIG.org_groups_enabled() { |     if !CONFIG.org_groups_enabled() { | ||||||
|         err!("Group support is disabled"); |         err!("Group support is disabled"); | ||||||
|     } |     } | ||||||
| @@ -2895,6 +3019,9 @@ async fn put_group_members( | |||||||
|     data: Json<Vec<MembershipId>>, |     data: Json<Vec<MembershipId>>, | ||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
| ) -> EmptyResult { | ) -> EmptyResult { | ||||||
|  |     if org_id != headers.org_id { | ||||||
|  |         err!("Organization not found", "Organization id's do not match"); | ||||||
|  |     } | ||||||
|     if !CONFIG.org_groups_enabled() { |     if !CONFIG.org_groups_enabled() { | ||||||
|         err!("Group support is disabled"); |         err!("Group support is disabled"); | ||||||
|     } |     } | ||||||
| @@ -3079,7 +3206,7 @@ async fn get_organization_public_key( | |||||||
|     headers: OrgMemberHeaders, |     headers: OrgMemberHeaders, | ||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
| ) -> JsonResult { | ) -> JsonResult { | ||||||
|     if org_id != headers.org_id { |     if org_id != headers.membership.org_uuid { | ||||||
|         err!("Organization not found", "Organization id's do not match"); |         err!("Organization not found", "Organization id's do not match"); | ||||||
|     } |     } | ||||||
|     let Some(org) = Organization::find_by_uuid(&org_id, &mut conn).await else { |     let Some(org) = Organization::find_by_uuid(&org_id, &mut conn).await else { | ||||||
| @@ -3093,7 +3220,7 @@ async fn get_organization_public_key( | |||||||
| } | } | ||||||
|  |  | ||||||
| // Obsolete - Renamed to public-key (2023.8), left for backwards compatibility with older clients | // Obsolete - Renamed to public-key (2023.8), left for backwards compatibility with older clients | ||||||
| // https://github.com/bitwarden/server/blob/25dc0c9178e3e3584074bbef0d4be827b7c89415/src/Api/AdminConsole/Controllers/OrganizationsController.cs#L463-L468 | // https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/AdminConsole/Controllers/OrganizationsController.cs#L487-L492 | ||||||
| #[get("/organizations/<org_id>/keys")] | #[get("/organizations/<org_id>/keys")] | ||||||
| async fn get_organization_keys(org_id: OrganizationId, headers: OrgMemberHeaders, conn: DbConn) -> JsonResult { | async fn get_organization_keys(org_id: OrganizationId, headers: OrgMemberHeaders, conn: DbConn) -> JsonResult { | ||||||
|     get_organization_public_key(org_id, headers, conn).await |     get_organization_public_key(org_id, headers, conn).await | ||||||
| @@ -3144,7 +3271,7 @@ async fn put_reset_password( | |||||||
|     user.set_password(reset_request.new_master_password_hash.as_str(), Some(reset_request.key), true, None); |     user.set_password(reset_request.new_master_password_hash.as_str(), Some(reset_request.key), true, None); | ||||||
|     user.save(&mut conn).await?; |     user.save(&mut conn).await?; | ||||||
|  |  | ||||||
|     nt.send_logout(&user, None).await; |     nt.send_logout(&user, None, &mut conn).await; | ||||||
|  |  | ||||||
|     log_event( |     log_event( | ||||||
|         EventType::OrganizationUserAdminResetPassword as i32, |         EventType::OrganizationUserAdminResetPassword as i32, | ||||||
| @@ -3184,16 +3311,16 @@ async fn get_reset_password_details( | |||||||
|  |  | ||||||
|     check_reset_password_applicable_and_permissions(&org_id, &member_id, &headers, &mut conn).await?; |     check_reset_password_applicable_and_permissions(&org_id, &member_id, &headers, &mut conn).await?; | ||||||
|  |  | ||||||
|     // https://github.com/bitwarden/server/blob/3b50ccb9f804efaacdc46bed5b60e5b28eddefcf/src/Api/Models/Response/Organizations/OrganizationUserResponseModel.cs#L111 |     // https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/AdminConsole/Models/Response/Organizations/OrganizationUserResponseModel.cs#L190 | ||||||
|     Ok(Json(json!({ |     Ok(Json(json!({ | ||||||
|         "object": "organizationUserResetPasswordDetails", |         "object": "organizationUserResetPasswordDetails", | ||||||
|         "kdf":user.client_kdf_type, |         "organizationUserId": member_id, | ||||||
|         "kdfIterations":user.client_kdf_iter, |         "kdf": user.client_kdf_type, | ||||||
|         "kdfMemory":user.client_kdf_memory, |         "kdfIterations": user.client_kdf_iter, | ||||||
|         "kdfParallelism":user.client_kdf_parallelism, |         "kdfMemory": user.client_kdf_memory, | ||||||
|         "resetPasswordKey":member.reset_password_key, |         "kdfParallelism": user.client_kdf_parallelism, | ||||||
|         "encryptedPrivateKey":org.private_key, |         "resetPasswordKey": member.reset_password_key, | ||||||
|  |         "encryptedPrivateKey": org.private_key, | ||||||
|     }))) |     }))) | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -3249,13 +3376,17 @@ async fn put_reset_password_enrollment( | |||||||
|  |  | ||||||
|     let reset_request = data.into_inner(); |     let reset_request = data.into_inner(); | ||||||
|  |  | ||||||
|     if reset_request.reset_password_key.is_none() |     let reset_password_key = match reset_request.reset_password_key { | ||||||
|         && OrgPolicy::org_is_reset_password_auto_enroll(&org_id, &mut conn).await |         None => None, | ||||||
|     { |         Some(ref key) if key.is_empty() => None, | ||||||
|  |         Some(key) => Some(key), | ||||||
|  |     }; | ||||||
|  |  | ||||||
|  |     if reset_password_key.is_none() && OrgPolicy::org_is_reset_password_auto_enroll(&org_id, &mut conn).await { | ||||||
|         err!("Reset password can't be withdrawn due to an enterprise policy"); |         err!("Reset password can't be withdrawn due to an enterprise policy"); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     if reset_request.reset_password_key.is_some() { |     if reset_password_key.is_some() { | ||||||
|         PasswordOrOtpData { |         PasswordOrOtpData { | ||||||
|             master_password_hash: reset_request.master_password_hash, |             master_password_hash: reset_request.master_password_hash, | ||||||
|             otp: reset_request.otp, |             otp: reset_request.otp, | ||||||
| @@ -3264,7 +3395,7 @@ async fn put_reset_password_enrollment( | |||||||
|         .await?; |         .await?; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     member.reset_password_key = reset_request.reset_password_key; |     member.reset_password_key = reset_password_key; | ||||||
|     member.save(&mut conn).await?; |     member.save(&mut conn).await?; | ||||||
|  |  | ||||||
|     let log_id = if member.reset_password_key.is_some() { |     let log_id = if member.reset_password_key.is_some() { | ||||||
| @@ -3278,57 +3409,22 @@ async fn put_reset_password_enrollment( | |||||||
|     Ok(()) |     Ok(()) | ||||||
| } | } | ||||||
|  |  | ||||||
| // This is a new function active since the v2022.9.x clients. |  | ||||||
| // It combines the previous two calls done before. |  | ||||||
| // We call those two functions here and combine them ourselves. |  | ||||||
| // |  | ||||||
| // NOTE: It seems clients can't handle uppercase-first keys!! | // NOTE: It seems clients can't handle uppercase-first keys!! | ||||||
| //       We need to convert all keys so they have the first character to be a lowercase. | //       We need to convert all keys so they have the first character to be a lowercase. | ||||||
| //       Else the export will be just an empty JSON file. | //       Else the export will be just an empty JSON file. | ||||||
|  | // We currently only support exports by members of the Admin or Owner status. | ||||||
|  | // Vaultwarden does not yet support exporting only managed collections! | ||||||
|  | // https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Tools/Controllers/OrganizationExportController.cs#L52 | ||||||
| #[get("/organizations/<org_id>/export")] | #[get("/organizations/<org_id>/export")] | ||||||
| async fn get_org_export( | async fn get_org_export(org_id: OrganizationId, headers: AdminHeaders, mut conn: DbConn) -> JsonResult { | ||||||
|     org_id: OrganizationId, |  | ||||||
|     headers: AdminHeaders, |  | ||||||
|     client_version: Option<ClientVersion>, |  | ||||||
|     mut conn: DbConn, |  | ||||||
| ) -> JsonResult { |  | ||||||
|     if org_id != headers.org_id { |     if org_id != headers.org_id { | ||||||
|         err!("Organization not found", "Organization id's do not match"); |         err!("Organization not found", "Organization id's do not match"); | ||||||
|     } |     } | ||||||
|     // Since version v2023.1.0 the format of the export is different. |  | ||||||
|     // Also, this endpoint was created since v2022.9.0. |  | ||||||
|     // Therefore, we will check for any version smaller then v2023.1.0 and return a different response. |  | ||||||
|     // If we can't determine the version, we will use the latest default v2023.1.0 and higher. |  | ||||||
|     // https://github.com/bitwarden/server/blob/9ca93381ce416454734418c3a9f99ab49747f1b6/src/Api/Controllers/OrganizationExportController.cs#L44 |  | ||||||
|     let use_list_response_model = if let Some(client_version) = client_version { |  | ||||||
|         let ver_match = semver::VersionReq::parse("<2023.1.0").unwrap(); |  | ||||||
|         ver_match.matches(&client_version.0) |  | ||||||
|     } else { |  | ||||||
|         false |  | ||||||
|     }; |  | ||||||
|  |  | ||||||
|     // Also both main keys here need to be lowercase, else the export will fail. |     Ok(Json(json!({ | ||||||
|     if use_list_response_model { |         "collections": convert_json_key_lcase_first(_get_org_collections(&org_id, &mut conn).await), | ||||||
|         // Backwards compatible pre v2023.1.0 response |         "ciphers": convert_json_key_lcase_first(_get_org_details(&org_id, &headers.host, &headers.user.uuid, &mut conn).await?), | ||||||
|         Ok(Json(json!({ |     }))) | ||||||
|             "collections": { |  | ||||||
|                 "data": convert_json_key_lcase_first(_get_org_collections(&org_id, &mut conn).await), |  | ||||||
|                 "object": "list", |  | ||||||
|                 "continuationToken": null, |  | ||||||
|             }, |  | ||||||
|             "ciphers": { |  | ||||||
|                 "data": convert_json_key_lcase_first(_get_org_details(&org_id, &headers.host, &headers.user.uuid, &mut conn).await), |  | ||||||
|                 "object": "list", |  | ||||||
|                 "continuationToken": null, |  | ||||||
|             } |  | ||||||
|         }))) |  | ||||||
|     } else { |  | ||||||
|         // v2023.1.0 and newer response |  | ||||||
|         Ok(Json(json!({ |  | ||||||
|             "collections": convert_json_key_lcase_first(_get_org_collections(&org_id, &mut conn).await), |  | ||||||
|             "ciphers": convert_json_key_lcase_first(_get_org_details(&org_id, &headers.host, &headers.user.uuid, &mut conn).await), |  | ||||||
|         }))) |  | ||||||
|     } |  | ||||||
| } | } | ||||||
|  |  | ||||||
| async fn _api_key( | async fn _api_key( | ||||||
| @@ -3338,6 +3434,9 @@ async fn _api_key( | |||||||
|     headers: AdminHeaders, |     headers: AdminHeaders, | ||||||
|     mut conn: DbConn, |     mut conn: DbConn, | ||||||
| ) -> JsonResult { | ) -> JsonResult { | ||||||
|  |     if org_id != &headers.org_id { | ||||||
|  |         err!("Organization not found", "Organization id's do not match"); | ||||||
|  |     } | ||||||
|     let data: PasswordOrOtpData = data.into_inner(); |     let data: PasswordOrOtpData = data.into_inner(); | ||||||
|     let user = headers.user; |     let user = headers.user; | ||||||
|  |  | ||||||
|   | |||||||
| @@ -46,7 +46,7 @@ struct OrgImportData { | |||||||
| #[post("/public/organization/import", data = "<data>")] | #[post("/public/organization/import", data = "<data>")] | ||||||
| async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: DbConn) -> EmptyResult { | async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: DbConn) -> EmptyResult { | ||||||
|     // Most of the logic for this function can be found here |     // Most of the logic for this function can be found here | ||||||
|     // https://github.com/bitwarden/server/blob/fd892b2ff4547648a276734fb2b14a8abae2c6f5/src/Core/Services/Implementations/OrganizationService.cs#L1797 |     // https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Services/Implementations/OrganizationService.cs#L1203 | ||||||
|  |  | ||||||
|     let org_id = token.0; |     let org_id = token.0; | ||||||
|     let data = data.into_inner(); |     let data = data.into_inner(); | ||||||
| @@ -89,7 +89,7 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: Db | |||||||
|                 Some(user) => user, // exists in vaultwarden |                 Some(user) => user, // exists in vaultwarden | ||||||
|                 None => { |                 None => { | ||||||
|                     // User does not exist yet |                     // User does not exist yet | ||||||
|                     let mut new_user = User::new(user_data.email.clone()); |                     let mut new_user = User::new(user_data.email.clone(), None); | ||||||
|                     new_user.save(&mut conn).await?; |                     new_user.save(&mut conn).await?; | ||||||
|  |  | ||||||
|                     if !CONFIG.mail_enabled() { |                     if !CONFIG.mail_enabled() { | ||||||
| @@ -105,7 +105,12 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: Db | |||||||
|                 MembershipStatus::Accepted as i32 // Automatically mark user as accepted if no email invites |                 MembershipStatus::Accepted as i32 // Automatically mark user as accepted if no email invites | ||||||
|             }; |             }; | ||||||
|  |  | ||||||
|             let mut new_member = Membership::new(user.uuid.clone(), org_id.clone()); |             let (org_name, org_email) = match Organization::find_by_uuid(&org_id, &mut conn).await { | ||||||
|  |                 Some(org) => (org.name, org.billing_email), | ||||||
|  |                 None => err!("Error looking up organization"), | ||||||
|  |             }; | ||||||
|  |  | ||||||
|  |             let mut new_member = Membership::new(user.uuid.clone(), org_id.clone(), Some(org_email.clone())); | ||||||
|             new_member.set_external_id(Some(user_data.external_id.clone())); |             new_member.set_external_id(Some(user_data.external_id.clone())); | ||||||
|             new_member.access_all = false; |             new_member.access_all = false; | ||||||
|             new_member.atype = MembershipType::User as i32; |             new_member.atype = MembershipType::User as i32; | ||||||
| @@ -114,11 +119,6 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: Db | |||||||
|             new_member.save(&mut conn).await?; |             new_member.save(&mut conn).await?; | ||||||
|  |  | ||||||
|             if CONFIG.mail_enabled() { |             if CONFIG.mail_enabled() { | ||||||
|                 let (org_name, org_email) = match Organization::find_by_uuid(&org_id, &mut conn).await { |  | ||||||
|                     Some(org) => (org.name, org.billing_email), |  | ||||||
|                     None => err!("Error looking up organization"), |  | ||||||
|                 }; |  | ||||||
|  |  | ||||||
|                 if let Err(e) = |                 if let Err(e) = | ||||||
|                     mail::send_invite(&user, org_id.clone(), new_member.uuid.clone(), &org_name, Some(org_email)).await |                     mail::send_invite(&user, org_id.clone(), new_member.uuid.clone(), &org_name, Some(org_email)).await | ||||||
|                 { |                 { | ||||||
|   | |||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user