mirror of
				https://github.com/dani-garcia/vaultwarden.git
				synced 2025-10-31 18:28:20 +02:00 
			
		
		
		
	Compare commits
	
		
			24 Commits
		
	
	
		
			1.34.2
			...
			7161f612a1
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 7161f612a1 | ||
|  | 5ee908517f | ||
|  | 55577fa4eb | ||
|  | 843c063649 | ||
|  | 550b670dba | ||
|  | de808c5ad9 | ||
|  | 1f73630136 | ||
|  | 77008a91e9 | ||
|  | 7f386d38ae | ||
|  | 8e7eeab293 | ||
|  | e35c6f8705 | ||
|  | ae7b725c0f | ||
|  | 2a5489a4b2 | ||
|  | 8fd0ee4211 | ||
|  | 4a5516e150 | ||
|  | 7fc94516ce | ||
|  | 5ea0779d6b | ||
|  | a133d4e90c | ||
|  | 49eff787de | ||
|  | cff6c2b3af | ||
|  | a0c76284fd | ||
|  | 318653b0e5 | ||
|  | 5d84f17600 | ||
|  | 0db4b00007 | 
| @@ -80,8 +80,16 @@ | ||||
| ## Timeout when acquiring database connection | ||||
| # DATABASE_TIMEOUT=30 | ||||
|  | ||||
| ## Database idle timeout | ||||
| ## Timeout in seconds before idle connections to the database are closed. | ||||
| # DATABASE_IDLE_TIMEOUT=600 | ||||
|  | ||||
| ## Database min connections | ||||
| ## Define the minimum size of the connection pool used for connecting to the database. | ||||
| # DATABASE_MIN_CONNS=2 | ||||
|  | ||||
| ## Database max connections | ||||
| ## Define the size of the connection pool used for connecting to the database. | ||||
| ## Define the maximum size of the connection pool used for connecting to the database. | ||||
| # DATABASE_MAX_CONNS=10 | ||||
|  | ||||
| ## Database connection initialization | ||||
| @@ -174,6 +182,10 @@ | ||||
| ## Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt. | ||||
| ## Defaults to every minute. Set blank to disable this job. | ||||
| # DUO_CONTEXT_PURGE_SCHEDULE="30 * * * * *" | ||||
| # | ||||
| ## Cron schedule of the job that cleans sso nonce from incomplete flow | ||||
| ## Defaults to daily (20 minutes after midnight). Set blank to disable this job. | ||||
| # PURGE_INCOMPLETE_SSO_NONCE="0 20 0 * * *" | ||||
|  | ||||
| ######################## | ||||
| ### General settings ### | ||||
| @@ -459,6 +471,55 @@ | ||||
| ## Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy. | ||||
| # ENFORCE_SINGLE_ORG_WITH_RESET_PW_POLICY=false | ||||
|  | ||||
| ##################################### | ||||
| ### SSO settings (OpenID Connect) ### | ||||
| ##################################### | ||||
|  | ||||
| ## Controls whether users can login using an OpenID Connect identity provider | ||||
| # SSO_ENABLED=false | ||||
|  | ||||
| ## Prevent users from logging in directly without going through SSO | ||||
| # SSO_ONLY=false | ||||
|  | ||||
| ## On SSO Signup if a user with a matching email already exists make the association | ||||
| # SSO_SIGNUPS_MATCH_EMAIL=true | ||||
|  | ||||
| ## Allow unknown email verification status. Allowing this with `SSO_SIGNUPS_MATCH_EMAIL=true` open potential account takeover. | ||||
| # SSO_ALLOW_UNKNOWN_EMAIL_VERIFICATION=false | ||||
|  | ||||
| ## Base URL of the OIDC server (auto-discovery is used) | ||||
| ##  - Should not include the `/.well-known/openid-configuration` part and no trailing `/` | ||||
| ##  - ${SSO_AUTHORITY}/.well-known/openid-configuration should return a json document: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationResponse | ||||
| # SSO_AUTHORITY=https://auth.example.com | ||||
|  | ||||
| ## Authorization request scopes. Optional SSO scopes, override if email and profile are not enough (`openid` is implicit). | ||||
| # SSO_SCOPES="email profile" | ||||
|  | ||||
| ## Additional authorization url parameters (ex: to obtain a `refresh_token` with Google Auth). | ||||
| # SSO_AUTHORIZE_EXTRA_PARAMS="access_type=offline&prompt=consent" | ||||
|  | ||||
| ## Activate PKCE for the Auth Code flow. | ||||
| # SSO_PKCE=true | ||||
|  | ||||
| ## Regex for additional trusted Id token audience (by default only the client_id is trusted). | ||||
| # SSO_AUDIENCE_TRUSTED='^$' | ||||
|  | ||||
| ## Set your Client ID and Client Key | ||||
| # SSO_CLIENT_ID=11111 | ||||
| # SSO_CLIENT_SECRET=AAAAAAAAAAAAAAAAAAAAAAAA | ||||
|  | ||||
| ## Optional Master password policy (minComplexity=[0-4]), `enforceOnLogin` is not supported at the moment. | ||||
| # SSO_MASTER_PASSWORD_POLICY='{"enforceOnLogin":false,"minComplexity":3,"minLength":12,"requireLower":false,"requireNumbers":false,"requireSpecial":false,"requireUpper":false}' | ||||
|  | ||||
| ## Use sso only for authentication not the session lifecycle | ||||
| # SSO_AUTH_ONLY_NOT_SESSION=false | ||||
|  | ||||
| ## Client cache for discovery endpoint. Duration in seconds (0 to disable). | ||||
| # SSO_CLIENT_CACHE_EXPIRATION=0 | ||||
|  | ||||
| ## Log all the tokens, LOG_LEVEL=debug is required | ||||
| # SSO_DEBUG_TOKENS=false | ||||
|  | ||||
| ######################## | ||||
| ### MFA/2FA settings ### | ||||
| ######################## | ||||
| @@ -518,7 +579,7 @@ | ||||
| ## | ||||
| ## According to the RFC6238 (https://tools.ietf.org/html/rfc6238), | ||||
| ## we allow by default the TOTP code which was valid one step back and one in the future. | ||||
| ## This can however allow attackers to be a bit more lucky with there attempts because there are 3 valid codes. | ||||
| ## This can however allow attackers to be a bit more lucky with their attempts because there are 3 valid codes. | ||||
| ## You can disable this, so that only the current TOTP Code is allowed. | ||||
| ## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid. | ||||
| ## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid. | ||||
| @@ -558,7 +619,7 @@ | ||||
| # SMTP_AUTH_MECHANISM= | ||||
|  | ||||
| ## Server name sent during the SMTP HELO | ||||
| ## By default this value should be is on the machine's hostname, | ||||
| ## By default this value should be the machine's hostname, | ||||
| ## but might need to be changed in case it trips some anti-spam filters | ||||
| # HELO_NAME= | ||||
|  | ||||
|   | ||||
							
								
								
									
										37
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										37
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -10,33 +10,16 @@ on: | ||||
|       # https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet | ||||
|       - '[1-2].[0-9]+.[0-9]+' | ||||
|  | ||||
| concurrency: | ||||
|   # Apply concurrency control only on the upstream repo | ||||
|   group: ${{ github.repository == 'dani-garcia/vaultwarden' && format('{0}-{1}', github.workflow, github.ref) || github.run_id }} | ||||
|   # Don't cancel other runs when creating a tag | ||||
|   cancel-in-progress: ${{ github.ref_type == 'branch' }} | ||||
|  | ||||
| jobs: | ||||
|   # https://github.com/marketplace/actions/skip-duplicate-actions | ||||
|   # Some checks to determine if we need to continue with building a new docker. | ||||
|   # We will skip this check if we are creating a tag, because that has the same hash as a previous run already. | ||||
|   skip_check: | ||||
|     # Only run this in the upstream repo and not on forks | ||||
|     if: ${{ github.repository == 'dani-garcia/vaultwarden' }} | ||||
|     name: Cancel older jobs when running | ||||
|     permissions: | ||||
|       actions: write | ||||
|     runs-on: ubuntu-24.04 | ||||
|     outputs: | ||||
|       should_skip: ${{ steps.skip_check.outputs.should_skip }} | ||||
|  | ||||
|     steps: | ||||
|       - name: Skip Duplicates Actions | ||||
|         id: skip_check | ||||
|         uses: fkirc/skip-duplicate-actions@f75f66ce1886f00957d99748a42c724f4330bdcf # v5.3.1 | ||||
|         with: | ||||
|           cancel_others: 'true' | ||||
|         # Only run this when not creating a tag | ||||
|         if: ${{ github.ref_type == 'branch' }} | ||||
|  | ||||
|   docker-build: | ||||
|     needs: skip_check | ||||
|     if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }} | ||||
|     name: Build Vaultwarden containers | ||||
|     if: ${{ github.repository == 'dani-garcia/vaultwarden' }} | ||||
|     permissions: | ||||
|       packages: write | ||||
|       contents: read | ||||
| @@ -120,7 +103,7 @@ jobs: | ||||
|  | ||||
|       # Login to Docker Hub | ||||
|       - name: Login to Docker Hub | ||||
|         uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           username: ${{ secrets.DOCKERHUB_USERNAME }} | ||||
|           password: ${{ secrets.DOCKERHUB_TOKEN }} | ||||
| @@ -136,7 +119,7 @@ jobs: | ||||
|  | ||||
|       # Login to GitHub Container Registry | ||||
|       - name: Login to GitHub Container Registry | ||||
|         uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: ghcr.io | ||||
|           username: ${{ github.repository_owner }} | ||||
| @@ -153,7 +136,7 @@ jobs: | ||||
|  | ||||
|       # Login to Quay.io | ||||
|       - name: Login to Quay.io | ||||
|         uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 | ||||
|         uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 | ||||
|         with: | ||||
|           registry: quay.io | ||||
|           username: ${{ secrets.QUAY_USERNAME }} | ||||
|   | ||||
							
								
								
									
										2
									
								
								.github/workflows/trivy.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/trivy.yml
									
									
									
									
										vendored
									
									
								
							| @@ -48,6 +48,6 @@ jobs: | ||||
|           severity: CRITICAL,HIGH | ||||
|  | ||||
|       - name: Upload Trivy scan results to GitHub Security tab | ||||
|         uses: github/codeql-action/upload-sarif@4e828ff8d448a8a6e532957b1811f387a63867e8 # v3.29.4 | ||||
|         uses: github/codeql-action/upload-sarif@df559355d593797519d70b90fc8edd5db049e7a2 # v3.29.9 | ||||
|         with: | ||||
|           sarif_file: 'trivy-results.sarif' | ||||
|   | ||||
							
								
								
									
										4
									
								
								.github/workflows/zizmor.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/zizmor.yml
									
									
									
									
										vendored
									
									
								
							| @@ -16,12 +16,12 @@ jobs: | ||||
|       security-events: write | ||||
|     steps: | ||||
|       - name: Checkout repository | ||||
|         uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 | ||||
|         uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | ||||
|         with: | ||||
|           persist-credentials: false | ||||
|  | ||||
|       - name: Run zizmor | ||||
|         uses: zizmorcore/zizmor-action@f52a838cfabf134edcbaa7c8b3677dde20045018 # v0.1.1 | ||||
|         uses: zizmorcore/zizmor-action@5ca5fc7a4779c5263a3ffa0e1f693009994446d1 # v0.1.2 | ||||
|         with: | ||||
|           # intentionally not scanning the entire repository, | ||||
|           # since it contains integration tests. | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| --- | ||||
| repos: | ||||
| -   repo: https://github.com/pre-commit/pre-commit-hooks | ||||
|     rev: v5.0.0 | ||||
|     rev: v6.0.0 | ||||
|     hooks: | ||||
|     - id: check-yaml | ||||
|     - id: check-json | ||||
|   | ||||
							
								
								
									
										995
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										995
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										46
									
								
								Cargo.toml
									
									
									
									
									
								
							
							
						
						
									
										46
									
								
								Cargo.toml
									
									
									
									
									
								
							| @@ -6,7 +6,7 @@ name = "vaultwarden" | ||||
| version = "1.0.0" | ||||
| authors = ["Daniel García <dani-garcia@users.noreply.github.com>"] | ||||
| edition = "2021" | ||||
| rust-version = "1.86.0" | ||||
| rust-version = "1.87.0" | ||||
| resolver = "2" | ||||
|  | ||||
| repository = "https://github.com/dani-garcia/vaultwarden" | ||||
| @@ -34,6 +34,10 @@ enable_mimalloc = ["dep:mimalloc"] | ||||
| query_logger = ["dep:diesel_logger"] | ||||
| s3 = ["opendal/services-s3", "dep:aws-config", "dep:aws-credential-types", "dep:aws-smithy-runtime-api", "dep:anyhow", "dep:http", "dep:reqsign"] | ||||
|  | ||||
| # OIDC specific features | ||||
| oidc-accept-rfc3339-timestamps = ["openidconnect/accept-rfc3339-timestamps"] | ||||
| oidc-accept-string-booleans = ["openidconnect/accept-string-booleans"] | ||||
|  | ||||
| # Enable unstable features, requires nightly | ||||
| # Currently only used to enable rusts official ip support | ||||
| unstable = [] | ||||
| @@ -73,12 +77,12 @@ dashmap = "6.1.0" | ||||
|  | ||||
| # Async futures | ||||
| futures = "0.3.31" | ||||
| tokio = { version = "1.46.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] } | ||||
| tokio-util = { version = "0.7.15", features = ["compat"]} | ||||
| tokio = { version = "1.47.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] } | ||||
| tokio-util = { version = "0.7.16", features = ["compat"]} | ||||
|  | ||||
| # A generic serialization/deserialization framework | ||||
| serde = { version = "1.0.219", features = ["derive"] } | ||||
| serde_json = "1.0.141" | ||||
| serde_json = "1.0.142" | ||||
|  | ||||
| # A safe, extensible ORM and Query builder | ||||
| diesel = { version = "2.2.12", features = ["chrono", "r2d2", "numeric"] } | ||||
| @@ -97,7 +101,7 @@ ring = "0.17.14" | ||||
| subtle = "2.6.1" | ||||
|  | ||||
| # UUID generation | ||||
| uuid = { version = "1.17.0", features = ["v4"] } | ||||
| uuid = { version = "1.18.0", features = ["v4"] } | ||||
|  | ||||
| # Date and time libraries | ||||
| chrono = { version = "0.4.41", features = ["clock", "serde"], default-features = false } | ||||
| @@ -105,7 +109,7 @@ chrono-tz = "0.10.4" | ||||
| time = "0.3.41" | ||||
|  | ||||
| # Job scheduler | ||||
| job_scheduler_ng = "2.2.0" | ||||
| job_scheduler_ng = "2.3.0" | ||||
|  | ||||
| # Data encoding library Hex/Base32/Base64 | ||||
| data-encoding = "2.9.0" | ||||
| @@ -117,16 +121,20 @@ jsonwebtoken = "9.3.1" | ||||
| totp-lite = "2.0.1" | ||||
|  | ||||
| # Yubico Library | ||||
| yubico = { package = "yubico_ng", version = "0.13.0", features = ["online-tokio"], default-features = false } | ||||
| yubico = { package = "yubico_ng", version = "0.14.1", features = ["online-tokio"], default-features = false } | ||||
|  | ||||
| # WebAuthn libraries | ||||
| webauthn-rs = "0.3.2" | ||||
| # danger-allow-state-serialisation is needed to save the state in the db | ||||
| # danger-credential-internals is needed to support U2F to Webauthn migration | ||||
| webauthn-rs = { version = "0.5.2", features = ["danger-allow-state-serialisation", "danger-credential-internals"] } | ||||
| webauthn-rs-proto = "0.5.2" | ||||
| webauthn-rs-core = "0.5.2" | ||||
|  | ||||
| # Handling of URL's for WebAuthn and favicons | ||||
| url = "2.5.4" | ||||
|  | ||||
| # Email libraries | ||||
| lettre = { version = "0.11.17", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "hostname", "tracing", "tokio1-rustls", "ring", "rustls-native-certs"], default-features = false } | ||||
| lettre = { version = "0.11.18", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "hostname", "tracing", "tokio1-rustls", "ring", "rustls-native-certs"], default-features = false } | ||||
| percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails | ||||
| email_address = "0.2.9" | ||||
|  | ||||
| @@ -134,7 +142,7 @@ email_address = "0.2.9" | ||||
| handlebars = { version = "6.3.2", features = ["dir_source"] } | ||||
|  | ||||
| # HTTP client (Used for favicons, version check, DUO and HIBP API) | ||||
| reqwest = { version = "0.12.22", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false} | ||||
| reqwest = { version = "0.12.23", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false} | ||||
| hickory-resolver = "0.25.2" | ||||
|  | ||||
| # Favicon extraction libraries | ||||
| @@ -158,8 +166,12 @@ openssl = "0.10.73" | ||||
| pico-args = "0.5.0" | ||||
|  | ||||
| # Macro ident concatenation | ||||
| pastey = "0.1.0" | ||||
| governor = "0.10.0" | ||||
| pastey = "0.1.1" | ||||
| governor = "0.10.1" | ||||
|  | ||||
| # OIDC for SSO | ||||
| openidconnect = { version = "4.0.1", features = ["reqwest", "native-tls"] } | ||||
| mini-moka = "0.10.3" | ||||
|  | ||||
| # Check client versions for specific features. | ||||
| semver = "1.0.26" | ||||
| @@ -183,10 +195,10 @@ grass_compiler = { version = "0.13.4", default-features = false } | ||||
| opendal = { version = "0.54.0", features = ["services-fs"], default-features = false } | ||||
|  | ||||
| # For retrieving AWS credentials, including temporary SSO credentials | ||||
| anyhow = { version = "1.0.98", optional = true } | ||||
| aws-config = { version = "1.8.3", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true } | ||||
| aws-credential-types = { version = "1.2.4", optional = true } | ||||
| aws-smithy-runtime-api = { version = "1.8.5", optional = true } | ||||
| anyhow = { version = "1.0.99", optional = true } | ||||
| aws-config = { version = "1.8.5", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true } | ||||
| aws-credential-types = { version = "1.2.5", optional = true } | ||||
| aws-smithy-runtime-api = { version = "1.8.7", optional = true } | ||||
| http = { version = "1.3.1", optional = true } | ||||
| reqsign = { version = "0.16.5", optional = true } | ||||
|  | ||||
| @@ -271,6 +283,7 @@ clone_on_ref_ptr = "deny" | ||||
| equatable_if_let = "deny" | ||||
| filter_map_next = "deny" | ||||
| float_cmp_const = "deny" | ||||
| implicit_clone = "deny" | ||||
| inefficient_to_string = "deny" | ||||
| iter_on_empty_collections = "deny" | ||||
| iter_on_single_items = "deny" | ||||
| @@ -285,7 +298,6 @@ needless_continue = "deny" | ||||
| needless_lifetimes = "deny" | ||||
| option_option = "deny" | ||||
| string_add_assign = "deny" | ||||
| string_to_string = "deny" | ||||
| unnecessary_join = "deny" | ||||
| unnecessary_self_imports = "deny" | ||||
| unnested_or_patterns = "deny" | ||||
|   | ||||
| @@ -1,12 +1,12 @@ | ||||
| --- | ||||
| vault_version: "v2025.7.0" | ||||
| vault_image_digest: "sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e" | ||||
| vault_version: "v2025.7.2" | ||||
| vault_image_digest: "sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9" | ||||
| # Cross Compile Docker Helper Scripts v1.6.1 | ||||
| # We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts | ||||
| # https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags | ||||
| xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894" | ||||
| rust_version: 1.88.0 # Rust version to be used | ||||
| debian_version: bookworm # Debian release name to be used | ||||
| rust_version: 1.89.0 # Rust version to be used | ||||
| debian_version: trixie # Debian release name to be used | ||||
| alpine_version: "3.22" # Alpine version to be used | ||||
| # For which platforms/architectures will we try to build images | ||||
| platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"] | ||||
|   | ||||
| @@ -19,23 +19,23 @@ | ||||
| # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, | ||||
| #   click the tag name to view the digest of the image it currently points to. | ||||
| # - From the command line: | ||||
| #     $ docker pull docker.io/vaultwarden/web-vault:v2025.7.0 | ||||
| #     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.0 | ||||
| #     [docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e] | ||||
| #     $ docker pull docker.io/vaultwarden/web-vault:v2025.7.2 | ||||
| #     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.2 | ||||
| #     [docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9] | ||||
| # | ||||
| # - Conversely, to get the tag name from the digest: | ||||
| #     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e | ||||
| #     [docker.io/vaultwarden/web-vault:v2025.7.0] | ||||
| #     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9 | ||||
| #     [docker.io/vaultwarden/web-vault:v2025.7.2] | ||||
| # | ||||
| FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e AS vault | ||||
| FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9 AS vault | ||||
|  | ||||
| ########################## ALPINE BUILD IMAGES ########################## | ||||
| ## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 | ||||
| ## And for Alpine we define all build images here, they will only be loaded when actually used | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.88.0 AS build_amd64 | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.88.0 AS build_arm64 | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.88.0 AS build_armv7 | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.88.0 AS build_armv6 | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.89.0 AS build_amd64 | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.89.0 AS build_arm64 | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.89.0 AS build_armv7 | ||||
| FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.89.0 AS build_armv6 | ||||
|  | ||||
| ########################## BUILD IMAGE ########################## | ||||
| # hadolint ignore=DL3006 | ||||
|   | ||||
| @@ -19,15 +19,15 @@ | ||||
| # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, | ||||
| #   click the tag name to view the digest of the image it currently points to. | ||||
| # - From the command line: | ||||
| #     $ docker pull docker.io/vaultwarden/web-vault:v2025.7.0 | ||||
| #     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.0 | ||||
| #     [docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e] | ||||
| #     $ docker pull docker.io/vaultwarden/web-vault:v2025.7.2 | ||||
| #     $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.2 | ||||
| #     [docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9] | ||||
| # | ||||
| # - Conversely, to get the tag name from the digest: | ||||
| #     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e | ||||
| #     [docker.io/vaultwarden/web-vault:v2025.7.0] | ||||
| #     $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9 | ||||
| #     [docker.io/vaultwarden/web-vault:v2025.7.2] | ||||
| # | ||||
| FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e AS vault | ||||
| FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:e40b20eeffbcccb27db6c08c3aaa1cf7d3c92333f634dec26a077590e910e1c9 AS vault | ||||
|  | ||||
| ########################## Cross Compile Docker Helper Scripts ########################## | ||||
| ## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts | ||||
| @@ -36,7 +36,7 @@ FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bd | ||||
|  | ||||
| ########################## BUILD IMAGE ########################## | ||||
| # hadolint ignore=DL3006 | ||||
| FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.88.0-slim-bookworm AS build | ||||
| FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.89.0-slim-trixie AS build | ||||
| COPY --from=xx / / | ||||
| ARG TARGETARCH | ||||
| ARG TARGETVARIANT | ||||
| @@ -68,15 +68,11 @@ RUN apt-get update && \ | ||||
|     xx-apt-get install -y \ | ||||
|         --no-install-recommends \ | ||||
|         gcc \ | ||||
|         libmariadb3 \ | ||||
|         libpq-dev \ | ||||
|         libpq5 \ | ||||
|         libssl-dev \ | ||||
|         libmariadb-dev \ | ||||
|         zlib1g-dev && \ | ||||
|     # Force install arch dependend mariadb dev packages | ||||
|     # Installing them the normal way breaks several other packages (again) | ||||
|     apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \ | ||||
|     dpkg --force-all -i ./libmariadb-dev*.deb && \ | ||||
|     # Run xx-cargo early, since it sometimes seems to break when run at a later stage | ||||
|     echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo | ||||
|  | ||||
| @@ -166,7 +162,7 @@ RUN source /env-cargo && \ | ||||
| # To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' | ||||
| # | ||||
| # We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742 | ||||
| FROM --platform=$TARGETPLATFORM docker.io/library/debian:bookworm-slim | ||||
| FROM --platform=$TARGETPLATFORM docker.io/library/debian:trixie-slim | ||||
|  | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
| @@ -179,7 +175,7 @@ RUN mkdir /data && \ | ||||
|         --no-install-recommends \ | ||||
|         ca-certificates \ | ||||
|         curl \ | ||||
|         libmariadb-dev-compat \ | ||||
|         libmariadb-dev \ | ||||
|         libpq5 \ | ||||
|         openssl && \ | ||||
|     apt-get clean && \ | ||||
|   | ||||
| @@ -86,15 +86,11 @@ RUN apt-get update && \ | ||||
|     xx-apt-get install -y \ | ||||
|         --no-install-recommends \ | ||||
|         gcc \ | ||||
|         libmariadb3 \ | ||||
|         libpq-dev \ | ||||
|         libpq5 \ | ||||
|         libssl-dev \ | ||||
|         libmariadb-dev \ | ||||
|         zlib1g-dev && \ | ||||
|     # Force install arch dependend mariadb dev packages | ||||
|     # Installing them the normal way breaks several other packages (again) | ||||
|     apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \ | ||||
|     dpkg --force-all -i ./libmariadb-dev*.deb && \ | ||||
|     # Run xx-cargo early, since it sometimes seems to break when run at a later stage | ||||
|     echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo | ||||
| {% endif %} | ||||
| @@ -216,7 +212,7 @@ RUN mkdir /data && \ | ||||
|         --no-install-recommends \ | ||||
|         ca-certificates \ | ||||
|         curl \ | ||||
|         libmariadb-dev-compat \ | ||||
|         libmariadb-dev \ | ||||
|         libpq5 \ | ||||
|         openssl && \ | ||||
|     apt-get clean && \ | ||||
|   | ||||
| @@ -10,7 +10,7 @@ proc-macro = true | ||||
|  | ||||
| [dependencies] | ||||
| quote = "1.0.40" | ||||
| syn = "2.0.104" | ||||
| syn = "2.0.105" | ||||
|  | ||||
| [lints] | ||||
| workspace = true | ||||
|   | ||||
							
								
								
									
										1
									
								
								migrations/mysql/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								migrations/mysql/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| DROP TABLE sso_nonce; | ||||
							
								
								
									
										4
									
								
								migrations/mysql/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								migrations/mysql/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,4 @@ | ||||
| CREATE TABLE sso_nonce ( | ||||
|   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||
| ); | ||||
| @@ -0,0 +1 @@ | ||||
| ALTER TABLE users_organizations DROP COLUMN invited_by_email; | ||||
| @@ -0,0 +1 @@ | ||||
| ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; | ||||
| @@ -0,0 +1,6 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||
| ); | ||||
| @@ -0,0 +1,8 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
| 	state               VARCHAR(512) NOT NULL PRIMARY KEY, | ||||
|   	nonce               TEXT NOT NULL, | ||||
|   	redirect_uri 		TEXT NOT NULL, | ||||
|   	created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1,8 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|     state               VARCHAR(512) NOT NULL PRIMARY KEY, | ||||
|     nonce               TEXT NOT NULL, | ||||
|     redirect_uri        TEXT NOT NULL, | ||||
|     created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1,9 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|     state               VARCHAR(512) NOT NULL PRIMARY KEY, | ||||
|   	nonce               TEXT NOT NULL, | ||||
|     verifier            TEXT, | ||||
|   	redirect_uri 		TEXT NOT NULL, | ||||
|   	created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1 @@ | ||||
| DROP TABLE IF EXISTS sso_users; | ||||
							
								
								
									
										7
									
								
								migrations/mysql/2024-03-06-170000_add_sso_users/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								migrations/mysql/2024-03-06-170000_add_sso_users/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | ||||
| CREATE TABLE sso_users ( | ||||
|   user_uuid           CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   identifier          VARCHAR(768) NOT NULL UNIQUE, | ||||
|   created_at          TIMESTAMP NOT NULL DEFAULT now(), | ||||
|  | ||||
|   FOREIGN KEY(user_uuid) REFERENCES users(uuid) | ||||
| ); | ||||
| @@ -0,0 +1,2 @@ | ||||
| ALTER TABLE sso_users DROP FOREIGN KEY `sso_users_ibfk_1`; | ||||
| ALTER TABLE sso_users ADD FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE; | ||||
							
								
								
									
										1
									
								
								migrations/postgresql/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								migrations/postgresql/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| DROP TABLE sso_nonce; | ||||
							
								
								
									
										4
									
								
								migrations/postgresql/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								migrations/postgresql/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,4 @@ | ||||
| CREATE TABLE sso_nonce ( | ||||
|   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1 @@ | ||||
| ALTER TABLE users_organizations DROP COLUMN invited_by_email; | ||||
| @@ -0,0 +1 @@ | ||||
| ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; | ||||
| @@ -0,0 +1,6 @@ | ||||
| DROP TABLE sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1,8 @@ | ||||
| DROP TABLE sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
| 	state               TEXT NOT NULL PRIMARY KEY, | ||||
|   	nonce               TEXT NOT NULL, | ||||
|   	redirect_uri 		TEXT NOT NULL, | ||||
|   	created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1,8 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|     state               TEXT NOT NULL PRIMARY KEY, | ||||
|     nonce               TEXT NOT NULL, | ||||
|     redirect_uri        TEXT NOT NULL, | ||||
|     created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1,9 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|     state               TEXT NOT NULL PRIMARY KEY, | ||||
|     nonce               TEXT NOT NULL, | ||||
|     verifier            TEXT, | ||||
|     redirect_uri        TEXT NOT NULL, | ||||
|     created_at          TIMESTAMP NOT NULL DEFAULT now() | ||||
| ); | ||||
| @@ -0,0 +1 @@ | ||||
| DROP TABLE IF EXISTS sso_users; | ||||
| @@ -0,0 +1,7 @@ | ||||
| CREATE TABLE sso_users ( | ||||
|   user_uuid           CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   identifier          TEXT NOT NULL UNIQUE, | ||||
|   created_at          TIMESTAMP NOT NULL DEFAULT now(), | ||||
|  | ||||
|   FOREIGN KEY(user_uuid) REFERENCES users(uuid) | ||||
| ); | ||||
| @@ -0,0 +1,3 @@ | ||||
| ALTER TABLE sso_users | ||||
|   DROP CONSTRAINT "sso_users_user_uuid_fkey", | ||||
|   ADD CONSTRAINT "sso_users_user_uuid_fkey" FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE; | ||||
							
								
								
									
										1
									
								
								migrations/sqlite/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								migrations/sqlite/2023-09-10-133000_add_sso/down.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| DROP TABLE sso_nonce; | ||||
							
								
								
									
										4
									
								
								migrations/sqlite/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								migrations/sqlite/2023-09-10-133000_add_sso/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,4 @@ | ||||
| CREATE TABLE sso_nonce ( | ||||
|   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||
| ); | ||||
| @@ -0,0 +1 @@ | ||||
| ALTER TABLE users_organizations DROP COLUMN invited_by_email; | ||||
| @@ -0,0 +1 @@ | ||||
| ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; | ||||
| @@ -0,0 +1,6 @@ | ||||
| DROP TABLE sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|   nonce               CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||
| ); | ||||
| @@ -0,0 +1,8 @@ | ||||
| DROP TABLE sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|   state               TEXT NOT NULL PRIMARY KEY, | ||||
|   nonce               TEXT NOT NULL, | ||||
|   redirect_uri        TEXT NOT NULL, | ||||
|   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||
| ); | ||||
| @@ -0,0 +1,8 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|   state               TEXT NOT NULL PRIMARY KEY, | ||||
|   nonce               TEXT NOT NULL, | ||||
|   redirect_uri        TEXT NOT NULL, | ||||
|   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||
| ); | ||||
| @@ -0,0 +1,9 @@ | ||||
| DROP TABLE IF EXISTS sso_nonce; | ||||
|  | ||||
| CREATE TABLE sso_nonce ( | ||||
|   state               TEXT NOT NULL PRIMARY KEY, | ||||
|   nonce               TEXT NOT NULL, | ||||
|   verifier            TEXT, | ||||
|   redirect_uri        TEXT NOT NULL, | ||||
|   created_at          DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP | ||||
| ); | ||||
| @@ -0,0 +1 @@ | ||||
| DROP TABLE IF EXISTS sso_users; | ||||
							
								
								
									
										7
									
								
								migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | ||||
| CREATE TABLE sso_users ( | ||||
|   user_uuid           CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   identifier          TEXT NOT NULL UNIQUE, | ||||
|   created_at          TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, | ||||
|  | ||||
|   FOREIGN KEY(user_uuid) REFERENCES users(uuid) | ||||
| ); | ||||
| @@ -0,0 +1,9 @@ | ||||
| DROP TABLE IF EXISTS sso_users; | ||||
|  | ||||
| CREATE TABLE sso_users ( | ||||
|   user_uuid           CHAR(36) NOT NULL PRIMARY KEY, | ||||
|   identifier          TEXT NOT NULL UNIQUE, | ||||
|   created_at          TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, | ||||
|  | ||||
|   FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE | ||||
| ); | ||||
							
								
								
									
										64
									
								
								playwright/.env.template
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								playwright/.env.template
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,64 @@ | ||||
| ################################# | ||||
| ### Conf to run dev instances ### | ||||
| ################################# | ||||
| ENV=dev | ||||
| DC_ENV_FILE=.env | ||||
| COMPOSE_IGNORE_ORPHANS=True | ||||
| DOCKER_BUILDKIT=1 | ||||
|  | ||||
| ################ | ||||
| # Users Config # | ||||
| ################ | ||||
| TEST_USER=test | ||||
| TEST_USER_PASSWORD=${TEST_USER} | ||||
| TEST_USER_MAIL=${TEST_USER}@yopmail.com | ||||
|  | ||||
| TEST_USER2=test2 | ||||
| TEST_USER2_PASSWORD=${TEST_USER2} | ||||
| TEST_USER2_MAIL=${TEST_USER2}@yopmail.com | ||||
|  | ||||
| TEST_USER3=test3 | ||||
| TEST_USER3_PASSWORD=${TEST_USER3} | ||||
| TEST_USER3_MAIL=${TEST_USER3}@yopmail.com | ||||
|  | ||||
| ################### | ||||
| # Keycloak Config # | ||||
| ################### | ||||
| KEYCLOAK_ADMIN=admin | ||||
| KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN} | ||||
| KC_HTTP_HOST=127.0.0.1 | ||||
| KC_HTTP_PORT=8080 | ||||
|  | ||||
| # Script parameters (use Keycloak and Vaultwarden config too) | ||||
| TEST_REALM=test | ||||
| DUMMY_REALM=dummy | ||||
| DUMMY_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${DUMMY_REALM} | ||||
|  | ||||
| ###################### | ||||
| # Vaultwarden Config # | ||||
| ###################### | ||||
| ROCKET_ADDRESS=0.0.0.0 | ||||
| ROCKET_PORT=8000 | ||||
| DOMAIN=http://localhost:${ROCKET_PORT} | ||||
| LOG_LEVEL=info,oidcwarden::sso=debug | ||||
| I_REALLY_WANT_VOLATILE_STORAGE=true | ||||
|  | ||||
| SSO_ENABLED=true | ||||
| SSO_ONLY=false | ||||
| SSO_CLIENT_ID=warden | ||||
| SSO_CLIENT_SECRET=warden | ||||
| SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM} | ||||
|  | ||||
| SMTP_HOST=127.0.0.1 | ||||
| SMTP_PORT=1025 | ||||
| SMTP_SECURITY=off | ||||
| SMTP_TIMEOUT=5 | ||||
| SMTP_FROM=vaultwarden@test | ||||
| SMTP_FROM_NAME=Vaultwarden | ||||
|  | ||||
| ######################################################## | ||||
| # DUMMY values for docker-compose to stop bothering us # | ||||
| ######################################################## | ||||
| MARIADB_PORT=3305 | ||||
| MYSQL_PORT=3307 | ||||
| POSTGRES_PORT=5432 | ||||
							
								
								
									
										6
									
								
								playwright/.gitignore
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								playwright/.gitignore
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| logs | ||||
| node_modules/ | ||||
| /test-results/ | ||||
| /playwright-report/ | ||||
| /playwright/.cache/ | ||||
| temp | ||||
							
								
								
									
										177
									
								
								playwright/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										177
									
								
								playwright/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,177 @@ | ||||
| # Integration tests | ||||
|  | ||||
| This allows running integration tests using [Playwright](https://playwright.dev/). | ||||
|  | ||||
| It uses its own `test.env` with different ports to not collide with a running dev instance. | ||||
|  | ||||
| ## Install | ||||
|  | ||||
| This relies on `docker` and the `compose` [plugin](https://docs.docker.com/compose/install/). | ||||
| Databases (`Mariadb`, `Mysql` and `Postgres`) and `Playwright` will run in containers. | ||||
|  | ||||
| ### Running Playwright outside docker | ||||
|  | ||||
| It is possible to run `Playwright` outside of the container, this removes the need to rebuild the image for each change. | ||||
| You will additionally need `nodejs` then run: | ||||
|  | ||||
| ```bash | ||||
| npm install | ||||
| npx playwright install-deps | ||||
| npx playwright install firefox | ||||
| ``` | ||||
|  | ||||
| ## Usage | ||||
|  | ||||
| To run all the tests: | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright | ||||
| ``` | ||||
|  | ||||
| To force a rebuild of the Playwright image: | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --env-file test.env build Playwright | ||||
| ``` | ||||
|  | ||||
| To access the UI to easily run test individually and debug if needed (this will not work in docker): | ||||
|  | ||||
| ```bash | ||||
| npx playwright test --ui | ||||
| ``` | ||||
|  | ||||
| ### DB | ||||
|  | ||||
| Projects are configured to allow to run tests only on specific database. | ||||
|  | ||||
| You can use: | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=mariadb | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=mysql | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=postgres | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite | ||||
| ``` | ||||
|  | ||||
| ### SSO | ||||
|  | ||||
| To run the SSO tests: | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project sso-sqlite | ||||
| ``` | ||||
|  | ||||
| ### Keep services running | ||||
|  | ||||
| If you want you can keep the DB and Keycloak runnning (states are not impacted by the tests): | ||||
|  | ||||
| ```bash | ||||
| PW_KEEP_SERVICE_RUNNNING=true npx playwright test | ||||
| ``` | ||||
|  | ||||
| ### Running specific tests | ||||
|  | ||||
| To run a whole file you can : | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite tests/login.spec.ts | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite login | ||||
| ``` | ||||
|  | ||||
| To run only a specifc test (It might fail if it has dependency): | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite -g "Account creation" | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite tests/login.spec.ts:16 | ||||
| ``` | ||||
|  | ||||
| ## Writing scenario | ||||
|  | ||||
| When creating new scenario use the recorder to more easily identify elements | ||||
| (in general try to rely on visible hint to identify elements and not hidden IDs). | ||||
| This does not start the server, you will need to start it manually. | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env up Vaultwarden | ||||
| npx playwright codegen "http://127.0.0.1:8003" | ||||
| ``` | ||||
|  | ||||
| ## Override web-vault | ||||
|  | ||||
| It is possible to change the `web-vault` used by referencing a different `bw_web_builds` commit. | ||||
|  | ||||
| Simplest is to set and uncomment `PW_WV_REPO_URL` and `PW_WV_COMMIT_HASH` in the `test.env`. | ||||
| Ensure that the image is built with: | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env build Vaultwarden | ||||
| ``` | ||||
|  | ||||
| You can check the result running: | ||||
|  | ||||
| ```bash | ||||
| DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env up Vaultwarden | ||||
| ``` | ||||
|  | ||||
| # OpenID Connect test setup | ||||
|  | ||||
| Additionally this `docker-compose` template allows to run locally Vaultwarden, | ||||
| [Keycloak](https://www.keycloak.org/) and [Maildev](https://github.com/timshel/maildev) to test OIDC. | ||||
|  | ||||
| ## Setup | ||||
|  | ||||
| This rely on `docker` and the `compose` [plugin](https://docs.docker.com/compose/install/). | ||||
| First create a copy of `.env.template` as `.env` (This is done to prevent committing your custom settings, Ex `SMTP_`). | ||||
|  | ||||
| ## Usage | ||||
|  | ||||
| Then start the stack (the `profile` is required to run `Vaultwarden`) : | ||||
|  | ||||
| ```bash | ||||
| > docker compose --profile vaultwarden --env-file .env up | ||||
| .... | ||||
| keycloakSetup_1  | Logging into http://127.0.0.1:8080 as user admin of realm master | ||||
| keycloakSetup_1  | Created new realm with id 'test' | ||||
| keycloakSetup_1  | 74af4933-e386-4e64-ba15-a7b61212c45e | ||||
| oidc_keycloakSetup_1 exited with code 0 | ||||
| ``` | ||||
|  | ||||
| Wait until `oidc_keycloakSetup_1 exited with code 0` which indicates the correct setup of the Keycloak realm, client and user | ||||
| (It is normal for this container to stop once the configuration is done). | ||||
|  | ||||
| Then you can access : | ||||
|  | ||||
| - `Vaultwarden` on http://0.0.0.0:8000 with the default user `test@yopmail.com/test`. | ||||
| - `Keycloak` on http://0.0.0.0:8080/admin/master/console/ with the default user `admin/admin` | ||||
| - `Maildev` on http://0.0.0.0:1080 | ||||
|  | ||||
| To proceed with an SSO login after you enter the email, on the screen prompting for `Master Password` the SSO button should be visible. | ||||
| To use your computer external ip (for example when testing with a phone) you will have to configure `KC_HTTP_HOST` and `DOMAIN`. | ||||
|  | ||||
| ## Running only Keycloak | ||||
|  | ||||
| You can run just `Keycloak` with `--profile keycloak`: | ||||
|  | ||||
| ```bash | ||||
| > docker compose --profile keycloak --env-file .env up | ||||
| ``` | ||||
| When running with a local Vaultwarden, you can use a front-end build from [dani-garcia/bw_web_builds](https://github.com/dani-garcia/bw_web_builds/releases). | ||||
|  | ||||
| ## Rebuilding the Vaultwarden | ||||
|  | ||||
| To force rebuilding the Vaultwarden image you can run | ||||
|  | ||||
| ```bash | ||||
| docker compose --profile vaultwarden --env-file .env build VaultwardenPrebuild Vaultwarden | ||||
| ``` | ||||
|  | ||||
| ## Configuration | ||||
|  | ||||
| All configuration for `keycloak` / `Vaultwarden` / `keycloak_setup.sh` can be found in [.env](.env.template). | ||||
| The content of the file will be loaded as environment variables in all containers. | ||||
|  | ||||
| - `keycloak` [configuration](https://www.keycloak.org/server/all-config) includes `KEYCLOAK_ADMIN` / `KEYCLOAK_ADMIN_PASSWORD` and any variable prefixed `KC_` ([more information](https://www.keycloak.org/server/configuration#_example_configuring_the_db_url_host_parameter)). | ||||
| - All `Vaultwarden` configuration can be set (EX: `SMTP_*`) | ||||
|  | ||||
| ## Cleanup | ||||
|  | ||||
| Use `docker compose --profile vaultwarden down`. | ||||
							
								
								
									
										40
									
								
								playwright/compose/keycloak/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								playwright/compose/keycloak/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | ||||
| FROM docker.io/library/debian:bookworm-slim as build | ||||
|  | ||||
| ENV DEBIAN_FRONTEND=noninteractive | ||||
| ARG KEYCLOAK_VERSION | ||||
|  | ||||
| SHELL ["/bin/bash", "-o", "pipefail", "-c"] | ||||
|  | ||||
| RUN apt-get update \ | ||||
|     && apt-get install -y ca-certificates curl wget \ | ||||
|     && rm -rf /var/lib/apt/lists/* | ||||
|  | ||||
| WORKDIR / | ||||
|  | ||||
| RUN wget -c https://github.com/keycloak/keycloak/releases/download/${KEYCLOAK_VERSION}/keycloak-${KEYCLOAK_VERSION}.tar.gz -O - | tar -xz | ||||
|  | ||||
| FROM docker.io/library/debian:bookworm-slim | ||||
|  | ||||
| ENV DEBIAN_FRONTEND=noninteractive | ||||
| ARG KEYCLOAK_VERSION | ||||
|  | ||||
| SHELL ["/bin/bash", "-o", "pipefail", "-c"] | ||||
|  | ||||
| RUN apt-get update \ | ||||
|     && apt-get install -y ca-certificates curl wget \ | ||||
|     && rm -rf /var/lib/apt/lists/* | ||||
|  | ||||
| ARG JAVA_URL | ||||
| ARG JAVA_VERSION | ||||
|  | ||||
| ENV JAVA_VERSION=${JAVA_VERSION} | ||||
|  | ||||
| RUN mkdir -p /opt/openjdk && cd /opt/openjdk \ | ||||
|     && wget -c "${JAVA_URL}"  -O - | tar -xz | ||||
|  | ||||
| WORKDIR / | ||||
|  | ||||
| COPY setup.sh /setup.sh | ||||
| COPY --from=build /keycloak-${KEYCLOAK_VERSION}/bin /opt/keycloak/bin | ||||
|  | ||||
| CMD "/setup.sh" | ||||
							
								
								
									
										36
									
								
								playwright/compose/keycloak/setup.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										36
									
								
								playwright/compose/keycloak/setup.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,36 @@ | ||||
| #!/bin/bash | ||||
|  | ||||
| export PATH=/opt/keycloak/bin:/opt/openjdk/jdk-${JAVA_VERSION}/bin:$PATH | ||||
| export JAVA_HOME=/opt/openjdk/jdk-${JAVA_VERSION} | ||||
|  | ||||
| STATUS_CODE=0 | ||||
| while [[ "$STATUS_CODE" != "404" ]] ; do | ||||
|     echo "Will retry in 2 seconds" | ||||
|     sleep 2 | ||||
|  | ||||
|     STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}"  "$DUMMY_AUTHORITY") | ||||
|  | ||||
|     if [[ "$STATUS_CODE" = "200" ]]; then | ||||
|         echo "Setup should already be done. Will not run." | ||||
|         exit 0 | ||||
|     fi | ||||
| done | ||||
|  | ||||
| set -e | ||||
|  | ||||
| kcadm.sh config credentials --server "http://${KC_HTTP_HOST}:${KC_HTTP_PORT}" --realm master --user "$KEYCLOAK_ADMIN" --password "$KEYCLOAK_ADMIN_PASSWORD" --client admin-cli | ||||
|  | ||||
| kcadm.sh create realms -s realm="$TEST_REALM" -s enabled=true -s "accessTokenLifespan=600" | ||||
| kcadm.sh create clients -r test -s "clientId=$SSO_CLIENT_ID" -s "secret=$SSO_CLIENT_SECRET" -s "redirectUris=[\"$DOMAIN/*\"]" -i | ||||
|  | ||||
| TEST_USER_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER" -s "firstName=$TEST_USER" -s "lastName=$TEST_USER" -s "email=$TEST_USER_MAIL"  -s emailVerified=true -s enabled=true -i) | ||||
| kcadm.sh update users/$TEST_USER_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER_PASSWORD" -n | ||||
|  | ||||
| TEST_USER2_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER2" -s "firstName=$TEST_USER2" -s "lastName=$TEST_USER2" -s "email=$TEST_USER2_MAIL"  -s emailVerified=true -s enabled=true -i) | ||||
| kcadm.sh update users/$TEST_USER2_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER2_PASSWORD" -n | ||||
|  | ||||
| TEST_USER3_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER3" -s "firstName=$TEST_USER3" -s "lastName=$TEST_USER3" -s "email=$TEST_USER3_MAIL"  -s emailVerified=true -s enabled=true -i) | ||||
| kcadm.sh update users/$TEST_USER3_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER3_PASSWORD" -n | ||||
|  | ||||
| # Dummy realm to mark end of setup | ||||
| kcadm.sh create realms -s realm="$DUMMY_REALM" -s enabled=true -s "accessTokenLifespan=600" | ||||
							
								
								
									
										40
									
								
								playwright/compose/playwright/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								playwright/compose/playwright/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | ||||
| FROM docker.io/library/debian:bookworm-slim | ||||
|  | ||||
| SHELL ["/bin/bash", "-o", "pipefail", "-c"] | ||||
|  | ||||
| ENV DEBIAN_FRONTEND=noninteractive | ||||
|  | ||||
| RUN apt-get update \ | ||||
|     && apt-get install -y ca-certificates curl \ | ||||
|     && curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc \ | ||||
|     && chmod a+r /etc/apt/keyrings/docker.asc \ | ||||
|     && echo "deb [signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian bookworm stable" | tee /etc/apt/sources.list.d/docker.list \ | ||||
|     && apt-get update \ | ||||
|     && apt-get install -y --no-install-recommends \ | ||||
|         containerd.io \ | ||||
|         docker-buildx-plugin \ | ||||
|         docker-ce \ | ||||
|         docker-ce-cli \ | ||||
|         docker-compose-plugin \ | ||||
|         git \ | ||||
|         libmariadb-dev-compat \ | ||||
|         libpq5 \ | ||||
|         nodejs \ | ||||
|         npm \ | ||||
|         openssl \ | ||||
|     && rm -rf /var/lib/apt/lists/* | ||||
|  | ||||
| RUN mkdir /playwright | ||||
| WORKDIR /playwright | ||||
|  | ||||
| COPY package.json . | ||||
| RUN npm install && npx playwright install-deps && npx playwright install firefox | ||||
|  | ||||
| COPY docker-compose.yml test.env ./ | ||||
| COPY compose ./compose | ||||
|  | ||||
| COPY *.ts test.env ./ | ||||
| COPY tests ./tests | ||||
|  | ||||
| ENTRYPOINT ["/usr/bin/npx", "playwright"] | ||||
| CMD ["test"] | ||||
							
								
								
									
										40
									
								
								playwright/compose/warden/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								playwright/compose/warden/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | ||||
| FROM playwright_oidc_vaultwarden_prebuilt AS prebuilt | ||||
|  | ||||
| FROM node:22-trixie AS build | ||||
|  | ||||
| ARG REPO_URL | ||||
| ARG COMMIT_HASH | ||||
|  | ||||
| ENV REPO_URL=$REPO_URL | ||||
| ENV COMMIT_HASH=$COMMIT_HASH | ||||
|  | ||||
| COPY --from=prebuilt /web-vault /web-vault | ||||
|  | ||||
| COPY build.sh /build.sh | ||||
| RUN /build.sh | ||||
|  | ||||
| ######################## RUNTIME IMAGE  ######################## | ||||
| FROM docker.io/library/debian:trixie-slim | ||||
|  | ||||
| ENV DEBIAN_FRONTEND=noninteractive | ||||
|  | ||||
| # Create data folder and Install needed libraries | ||||
| RUN mkdir /data && \ | ||||
|     apt-get update && apt-get install -y \ | ||||
|         --no-install-recommends \ | ||||
|         ca-certificates \ | ||||
|         curl \ | ||||
|         libmariadb-dev \ | ||||
|         libpq5 \ | ||||
|         openssl && \ | ||||
|     rm -rf /var/lib/apt/lists/* | ||||
|  | ||||
| # Copies the files from the context (Rocket.toml file and web-vault) | ||||
| # and the binary from the "build" stage to the current stage | ||||
| WORKDIR / | ||||
|  | ||||
| COPY --from=prebuilt /start.sh . | ||||
| COPY --from=prebuilt /vaultwarden . | ||||
| COPY --from=build /web-vault ./web-vault | ||||
|  | ||||
| ENTRYPOINT ["/start.sh"] | ||||
							
								
								
									
										23
									
								
								playwright/compose/warden/build.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										23
									
								
								playwright/compose/warden/build.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| #!/bin/bash | ||||
|  | ||||
| echo $REPO_URL | ||||
| echo $COMMIT_HASH | ||||
|  | ||||
| if [[ ! -z "$REPO_URL" ]] && [[ ! -z "$COMMIT_HASH" ]] ; then | ||||
|     rm -rf /web-vault | ||||
|  | ||||
|     mkdir bw_web_builds; | ||||
|     cd bw_web_builds; | ||||
|  | ||||
|     git -c init.defaultBranch=main init | ||||
|     git remote add origin "$REPO_URL" | ||||
|     git fetch --depth 1 origin "$COMMIT_HASH" | ||||
|     git -c advice.detachedHead=false checkout FETCH_HEAD | ||||
|  | ||||
|     export VAULT_VERSION=$(cat Dockerfile | grep "ARG VAULT_VERSION" | cut -d "=" -f2) | ||||
|     ./scripts/checkout_web_vault.sh | ||||
|     ./scripts/build_web_vault.sh | ||||
|     printf '{"version":"%s"}' "$COMMIT_HASH" > ./web-vault/apps/web/build/vw-version.json | ||||
|  | ||||
|     mv ./web-vault/apps/web/build /web-vault | ||||
| fi | ||||
							
								
								
									
										124
									
								
								playwright/docker-compose.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										124
									
								
								playwright/docker-compose.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,124 @@ | ||||
| services: | ||||
|   VaultwardenPrebuild: | ||||
|     profiles: ["playwright", "vaultwarden"] | ||||
|     container_name: playwright_oidc_vaultwarden_prebuilt | ||||
|     image: playwright_oidc_vaultwarden_prebuilt | ||||
|     build: | ||||
|       context: .. | ||||
|       dockerfile: Dockerfile | ||||
|     entrypoint: /bin/bash | ||||
|     restart: "no" | ||||
|  | ||||
|   Vaultwarden: | ||||
|     profiles: ["playwright", "vaultwarden"] | ||||
|     container_name: playwright_oidc_vaultwarden-${ENV:-dev} | ||||
|     image: playwright_oidc_vaultwarden-${ENV:-dev} | ||||
|     network_mode: "host" | ||||
|     build: | ||||
|       context: compose/warden | ||||
|       dockerfile: Dockerfile | ||||
|       args: | ||||
|         REPO_URL: ${PW_WV_REPO_URL:-} | ||||
|         COMMIT_HASH: ${PW_WV_COMMIT_HASH:-} | ||||
|     env_file: ${DC_ENV_FILE:-.env} | ||||
|     environment: | ||||
|       - DATABASE_URL | ||||
|       - I_REALLY_WANT_VOLATILE_STORAGE | ||||
|       - LOG_LEVEL | ||||
|       - LOGIN_RATELIMIT_MAX_BURST | ||||
|       - SMTP_HOST | ||||
|       - SMTP_FROM | ||||
|       - SMTP_DEBUG | ||||
|       - SSO_DEBUG_TOKENS | ||||
|       - SSO_FRONTEND | ||||
|       - SSO_ENABLED | ||||
|       - SSO_ONLY | ||||
|     restart: "no" | ||||
|     depends_on: | ||||
|       - VaultwardenPrebuild | ||||
|  | ||||
|   Playwright: | ||||
|     profiles: ["playwright"] | ||||
|     container_name: playwright_oidc_playwright | ||||
|     image: playwright_oidc_playwright | ||||
|     network_mode: "host" | ||||
|     build: | ||||
|       context: . | ||||
|       dockerfile: compose/playwright/Dockerfile | ||||
|     environment: | ||||
|       - PW_WV_REPO_URL | ||||
|       - PW_WV_COMMIT_HASH | ||||
|     restart: "no" | ||||
|     volumes: | ||||
|       - /var/run/docker.sock:/var/run/docker.sock | ||||
|       - ..:/project | ||||
|  | ||||
|   Mariadb: | ||||
|     profiles: ["playwright"] | ||||
|     container_name: playwright_mariadb | ||||
|     image: mariadb:11.2.4 | ||||
|     env_file: test.env | ||||
|     healthcheck: | ||||
|       test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"] | ||||
|       start_period: 10s | ||||
|       interval: 10s | ||||
|     ports: | ||||
|       - ${MARIADB_PORT}:3306 | ||||
|  | ||||
|   Mysql: | ||||
|     profiles: ["playwright"] | ||||
|     container_name: playwright_mysql | ||||
|     image: mysql:8.4.1 | ||||
|     env_file: test.env | ||||
|     healthcheck: | ||||
|       test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"] | ||||
|       start_period: 10s | ||||
|       interval: 10s | ||||
|     ports: | ||||
|       - ${MYSQL_PORT}:3306 | ||||
|  | ||||
|   Postgres: | ||||
|     profiles: ["playwright"] | ||||
|     container_name: playwright_postgres | ||||
|     image: postgres:16.3 | ||||
|     env_file: test.env | ||||
|     healthcheck: | ||||
|       test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}"] | ||||
|       start_period: 20s | ||||
|       interval: 30s | ||||
|     ports: | ||||
|       - ${POSTGRES_PORT}:5432 | ||||
|  | ||||
|   Maildev: | ||||
|     profiles: ["vaultwarden", "maildev"] | ||||
|     container_name: maildev | ||||
|     image: timshel/maildev:3.0.4 | ||||
|     ports: | ||||
|       - ${SMTP_PORT}:1025 | ||||
|       - 1080:1080 | ||||
|  | ||||
|   Keycloak: | ||||
|     profiles: ["keycloak", "vaultwarden"] | ||||
|     container_name: keycloak-${ENV:-dev} | ||||
|     image: quay.io/keycloak/keycloak:25.0.4 | ||||
|     network_mode: "host" | ||||
|     command: | ||||
|       - start-dev | ||||
|     env_file: ${DC_ENV_FILE:-.env} | ||||
|  | ||||
|   KeycloakSetup: | ||||
|     profiles: ["keycloak", "vaultwarden"] | ||||
|     container_name: keycloakSetup-${ENV:-dev} | ||||
|     image: keycloak_setup-${ENV:-dev} | ||||
|     build: | ||||
|       context: compose/keycloak | ||||
|       dockerfile: Dockerfile | ||||
|       args: | ||||
|         KEYCLOAK_VERSION: 25.0.4 | ||||
|         JAVA_URL: https://download.java.net/java/GA/jdk21.0.2/f2283984656d49d69e91c558476027ac/13/GPL/openjdk-21.0.2_linux-x64_bin.tar.gz | ||||
|         JAVA_VERSION: 21.0.2 | ||||
|     network_mode: "host" | ||||
|     depends_on: | ||||
|       - Keycloak | ||||
|     restart: "no" | ||||
|     env_file: ${DC_ENV_FILE:-.env} | ||||
							
								
								
									
										22
									
								
								playwright/global-setup.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								playwright/global-setup.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | ||||
| import { firefox, type FullConfig } from '@playwright/test'; | ||||
| import { execSync } from 'node:child_process'; | ||||
| import fs from 'fs'; | ||||
|  | ||||
| const utils = require('./global-utils'); | ||||
|  | ||||
| utils.loadEnv(); | ||||
|  | ||||
| async function globalSetup(config: FullConfig) { | ||||
|     // Are we running in docker and the project is mounted ? | ||||
|     const path = (fs.existsSync("/project/playwright/playwright.config.ts") ? "/project/playwright" : "."); | ||||
|     execSync(`docker compose --project-directory ${path} --profile playwright --env-file test.env build VaultwardenPrebuild`, { | ||||
|         env: { ...process.env }, | ||||
|         stdio: "inherit" | ||||
|     }); | ||||
|     execSync(`docker compose --project-directory ${path} --profile playwright --env-file test.env build Vaultwarden`, { | ||||
|         env: { ...process.env }, | ||||
|         stdio: "inherit" | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export default globalSetup; | ||||
							
								
								
									
										246
									
								
								playwright/global-utils.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										246
									
								
								playwright/global-utils.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,246 @@ | ||||
| import { expect, type Browser, type TestInfo } from '@playwright/test'; | ||||
| import { EventEmitter } from "events"; | ||||
| import { type Mail, MailServer } from 'maildev'; | ||||
| import { execSync } from 'node:child_process'; | ||||
|  | ||||
| import dotenv from 'dotenv'; | ||||
| import dotenvExpand from 'dotenv-expand'; | ||||
|  | ||||
| const fs = require("fs"); | ||||
| const { spawn } = require('node:child_process'); | ||||
|  | ||||
| export function loadEnv(){ | ||||
|     var myEnv = dotenv.config({ path: 'test.env' }); | ||||
|     dotenvExpand.expand(myEnv); | ||||
|  | ||||
|     return { | ||||
|         user1: { | ||||
|             email: process.env.TEST_USER_MAIL, | ||||
|             name: process.env.TEST_USER, | ||||
|             password: process.env.TEST_USER_PASSWORD, | ||||
|         }, | ||||
|         user2: { | ||||
|             email: process.env.TEST_USER2_MAIL, | ||||
|             name: process.env.TEST_USER2, | ||||
|             password: process.env.TEST_USER2_PASSWORD, | ||||
|         }, | ||||
|         user3: { | ||||
|             email: process.env.TEST_USER3_MAIL, | ||||
|             name: process.env.TEST_USER3, | ||||
|             password: process.env.TEST_USER3_PASSWORD, | ||||
|         }, | ||||
|     } | ||||
| } | ||||
|  | ||||
| export async function waitFor(url: String, browser: Browser) { | ||||
|     var ready = false; | ||||
|     var context; | ||||
|  | ||||
|     do { | ||||
|         try { | ||||
|             context = await browser.newContext(); | ||||
|             const page = await context.newPage(); | ||||
|             await page.waitForTimeout(500); | ||||
|             const result = await page.goto(url); | ||||
|             ready = result.status() === 200; | ||||
|         } catch(e) { | ||||
|             if( !e.message.includes("CONNECTION_REFUSED") ){ | ||||
|                 throw e; | ||||
|             } | ||||
|         } finally { | ||||
|             await context.close(); | ||||
|         } | ||||
|     } while(!ready); | ||||
| } | ||||
|  | ||||
| export function startComposeService(serviceName: String){ | ||||
|     console.log(`Starting ${serviceName}`); | ||||
|     execSync(`docker compose --profile playwright --env-file test.env  up -d ${serviceName}`); | ||||
| } | ||||
|  | ||||
| export function stopComposeService(serviceName: String){ | ||||
|     console.log(`Stopping ${serviceName}`); | ||||
|     execSync(`docker compose --profile playwright --env-file test.env  stop ${serviceName}`); | ||||
| } | ||||
|  | ||||
| function wipeSqlite(){ | ||||
|     console.log(`Delete Vaultwarden container to wipe sqlite`); | ||||
|     execSync(`docker compose --env-file test.env stop Vaultwarden`); | ||||
|     execSync(`docker compose --env-file test.env rm -f Vaultwarden`); | ||||
| } | ||||
|  | ||||
| async function wipeMariaDB(){ | ||||
|     var mysql = require('mysql2/promise'); | ||||
|     var ready = false; | ||||
|     var connection; | ||||
|  | ||||
|     do { | ||||
|         try { | ||||
|             connection = await mysql.createConnection({ | ||||
|                 user: process.env.MARIADB_USER, | ||||
|                 host: "127.0.0.1", | ||||
|                 database: process.env.MARIADB_DATABASE, | ||||
|                 password: process.env.MARIADB_PASSWORD, | ||||
|                 port: process.env.MARIADB_PORT, | ||||
|             }); | ||||
|  | ||||
|             await connection.execute(`DROP DATABASE ${process.env.MARIADB_DATABASE}`); | ||||
|             await connection.execute(`CREATE DATABASE ${process.env.MARIADB_DATABASE}`); | ||||
|             console.log('Successfully wiped mariadb'); | ||||
|             ready = true; | ||||
|         } catch (err) { | ||||
|             console.log(`Error when wiping mariadb: ${err}`); | ||||
|         } finally { | ||||
|             if( connection ){ | ||||
|                 connection.end(); | ||||
|             } | ||||
|         } | ||||
|         await new Promise(r => setTimeout(r, 1000)); | ||||
|     } while(!ready); | ||||
| } | ||||
|  | ||||
| async function wipeMysqlDB(){ | ||||
|     var mysql = require('mysql2/promise'); | ||||
|     var ready = false; | ||||
|     var connection; | ||||
|  | ||||
|     do{ | ||||
|         try { | ||||
|             connection = await mysql.createConnection({ | ||||
|                 user: process.env.MYSQL_USER, | ||||
|                 host: "127.0.0.1", | ||||
|                 database: process.env.MYSQL_DATABASE, | ||||
|                 password: process.env.MYSQL_PASSWORD, | ||||
|                 port: process.env.MYSQL_PORT, | ||||
|             }); | ||||
|  | ||||
|             await connection.execute(`DROP DATABASE ${process.env.MYSQL_DATABASE}`); | ||||
|             await connection.execute(`CREATE DATABASE ${process.env.MYSQL_DATABASE}`); | ||||
|             console.log('Successfully wiped mysql'); | ||||
|             ready = true; | ||||
|         } catch (err) { | ||||
|             console.log(`Error when wiping mysql: ${err}`); | ||||
|         } finally { | ||||
|             if( connection ){ | ||||
|                 connection.end(); | ||||
|             } | ||||
|         } | ||||
|         await new Promise(r => setTimeout(r, 1000)); | ||||
|     } while(!ready); | ||||
| } | ||||
|  | ||||
| async function wipePostgres(){ | ||||
|     const { Client } = require('pg'); | ||||
|  | ||||
|     const client = new Client({ | ||||
|         user: process.env.POSTGRES_USER, | ||||
|         host: "127.0.0.1", | ||||
|         database: "postgres", | ||||
|         password: process.env.POSTGRES_PASSWORD, | ||||
|         port: process.env.POSTGRES_PORT, | ||||
|     }); | ||||
|  | ||||
|     try { | ||||
|         await client.connect(); | ||||
|         await client.query(`DROP DATABASE ${process.env.POSTGRES_DB}`); | ||||
|         await client.query(`CREATE DATABASE ${process.env.POSTGRES_DB}`); | ||||
|         console.log('Successfully wiped postgres'); | ||||
|     } catch (err) { | ||||
|         console.log(`Error when wiping postgres: ${err}`); | ||||
|     } finally { | ||||
|         client.end(); | ||||
|     } | ||||
| } | ||||
|  | ||||
| function dbConfig(testInfo: TestInfo){ | ||||
|     switch(testInfo.project.name) { | ||||
|         case "postgres": | ||||
|         case "sso-postgres": | ||||
|             return { DATABASE_URL: `postgresql://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PASSWORD}@127.0.0.1:${process.env.POSTGRES_PORT}/${process.env.POSTGRES_DB}` }; | ||||
|         case "mariadb": | ||||
|         case "sso-mariadb": | ||||
|             return { DATABASE_URL: `mysql://${process.env.MARIADB_USER}:${process.env.MARIADB_PASSWORD}@127.0.0.1:${process.env.MARIADB_PORT}/${process.env.MARIADB_DATABASE}` }; | ||||
|         case "mysql": | ||||
|         case "sso-mysql": | ||||
|             return { DATABASE_URL: `mysql://${process.env.MYSQL_USER}:${process.env.MYSQL_PASSWORD}@127.0.0.1:${process.env.MYSQL_PORT}/${process.env.MYSQL_DATABASE}`}; | ||||
|         case "sqlite": | ||||
|         case "sso-sqlite": | ||||
|             return { I_REALLY_WANT_VOLATILE_STORAGE: true }; | ||||
|         default: | ||||
|             throw new Error(`Unknow database name: ${testInfo.project.name}`); | ||||
|     } | ||||
| } | ||||
|  | ||||
| /** | ||||
|  *  All parameters passed in `env` need to be added to the docker-compose.yml | ||||
|  **/ | ||||
| export async function startVault(browser: Browser, testInfo: TestInfo, env = {}, resetDB: Boolean = true) { | ||||
|     if( resetDB ){ | ||||
|         switch(testInfo.project.name) { | ||||
|             case "postgres": | ||||
|             case "sso-postgres": | ||||
|                 await wipePostgres(); | ||||
|                 break; | ||||
|             case "mariadb": | ||||
|             case "sso-mariadb": | ||||
|                 await wipeMariaDB(); | ||||
|                 break; | ||||
|             case "mysql": | ||||
|             case "sso-mysql": | ||||
|                 await wipeMysqlDB(); | ||||
|                 break; | ||||
|             case "sqlite": | ||||
|             case "sso-sqlite": | ||||
|                 wipeSqlite(); | ||||
|                 break; | ||||
|             default: | ||||
|                 throw new Error(`Unknow database name: ${testInfo.project.name}`); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     console.log(`Starting Vaultwarden`); | ||||
|     execSync(`docker compose --profile playwright --env-file test.env up -d Vaultwarden`, { | ||||
|         env: { ...env, ...dbConfig(testInfo) }, | ||||
|     }); | ||||
|     await waitFor("/", browser); | ||||
|     console.log(`Vaultwarden running on: ${process.env.DOMAIN}`); | ||||
| } | ||||
|  | ||||
| export async function stopVault(force: boolean = false) { | ||||
|     if( force === false && process.env.PW_KEEP_SERVICE_RUNNNING === "true" ) { | ||||
|         console.log(`Keep vaultwarden running on: ${process.env.DOMAIN}`); | ||||
|     } else { | ||||
|         console.log(`Vaultwarden stopping`); | ||||
|         execSync(`docker compose --profile playwright --env-file test.env stop Vaultwarden`); | ||||
|     } | ||||
| } | ||||
|  | ||||
| export async function restartVault(page: Page, testInfo: TestInfo, env, resetDB: Boolean = true) { | ||||
|     stopVault(true); | ||||
|     return startVault(page.context().browser(), testInfo, env, resetDB); | ||||
| } | ||||
|  | ||||
| export async function checkNotification(page: Page, hasText: string) { | ||||
|     await expect(page.locator('bit-toast').filter({ hasText })).toBeVisible(); | ||||
|     await page.locator('bit-toast').filter({ hasText }).getByRole('button').click(); | ||||
|     await expect(page.locator('bit-toast').filter({ hasText })).toHaveCount(0); | ||||
| } | ||||
|  | ||||
| export async function cleanLanding(page: Page) { | ||||
|     await page.goto('/', { waitUntil: 'domcontentloaded' }); | ||||
|     await expect(page.getByRole('button').nth(0)).toBeVisible(); | ||||
|  | ||||
|     const logged = await page.getByRole('button', { name: 'Log out' }).count(); | ||||
|     if( logged > 0 ){ | ||||
|         await page.getByRole('button', { name: 'Log out' }).click(); | ||||
|         await page.getByRole('button', { name: 'Log out' }).click(); | ||||
|     } | ||||
| } | ||||
|  | ||||
| export async function logout(test: Test, page: Page, user: { name: string }) { | ||||
|     await test.step('logout', async () => { | ||||
|         await page.getByRole('button', { name: user.name, exact: true }).click(); | ||||
|         await page.getByRole('menuitem', { name: 'Log out' }).click(); | ||||
|         await expect(page.getByRole('heading', { name: 'Log in' })).toBeVisible(); | ||||
|     }); | ||||
| } | ||||
							
								
								
									
										2594
									
								
								playwright/package-lock.json
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										2594
									
								
								playwright/package-lock.json
									
									
									
										generated
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										21
									
								
								playwright/package.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								playwright/package.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | ||||
| { | ||||
|     "name": "scenarios", | ||||
|     "version": "1.0.0", | ||||
|     "description": "", | ||||
|     "main": "index.js", | ||||
|     "scripts": {}, | ||||
|     "keywords": [], | ||||
|     "author": "", | ||||
|     "license": "ISC", | ||||
|     "devDependencies": { | ||||
|         "@playwright/test": "^1.54.2", | ||||
|         "dotenv": "^16.6.1", | ||||
|         "dotenv-expand": "^12.0.2", | ||||
|         "maildev": "npm:@timshel_npm/maildev@^3.2.1" | ||||
|     }, | ||||
|     "dependencies": { | ||||
|         "mysql2": "^3.14.3", | ||||
|         "otpauth": "^9.4.0", | ||||
|         "pg": "^8.16.3" | ||||
|     } | ||||
| } | ||||
							
								
								
									
										143
									
								
								playwright/playwright.config.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										143
									
								
								playwright/playwright.config.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,143 @@ | ||||
| import { defineConfig, devices } from '@playwright/test'; | ||||
| import { exec } from 'node:child_process'; | ||||
|  | ||||
| const utils = require('./global-utils'); | ||||
|  | ||||
| utils.loadEnv(); | ||||
|  | ||||
| /** | ||||
|  * See https://playwright.dev/docs/test-configuration. | ||||
|  */ | ||||
| export default defineConfig({ | ||||
|     testDir: './.', | ||||
|     /* Run tests in files in parallel */ | ||||
|     fullyParallel: false, | ||||
|  | ||||
|     /* Fail the build on CI if you accidentally left test.only in the source code. */ | ||||
|     forbidOnly: !!process.env.CI, | ||||
|  | ||||
|     retries: 0, | ||||
|     workers: 1, | ||||
|  | ||||
|     /* Reporter to use. See https://playwright.dev/docs/test-reporters */ | ||||
|     reporter: 'html', | ||||
|  | ||||
|     /* Long global timeout for complex tests | ||||
|      * But short action/nav/expect timeouts to fail on specific step (raise locally if not enough). | ||||
|      */ | ||||
|     timeout: 120 * 1000, | ||||
|     actionTimeout: 20 * 1000, | ||||
|     navigationTimeout: 20 * 1000, | ||||
|     expect: { timeout: 20 * 1000 }, | ||||
|  | ||||
|     /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ | ||||
|     use: { | ||||
|         /* Base URL to use in actions like `await page.goto('/')`. */ | ||||
|         baseURL: process.env.DOMAIN, | ||||
|         browserName: 'firefox', | ||||
|         locale: 'en-GB', | ||||
|         timezoneId: 'Europe/London', | ||||
|  | ||||
|         /* Always collect trace (other values add random test failures) See https://playwright.dev/docs/trace-viewer */ | ||||
|         trace: 'on', | ||||
|         viewport: { | ||||
|             width: 1080, | ||||
|             height: 720, | ||||
|         }, | ||||
|         video: "on", | ||||
|     }, | ||||
|  | ||||
|     /* Configure projects for major browsers */ | ||||
|     projects: [ | ||||
|         { | ||||
|             name: 'mariadb-setup', | ||||
|             testMatch: 'tests/setups/db-setup.ts', | ||||
|             use: { serviceName: "Mariadb" }, | ||||
|             teardown: 'mariadb-teardown', | ||||
|         }, | ||||
|         { | ||||
|             name: 'mysql-setup', | ||||
|             testMatch: 'tests/setups/db-setup.ts', | ||||
|             use: { serviceName: "Mysql" }, | ||||
|             teardown: 'mysql-teardown', | ||||
|         }, | ||||
|         { | ||||
|             name: 'postgres-setup', | ||||
|             testMatch: 'tests/setups/db-setup.ts', | ||||
|             use: { serviceName: "Postgres" }, | ||||
|             teardown: 'postgres-teardown', | ||||
|         }, | ||||
|         { | ||||
|             name: 'sso-setup', | ||||
|             testMatch: 'tests/setups/sso-setup.ts', | ||||
|             teardown: 'sso-teardown', | ||||
|         }, | ||||
|  | ||||
|         { | ||||
|             name: 'mariadb', | ||||
|             testMatch: 'tests/*.spec.ts', | ||||
|             testIgnore: 'tests/sso_*.spec.ts', | ||||
|             dependencies: ['mariadb-setup'], | ||||
|         }, | ||||
|         { | ||||
|             name: 'mysql', | ||||
|             testMatch: 'tests/*.spec.ts', | ||||
|             testIgnore: 'tests/sso_*.spec.ts', | ||||
|             dependencies: ['mysql-setup'], | ||||
|         }, | ||||
|         { | ||||
|             name: 'postgres', | ||||
|             testMatch: 'tests/*.spec.ts', | ||||
|             testIgnore: 'tests/sso_*.spec.ts', | ||||
|             dependencies: ['postgres-setup'], | ||||
|         }, | ||||
|         { | ||||
|             name: 'sqlite', | ||||
|             testMatch: 'tests/*.spec.ts', | ||||
|             testIgnore: 'tests/sso_*.spec.ts', | ||||
|         }, | ||||
|  | ||||
|         { | ||||
|             name: 'sso-mariadb', | ||||
|             testMatch: 'tests/sso_*.spec.ts', | ||||
|             dependencies: ['sso-setup', 'mariadb-setup'], | ||||
|         }, | ||||
|         { | ||||
|             name: 'sso-mysql', | ||||
|             testMatch: 'tests/sso_*.spec.ts', | ||||
|             dependencies: ['sso-setup', 'mysql-setup'], | ||||
|         }, | ||||
|         { | ||||
|             name: 'sso-postgres', | ||||
|             testMatch: 'tests/sso_*.spec.ts', | ||||
|             dependencies: ['sso-setup', 'postgres-setup'], | ||||
|         }, | ||||
|         { | ||||
|             name: 'sso-sqlite', | ||||
|             testMatch: 'tests/sso_*.spec.ts', | ||||
|             dependencies: ['sso-setup'], | ||||
|         }, | ||||
|  | ||||
|         { | ||||
|             name: 'mariadb-teardown', | ||||
|             testMatch: 'tests/setups/db-teardown.ts', | ||||
|             use: { serviceName: "Mariadb" }, | ||||
|         }, | ||||
|         { | ||||
|             name: 'mysql-teardown', | ||||
|             testMatch: 'tests/setups/db-teardown.ts', | ||||
|             use: { serviceName: "Mysql" }, | ||||
|         }, | ||||
|         { | ||||
|             name: 'postgres-teardown', | ||||
|             testMatch: 'tests/setups/db-teardown.ts', | ||||
|             use: { serviceName: "Postgres" }, | ||||
|         }, | ||||
|         { | ||||
|             name: 'sso-teardown', | ||||
|             testMatch: 'tests/setups/sso-teardown.ts', | ||||
|         }, | ||||
|     ], | ||||
|  | ||||
|     globalSetup: require.resolve('./global-setup'), | ||||
| }); | ||||
							
								
								
									
										97
									
								
								playwright/test.env
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										97
									
								
								playwright/test.env
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,97 @@ | ||||
| ################################################################## | ||||
| ### Shared Playwright conf test file Vaultwarden and Databases ### | ||||
| ################################################################## | ||||
|  | ||||
| ENV=test | ||||
| DC_ENV_FILE=test.env | ||||
| COMPOSE_IGNORE_ORPHANS=True | ||||
| DOCKER_BUILDKIT=1 | ||||
|  | ||||
| ##################### | ||||
| # Playwright Config # | ||||
| ##################### | ||||
| PW_KEEP_SERVICE_RUNNNING=${PW_KEEP_SERVICE_RUNNNING:-false} | ||||
| PW_SMTP_FROM=vaultwarden@playwright.test | ||||
|  | ||||
| ##################### | ||||
| # Maildev Config 	# | ||||
| ##################### | ||||
| MAILDEV_HTTP_PORT=1081 | ||||
| MAILDEV_SMTP_PORT=1026 | ||||
| MAILDEV_HOST=127.0.0.1 | ||||
|  | ||||
| ################ | ||||
| # Users Config # | ||||
| ################ | ||||
| TEST_USER=test | ||||
| TEST_USER_PASSWORD=Master Password | ||||
| TEST_USER_MAIL=${TEST_USER}@example.com | ||||
|  | ||||
| TEST_USER2=test2 | ||||
| TEST_USER2_PASSWORD=Master Password | ||||
| TEST_USER2_MAIL=${TEST_USER2}@example.com | ||||
|  | ||||
| TEST_USER3=test3 | ||||
| TEST_USER3_PASSWORD=Master Password | ||||
| TEST_USER3_MAIL=${TEST_USER3}@example.com | ||||
|  | ||||
| ################### | ||||
| # Keycloak Config # | ||||
| ################### | ||||
| KEYCLOAK_ADMIN=admin | ||||
| KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN} | ||||
| KC_HTTP_HOST=127.0.0.1 | ||||
| KC_HTTP_PORT=8081 | ||||
|  | ||||
| # Script parameters (use Keycloak and Vaultwarden config too) | ||||
| TEST_REALM=test | ||||
| DUMMY_REALM=dummy | ||||
| DUMMY_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${DUMMY_REALM} | ||||
|  | ||||
| ###################### | ||||
| # Vaultwarden Config # | ||||
| ###################### | ||||
| ROCKET_PORT=8003 | ||||
| DOMAIN=http://localhost:${ROCKET_PORT} | ||||
| LOG_LEVEL=info,oidcwarden::sso=debug | ||||
| LOGIN_RATELIMIT_MAX_BURST=100 | ||||
|  | ||||
| SMTP_SECURITY=off | ||||
| SMTP_PORT=${MAILDEV_SMTP_PORT} | ||||
| SMTP_FROM_NAME=Vaultwarden | ||||
| SMTP_TIMEOUT=5 | ||||
|  | ||||
| SSO_CLIENT_ID=warden | ||||
| SSO_CLIENT_SECRET=warden | ||||
| SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM} | ||||
| SSO_DEBUG_TOKENS=true | ||||
|  | ||||
| # Custom web-vault build | ||||
| # PW_WV_REPO_URL=https://github.com/dani-garcia/bw_web_builds.git | ||||
| # PW_WV_COMMIT_HASH=a5f5390895516bce2f48b7baadb6dc399e5fe75a | ||||
|  | ||||
| ########################### | ||||
| # Docker MariaDb container# | ||||
| ########################### | ||||
| MARIADB_PORT=3307 | ||||
| MARIADB_ROOT_PASSWORD=warden | ||||
| MARIADB_USER=warden | ||||
| MARIADB_PASSWORD=warden | ||||
| MARIADB_DATABASE=warden | ||||
|  | ||||
| ########################### | ||||
| # Docker Mysql container# | ||||
| ########################### | ||||
| MYSQL_PORT=3309 | ||||
| MYSQL_ROOT_PASSWORD=warden | ||||
| MYSQL_USER=warden | ||||
| MYSQL_PASSWORD=warden | ||||
| MYSQL_DATABASE=warden | ||||
|  | ||||
| ############################ | ||||
| # Docker Postgres container# | ||||
| ############################ | ||||
| POSTGRES_PORT=5433 | ||||
| POSTGRES_USER=warden | ||||
| POSTGRES_PASSWORD=warden | ||||
| POSTGRES_DB=warden | ||||
							
								
								
									
										37
									
								
								playwright/tests/collection.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										37
									
								
								playwright/tests/collection.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,37 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
|  | ||||
| import * as utils from "../global-utils"; | ||||
| import { createAccount } from './setups/user'; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     await utils.startVault(browser, testInfo); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
| }); | ||||
|  | ||||
| test('Create', async ({ page }) => { | ||||
|     await createAccount(test, page, users.user1); | ||||
|  | ||||
|     await test.step('Create Org', async () => { | ||||
|         await page.getByRole('link', { name: 'New organisation' }).click(); | ||||
|         await page.getByLabel('Organisation name (required)').fill('Test'); | ||||
|         await page.getByRole('button', { name: 'Submit' }).click(); | ||||
|         await page.locator('div').filter({ hasText: 'Members' }).nth(2).click(); | ||||
|  | ||||
|         await utils.checkNotification(page, 'Organisation created'); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Create Collection', async () => { | ||||
|         await page.getByRole('link', { name: 'Collections' }).click(); | ||||
|         await page.getByRole('button', { name: 'New' }).click(); | ||||
|         await page.getByRole('menuitem', { name: 'Collection' }).click(); | ||||
|         await page.getByLabel('Name (required)').fill('RandomCollec'); | ||||
|         await page.getByRole('button', { name: 'Save' }).click(); | ||||
|         await utils.checkNotification(page, 'Created collection RandomCollec'); | ||||
|         await expect(page.getByRole('button', { name: 'RandomCollec' })).toBeVisible(); | ||||
|     }); | ||||
| }); | ||||
							
								
								
									
										100
									
								
								playwright/tests/login.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										100
									
								
								playwright/tests/login.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,100 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
| import { MailDev } from 'maildev'; | ||||
|  | ||||
| const utils = require('../global-utils'); | ||||
| import { createAccount, logUser } from './setups/user'; | ||||
| import { activateEmail, retrieveEmailCode, disableEmail } from './setups/2fa'; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| let mailserver; | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     mailserver = new MailDev({ | ||||
|         port: process.env.MAILDEV_SMTP_PORT, | ||||
|         web: { port: process.env.MAILDEV_HTTP_PORT }, | ||||
|     }) | ||||
|  | ||||
|     await mailserver.listen(); | ||||
|  | ||||
|     await utils.startVault(browser, testInfo, { | ||||
|         SMTP_HOST: process.env.MAILDEV_HOST, | ||||
|         SMTP_FROM: process.env.PW_SMTP_FROM, | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
|     if( mailserver ){ | ||||
|         await mailserver.close(); | ||||
|     } | ||||
| }); | ||||
|  | ||||
| test('Account creation', async ({ page }) => { | ||||
|     const mailBuffer = mailserver.buffer(users.user1.email); | ||||
|  | ||||
|     await createAccount(test, page, users.user1, mailBuffer); | ||||
|  | ||||
|     mailBuffer.close(); | ||||
| }); | ||||
|  | ||||
| test('Login', async ({ context, page }) => { | ||||
|     const mailBuffer = mailserver.buffer(users.user1.email); | ||||
|  | ||||
|     await logUser(test, page, users.user1, mailBuffer); | ||||
|  | ||||
|     await test.step('verify email', async () => { | ||||
|         await page.getByText('Verify your account\'s email').click(); | ||||
|         await expect(page.getByText('Verify your account\'s email')).toBeVisible(); | ||||
|         await page.getByRole('button', { name: 'Send email' }).click(); | ||||
|  | ||||
|         await utils.checkNotification(page, 'Check your email inbox for a verification link'); | ||||
|  | ||||
|         const verify = await mailBuffer.expect((m) => m.subject === "Verify Your Email"); | ||||
|         expect(verify.from[0]?.address).toBe(process.env.PW_SMTP_FROM); | ||||
|  | ||||
|         const page2 = await context.newPage(); | ||||
|         await page2.setContent(verify.html); | ||||
|         const link = await page2.getByTestId("verify").getAttribute("href"); | ||||
|         await page2.close(); | ||||
|  | ||||
|         await page.goto(link); | ||||
|         await utils.checkNotification(page, 'Account email verified'); | ||||
|     }); | ||||
|  | ||||
|     mailBuffer.close(); | ||||
| }); | ||||
|  | ||||
| test('Activate 2fa', async ({ page }) => { | ||||
|     const emails = mailserver.buffer(users.user1.email); | ||||
|  | ||||
|     await logUser(test, page, users.user1); | ||||
|  | ||||
|     await activateEmail(test, page, users.user1, emails); | ||||
|  | ||||
|     emails.close(); | ||||
| }); | ||||
|  | ||||
| test('2fa', async ({ page }) => { | ||||
|     const emails = mailserver.buffer(users.user1.email); | ||||
|  | ||||
|     await test.step('login', async () => { | ||||
|         await page.goto('/'); | ||||
|  | ||||
|         await page.getByLabel(/Email address/).fill(users.user1.email); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|         await page.getByLabel('Master password').fill(users.user1.password); | ||||
|         await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||
|  | ||||
|         await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible(); | ||||
|         const code = await retrieveEmailCode(test, page, emails); | ||||
|         await page.getByLabel(/Verification code/).fill(code); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|  | ||||
|         await expect(page).toHaveTitle(/Vaults/); | ||||
|     }) | ||||
|  | ||||
|     await disableEmail(test, page, users.user1); | ||||
|  | ||||
|     emails.close(); | ||||
| }); | ||||
							
								
								
									
										51
									
								
								playwright/tests/login.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								playwright/tests/login.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,51 @@ | ||||
| import { test, expect, type Page, type TestInfo } from '@playwright/test'; | ||||
| import * as OTPAuth from "otpauth"; | ||||
|  | ||||
| import * as utils from "../global-utils"; | ||||
| import { createAccount, logUser } from './setups/user'; | ||||
| import { activateTOTP, disableTOTP } from './setups/2fa'; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
| let totp; | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     await utils.startVault(browser, testInfo, {}); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
| }); | ||||
|  | ||||
| test('Account creation', async ({ page }) => { | ||||
|     await createAccount(test, page, users.user1); | ||||
| }); | ||||
|  | ||||
| test('Master password login', async ({ page }) => { | ||||
|     await logUser(test, page, users.user1); | ||||
| }); | ||||
|  | ||||
| test('Authenticator 2fa', async ({ page }) => { | ||||
|     await logUser(test, page, users.user1); | ||||
|  | ||||
|     let totp = await activateTOTP(test, page, users.user1); | ||||
|  | ||||
|     await utils.logout(test, page, users.user1); | ||||
|  | ||||
|     await test.step('login', async () => { | ||||
|         let timestamp = Date.now(); // Needed to use the next token | ||||
|         timestamp = timestamp + (totp.period - (Math.floor(timestamp / 1000) % totp.period) + 1) * 1000; | ||||
|  | ||||
|         await page.getByLabel(/Email address/).fill(users.user1.email); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|         await page.getByLabel('Master password').fill(users.user1.password); | ||||
|         await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||
|  | ||||
|         await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible(); | ||||
|         await page.getByLabel(/Verification code/).fill(totp.generate({timestamp})); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|  | ||||
|         await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|     }); | ||||
|  | ||||
|     await disableTOTP(test, page, users.user1); | ||||
| }); | ||||
							
								
								
									
										115
									
								
								playwright/tests/organization.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										115
									
								
								playwright/tests/organization.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,115 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
| import { MailDev } from 'maildev'; | ||||
|  | ||||
| import * as utils from '../global-utils'; | ||||
| import * as orgs from './setups/orgs'; | ||||
| import { createAccount, logUser } from './setups/user'; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| let mailServer, mail1Buffer, mail2Buffer, mail3Buffer; | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     mailServer = new MailDev({ | ||||
|         port: process.env.MAILDEV_SMTP_PORT, | ||||
|         web: { port: process.env.MAILDEV_HTTP_PORT }, | ||||
|     }) | ||||
|  | ||||
|     await mailServer.listen(); | ||||
|  | ||||
|     await utils.startVault(browser, testInfo, { | ||||
|         SMTP_HOST: process.env.MAILDEV_HOST, | ||||
|         SMTP_FROM: process.env.PW_SMTP_FROM, | ||||
|     }); | ||||
|  | ||||
|     mail1Buffer = mailServer.buffer(users.user1.email); | ||||
|     mail2Buffer = mailServer.buffer(users.user2.email); | ||||
|     mail3Buffer = mailServer.buffer(users.user3.email); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}, testInfo: TestInfo) => { | ||||
|     utils.stopVault(testInfo); | ||||
|     [mail1Buffer, mail2Buffer, mail3Buffer, mailServer].map((m) => m?.close()); | ||||
| }); | ||||
|  | ||||
| test('Create user3', async ({ page }) => { | ||||
|     await createAccount(test, page, users.user3, mail3Buffer); | ||||
| }); | ||||
|  | ||||
| test('Invite users', async ({ page }) => { | ||||
|     await createAccount(test, page, users.user1, mail1Buffer); | ||||
|  | ||||
|     await orgs.create(test, page, 'Test'); | ||||
|     await orgs.members(test, page, 'Test'); | ||||
|     await orgs.invite(test, page, 'Test', users.user2.email); | ||||
|     await orgs.invite(test, page, 'Test', users.user3.email, { | ||||
|         navigate: false, | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test('invited with new account', async ({ page }) => { | ||||
|     const invited = await mail2Buffer.expect((mail) => mail.subject === 'Join Test'); | ||||
|  | ||||
|     await test.step('Create account', async () => { | ||||
|         await page.setContent(invited.html); | ||||
|         const link = await page.getByTestId('invite').getAttribute('href'); | ||||
|         await page.goto(link); | ||||
|         await expect(page).toHaveTitle(/Create account | Vaultwarden Web/); | ||||
|  | ||||
|         //await page.getByLabel('Name').fill(users.user2.name); | ||||
|         await page.getByLabel('New master password (required)', { exact: true }).fill(users.user2.password); | ||||
|         await page.getByLabel('Confirm new master password (').fill(users.user2.password); | ||||
|         await page.getByRole('button', { name: 'Create account' }).click(); | ||||
|         await utils.checkNotification(page, 'Your new account has been created'); | ||||
|  | ||||
|         // Redirected to the vault | ||||
|         await expect(page).toHaveTitle('Vaults | Vaultwarden Web'); | ||||
|         await utils.checkNotification(page, 'You have been logged in!'); | ||||
|         await utils.checkNotification(page, 'Invitation accepted'); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Check mails', async () => { | ||||
|         await mail2Buffer.expect((m) => m.subject === 'Welcome'); | ||||
|         await mail2Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox'); | ||||
|         await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted')); | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test('invited with existing account', async ({ page }) => { | ||||
|     const invited = await mail3Buffer.expect((mail) => mail.subject === 'Join Test'); | ||||
|  | ||||
|     await page.setContent(invited.html); | ||||
|     const link = await page.getByTestId('invite').getAttribute('href'); | ||||
|  | ||||
|     await page.goto(link); | ||||
|  | ||||
|     // We should be on login page with email prefilled | ||||
|     await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|     await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|  | ||||
|     // Unlock page | ||||
|     await page.getByLabel('Master password').fill(users.user3.password); | ||||
|     await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||
|  | ||||
|     // We are now in the default vault page | ||||
|     await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|     await utils.checkNotification(page, 'Invitation accepted'); | ||||
|  | ||||
|     await mail3Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox'); | ||||
|     await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted')); | ||||
| }); | ||||
|  | ||||
| test('Confirm invited user', async ({ page }) => { | ||||
|     await logUser(test, page, users.user1, mail1Buffer); | ||||
|  | ||||
|     await orgs.members(test, page, 'Test'); | ||||
|     await orgs.confirm(test, page, 'Test', users.user2.email); | ||||
|  | ||||
|     await mail2Buffer.expect((m) => m.subject.includes('Invitation to Test confirmed')); | ||||
| }); | ||||
|  | ||||
| test('Organization is visible', async ({ page }) => { | ||||
|     await logUser(test, page, users.user2, mail2Buffer); | ||||
|     await page.getByRole('button', { name: 'vault: Test', exact: true }).click(); | ||||
|     await expect(page.getByLabel('Filter: Default collection')).toBeVisible(); | ||||
| }); | ||||
							
								
								
									
										54
									
								
								playwright/tests/organization.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										54
									
								
								playwright/tests/organization.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,54 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
| import { MailDev } from 'maildev'; | ||||
|  | ||||
| import * as utils from "../global-utils"; | ||||
| import * as orgs from './setups/orgs'; | ||||
| import { createAccount, logUser } from './setups/user'; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     await utils.startVault(browser, testInfo); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
| }); | ||||
|  | ||||
| test('Invite', async ({ page }) => { | ||||
|     await createAccount(test, page, users.user3); | ||||
|     await createAccount(test, page, users.user1); | ||||
|  | ||||
|     await orgs.create(test, page, 'New organisation'); | ||||
|     await orgs.members(test, page, 'New organisation'); | ||||
|  | ||||
|     await test.step('missing user2', async () => { | ||||
|         await orgs.invite(test, page, 'New organisation', users.user2.email); | ||||
|         await expect(page.getByRole('row', { name: users.user2.email })).toHaveText(/Invited/); | ||||
|     }); | ||||
|  | ||||
|     await test.step('existing user3', async () => { | ||||
|         await orgs.invite(test, page, 'New organisation', users.user3.email); | ||||
|         await expect(page.getByRole('row', { name: users.user3.email })).toHaveText(/Needs confirmation/); | ||||
|         await orgs.confirm(test, page, 'New organisation', users.user3.email); | ||||
|     }); | ||||
|  | ||||
|     await test.step('confirm user2', async () => { | ||||
|         await createAccount(test, page, users.user2); | ||||
|         await logUser(test, page, users.user1); | ||||
|         await orgs.members(test, page, 'New organisation'); | ||||
|         await orgs.confirm(test, page, 'New organisation', users.user2.email); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Org visible user2  ', async () => { | ||||
|         await logUser(test, page, users.user2); | ||||
|         await page.getByRole('button', { name: 'vault: New organisation', exact: true }).click(); | ||||
|         await expect(page.getByLabel('Filter: Default collection')).toBeVisible(); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Org visible user3  ', async () => { | ||||
|         await logUser(test, page, users.user3); | ||||
|         await page.getByRole('button', { name: 'vault: New organisation', exact: true }).click(); | ||||
|         await expect(page.getByLabel('Filter: Default collection')).toBeVisible(); | ||||
|     }); | ||||
| }); | ||||
							
								
								
									
										92
									
								
								playwright/tests/setups/2fa.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										92
									
								
								playwright/tests/setups/2fa.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,92 @@ | ||||
| import { expect, type Page, Test } from '@playwright/test'; | ||||
| import { type MailBuffer } from 'maildev'; | ||||
| import * as OTPAuth from "otpauth"; | ||||
|  | ||||
| import * as utils from '../../global-utils'; | ||||
|  | ||||
| export async function activateTOTP(test: Test, page: Page, user: { name: string, password: string }): OTPAuth.TOTP { | ||||
|     return await test.step('Activate TOTP 2FA', async () => { | ||||
|         await page.getByRole('button', { name: user.name }).click(); | ||||
|         await page.getByRole('menuitem', { name: 'Account settings' }).click(); | ||||
|         await page.getByRole('link', { name: 'Security' }).click(); | ||||
|         await page.getByRole('link', { name: 'Two-step login' }).click(); | ||||
|         await page.locator('bit-item').filter({ hasText: /Authenticator app/ }).getByRole('button').click(); | ||||
|         await page.getByLabel('Master password (required)').fill(user.password); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|  | ||||
|         const secret = await page.getByLabel('Key').innerText(); | ||||
|         let totp = new OTPAuth.TOTP({ secret, period: 30 }); | ||||
|  | ||||
|         await page.getByLabel(/Verification code/).fill(totp.generate()); | ||||
|         await page.getByRole('button', { name: 'Turn on' }).click(); | ||||
|         await page.getByRole('heading', { name: 'Turned on', exact: true }); | ||||
|         await page.getByLabel('Close').click(); | ||||
|  | ||||
|         return totp; | ||||
|     }) | ||||
| } | ||||
|  | ||||
| export async function disableTOTP(test: Test, page: Page, user: { password: string }) { | ||||
|     await test.step('Disable TOTP 2FA', async () => { | ||||
|         await page.getByRole('button', { name: 'Test' }).click(); | ||||
|         await page.getByRole('menuitem', { name: 'Account settings' }).click(); | ||||
|         await page.getByRole('link', { name: 'Security' }).click(); | ||||
|         await page.getByRole('link', { name: 'Two-step login' }).click(); | ||||
|         await page.locator('bit-item').filter({ hasText: /Authenticator app/ }).getByRole('button').click(); | ||||
|         await page.getByLabel('Master password (required)').click(); | ||||
|         await page.getByLabel('Master password (required)').fill(user.password); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|         await page.getByRole('button', { name: 'Turn off' }).click(); | ||||
|         await page.getByRole('button', { name: 'Yes' }).click(); | ||||
|         await utils.checkNotification(page, 'Two-step login provider turned off'); | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function activateEmail(test: Test, page: Page, user: { name: string, password: string }, mailBuffer: MailBuffer) { | ||||
|     await test.step('Activate Email 2FA', async () => { | ||||
|         await page.getByRole('button', { name: user.name }).click(); | ||||
|         await page.getByRole('menuitem', { name: 'Account settings' }).click(); | ||||
|         await page.getByRole('link', { name: 'Security' }).click(); | ||||
|         await page.getByRole('link', { name: 'Two-step login' }).click(); | ||||
|         await page.locator('bit-item').filter({ hasText: 'Email Email Enter a code sent' }).getByRole('button').click(); | ||||
|         await page.getByLabel('Master password (required)').fill(user.password); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|         await page.getByRole('button', { name: 'Send email' }).click(); | ||||
|     }); | ||||
|  | ||||
|     let code = await retrieveEmailCode(test, page, mailBuffer); | ||||
|  | ||||
|     await test.step('input code', async () => { | ||||
|         await page.getByLabel('2. Enter the resulting 6').fill(code); | ||||
|         await page.getByRole('button', { name: 'Turn on' }).click(); | ||||
|         await page.getByRole('heading', { name: 'Turned on', exact: true }); | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function retrieveEmailCode(test: Test, page: Page, mailBuffer: MailBuffer): string { | ||||
|     return await test.step('retrieve code', async () => { | ||||
|         const codeMail = await mailBuffer.expect((mail) => mail.subject.includes("Login Verification Code")); | ||||
|         const page2 = await page.context().newPage(); | ||||
|         await page2.setContent(codeMail.html); | ||||
|         const code = await page2.getByTestId("2fa").innerText(); | ||||
|         await page2.close(); | ||||
|         return code; | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function disableEmail(test: Test, page: Page, user: { password: string }) { | ||||
|     await test.step('Disable Email 2FA', async () => { | ||||
|         await page.getByRole('button', { name: 'Test' }).click(); | ||||
|         await page.getByRole('menuitem', { name: 'Account settings' }).click(); | ||||
|         await page.getByRole('link', { name: 'Security' }).click(); | ||||
|         await page.getByRole('link', { name: 'Two-step login' }).click(); | ||||
|         await page.locator('bit-item').filter({ hasText: 'Email' }).getByRole('button').click(); | ||||
|         await page.getByLabel('Master password (required)').click(); | ||||
|         await page.getByLabel('Master password (required)').fill(user.password); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|         await page.getByRole('button', { name: 'Turn off' }).click(); | ||||
|         await page.getByRole('button', { name: 'Yes' }).click(); | ||||
|  | ||||
|         await utils.checkNotification(page, 'Two-step login provider turned off'); | ||||
|     }); | ||||
| } | ||||
							
								
								
									
										7
									
								
								playwright/tests/setups/db-setup.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								playwright/tests/setups/db-setup.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | ||||
| import { test } from './db-test'; | ||||
|  | ||||
| const utils = require('../../global-utils'); | ||||
|  | ||||
| test('DB start', async ({ serviceName }) => { | ||||
| 	utils.startComposeService(serviceName); | ||||
| }); | ||||
							
								
								
									
										11
									
								
								playwright/tests/setups/db-teardown.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								playwright/tests/setups/db-teardown.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | ||||
| import { test } from './db-test'; | ||||
|  | ||||
| const utils = require('../../global-utils'); | ||||
|  | ||||
| utils.loadEnv(); | ||||
|  | ||||
| test('DB teardown ?', async ({ serviceName }) => { | ||||
|     if( process.env.PW_KEEP_SERVICE_RUNNNING !== "true" ) { | ||||
|         utils.stopComposeService(serviceName); | ||||
|     } | ||||
| }); | ||||
							
								
								
									
										9
									
								
								playwright/tests/setups/db-test.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								playwright/tests/setups/db-test.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | ||||
| import { test as base } from '@playwright/test'; | ||||
|  | ||||
| export type TestOptions = { | ||||
|   serviceName: string; | ||||
| }; | ||||
|  | ||||
| export const test = base.extend<TestOptions>({ | ||||
|   serviceName: ['', { option: true }], | ||||
| }); | ||||
							
								
								
									
										77
									
								
								playwright/tests/setups/orgs.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										77
									
								
								playwright/tests/setups/orgs.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,77 @@ | ||||
| import { expect, type Browser,Page } from '@playwright/test'; | ||||
|  | ||||
| import * as utils from '../../global-utils'; | ||||
|  | ||||
| export async function create(test, page: Page, name: string) { | ||||
|     await test.step('Create Org', async () => { | ||||
|         await page.locator('a').filter({ hasText: 'Password Manager' }).first().click(); | ||||
|         await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible(); | ||||
|         await page.getByRole('link', { name: 'New organisation' }).click(); | ||||
|         await page.getByLabel('Organisation name (required)').fill(name); | ||||
|         await page.getByRole('button', { name: 'Submit' }).click(); | ||||
|  | ||||
|         await utils.checkNotification(page, 'Organisation created'); | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function policies(test, page: Page, name: string) { | ||||
|     await test.step(`Navigate to ${name} policies`, async () => { | ||||
|         await page.locator('a').filter({ hasText: 'Admin Console' }).first().click(); | ||||
|         await page.locator('org-switcher').getByLabel(/Toggle collapse/).click(); | ||||
|         await page.locator('org-switcher').getByRole('link', { name: `${name}` }).first().click(); | ||||
|         await expect(page.getByRole('heading', { name: `${name} collections` })).toBeVisible(); | ||||
|         await page.getByRole('button', { name: 'Toggle collapse Settings' }).click(); | ||||
|         await page.getByRole('link', { name: 'Policies' }).click(); | ||||
|         await expect(page.getByRole('heading', { name: 'Policies' })).toBeVisible(); | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function members(test, page: Page, name: string) { | ||||
|     await test.step(`Navigate to ${name} members`, async () => { | ||||
|         await page.locator('a').filter({ hasText: 'Admin Console' }).first().click(); | ||||
|         await page.locator('org-switcher').getByLabel(/Toggle collapse/).click(); | ||||
|         await page.locator('org-switcher').getByRole('link', { name: `${name}` }).first().click(); | ||||
|         await expect(page.getByRole('heading', { name: `${name} collections` })).toBeVisible(); | ||||
|         await page.locator('div').filter({ hasText: 'Members' }).nth(2).click(); | ||||
|         await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible(); | ||||
|         await expect(page.getByRole('cell', { name: 'All' })).toBeVisible(); | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function invite(test, page: Page, name: string, email: string) { | ||||
|     await test.step(`Invite ${email}`, async () => { | ||||
|         await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible(); | ||||
|         await page.getByRole('button', { name: 'Invite member' }).click(); | ||||
|         await page.getByLabel('Email (required)').fill(email); | ||||
|         await page.getByRole('tab', { name: 'Collections' }).click(); | ||||
|         await page.getByRole('combobox', { name: 'Permission' }).click(); | ||||
|         await page.getByText('Edit items', { exact: true }).click(); | ||||
|         await page.getByLabel('Select collections').click(); | ||||
|         await page.getByText('Default collection').click(); | ||||
|         await page.getByRole('cell', { name: 'Collection', exact: true }).click(); | ||||
|         await page.getByRole('button', { name: 'Save' }).click(); | ||||
|         await utils.checkNotification(page, 'User(s) invited'); | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function confirm(test, page: Page, name: string, user_email: string) { | ||||
|     await test.step(`Confirm ${user_email}`, async () => { | ||||
|         await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible(); | ||||
|         await page.getByRole('row').filter({hasText: user_email}).getByLabel('Options').click(); | ||||
|         await page.getByRole('menuitem', { name: 'Confirm' }).click(); | ||||
|         await expect(page.getByRole('heading', { name: 'Confirm user' })).toBeVisible(); | ||||
|         await page.getByRole('button', { name: 'Confirm' }).click(); | ||||
|         await utils.checkNotification(page, 'confirmed'); | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function revoke(test, page: Page, name: string, user_email: string) { | ||||
|     await test.step(`Revoke ${user_email}`, async () => { | ||||
|         await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible(); | ||||
|         await page.getByRole('row').filter({hasText: user_email}).getByLabel('Options').click(); | ||||
|         await page.getByRole('menuitem', { name: 'Revoke access' }).click(); | ||||
|         await expect(page.getByRole('heading', { name: 'Revoke access' })).toBeVisible(); | ||||
|         await page.getByRole('button', { name: 'Revoke access' }).click(); | ||||
|         await utils.checkNotification(page, 'Revoked organisation access'); | ||||
|     }); | ||||
| } | ||||
							
								
								
									
										18
									
								
								playwright/tests/setups/sso-setup.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								playwright/tests/setups/sso-setup.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
|  | ||||
| const { exec } = require('node:child_process'); | ||||
| const utils = require('../../global-utils'); | ||||
|  | ||||
| utils.loadEnv(); | ||||
|  | ||||
| test.beforeAll('Setup', async () => { | ||||
|     console.log("Starting Keycloak"); | ||||
|     exec(`docker compose --profile keycloak --env-file test.env up`); | ||||
| }); | ||||
|  | ||||
| test('Keycloak is up', async ({ page }) => { | ||||
|     await utils.waitFor(process.env.SSO_AUTHORITY, page.context().browser()); | ||||
|     // Dummy authority is created at the end of the setup | ||||
|     await utils.waitFor(process.env.DUMMY_AUTHORITY, page.context().browser()); | ||||
|     console.log(`Keycloak running on: ${process.env.SSO_AUTHORITY}`); | ||||
| }); | ||||
							
								
								
									
										15
									
								
								playwright/tests/setups/sso-teardown.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								playwright/tests/setups/sso-teardown.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| import { test, type FullConfig } from '@playwright/test'; | ||||
|  | ||||
| const { execSync } = require('node:child_process'); | ||||
| const utils = require('../../global-utils'); | ||||
|  | ||||
| utils.loadEnv(); | ||||
|  | ||||
| test('Keycloak teardown', async () => { | ||||
|     if( process.env.PW_KEEP_SERVICE_RUNNNING === "true" ) { | ||||
|         console.log("Keep Keycloak running"); | ||||
|     } else { | ||||
|         console.log("Keycloak stopping"); | ||||
|         execSync(`docker compose --profile keycloak --env-file test.env stop Keycloak`); | ||||
|     } | ||||
| }); | ||||
							
								
								
									
										129
									
								
								playwright/tests/setups/sso.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										129
									
								
								playwright/tests/setups/sso.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,129 @@ | ||||
| import { expect, type Page, Test } from '@playwright/test'; | ||||
| import { type MailBuffer, MailServer } from 'maildev'; | ||||
| import * as OTPAuth from "otpauth"; | ||||
|  | ||||
| import * as utils from '../../global-utils'; | ||||
| import { retrieveEmailCode } from './2fa'; | ||||
|  | ||||
| /** | ||||
|  * If a MailBuffer is passed it will be used and consume the expected emails | ||||
|  */ | ||||
| export async function logNewUser( | ||||
|     test: Test, | ||||
|     page: Page, | ||||
|     user: { email: string, name: string, password: string }, | ||||
|     options: { mailBuffer?: MailBuffer } = {} | ||||
| ) { | ||||
|     await test.step(`Create user ${user.name}`, async () => { | ||||
|         await page.context().clearCookies(); | ||||
|  | ||||
|         await test.step('Landing page', async () => { | ||||
|             await utils.cleanLanding(page); | ||||
|  | ||||
|             await page.locator("input[type=email].vw-email-sso").fill(user.email); | ||||
|             await page.getByRole('button', { name: /Use single sign-on/ }).click(); | ||||
|         }); | ||||
|  | ||||
|         await test.step('Keycloak login', async () => { | ||||
|             await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible(); | ||||
|             await page.getByLabel(/Username/).fill(user.name); | ||||
|             await page.getByLabel('Password', { exact: true }).fill(user.password); | ||||
|             await page.getByRole('button', { name: 'Sign In' }).click(); | ||||
|         }); | ||||
|  | ||||
|         await test.step('Create Vault account', async () => { | ||||
|             await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible(); | ||||
|             await page.getByLabel('New master password (required)', { exact: true }).fill(user.password); | ||||
|             await page.getByLabel('Confirm new master password (').fill(user.password); | ||||
|             await page.getByRole('button', { name: 'Create account' }).click(); | ||||
|         }); | ||||
|  | ||||
|         await test.step('Default vault page', async () => { | ||||
|             await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|             await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible(); | ||||
|         }); | ||||
|  | ||||
|         await utils.checkNotification(page, 'Account successfully created!'); | ||||
|         await utils.checkNotification(page, 'Invitation accepted'); | ||||
|  | ||||
|         if( options.mailBuffer ){ | ||||
|             let mailBuffer = options.mailBuffer; | ||||
|             await test.step('Check emails', async () => { | ||||
|                 await mailBuffer.expect((m) => m.subject === "Welcome"); | ||||
|                 await mailBuffer.expect((m) => m.subject.includes("New Device Logged")); | ||||
|             }); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
|  | ||||
| /** | ||||
|  * If a MailBuffer is passed it will be used and consume the expected emails | ||||
|  */ | ||||
| export async function logUser( | ||||
|     test: Test, | ||||
|     page: Page, | ||||
|     user: { email: string, password: string }, | ||||
|     options: { | ||||
|         mailBuffer ?: MailBuffer, | ||||
|         totp?: OTPAuth.TOTP, | ||||
|         mail2fa?: boolean, | ||||
|     } = {} | ||||
| ) { | ||||
|     let mailBuffer = options.mailBuffer; | ||||
|  | ||||
|     await test.step(`Log user ${user.email}`, async () => { | ||||
|         await page.context().clearCookies(); | ||||
|  | ||||
|         await test.step('Landing page', async () => { | ||||
|             await utils.cleanLanding(page); | ||||
|  | ||||
|             await page.locator("input[type=email].vw-email-sso").fill(user.email); | ||||
|             await page.getByRole('button', { name: /Use single sign-on/ }).click(); | ||||
|         }); | ||||
|  | ||||
|         await test.step('Keycloak login', async () => { | ||||
|             await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible(); | ||||
|             await page.getByLabel(/Username/).fill(user.name); | ||||
|             await page.getByLabel('Password', { exact: true }).fill(user.password); | ||||
|             await page.getByRole('button', { name: 'Sign In' }).click(); | ||||
|         }); | ||||
|  | ||||
|         if( options.totp || options.mail2fa ){ | ||||
|             let code; | ||||
|  | ||||
|             await test.step('2FA check', async () => { | ||||
|                 await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible(); | ||||
|  | ||||
|                 if( options.totp ) { | ||||
|                     const totp = options.totp; | ||||
|                     let timestamp = Date.now(); // Needed to use the next token | ||||
|                     timestamp = timestamp + (totp.period - (Math.floor(timestamp / 1000) % totp.period) + 1) * 1000; | ||||
|                     code = totp.generate({timestamp}); | ||||
|                 } else if( options.mail2fa ){ | ||||
|                     code = await retrieveEmailCode(test, page, mailBuffer); | ||||
|                 } | ||||
|  | ||||
|                 await page.getByLabel(/Verification code/).fill(code); | ||||
|                 await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|             }); | ||||
|         } | ||||
|  | ||||
|         await test.step('Unlock vault', async () => { | ||||
|             await expect(page).toHaveTitle('Vaultwarden Web'); | ||||
|             await expect(page.getByRole('heading', { name: 'Your vault is locked' })).toBeVisible(); | ||||
|             await page.getByLabel('Master password').fill(user.password); | ||||
|             await page.getByRole('button', { name: 'Unlock' }).click(); | ||||
|         }); | ||||
|  | ||||
|         await test.step('Default vault page', async () => { | ||||
|             await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|             await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible(); | ||||
|         }); | ||||
|  | ||||
|         if( mailBuffer ){ | ||||
|             await test.step('Check email', async () => { | ||||
|                 await mailBuffer.expect((m) => m.subject.includes("New Device Logged")); | ||||
|             }); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
							
								
								
									
										55
									
								
								playwright/tests/setups/user.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										55
									
								
								playwright/tests/setups/user.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,55 @@ | ||||
| import { expect, type Browser, Page } from '@playwright/test'; | ||||
|  | ||||
| import { type MailBuffer } from 'maildev'; | ||||
|  | ||||
| import * as utils from '../../global-utils'; | ||||
|  | ||||
| export async function createAccount(test, page: Page, user: { email: string, name: string, password: string }, mailBuffer?: MailBuffer) { | ||||
|     await test.step(`Create user ${user.name}`, async () => { | ||||
|         await utils.cleanLanding(page); | ||||
|  | ||||
|         await page.getByRole('link', { name: 'Create account' }).click(); | ||||
|  | ||||
|         // Back to Vault create account | ||||
|         await expect(page).toHaveTitle(/Create account | Vaultwarden Web/); | ||||
|         await page.getByLabel(/Email address/).fill(user.email); | ||||
|         await page.getByLabel('Name').fill(user.name); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|  | ||||
|         // Vault finish Creation | ||||
|         await page.getByLabel('New master password (required)', { exact: true }).fill(user.password); | ||||
|         await page.getByLabel('Confirm new master password (').fill(user.password); | ||||
|         await page.getByRole('button', { name: 'Create account' }).click(); | ||||
|  | ||||
|         await utils.checkNotification(page, 'Your new account has been created') | ||||
|  | ||||
|         // We are now in the default vault page | ||||
|         await expect(page).toHaveTitle('Vaults | Vaultwarden Web'); | ||||
|         await utils.checkNotification(page, 'You have been logged in!'); | ||||
|  | ||||
|         if( mailBuffer ){ | ||||
|             await mailBuffer.expect((m) => m.subject === "Welcome"); | ||||
|             await mailBuffer.expect((m) => m.subject === "New Device Logged In From Firefox"); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function logUser(test, page: Page, user: { email: string, password: string }, mailBuffer?: MailBuffer) { | ||||
|     await test.step(`Log user ${user.email}`, async () => { | ||||
|         await utils.cleanLanding(page); | ||||
|  | ||||
|         await page.getByLabel(/Email address/).fill(user.email); | ||||
|         await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|  | ||||
|         // Unlock page | ||||
|         await page.getByLabel('Master password').fill(user.password); | ||||
|         await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||
|  | ||||
|         // We are now in the default vault page | ||||
|         await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|  | ||||
|         if( mailBuffer ){ | ||||
|             await mailBuffer.expect((m) => m.subject === "New Device Logged In From Firefox"); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
							
								
								
									
										53
									
								
								playwright/tests/sso_login.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										53
									
								
								playwright/tests/sso_login.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,53 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
| import { MailDev } from 'maildev'; | ||||
|  | ||||
| import { logNewUser, logUser } from './setups/sso'; | ||||
| import { activateEmail, disableEmail } from './setups/2fa'; | ||||
| import * as utils from "../global-utils"; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| let mailserver; | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     mailserver = new MailDev({ | ||||
|         port: process.env.MAILDEV_SMTP_PORT, | ||||
|         web: { port: process.env.MAILDEV_HTTP_PORT }, | ||||
|     }) | ||||
|  | ||||
|     await mailserver.listen(); | ||||
|  | ||||
|     await utils.startVault(browser, testInfo, { | ||||
|         SSO_ENABLED: true, | ||||
|         SSO_ONLY: false, | ||||
|         SMTP_HOST: process.env.MAILDEV_HOST, | ||||
|         SMTP_FROM: process.env.PW_SMTP_FROM, | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
|     if( mailserver ){ | ||||
|         await mailserver.close(); | ||||
|     } | ||||
| }); | ||||
|  | ||||
| test('Create and activate 2FA', async ({ page }) => { | ||||
|     const mailBuffer = mailserver.buffer(users.user1.email); | ||||
|  | ||||
|     await logNewUser(test, page, users.user1, {mailBuffer: mailBuffer}); | ||||
|  | ||||
|     await activateEmail(test, page, users.user1, mailBuffer); | ||||
|  | ||||
|     mailBuffer.close(); | ||||
| }); | ||||
|  | ||||
| test('Log and disable', async ({ page }) => { | ||||
|     const mailBuffer = mailserver.buffer(users.user1.email); | ||||
|  | ||||
|     await logUser(test, page, users.user1, {mailBuffer: mailBuffer, mail2fa: true}); | ||||
|  | ||||
|     await disableEmail(test, page, users.user1); | ||||
|  | ||||
|     mailBuffer.close(); | ||||
| }); | ||||
							
								
								
									
										85
									
								
								playwright/tests/sso_login.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										85
									
								
								playwright/tests/sso_login.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,85 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
|  | ||||
| import { logNewUser, logUser } from './setups/sso'; | ||||
| import { activateTOTP, disableTOTP } from './setups/2fa'; | ||||
| import * as utils from "../global-utils"; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     await utils.startVault(browser, testInfo, { | ||||
|         SSO_ENABLED: true, | ||||
|         SSO_ONLY: false | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
| }); | ||||
|  | ||||
| test('Account creation using SSO', async ({ page }) => { | ||||
|     // Landing page | ||||
|     await logNewUser(test, page, users.user1); | ||||
| }); | ||||
|  | ||||
| test('SSO login', async ({ page }) => { | ||||
|     await logUser(test, page, users.user1); | ||||
| }); | ||||
|  | ||||
| test('Non SSO login', async ({ page }) => { | ||||
|     // Landing page | ||||
|     await page.goto('/'); | ||||
|     await page.locator("input[type=email].vw-email-sso").fill(users.user1.email); | ||||
|     await page.getByRole('button', { name: 'Other' }).click(); | ||||
|  | ||||
|     // Unlock page | ||||
|     await page.getByLabel('Master password').fill(users.user1.password); | ||||
|     await page.getByRole('button', { name: 'Log in with master password' }).click(); | ||||
|  | ||||
|     // We are now in the default vault page | ||||
|     await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
| }); | ||||
|  | ||||
| test('SSO login with TOTP 2fa', async ({ page }) => { | ||||
|     await logUser(test, page, users.user1); | ||||
|  | ||||
|     let totp = await activateTOTP(test, page, users.user1); | ||||
|  | ||||
|     await logUser(test, page, users.user1, { totp }); | ||||
|  | ||||
|     await disableTOTP(test, page, users.user1); | ||||
| }); | ||||
|  | ||||
| test('Non SSO login impossible', async ({ page, browser }, testInfo: TestInfo) => { | ||||
|     await utils.restartVault(page, testInfo, { | ||||
|         SSO_ENABLED: true, | ||||
|         SSO_ONLY: true | ||||
|     }, false); | ||||
|  | ||||
|     // Landing page | ||||
|     await page.goto('/'); | ||||
|  | ||||
|     // Check that SSO login is available | ||||
|     await expect(page.getByRole('button', { name: /Use single sign-on/ })).toHaveCount(1); | ||||
|  | ||||
|     // No Continue/Other | ||||
|     await expect(page.getByRole('button', { name: 'Other' })).toHaveCount(0); | ||||
| }); | ||||
|  | ||||
|  | ||||
| test('No SSO login', async ({ page }, testInfo: TestInfo) => { | ||||
|     await utils.restartVault(page, testInfo, { | ||||
|         SSO_ENABLED: false | ||||
|     }, false); | ||||
|  | ||||
|     // Landing page | ||||
|     await page.goto('/'); | ||||
|  | ||||
|     // No SSO button (rely on a correct selector checked in previous test) | ||||
|     await expect(page.getByRole('button', { name: /Use single sign-on/ })).toHaveCount(0); | ||||
|  | ||||
|     // Can continue to Master password | ||||
|     await page.getByLabel(/Email address/).fill(users.user1.email); | ||||
|     await page.getByRole('button', { name: 'Continue' }).click(); | ||||
|     await expect(page.getByRole('button', { name: 'Log in with master password' })).toHaveCount(1); | ||||
| }); | ||||
							
								
								
									
										121
									
								
								playwright/tests/sso_organization.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										121
									
								
								playwright/tests/sso_organization.smtp.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,121 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
| import { MailDev } from 'maildev'; | ||||
|  | ||||
| import * as utils from "../global-utils"; | ||||
| import * as orgs from './setups/orgs'; | ||||
| import { logNewUser, logUser } from './setups/sso'; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| let mailServer, mail1Buffer, mail2Buffer, mail3Buffer; | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     mailServer = new MailDev({ | ||||
|         port: process.env.MAILDEV_SMTP_PORT, | ||||
|         web: { port: process.env.MAILDEV_HTTP_PORT }, | ||||
|     }) | ||||
|  | ||||
|     await mailServer.listen(); | ||||
|  | ||||
|     await utils.startVault(browser, testInfo, { | ||||
|         SMTP_HOST: process.env.MAILDEV_HOST, | ||||
|         SMTP_FROM: process.env.PW_SMTP_FROM, | ||||
|         SSO_ENABLED: true, | ||||
|         SSO_ONLY: true, | ||||
|     }); | ||||
|  | ||||
|     mail1Buffer = mailServer.buffer(users.user1.email); | ||||
|     mail2Buffer = mailServer.buffer(users.user2.email); | ||||
|     mail3Buffer = mailServer.buffer(users.user3.email); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
|     [mail1Buffer, mail2Buffer, mail3Buffer, mailServer].map((m) => m?.close()); | ||||
| }); | ||||
|  | ||||
| test('Create user3', async ({ page }) => { | ||||
|     await logNewUser(test, page, users.user3, { mailBuffer: mail3Buffer }); | ||||
| }); | ||||
|  | ||||
| test('Invite users', async ({ page }) => { | ||||
|     await logNewUser(test, page, users.user1, { mailBuffer: mail1Buffer }); | ||||
|  | ||||
|     await orgs.create(test, page, '/Test'); | ||||
|     await orgs.members(test, page, '/Test'); | ||||
|     await orgs.invite(test, page, '/Test', users.user2.email); | ||||
|     await orgs.invite(test, page, '/Test', users.user3.email); | ||||
| }); | ||||
|  | ||||
| test('invited with new account', async ({ page }) => { | ||||
|     const link = await test.step('Extract email link', async () => { | ||||
|         const invited = await mail2Buffer.expect((m) => m.subject === "Join /Test"); | ||||
|         await page.setContent(invited.html); | ||||
|         return await page.getByTestId("invite").getAttribute("href"); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Redirect to Keycloak', async () => { | ||||
|         await page.goto(link); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Keycloak login', async () => { | ||||
|         await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible(); | ||||
|         await page.getByLabel(/Username/).fill(users.user2.name); | ||||
|         await page.getByLabel('Password', { exact: true }).fill(users.user2.password); | ||||
|         await page.getByRole('button', { name: 'Sign In' }).click(); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Create Vault account', async () => { | ||||
|         await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible(); | ||||
|         await page.getByLabel('New master password (required)', { exact: true }).fill(users.user2.password); | ||||
|         await page.getByLabel('Confirm new master password (').fill(users.user2.password); | ||||
|         await page.getByRole('button', { name: 'Create account' }).click(); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Default vault page', async () => { | ||||
|         await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|  | ||||
|         await utils.checkNotification(page, 'Account successfully created!'); | ||||
|         await utils.checkNotification(page, 'Invitation accepted'); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Check mails', async () => { | ||||
|         await mail2Buffer.expect((m) => m.subject.includes("New Device Logged")); | ||||
|         await mail1Buffer.expect((m) => m.subject === "Invitation to /Test accepted"); | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test('invited with existing account', async ({ page }) => { | ||||
|     const link = await test.step('Extract email link', async () => { | ||||
|         const invited = await mail3Buffer.expect((m) => m.subject === "Join /Test"); | ||||
|         await page.setContent(invited.html); | ||||
|         return await page.getByTestId("invite").getAttribute("href"); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Redirect to Keycloak', async () => { | ||||
|         await page.goto(link); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Keycloak login', async () => { | ||||
|         await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible(); | ||||
|         await page.getByLabel(/Username/).fill(users.user3.name); | ||||
|         await page.getByLabel('Password', { exact: true }).fill(users.user3.password); | ||||
|         await page.getByRole('button', { name: 'Sign In' }).click(); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Unlock vault', async () => { | ||||
|         await expect(page).toHaveTitle('Vaultwarden Web'); | ||||
|         await page.getByLabel('Master password').fill(users.user3.password); | ||||
|         await page.getByRole('button', { name: 'Unlock' }).click(); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Default vault page', async () => { | ||||
|         await expect(page).toHaveTitle(/Vaultwarden Web/); | ||||
|         await utils.checkNotification(page, 'Invitation accepted'); | ||||
|     }); | ||||
|  | ||||
|     await test.step('Check mails', async () => { | ||||
|         await mail3Buffer.expect((m) => m.subject.includes("New Device Logged")); | ||||
|         await mail1Buffer.expect((m) => m.subject === "Invitation to /Test accepted"); | ||||
|     }); | ||||
| }); | ||||
							
								
								
									
										76
									
								
								playwright/tests/sso_organization.spec.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										76
									
								
								playwright/tests/sso_organization.spec.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,76 @@ | ||||
| import { test, expect, type TestInfo } from '@playwright/test'; | ||||
| import { MailDev } from 'maildev'; | ||||
|  | ||||
| import * as utils from "../global-utils"; | ||||
| import * as orgs from './setups/orgs'; | ||||
| import { logNewUser, logUser } from './setups/sso'; | ||||
|  | ||||
| let users = utils.loadEnv(); | ||||
|  | ||||
| test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { | ||||
|     await utils.startVault(browser, testInfo, { | ||||
|         SSO_ENABLED: true, | ||||
|         SSO_ONLY: true, | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| test.afterAll('Teardown', async ({}) => { | ||||
|     utils.stopVault(); | ||||
| }); | ||||
|  | ||||
| test('Create user3', async ({ page }) => { | ||||
|     await logNewUser(test, page, users.user3); | ||||
| }); | ||||
|  | ||||
| test('Invite users', async ({ page }) => { | ||||
|     await logNewUser(test, page, users.user1); | ||||
|  | ||||
|     await orgs.create(test, page, '/Test'); | ||||
|     await orgs.members(test, page, '/Test'); | ||||
|     await orgs.invite(test, page, '/Test', users.user2.email); | ||||
|     await orgs.invite(test, page, '/Test', users.user3.email); | ||||
|     await orgs.confirm(test, page, '/Test', users.user3.email); | ||||
| }); | ||||
|  | ||||
| test('Create invited account', async ({ page }) => { | ||||
|     await logNewUser(test, page, users.user2); | ||||
| }); | ||||
|  | ||||
| test('Confirm invited user', async ({ page }) => { | ||||
|     await logUser(test, page, users.user1); | ||||
|     await orgs.members(test, page, '/Test'); | ||||
|     await expect(page.getByRole('row', { name: users.user2.name })).toHaveText(/Needs confirmation/); | ||||
|     await orgs.confirm(test, page, '/Test', users.user2.email); | ||||
| }); | ||||
|  | ||||
| test('Organization is visible', async ({ page }) => { | ||||
|     await logUser(test, page, users.user2); | ||||
|     await page.getByLabel('vault: /Test').click(); | ||||
|     await expect(page.getByLabel('Filter: Default collection')).toBeVisible(); | ||||
| }); | ||||
|  | ||||
| test('Enforce password policy', async ({ page }) => { | ||||
|     await logUser(test, page, users.user1); | ||||
|     await orgs.policies(test, page, '/Test'); | ||||
|  | ||||
|     await test.step(`Set master password policy`, async () => { | ||||
|         await page.getByRole('button', { name: 'Master password requirements' }).click(); | ||||
|         await page.getByRole('checkbox', { name: 'Turn on' }).check(); | ||||
|         await page.getByRole('checkbox', { name: 'Require existing members to' }).check(); | ||||
|         await page.getByRole('spinbutton', { name: 'Minimum length' }).fill('42'); | ||||
|         await page.getByRole('button', { name: 'Save' }).click(); | ||||
|         await utils.checkNotification(page, 'Edited policy Master password requirements.'); | ||||
|     }); | ||||
|  | ||||
|     await utils.logout(test, page, users.user1); | ||||
|  | ||||
|     await test.step(`Unlock trigger policy`, async () => { | ||||
|         await page.locator("input[type=email].vw-email-sso").fill(users.user1.email); | ||||
|         await page.getByRole('button', { name: 'Use single sign-on' }).click(); | ||||
|  | ||||
|         await page.getByRole('textbox', { name: 'Master password (required)' }).fill(users.user1.password); | ||||
|         await page.getByRole('button', { name: 'Unlock' }).click(); | ||||
|  | ||||
|         await expect(page.getByRole('heading', { name: 'Update master password' })).toBeVisible(); | ||||
|     }); | ||||
| }); | ||||
| @@ -1,4 +1,4 @@ | ||||
| [toolchain] | ||||
| channel = "1.88.0" | ||||
| channel = "1.89.0" | ||||
| components = [ "rustfmt", "clippy" ] | ||||
| profile = "minimal" | ||||
|   | ||||
| @@ -46,6 +46,7 @@ pub fn routes() -> Vec<Route> { | ||||
|         invite_user, | ||||
|         logout, | ||||
|         delete_user, | ||||
|         delete_sso_user, | ||||
|         deauth_user, | ||||
|         disable_user, | ||||
|         enable_user, | ||||
| @@ -239,6 +240,7 @@ struct AdminTemplateData { | ||||
|     page_data: Option<Value>, | ||||
|     logged_in: bool, | ||||
|     urlpath: String, | ||||
|     sso_enabled: bool, | ||||
| } | ||||
|  | ||||
| impl AdminTemplateData { | ||||
| @@ -248,6 +250,7 @@ impl AdminTemplateData { | ||||
|             page_data: Some(page_data), | ||||
|             logged_in: true, | ||||
|             urlpath: CONFIG.domain_path(), | ||||
|             sso_enabled: CONFIG.sso_enabled(), | ||||
|         } | ||||
|     } | ||||
|  | ||||
| @@ -296,7 +299,7 @@ async fn invite_user(data: Json<InviteData>, _token: AdminToken, mut conn: DbCon | ||||
|         err_code!("User already exists", Status::Conflict.code) | ||||
|     } | ||||
|  | ||||
|     let mut user = User::new(data.email); | ||||
|     let mut user = User::new(data.email, None); | ||||
|  | ||||
|     async fn _generate_invite(user: &User, conn: &mut DbConn) -> EmptyResult { | ||||
|         if CONFIG.mail_enabled() { | ||||
| @@ -336,7 +339,7 @@ fn logout(cookies: &CookieJar<'_>) -> Redirect { | ||||
| async fn get_users_json(_token: AdminToken, mut conn: DbConn) -> Json<Value> { | ||||
|     let users = User::get_all(&mut conn).await; | ||||
|     let mut users_json = Vec::with_capacity(users.len()); | ||||
|     for u in users { | ||||
|     for (u, _) in users { | ||||
|         let mut usr = u.to_json(&mut conn).await; | ||||
|         usr["userEnabled"] = json!(u.enabled); | ||||
|         usr["createdAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); | ||||
| @@ -354,7 +357,7 @@ async fn get_users_json(_token: AdminToken, mut conn: DbConn) -> Json<Value> { | ||||
| async fn users_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html<String>> { | ||||
|     let users = User::get_all(&mut conn).await; | ||||
|     let mut users_json = Vec::with_capacity(users.len()); | ||||
|     for u in users { | ||||
|     for (u, sso_u) in users { | ||||
|         let mut usr = u.to_json(&mut conn).await; | ||||
|         usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &mut conn).await); | ||||
|         usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &mut conn).await); | ||||
| @@ -365,6 +368,9 @@ async fn users_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html< | ||||
|             Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)), | ||||
|             None => json!("Never"), | ||||
|         }; | ||||
|  | ||||
|         usr["sso_identifier"] = json!(sso_u.map(|u| u.identifier.to_string()).unwrap_or(String::new())); | ||||
|  | ||||
|         users_json.push(usr); | ||||
|     } | ||||
|  | ||||
| @@ -417,6 +423,27 @@ async fn delete_user(user_id: UserId, token: AdminToken, mut conn: DbConn) -> Em | ||||
|     res | ||||
| } | ||||
|  | ||||
| #[delete("/users/<user_id>/sso", format = "application/json")] | ||||
| async fn delete_sso_user(user_id: UserId, token: AdminToken, mut conn: DbConn) -> EmptyResult { | ||||
|     let memberships = Membership::find_any_state_by_user(&user_id, &mut conn).await; | ||||
|     let res = SsoUser::delete(&user_id, &mut conn).await; | ||||
|  | ||||
|     for membership in memberships { | ||||
|         log_event( | ||||
|             EventType::OrganizationUserUnlinkedSso as i32, | ||||
|             &membership.uuid, | ||||
|             &membership.org_uuid, | ||||
|             &ACTING_ADMIN_USER.into(), | ||||
|             14, // Use UnknownBrowser type | ||||
|             &token.ip.ip, | ||||
|             &mut conn, | ||||
|         ) | ||||
|         .await; | ||||
|     } | ||||
|  | ||||
|     res | ||||
| } | ||||
|  | ||||
| #[post("/users/<user_id>/deauth", format = "application/json")] | ||||
| async fn deauth_user(user_id: UserId, _token: AdminToken, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | ||||
|     let mut user = get_user_or_404(&user_id, &mut conn).await?; | ||||
|   | ||||
| @@ -7,9 +7,9 @@ use serde_json::Value; | ||||
|  | ||||
| use crate::{ | ||||
|     api::{ | ||||
|         core::{log_user_event, two_factor::email}, | ||||
|         master_password_policy, register_push_device, unregister_push_device, AnonymousNotify, EmptyResult, JsonResult, | ||||
|         Notify, PasswordOrOtpData, UpdateType, | ||||
|         core::{accept_org_invite, log_user_event, two_factor::email}, | ||||
|         master_password_policy, register_push_device, unregister_push_device, AnonymousNotify, ApiResult, EmptyResult, | ||||
|         JsonResult, Notify, PasswordOrOtpData, UpdateType, | ||||
|     }, | ||||
|     auth::{decode_delete, decode_invite, decode_verify_email, ClientHeaders, Headers}, | ||||
|     crypto, | ||||
| @@ -34,6 +34,7 @@ pub fn routes() -> Vec<rocket::Route> { | ||||
|         get_public_keys, | ||||
|         post_keys, | ||||
|         post_password, | ||||
|         post_set_password, | ||||
|         post_kdf, | ||||
|         post_rotatekey, | ||||
|         post_sstamp, | ||||
| @@ -66,15 +67,22 @@ pub fn routes() -> Vec<rocket::Route> { | ||||
|     ] | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Deserialize)] | ||||
| #[serde(rename_all = "camelCase")] | ||||
| pub struct KDFData { | ||||
|     kdf: i32, | ||||
|     kdf_iterations: i32, | ||||
|     kdf_memory: Option<i32>, | ||||
|     kdf_parallelism: Option<i32>, | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Deserialize)] | ||||
| #[serde(rename_all = "camelCase")] | ||||
| pub struct RegisterData { | ||||
|     email: String, | ||||
|  | ||||
|     kdf: Option<i32>, | ||||
|     kdf_iterations: Option<i32>, | ||||
|     kdf_memory: Option<i32>, | ||||
|     kdf_parallelism: Option<i32>, | ||||
|     #[serde(flatten)] | ||||
|     kdf: KDFData, | ||||
|  | ||||
|     #[serde(alias = "userSymmetricKey")] | ||||
|     key: String, | ||||
| @@ -97,6 +105,19 @@ pub struct RegisterData { | ||||
|     org_invite_token: Option<String>, | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Deserialize)] | ||||
| #[serde(rename_all = "camelCase")] | ||||
| pub struct SetPasswordData { | ||||
|     #[serde(flatten)] | ||||
|     kdf: KDFData, | ||||
|  | ||||
|     key: String, | ||||
|     keys: Option<KeysData>, | ||||
|     master_password_hash: String, | ||||
|     master_password_hint: Option<String>, | ||||
|     org_identifier: Option<String>, | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Deserialize)] | ||||
| #[serde(rename_all = "camelCase")] | ||||
| struct KeysData { | ||||
| @@ -237,10 +258,7 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut c | ||||
|                     err!("Registration email does not match invite email") | ||||
|                 } | ||||
|             } else if Invitation::take(&email, &mut conn).await { | ||||
|                 for membership in Membership::find_invited_by_user(&user.uuid, &mut conn).await.iter_mut() { | ||||
|                     membership.status = MembershipStatus::Accepted as i32; | ||||
|                     membership.save(&mut conn).await?; | ||||
|                 } | ||||
|                 Membership::accept_user_invitations(&user.uuid, &mut conn).await?; | ||||
|                 user | ||||
|             } else if CONFIG.is_signup_allowed(&email) | ||||
|                 || (CONFIG.emergency_access_allowed() | ||||
| @@ -259,7 +277,7 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut c | ||||
|                 || CONFIG.is_signup_allowed(&email) | ||||
|                 || pending_emergency_access.is_some() | ||||
|             { | ||||
|                 User::new(email.clone()) | ||||
|                 User::new(email.clone(), None) | ||||
|             } else { | ||||
|                 err!("Registration not allowed or user already exists") | ||||
|             } | ||||
| @@ -269,16 +287,7 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut c | ||||
|     // Make sure we don't leave a lingering invitation. | ||||
|     Invitation::take(&email, &mut conn).await; | ||||
|  | ||||
|     if let Some(client_kdf_type) = data.kdf { | ||||
|         user.client_kdf_type = client_kdf_type; | ||||
|     } | ||||
|  | ||||
|     if let Some(client_kdf_iter) = data.kdf_iterations { | ||||
|         user.client_kdf_iter = client_kdf_iter; | ||||
|     } | ||||
|  | ||||
|     user.client_kdf_memory = data.kdf_memory; | ||||
|     user.client_kdf_parallelism = data.kdf_parallelism; | ||||
|     set_kdf_data(&mut user, data.kdf)?; | ||||
|  | ||||
|     user.set_password(&data.master_password_hash, Some(data.key), true, None); | ||||
|     user.password_hint = password_hint; | ||||
| @@ -327,6 +336,68 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut c | ||||
|     }))) | ||||
| } | ||||
|  | ||||
| #[post("/accounts/set-password", data = "<data>")] | ||||
| async fn post_set_password(data: Json<SetPasswordData>, headers: Headers, mut conn: DbConn) -> JsonResult { | ||||
|     let data: SetPasswordData = data.into_inner(); | ||||
|     let mut user = headers.user; | ||||
|  | ||||
|     if user.private_key.is_some() { | ||||
|         err!("Account already initialized, cannot set password") | ||||
|     } | ||||
|  | ||||
|     // Check against the password hint setting here so if it fails, | ||||
|     // the user can retry without losing their invitation below. | ||||
|     let password_hint = clean_password_hint(&data.master_password_hint); | ||||
|     enforce_password_hint_setting(&password_hint)?; | ||||
|  | ||||
|     set_kdf_data(&mut user, data.kdf)?; | ||||
|  | ||||
|     user.set_password( | ||||
|         &data.master_password_hash, | ||||
|         Some(data.key), | ||||
|         false, | ||||
|         Some(vec![String::from("revision_date")]), // We need to allow revision-date to use the old security_timestamp | ||||
|     ); | ||||
|     user.password_hint = password_hint; | ||||
|  | ||||
|     if let Some(keys) = data.keys { | ||||
|         user.private_key = Some(keys.encrypted_private_key); | ||||
|         user.public_key = Some(keys.public_key); | ||||
|     } | ||||
|  | ||||
|     if let Some(identifier) = data.org_identifier { | ||||
|         if identifier != crate::sso::FAKE_IDENTIFIER { | ||||
|             let org = match Organization::find_by_name(&identifier, &mut conn).await { | ||||
|                 None => err!("Failed to retrieve the associated organization"), | ||||
|                 Some(org) => org, | ||||
|             }; | ||||
|  | ||||
|             let membership = match Membership::find_by_user_and_org(&user.uuid, &org.uuid, &mut conn).await { | ||||
|                 None => err!("Failed to retrieve the invitation"), | ||||
|                 Some(org) => org, | ||||
|             }; | ||||
|  | ||||
|             accept_org_invite(&user, membership, None, &mut conn).await?; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     if CONFIG.mail_enabled() { | ||||
|         mail::send_welcome(&user.email.to_lowercase()).await?; | ||||
|     } else { | ||||
|         Membership::accept_user_invitations(&user.uuid, &mut conn).await?; | ||||
|     } | ||||
|  | ||||
|     log_user_event(EventType::UserChangedPassword as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn) | ||||
|         .await; | ||||
|  | ||||
|     user.save(&mut conn).await?; | ||||
|  | ||||
|     Ok(Json(json!({ | ||||
|       "Object": "set-password", | ||||
|       "CaptchaBypassToken": "", | ||||
|     }))) | ||||
| } | ||||
|  | ||||
| #[get("/accounts/profile")] | ||||
| async fn profile(headers: Headers, mut conn: DbConn) -> Json<Value> { | ||||
|     Json(headers.user.to_json(&mut conn).await) | ||||
| @@ -469,25 +540,15 @@ async fn post_password(data: Json<ChangePassData>, headers: Headers, mut conn: D | ||||
| #[derive(Deserialize)] | ||||
| #[serde(rename_all = "camelCase")] | ||||
| struct ChangeKdfData { | ||||
|     kdf: i32, | ||||
|     kdf_iterations: i32, | ||||
|     kdf_memory: Option<i32>, | ||||
|     kdf_parallelism: Option<i32>, | ||||
|     #[serde(flatten)] | ||||
|     kdf: KDFData, | ||||
|  | ||||
|     master_password_hash: String, | ||||
|     new_master_password_hash: String, | ||||
|     key: String, | ||||
| } | ||||
|  | ||||
| #[post("/accounts/kdf", data = "<data>")] | ||||
| async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | ||||
|     let data: ChangeKdfData = data.into_inner(); | ||||
|     let mut user = headers.user; | ||||
|  | ||||
|     if !user.check_valid_password(&data.master_password_hash) { | ||||
|         err!("Invalid password") | ||||
|     } | ||||
|  | ||||
| fn set_kdf_data(user: &mut User, data: KDFData) -> EmptyResult { | ||||
|     if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 { | ||||
|         err!("PBKDF2 KDF iterations must be at least 100000.") | ||||
|     } | ||||
| @@ -518,6 +579,21 @@ async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn, | ||||
|     } | ||||
|     user.client_kdf_iter = data.kdf_iterations; | ||||
|     user.client_kdf_type = data.kdf; | ||||
|  | ||||
|     Ok(()) | ||||
| } | ||||
|  | ||||
| #[post("/accounts/kdf", data = "<data>")] | ||||
| async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | ||||
|     let data: ChangeKdfData = data.into_inner(); | ||||
|     let mut user = headers.user; | ||||
|  | ||||
|     if !user.check_valid_password(&data.master_password_hash) { | ||||
|         err!("Invalid password") | ||||
|     } | ||||
|  | ||||
|     set_kdf_data(&mut user, data.kdf)?; | ||||
|  | ||||
|     user.set_password(&data.new_master_password_hash, Some(data.key), true, None); | ||||
|     let save_result = user.save(&mut conn).await; | ||||
|  | ||||
| @@ -1126,15 +1202,30 @@ struct SecretVerificationRequest { | ||||
|     master_password_hash: String, | ||||
| } | ||||
|  | ||||
| // Change the KDF Iterations if necessary | ||||
| pub async fn kdf_upgrade(user: &mut User, pwd_hash: &str, conn: &mut DbConn) -> ApiResult<()> { | ||||
|     if user.password_iterations < CONFIG.password_iterations() { | ||||
|         user.password_iterations = CONFIG.password_iterations(); | ||||
|         user.set_password(pwd_hash, None, false, None); | ||||
|  | ||||
|         if let Err(e) = user.save(conn).await { | ||||
|             error!("Error updating user: {e:#?}"); | ||||
|         } | ||||
|     } | ||||
|     Ok(()) | ||||
| } | ||||
|  | ||||
| #[post("/accounts/verify-password", data = "<data>")] | ||||
| async fn verify_password(data: Json<SecretVerificationRequest>, headers: Headers, conn: DbConn) -> JsonResult { | ||||
| async fn verify_password(data: Json<SecretVerificationRequest>, headers: Headers, mut conn: DbConn) -> JsonResult { | ||||
|     let data: SecretVerificationRequest = data.into_inner(); | ||||
|     let user = headers.user; | ||||
|     let mut user = headers.user; | ||||
|  | ||||
|     if !user.check_valid_password(&data.master_password_hash) { | ||||
|         err!("Invalid password") | ||||
|     } | ||||
|  | ||||
|     kdf_upgrade(&mut user, &data.master_password_hash, &mut conn).await?; | ||||
|  | ||||
|     Ok(Json(master_password_policy(&user, &conn).await)) | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -78,6 +78,7 @@ pub fn routes() -> Vec<Route> { | ||||
|         restore_cipher_put, | ||||
|         restore_cipher_put_admin, | ||||
|         restore_cipher_selected, | ||||
|         restore_cipher_selected_admin, | ||||
|         delete_all, | ||||
|         move_cipher_selected, | ||||
|         move_cipher_selected_put, | ||||
| @@ -318,7 +319,7 @@ async fn post_ciphers_create( | ||||
|     // or otherwise), we can just ignore this field entirely. | ||||
|     data.cipher.last_known_revision_date = None; | ||||
|  | ||||
|     share_cipher_by_uuid(&cipher.uuid, data, &headers, &mut conn, &nt).await | ||||
|     share_cipher_by_uuid(&cipher.uuid, data, &headers, &mut conn, &nt, None).await | ||||
| } | ||||
|  | ||||
| /// Called when creating a new user-owned cipher. | ||||
| @@ -920,7 +921,7 @@ async fn post_cipher_share( | ||||
| ) -> JsonResult { | ||||
|     let data: ShareCipherData = data.into_inner(); | ||||
|  | ||||
|     share_cipher_by_uuid(&cipher_id, data, &headers, &mut conn, &nt).await | ||||
|     share_cipher_by_uuid(&cipher_id, data, &headers, &mut conn, &nt, None).await | ||||
| } | ||||
|  | ||||
| #[put("/ciphers/<cipher_id>/share", data = "<data>")] | ||||
| @@ -933,7 +934,7 @@ async fn put_cipher_share( | ||||
| ) -> JsonResult { | ||||
|     let data: ShareCipherData = data.into_inner(); | ||||
|  | ||||
|     share_cipher_by_uuid(&cipher_id, data, &headers, &mut conn, &nt).await | ||||
|     share_cipher_by_uuid(&cipher_id, data, &headers, &mut conn, &nt, None).await | ||||
| } | ||||
|  | ||||
| #[derive(Deserialize)] | ||||
| @@ -973,11 +974,16 @@ async fn put_cipher_share_selected( | ||||
|         }; | ||||
|  | ||||
|         match shared_cipher_data.cipher.id.take() { | ||||
|             Some(id) => share_cipher_by_uuid(&id, shared_cipher_data, &headers, &mut conn, &nt).await?, | ||||
|             Some(id) => { | ||||
|                 share_cipher_by_uuid(&id, shared_cipher_data, &headers, &mut conn, &nt, Some(UpdateType::None)).await? | ||||
|             } | ||||
|             None => err!("Request missing ids field"), | ||||
|         }; | ||||
|     } | ||||
|  | ||||
|     // Multi share actions do not send out a push for each cipher, we need to send a general sync here | ||||
|     nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, &mut conn).await; | ||||
|  | ||||
|     Ok(()) | ||||
| } | ||||
|  | ||||
| @@ -987,6 +993,7 @@ async fn share_cipher_by_uuid( | ||||
|     headers: &Headers, | ||||
|     conn: &mut DbConn, | ||||
|     nt: &Notify<'_>, | ||||
|     override_ut: Option<UpdateType>, | ||||
| ) -> JsonResult { | ||||
|     let mut cipher = match Cipher::find_by_uuid(cipher_id, conn).await { | ||||
|         Some(cipher) => { | ||||
| @@ -1018,7 +1025,10 @@ async fn share_cipher_by_uuid( | ||||
|     }; | ||||
|  | ||||
|     // When LastKnownRevisionDate is None, it is a new cipher, so send CipherCreate. | ||||
|     let ut = if data.cipher.last_known_revision_date.is_some() { | ||||
|     // If there is an override, like when handling multiple items, we want to prevent a push notification for every single item | ||||
|     let ut = if let Some(ut) = override_ut { | ||||
|         ut | ||||
|     } else if data.cipher.last_known_revision_date.is_some() { | ||||
|         UpdateType::SyncCipherUpdate | ||||
|     } else { | ||||
|         UpdateType::SyncCipherCreate | ||||
| @@ -1405,7 +1415,7 @@ async fn delete_attachment_admin( | ||||
|  | ||||
| #[post("/ciphers/<cipher_id>/delete")] | ||||
| async fn delete_cipher_post(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | ||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, false, &nt).await | ||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::HardSingle, &nt).await | ||||
|     // permanent delete | ||||
| } | ||||
|  | ||||
| @@ -1416,13 +1426,13 @@ async fn delete_cipher_post_admin( | ||||
|     mut conn: DbConn, | ||||
|     nt: Notify<'_>, | ||||
| ) -> EmptyResult { | ||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, false, &nt).await | ||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::HardSingle, &nt).await | ||||
|     // permanent delete | ||||
| } | ||||
|  | ||||
| #[put("/ciphers/<cipher_id>/delete")] | ||||
| async fn delete_cipher_put(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | ||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, true, &nt).await | ||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::SoftSingle, &nt).await | ||||
|     // soft delete | ||||
| } | ||||
|  | ||||
| @@ -1433,18 +1443,19 @@ async fn delete_cipher_put_admin( | ||||
|     mut conn: DbConn, | ||||
|     nt: Notify<'_>, | ||||
| ) -> EmptyResult { | ||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, true, &nt).await | ||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::SoftSingle, &nt).await | ||||
|     // soft delete | ||||
| } | ||||
|  | ||||
| #[delete("/ciphers/<cipher_id>")] | ||||
| async fn delete_cipher(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | ||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, false, &nt).await | ||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::HardSingle, &nt).await | ||||
|     // permanent delete | ||||
| } | ||||
|  | ||||
| #[delete("/ciphers/<cipher_id>/admin")] | ||||
| async fn delete_cipher_admin(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { | ||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, false, &nt).await | ||||
|     _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &CipherDeleteOptions::HardSingle, &nt).await | ||||
|     // permanent delete | ||||
| } | ||||
|  | ||||
| @@ -1455,7 +1466,8 @@ async fn delete_cipher_selected( | ||||
|     conn: DbConn, | ||||
|     nt: Notify<'_>, | ||||
| ) -> EmptyResult { | ||||
|     _delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete | ||||
|     _delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::HardMulti, nt).await | ||||
|     // permanent delete | ||||
| } | ||||
|  | ||||
| #[post("/ciphers/delete", data = "<data>")] | ||||
| @@ -1465,7 +1477,8 @@ async fn delete_cipher_selected_post( | ||||
|     conn: DbConn, | ||||
|     nt: Notify<'_>, | ||||
| ) -> EmptyResult { | ||||
|     _delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete | ||||
|     _delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::HardMulti, nt).await | ||||
|     // permanent delete | ||||
| } | ||||
|  | ||||
| #[put("/ciphers/delete", data = "<data>")] | ||||
| @@ -1475,7 +1488,8 @@ async fn delete_cipher_selected_put( | ||||
|     conn: DbConn, | ||||
|     nt: Notify<'_>, | ||||
| ) -> EmptyResult { | ||||
|     _delete_multiple_ciphers(data, headers, conn, true, nt).await // soft delete | ||||
|     _delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::SoftMulti, nt).await | ||||
|     // soft delete | ||||
| } | ||||
|  | ||||
| #[delete("/ciphers/admin", data = "<data>")] | ||||
| @@ -1485,7 +1499,8 @@ async fn delete_cipher_selected_admin( | ||||
|     conn: DbConn, | ||||
|     nt: Notify<'_>, | ||||
| ) -> EmptyResult { | ||||
|     _delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete | ||||
|     _delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::HardMulti, nt).await | ||||
|     // permanent delete | ||||
| } | ||||
|  | ||||
| #[post("/ciphers/delete-admin", data = "<data>")] | ||||
| @@ -1495,7 +1510,8 @@ async fn delete_cipher_selected_post_admin( | ||||
|     conn: DbConn, | ||||
|     nt: Notify<'_>, | ||||
| ) -> EmptyResult { | ||||
|     _delete_multiple_ciphers(data, headers, conn, false, nt).await // permanent delete | ||||
|     _delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::HardMulti, nt).await | ||||
|     // permanent delete | ||||
| } | ||||
|  | ||||
| #[put("/ciphers/delete-admin", data = "<data>")] | ||||
| @@ -1505,12 +1521,13 @@ async fn delete_cipher_selected_put_admin( | ||||
|     conn: DbConn, | ||||
|     nt: Notify<'_>, | ||||
| ) -> EmptyResult { | ||||
|     _delete_multiple_ciphers(data, headers, conn, true, nt).await // soft delete | ||||
|     _delete_multiple_ciphers(data, headers, conn, CipherDeleteOptions::SoftMulti, nt).await | ||||
|     // soft delete | ||||
| } | ||||
|  | ||||
| #[put("/ciphers/<cipher_id>/restore")] | ||||
| async fn restore_cipher_put(cipher_id: CipherId, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { | ||||
|     _restore_cipher_by_uuid(&cipher_id, &headers, &mut conn, &nt).await | ||||
|     _restore_cipher_by_uuid(&cipher_id, &headers, false, &mut conn, &nt).await | ||||
| } | ||||
|  | ||||
| #[put("/ciphers/<cipher_id>/restore-admin")] | ||||
| @@ -1520,7 +1537,17 @@ async fn restore_cipher_put_admin( | ||||
|     mut conn: DbConn, | ||||
|     nt: Notify<'_>, | ||||
| ) -> JsonResult { | ||||
|     _restore_cipher_by_uuid(&cipher_id, &headers, &mut conn, &nt).await | ||||
|     _restore_cipher_by_uuid(&cipher_id, &headers, false, &mut conn, &nt).await | ||||
| } | ||||
|  | ||||
| #[put("/ciphers/restore-admin", data = "<data>")] | ||||
| async fn restore_cipher_selected_admin( | ||||
|     data: Json<CipherIdsData>, | ||||
|     headers: Headers, | ||||
|     mut conn: DbConn, | ||||
|     nt: Notify<'_>, | ||||
| ) -> JsonResult { | ||||
|     _restore_multiple_ciphers(data, &headers, &mut conn, &nt).await | ||||
| } | ||||
|  | ||||
| #[put("/ciphers/restore", data = "<data>")] | ||||
| @@ -1548,35 +1575,47 @@ async fn move_cipher_selected( | ||||
|     nt: Notify<'_>, | ||||
| ) -> EmptyResult { | ||||
|     let data = data.into_inner(); | ||||
|     let user_id = headers.user.uuid; | ||||
|     let user_id = &headers.user.uuid; | ||||
|  | ||||
|     if let Some(ref folder_id) = data.folder_id { | ||||
|         if Folder::find_by_uuid_and_user(folder_id, &user_id, &mut conn).await.is_none() { | ||||
|         if Folder::find_by_uuid_and_user(folder_id, user_id, &mut conn).await.is_none() { | ||||
|             err!("Invalid folder", "Folder does not exist or belongs to another user"); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     for cipher_id in data.ids { | ||||
|         let Some(cipher) = Cipher::find_by_uuid(&cipher_id, &mut conn).await else { | ||||
|             err!("Cipher doesn't exist") | ||||
|         }; | ||||
|     let cipher_count = data.ids.len(); | ||||
|     let mut single_cipher: Option<Cipher> = None; | ||||
|  | ||||
|         if !cipher.is_accessible_to_user(&user_id, &mut conn).await { | ||||
|             err!("Cipher is not accessible by user") | ||||
|     // TODO: Convert this to use a single query (or at least less) to update all items | ||||
|     // Find all ciphers a user has access to, all others will be ignored | ||||
|     let accessible_ciphers = Cipher::find_by_user_and_ciphers(user_id, &data.ids, &mut conn).await; | ||||
|     let accessible_ciphers_count = accessible_ciphers.len(); | ||||
|     for cipher in accessible_ciphers { | ||||
|         cipher.move_to_folder(data.folder_id.clone(), user_id, &mut conn).await?; | ||||
|         if cipher_count == 1 { | ||||
|             single_cipher = Some(cipher); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|         // Move cipher | ||||
|         cipher.move_to_folder(data.folder_id.clone(), &user_id, &mut conn).await?; | ||||
|  | ||||
|     if let Some(cipher) = single_cipher { | ||||
|         nt.send_cipher_update( | ||||
|             UpdateType::SyncCipherUpdate, | ||||
|             &cipher, | ||||
|             std::slice::from_ref(&user_id), | ||||
|             std::slice::from_ref(user_id), | ||||
|             &headers.device, | ||||
|             None, | ||||
|             &mut conn, | ||||
|         ) | ||||
|         .await; | ||||
|     } else { | ||||
|         // Multi move actions do not send out a push for each cipher, we need to send a general sync here | ||||
|         nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, &mut conn).await; | ||||
|     } | ||||
|  | ||||
|     if cipher_count != accessible_ciphers_count { | ||||
|         err!(format!( | ||||
|             "Not all ciphers are moved! {accessible_ciphers_count} of the selected {cipher_count} were moved." | ||||
|         )) | ||||
|     } | ||||
|  | ||||
|     Ok(()) | ||||
| @@ -1659,11 +1698,19 @@ async fn delete_all( | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[derive(PartialEq)] | ||||
| pub enum CipherDeleteOptions { | ||||
|     SoftSingle, | ||||
|     SoftMulti, | ||||
|     HardSingle, | ||||
|     HardMulti, | ||||
| } | ||||
|  | ||||
| async fn _delete_cipher_by_uuid( | ||||
|     cipher_id: &CipherId, | ||||
|     headers: &Headers, | ||||
|     conn: &mut DbConn, | ||||
|     soft_delete: bool, | ||||
|     delete_options: &CipherDeleteOptions, | ||||
|     nt: &Notify<'_>, | ||||
| ) -> EmptyResult { | ||||
|     let Some(mut cipher) = Cipher::find_by_uuid(cipher_id, conn).await else { | ||||
| @@ -1674,35 +1721,42 @@ async fn _delete_cipher_by_uuid( | ||||
|         err!("Cipher can't be deleted by user") | ||||
|     } | ||||
|  | ||||
|     if soft_delete { | ||||
|     if *delete_options == CipherDeleteOptions::SoftSingle || *delete_options == CipherDeleteOptions::SoftMulti { | ||||
|         cipher.deleted_at = Some(Utc::now().naive_utc()); | ||||
|         cipher.save(conn).await?; | ||||
|         nt.send_cipher_update( | ||||
|             UpdateType::SyncCipherUpdate, | ||||
|             &cipher, | ||||
|             &cipher.update_users_revision(conn).await, | ||||
|             &headers.device, | ||||
|             None, | ||||
|             conn, | ||||
|         ) | ||||
|         .await; | ||||
|         if *delete_options == CipherDeleteOptions::SoftSingle { | ||||
|             nt.send_cipher_update( | ||||
|                 UpdateType::SyncCipherUpdate, | ||||
|                 &cipher, | ||||
|                 &cipher.update_users_revision(conn).await, | ||||
|                 &headers.device, | ||||
|                 None, | ||||
|                 conn, | ||||
|             ) | ||||
|             .await; | ||||
|         } | ||||
|     } else { | ||||
|         cipher.delete(conn).await?; | ||||
|         nt.send_cipher_update( | ||||
|             UpdateType::SyncCipherDelete, | ||||
|             &cipher, | ||||
|             &cipher.update_users_revision(conn).await, | ||||
|             &headers.device, | ||||
|             None, | ||||
|             conn, | ||||
|         ) | ||||
|         .await; | ||||
|         if *delete_options == CipherDeleteOptions::HardSingle { | ||||
|             nt.send_cipher_update( | ||||
|                 UpdateType::SyncLoginDelete, | ||||
|                 &cipher, | ||||
|                 &cipher.update_users_revision(conn).await, | ||||
|                 &headers.device, | ||||
|                 None, | ||||
|                 conn, | ||||
|             ) | ||||
|             .await; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     if let Some(org_id) = cipher.organization_uuid { | ||||
|         let event_type = match soft_delete { | ||||
|             true => EventType::CipherSoftDeleted as i32, | ||||
|             false => EventType::CipherDeleted as i32, | ||||
|         let event_type = if *delete_options == CipherDeleteOptions::SoftSingle | ||||
|             || *delete_options == CipherDeleteOptions::SoftMulti | ||||
|         { | ||||
|             EventType::CipherSoftDeleted as i32 | ||||
|         } else { | ||||
|             EventType::CipherDeleted as i32 | ||||
|         }; | ||||
|  | ||||
|         log_event(event_type, &cipher.uuid, &org_id, &headers.user.uuid, headers.device.atype, &headers.ip.ip, conn) | ||||
| @@ -1722,23 +1776,27 @@ async fn _delete_multiple_ciphers( | ||||
|     data: Json<CipherIdsData>, | ||||
|     headers: Headers, | ||||
|     mut conn: DbConn, | ||||
|     soft_delete: bool, | ||||
|     delete_options: CipherDeleteOptions, | ||||
|     nt: Notify<'_>, | ||||
| ) -> EmptyResult { | ||||
|     let data = data.into_inner(); | ||||
|  | ||||
|     for cipher_id in data.ids { | ||||
|         if let error @ Err(_) = _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, soft_delete, &nt).await { | ||||
|         if let error @ Err(_) = _delete_cipher_by_uuid(&cipher_id, &headers, &mut conn, &delete_options, &nt).await { | ||||
|             return error; | ||||
|         }; | ||||
|     } | ||||
|  | ||||
|     // Multi delete actions do not send out a push for each cipher, we need to send a general sync here | ||||
|     nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, &mut conn).await; | ||||
|  | ||||
|     Ok(()) | ||||
| } | ||||
|  | ||||
| async fn _restore_cipher_by_uuid( | ||||
|     cipher_id: &CipherId, | ||||
|     headers: &Headers, | ||||
|     multi_restore: bool, | ||||
|     conn: &mut DbConn, | ||||
|     nt: &Notify<'_>, | ||||
| ) -> JsonResult { | ||||
| @@ -1753,15 +1811,17 @@ async fn _restore_cipher_by_uuid( | ||||
|     cipher.deleted_at = None; | ||||
|     cipher.save(conn).await?; | ||||
|  | ||||
|     nt.send_cipher_update( | ||||
|         UpdateType::SyncCipherUpdate, | ||||
|         &cipher, | ||||
|         &cipher.update_users_revision(conn).await, | ||||
|         &headers.device, | ||||
|         None, | ||||
|         conn, | ||||
|     ) | ||||
|     .await; | ||||
|     if !multi_restore { | ||||
|         nt.send_cipher_update( | ||||
|             UpdateType::SyncCipherUpdate, | ||||
|             &cipher, | ||||
|             &cipher.update_users_revision(conn).await, | ||||
|             &headers.device, | ||||
|             None, | ||||
|             conn, | ||||
|         ) | ||||
|         .await; | ||||
|     } | ||||
|  | ||||
|     if let Some(org_id) = &cipher.organization_uuid { | ||||
|         log_event( | ||||
| @@ -1789,12 +1849,15 @@ async fn _restore_multiple_ciphers( | ||||
|  | ||||
|     let mut ciphers: Vec<Value> = Vec::new(); | ||||
|     for cipher_id in data.ids { | ||||
|         match _restore_cipher_by_uuid(&cipher_id, headers, conn, nt).await { | ||||
|         match _restore_cipher_by_uuid(&cipher_id, headers, true, conn, nt).await { | ||||
|             Ok(json) => ciphers.push(json.into_inner()), | ||||
|             err => return err, | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // Multi move actions do not send out a push for each cipher, we need to send a general sync here | ||||
|     nt.send_user_update(UpdateType::SyncCiphers, &headers.user, &headers.device.push_uuid, conn).await; | ||||
|  | ||||
|     Ok(Json(json!({ | ||||
|       "data": ciphers, | ||||
|       "object": "list", | ||||
|   | ||||
| @@ -239,7 +239,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mu | ||||
|                 invitation.save(&mut conn).await?; | ||||
|             } | ||||
|  | ||||
|             let mut user = User::new(email.clone()); | ||||
|             let mut user = User::new(email.clone(), None); | ||||
|             user.save(&mut conn).await?; | ||||
|             (user, true) | ||||
|         } | ||||
|   | ||||
| @@ -50,11 +50,12 @@ pub fn events_routes() -> Vec<Route> { | ||||
| use rocket::{serde::json::Json, serde::json::Value, Catcher, Route}; | ||||
|  | ||||
| use crate::{ | ||||
|     api::{JsonResult, Notify, UpdateType}, | ||||
|     api::{EmptyResult, JsonResult, Notify, UpdateType}, | ||||
|     auth::Headers, | ||||
|     db::DbConn, | ||||
|     db::{models::*, DbConn}, | ||||
|     error::Error, | ||||
|     http_client::make_http_request, | ||||
|     mail, | ||||
|     util::parse_experimental_client_feature_flags, | ||||
| }; | ||||
|  | ||||
| @@ -225,7 +226,7 @@ fn config() -> Json<Value> { | ||||
|           "url": "https://github.com/dani-garcia/vaultwarden" | ||||
|         }, | ||||
|         "settings": { | ||||
|             "disableUserRegistration": !crate::CONFIG.signups_allowed() && crate::CONFIG.signups_domains_whitelist().is_empty(), | ||||
|             "disableUserRegistration": crate::CONFIG.is_signup_disabled() | ||||
|         }, | ||||
|         "environment": { | ||||
|           "vault": domain, | ||||
| @@ -259,3 +260,49 @@ fn api_not_found() -> Json<Value> { | ||||
|         } | ||||
|     })) | ||||
| } | ||||
|  | ||||
| async fn accept_org_invite( | ||||
|     user: &User, | ||||
|     mut member: Membership, | ||||
|     reset_password_key: Option<String>, | ||||
|     conn: &mut DbConn, | ||||
| ) -> EmptyResult { | ||||
|     if member.status != MembershipStatus::Invited as i32 { | ||||
|         err!("User already accepted the invitation"); | ||||
|     } | ||||
|  | ||||
|     // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type | ||||
|     // It returns different error messages per function. | ||||
|     if member.atype < MembershipType::Admin { | ||||
|         match OrgPolicy::is_user_allowed(&member.user_uuid, &member.org_uuid, false, conn).await { | ||||
|             Ok(_) => {} | ||||
|             Err(OrgPolicyErr::TwoFactorMissing) => { | ||||
|                 if crate::CONFIG.email_2fa_auto_fallback() { | ||||
|                     two_factor::email::activate_email_2fa(user, conn).await?; | ||||
|                 } else { | ||||
|                     err!("You cannot join this organization until you enable two-step login on your user account"); | ||||
|                 } | ||||
|             } | ||||
|             Err(OrgPolicyErr::SingleOrgEnforced) => { | ||||
|                 err!("You cannot join this organization because you are a member of an organization which forbids it"); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     member.status = MembershipStatus::Accepted as i32; | ||||
|     member.reset_password_key = reset_password_key; | ||||
|  | ||||
|     member.save(conn).await?; | ||||
|  | ||||
|     if crate::CONFIG.mail_enabled() { | ||||
|         let org = match Organization::find_by_uuid(&member.org_uuid, conn).await { | ||||
|             Some(org) => org, | ||||
|             None => err!("Organization not found."), | ||||
|         }; | ||||
|         // User was invited to an organization, so they must be confirmed manually after acceptance | ||||
|         mail::send_invite_accepted(&user.email, &member.invited_by_email.unwrap_or(org.billing_email), &org.name) | ||||
|             .await?; | ||||
|     } | ||||
|  | ||||
|     Ok(()) | ||||
| } | ||||
|   | ||||
| @@ -7,13 +7,13 @@ use std::collections::{HashMap, HashSet}; | ||||
| use crate::api::admin::FAKE_ADMIN_UUID; | ||||
| use crate::{ | ||||
|     api::{ | ||||
|         core::{log_event, two_factor, CipherSyncData, CipherSyncType}, | ||||
|         core::{accept_org_invite, log_event, two_factor, CipherSyncData, CipherSyncType}, | ||||
|         EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType, | ||||
|     }, | ||||
|     auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OrgMemberHeaders, OwnerHeaders}, | ||||
|     db::{models::*, DbConn}, | ||||
|     mail, | ||||
|     util::{convert_json_key_lcase_first, NumberOrString}, | ||||
|     util::{convert_json_key_lcase_first, get_uuid, NumberOrString}, | ||||
|     CONFIG, | ||||
| }; | ||||
|  | ||||
| @@ -43,6 +43,7 @@ pub fn routes() -> Vec<Route> { | ||||
|         bulk_delete_organization_collections, | ||||
|         post_bulk_collections, | ||||
|         get_org_details, | ||||
|         get_org_domain_sso_verified, | ||||
|         get_members, | ||||
|         send_invite, | ||||
|         reinvite_member, | ||||
| @@ -60,6 +61,7 @@ pub fn routes() -> Vec<Route> { | ||||
|         post_org_import, | ||||
|         list_policies, | ||||
|         list_policies_token, | ||||
|         get_master_password_policy, | ||||
|         get_policy, | ||||
|         put_policy, | ||||
|         get_organization_tax, | ||||
| @@ -103,6 +105,7 @@ pub fn routes() -> Vec<Route> { | ||||
|         api_key, | ||||
|         rotate_api_key, | ||||
|         get_billing_metadata, | ||||
|         get_auto_enroll_status, | ||||
|     ] | ||||
| } | ||||
|  | ||||
| @@ -192,7 +195,7 @@ async fn create_organization(headers: Headers, data: Json<OrgData>, mut conn: Db | ||||
|     }; | ||||
|  | ||||
|     let org = Organization::new(data.name, data.billing_email, private_key, public_key); | ||||
|     let mut member = Membership::new(headers.user.uuid, org.uuid.clone()); | ||||
|     let mut member = Membership::new(headers.user.uuid, org.uuid.clone(), None); | ||||
|     let collection = Collection::new(org.uuid.clone(), data.collection_name, None); | ||||
|  | ||||
|     member.akey = data.key; | ||||
| @@ -335,6 +338,34 @@ async fn get_user_collections(headers: Headers, mut conn: DbConn) -> Json<Value> | ||||
|     })) | ||||
| } | ||||
|  | ||||
| // Called during the SSO enrollment | ||||
| // The `identifier` should be the value returned by `get_org_domain_sso_details` | ||||
| // The returned `Id` will then be passed to `get_master_password_policy` which will mainly ignore it | ||||
| #[get("/organizations/<identifier>/auto-enroll-status")] | ||||
| async fn get_auto_enroll_status(identifier: &str, headers: Headers, mut conn: DbConn) -> JsonResult { | ||||
|     let org = if identifier == crate::sso::FAKE_IDENTIFIER { | ||||
|         match Membership::find_main_user_org(&headers.user.uuid, &mut conn).await { | ||||
|             Some(member) => Organization::find_by_uuid(&member.org_uuid, &mut conn).await, | ||||
|             None => None, | ||||
|         } | ||||
|     } else { | ||||
|         Organization::find_by_name(identifier, &mut conn).await | ||||
|     }; | ||||
|  | ||||
|     let (id, identifier, rp_auto_enroll) = match org { | ||||
|         None => (get_uuid(), identifier.to_string(), false), | ||||
|         Some(org) => { | ||||
|             (org.uuid.to_string(), org.name, OrgPolicy::org_is_reset_password_auto_enroll(&org.uuid, &mut conn).await) | ||||
|         } | ||||
|     }; | ||||
|  | ||||
|     Ok(Json(json!({ | ||||
|         "Id": id, | ||||
|         "Identifier": identifier, | ||||
|         "ResetPasswordEnabled": rp_auto_enroll, | ||||
|     }))) | ||||
| } | ||||
|  | ||||
| #[get("/organizations/<org_id>/collections")] | ||||
| async fn get_org_collections(org_id: OrganizationId, headers: ManagerHeadersLoose, mut conn: DbConn) -> JsonResult { | ||||
|     if org_id != headers.membership.org_uuid { | ||||
| @@ -930,6 +961,39 @@ async fn _get_org_details( | ||||
|     Ok(json!(ciphers_json)) | ||||
| } | ||||
|  | ||||
| #[derive(Deserialize)] | ||||
| #[serde(rename_all = "camelCase")] | ||||
| struct OrgDomainDetails { | ||||
|     email: String, | ||||
| } | ||||
|  | ||||
| // Returning a Domain/Organization here allow to prefill it and prevent prompting the user | ||||
| // So we either return an Org name associated to the user or a dummy value. | ||||
| // In use since `v2025.6.0`, appears to use only the first `organizationIdentifier` | ||||
| #[post("/organizations/domain/sso/verified", data = "<data>")] | ||||
| async fn get_org_domain_sso_verified(data: Json<OrgDomainDetails>, mut conn: DbConn) -> JsonResult { | ||||
|     let data: OrgDomainDetails = data.into_inner(); | ||||
|  | ||||
|     let identifiers = match Organization::find_org_user_email(&data.email, &mut conn) | ||||
|         .await | ||||
|         .into_iter() | ||||
|         .map(|o| o.name) | ||||
|         .collect::<Vec<String>>() | ||||
|     { | ||||
|         v if !v.is_empty() => v, | ||||
|         _ => vec![crate::sso::FAKE_IDENTIFIER.to_string()], | ||||
|     }; | ||||
|  | ||||
|     Ok(Json(json!({ | ||||
|         "object": "list", | ||||
|         "data": identifiers.into_iter().map(|identifier| json!({ | ||||
|             "organizationName": identifier,     // appear unused | ||||
|             "organizationIdentifier": identifier, | ||||
|             "domainName": CONFIG.domain(),      // appear unused | ||||
|         })).collect::<Vec<Value>>() | ||||
|     }))) | ||||
| } | ||||
|  | ||||
| #[derive(FromForm)] | ||||
| struct GetOrgUserData { | ||||
|     #[field(name = "includeCollections")] | ||||
| @@ -1063,7 +1127,7 @@ async fn send_invite( | ||||
|                     Invitation::new(email).save(&mut conn).await?; | ||||
|                 } | ||||
|  | ||||
|                 let mut new_user = User::new(email.clone()); | ||||
|                 let mut new_user = User::new(email.clone(), None); | ||||
|                 new_user.save(&mut conn).await?; | ||||
|                 user_created = true; | ||||
|                 new_user | ||||
| @@ -1081,7 +1145,7 @@ async fn send_invite( | ||||
|             } | ||||
|         }; | ||||
|  | ||||
|         let mut new_member = Membership::new(user.uuid.clone(), org_id.clone()); | ||||
|         let mut new_member = Membership::new(user.uuid.clone(), org_id.clone(), Some(headers.user.email.clone())); | ||||
|         new_member.access_all = access_all; | ||||
|         new_member.atype = new_type; | ||||
|         new_member.status = member_status; | ||||
| @@ -1267,71 +1331,39 @@ async fn accept_invite( | ||||
|         err!("Invitation was issued to a different account", "Claim does not match user_id") | ||||
|     } | ||||
|  | ||||
|     // If a claim org_id does not match the one in from the URI, something is wrong. | ||||
|     if !claims.org_id.eq(&org_id) { | ||||
|         err!("Error accepting the invitation", "Claim does not match the org_id") | ||||
|     } | ||||
|  | ||||
|     // If a claim does not have a member_id or it does not match the one in from the URI, something is wrong. | ||||
|     if !claims.member_id.eq(&member_id) { | ||||
|         err!("Error accepting the invitation", "Claim does not match the member_id") | ||||
|     } | ||||
|  | ||||
|     let member = &claims.member_id; | ||||
|     let org = &claims.org_id; | ||||
|  | ||||
|     let member_id = &claims.member_id; | ||||
|     Invitation::take(&claims.email, &mut conn).await; | ||||
|  | ||||
|     // skip invitation logic when we were invited via the /admin panel | ||||
|     if **member != FAKE_ADMIN_UUID { | ||||
|         let Some(mut member) = Membership::find_by_uuid_and_org(member, org, &mut conn).await else { | ||||
|     if **member_id != FAKE_ADMIN_UUID { | ||||
|         let Some(mut member) = Membership::find_by_uuid_and_org(member_id, &claims.org_id, &mut conn).await else { | ||||
|             err!("Error accepting the invitation") | ||||
|         }; | ||||
|  | ||||
|         if member.status != MembershipStatus::Invited as i32 { | ||||
|             err!("User already accepted the invitation") | ||||
|         } | ||||
|         let reset_password_key = match OrgPolicy::org_is_reset_password_auto_enroll(&member.org_uuid, &mut conn).await { | ||||
|             true if data.reset_password_key.is_none() => err!("Reset password key is required, but not provided."), | ||||
|             true => data.reset_password_key, | ||||
|             false => None, | ||||
|         }; | ||||
|  | ||||
|         let master_password_required = OrgPolicy::org_is_reset_password_auto_enroll(org, &mut conn).await; | ||||
|         if data.reset_password_key.is_none() && master_password_required { | ||||
|             err!("Reset password key is required, but not provided."); | ||||
|         } | ||||
|         // In case the user was invited before the mail was saved in db. | ||||
|         member.invited_by_email = member.invited_by_email.or(claims.invited_by_email); | ||||
|  | ||||
|         // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type | ||||
|         // It returns different error messages per function. | ||||
|         if member.atype < MembershipType::Admin { | ||||
|             match OrgPolicy::is_user_allowed(&member.user_uuid, &org_id, false, &mut conn).await { | ||||
|                 Ok(_) => {} | ||||
|                 Err(OrgPolicyErr::TwoFactorMissing) => { | ||||
|                     if CONFIG.email_2fa_auto_fallback() { | ||||
|                         two_factor::email::activate_email_2fa(&headers.user, &mut conn).await?; | ||||
|                     } else { | ||||
|                         err!("You cannot join this organization until you enable two-step login on your user account"); | ||||
|                     } | ||||
|                 } | ||||
|                 Err(OrgPolicyErr::SingleOrgEnforced) => { | ||||
|                     err!("You cannot join this organization because you are a member of an organization which forbids it"); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         member.status = MembershipStatus::Accepted as i32; | ||||
|  | ||||
|         if master_password_required { | ||||
|             member.reset_password_key = data.reset_password_key; | ||||
|         } | ||||
|  | ||||
|         member.save(&mut conn).await?; | ||||
|     } | ||||
|  | ||||
|     if CONFIG.mail_enabled() { | ||||
|         if let Some(invited_by_email) = &claims.invited_by_email { | ||||
|             let org_name = match Organization::find_by_uuid(&claims.org_id, &mut conn).await { | ||||
|                 Some(org) => org.name, | ||||
|                 None => err!("Organization not found."), | ||||
|             }; | ||||
|             // User was invited to an organization, so they must be confirmed manually after acceptance | ||||
|             mail::send_invite_accepted(&claims.email, invited_by_email, &org_name).await?; | ||||
|         } else { | ||||
|             // User was invited from /admin, so they are automatically confirmed | ||||
|             let org_name = CONFIG.invitation_org_name(); | ||||
|             mail::send_invite_confirmed(&claims.email, &org_name).await?; | ||||
|         } | ||||
|         accept_org_invite(&headers.user, member, reset_password_key, &mut conn).await?; | ||||
|     } else if CONFIG.mail_enabled() { | ||||
|         // User was invited from /admin, so they are automatically confirmed | ||||
|         let org_name = CONFIG.invitation_org_name(); | ||||
|         mail::send_invite_confirmed(&claims.email, &org_name).await?; | ||||
|     } | ||||
|  | ||||
|     Ok(()) | ||||
| @@ -2025,18 +2057,36 @@ async fn list_policies_token(org_id: OrganizationId, token: &str, mut conn: DbCo | ||||
|     }))) | ||||
| } | ||||
|  | ||||
| #[get("/organizations/<org_id>/policies/<pol_type>")] | ||||
| // Called during the SSO enrollment. | ||||
| // Return the org policy if it exists, otherwise use the default one. | ||||
| #[get("/organizations/<org_id>/policies/master-password", rank = 1)] | ||||
| async fn get_master_password_policy(org_id: OrganizationId, _headers: Headers, mut conn: DbConn) -> JsonResult { | ||||
|     let policy = | ||||
|         OrgPolicy::find_by_org_and_type(&org_id, OrgPolicyType::MasterPassword, &mut conn).await.unwrap_or_else(|| { | ||||
|             let data = match CONFIG.sso_master_password_policy() { | ||||
|                 Some(policy) => policy, | ||||
|                 None => "null".to_string(), | ||||
|             }; | ||||
|  | ||||
|             OrgPolicy::new(org_id, OrgPolicyType::MasterPassword, CONFIG.sso_master_password_policy().is_some(), data) | ||||
|         }); | ||||
|  | ||||
|     Ok(Json(policy.to_json())) | ||||
| } | ||||
|  | ||||
| #[get("/organizations/<org_id>/policies/<pol_type>", rank = 2)] | ||||
| async fn get_policy(org_id: OrganizationId, pol_type: i32, headers: AdminHeaders, mut conn: DbConn) -> JsonResult { | ||||
|     if org_id != headers.org_id { | ||||
|         err!("Organization not found", "Organization id's do not match"); | ||||
|     } | ||||
|  | ||||
|     let Some(pol_type_enum) = OrgPolicyType::from_i32(pol_type) else { | ||||
|         err!("Invalid or unsupported policy type") | ||||
|     }; | ||||
|  | ||||
|     let policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type_enum, &mut conn).await { | ||||
|         Some(p) => p, | ||||
|         None => OrgPolicy::new(org_id.clone(), pol_type_enum, "null".to_string()), | ||||
|         None => OrgPolicy::new(org_id.clone(), pol_type_enum, false, "null".to_string()), | ||||
|     }; | ||||
|  | ||||
|     Ok(Json(policy.to_json())) | ||||
| @@ -2147,7 +2197,7 @@ async fn put_policy( | ||||
|  | ||||
|     let mut policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type_enum, &mut conn).await { | ||||
|         Some(p) => p, | ||||
|         None => OrgPolicy::new(org_id.clone(), pol_type_enum, "{}".to_string()), | ||||
|         None => OrgPolicy::new(org_id.clone(), pol_type_enum, false, "{}".to_string()), | ||||
|     }; | ||||
|  | ||||
|     policy.enabled = data.enabled; | ||||
| @@ -2260,7 +2310,7 @@ struct OrgImportData { | ||||
|     users: Vec<OrgImportUserData>, | ||||
| } | ||||
|  | ||||
| /// This function seems to be deprected | ||||
| /// This function seems to be deprecated | ||||
| /// It is only used with older directory connectors | ||||
| /// TODO: Cleanup Tech debt | ||||
| #[post("/organizations/<org_id>/import", data = "<data>")] | ||||
| @@ -2306,7 +2356,8 @@ async fn import(org_id: OrganizationId, data: Json<OrgImportData>, headers: Head | ||||
|                     MembershipStatus::Accepted as i32 // Automatically mark user as accepted if no email invites | ||||
|                 }; | ||||
|  | ||||
|                 let mut new_member = Membership::new(user.uuid.clone(), org_id.clone()); | ||||
|                 let mut new_member = | ||||
|                     Membership::new(user.uuid.clone(), org_id.clone(), Some(headers.user.email.clone())); | ||||
|                 new_member.access_all = false; | ||||
|                 new_member.atype = MembershipType::User as i32; | ||||
|                 new_member.status = member_status; | ||||
|   | ||||
| @@ -89,7 +89,7 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: Db | ||||
|                 Some(user) => user, // exists in vaultwarden | ||||
|                 None => { | ||||
|                     // User does not exist yet | ||||
|                     let mut new_user = User::new(user_data.email.clone()); | ||||
|                     let mut new_user = User::new(user_data.email.clone(), None); | ||||
|                     new_user.save(&mut conn).await?; | ||||
|  | ||||
|                     if !CONFIG.mail_enabled() { | ||||
| @@ -105,7 +105,12 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: Db | ||||
|                 MembershipStatus::Accepted as i32 // Automatically mark user as accepted if no email invites | ||||
|             }; | ||||
|  | ||||
|             let mut new_member = Membership::new(user.uuid.clone(), org_id.clone()); | ||||
|             let (org_name, org_email) = match Organization::find_by_uuid(&org_id, &mut conn).await { | ||||
|                 Some(org) => (org.name, org.billing_email), | ||||
|                 None => err!("Error looking up organization"), | ||||
|             }; | ||||
|  | ||||
|             let mut new_member = Membership::new(user.uuid.clone(), org_id.clone(), Some(org_email.clone())); | ||||
|             new_member.set_external_id(Some(user_data.external_id.clone())); | ||||
|             new_member.access_all = false; | ||||
|             new_member.atype = MembershipType::User as i32; | ||||
| @@ -114,11 +119,6 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: Db | ||||
|             new_member.save(&mut conn).await?; | ||||
|  | ||||
|             if CONFIG.mail_enabled() { | ||||
|                 let (org_name, org_email) = match Organization::find_by_uuid(&org_id, &mut conn).await { | ||||
|                     Some(org) => (org.name, org.billing_email), | ||||
|                     None => err!("Error looking up organization"), | ||||
|                 }; | ||||
|  | ||||
|                 if let Err(e) = | ||||
|                     mail::send_invite(&user, org_id.clone(), new_member.uuid.clone(), &org_name, Some(org_email)).await | ||||
|                 { | ||||
|   | ||||
| @@ -10,7 +10,7 @@ use crate::{ | ||||
|     auth::Headers, | ||||
|     crypto, | ||||
|     db::{ | ||||
|         models::{EventType, TwoFactor, TwoFactorType, User, UserId}, | ||||
|         models::{DeviceId, EventType, TwoFactor, TwoFactorType, User, UserId}, | ||||
|         DbConn, | ||||
|     }, | ||||
|     error::{Error, MapResult}, | ||||
| @@ -24,11 +24,16 @@ pub fn routes() -> Vec<Route> { | ||||
| #[derive(Deserialize)] | ||||
| #[serde(rename_all = "camelCase")] | ||||
| struct SendEmailLoginData { | ||||
|     // DeviceIdentifier: String, // Currently not used | ||||
|     #[serde(alias = "DeviceIdentifier")] | ||||
|     device_identifier: DeviceId, | ||||
|  | ||||
|     #[allow(unused)] | ||||
|     #[serde(alias = "Email")] | ||||
|     email: String, | ||||
|     email: Option<String>, | ||||
|  | ||||
|     #[allow(unused)] | ||||
|     #[serde(alias = "MasterPasswordHash")] | ||||
|     master_password_hash: String, | ||||
|     master_password_hash: Option<String>, | ||||
| } | ||||
|  | ||||
| /// User is trying to login and wants to use email 2FA. | ||||
| @@ -40,15 +45,10 @@ async fn send_email_login(data: Json<SendEmailLoginData>, mut conn: DbConn) -> E | ||||
|     use crate::db::models::User; | ||||
|  | ||||
|     // Get the user | ||||
|     let Some(user) = User::find_by_mail(&data.email, &mut conn).await else { | ||||
|         err!("Username or password is incorrect. Try again.") | ||||
|     let Some(user) = User::find_by_device_id(&data.device_identifier, &mut conn).await else { | ||||
|         err!("Cannot find user. Try again.") | ||||
|     }; | ||||
|  | ||||
|     // Check password | ||||
|     if !user.check_valid_password(&data.master_password_hash) { | ||||
|         err!("Username or password is incorrect. Try again.") | ||||
|     } | ||||
|  | ||||
|     if !CONFIG._enable_email_2fa() { | ||||
|         err!("Email 2FA is disabled") | ||||
|     } | ||||
|   | ||||
| @@ -1,15 +1,10 @@ | ||||
| use rocket::serde::json::Json; | ||||
| use rocket::Route; | ||||
| use serde_json::Value; | ||||
| use url::Url; | ||||
| use webauthn_rs::{base64_data::Base64UrlSafeData, proto::*, AuthenticationState, RegistrationState, Webauthn}; | ||||
|  | ||||
| use crate::{ | ||||
|     api::{ | ||||
|         core::{log_user_event, two_factor::_generate_recover_code}, | ||||
|         EmptyResult, JsonResult, PasswordOrOtpData, | ||||
|     }, | ||||
|     auth::Headers, | ||||
|     crypto::ct_eq, | ||||
|     db::{ | ||||
|         models::{EventType, TwoFactor, TwoFactorType, UserId}, | ||||
|         DbConn, | ||||
| @@ -18,6 +13,37 @@ use crate::{ | ||||
|     util::NumberOrString, | ||||
|     CONFIG, | ||||
| }; | ||||
| use rocket::serde::json::Json; | ||||
| use rocket::Route; | ||||
| use serde_json::Value; | ||||
| use std::str::FromStr; | ||||
| use std::sync::{Arc, LazyLock}; | ||||
| use std::time::Duration; | ||||
| use url::Url; | ||||
| use uuid::Uuid; | ||||
| use webauthn_rs::prelude::{Base64UrlSafeData, Credential, Passkey, PasskeyAuthentication, PasskeyRegistration}; | ||||
| use webauthn_rs::{Webauthn, WebauthnBuilder}; | ||||
| use webauthn_rs_proto::{ | ||||
|     AuthenticationExtensionsClientOutputs, AuthenticatorAssertionResponseRaw, AuthenticatorAttestationResponseRaw, | ||||
|     PublicKeyCredential, RegisterPublicKeyCredential, RegistrationExtensionsClientOutputs, | ||||
|     RequestAuthenticationExtensions, UserVerificationPolicy, | ||||
| }; | ||||
|  | ||||
| pub static WEBAUTHN_2FA_CONFIG: LazyLock<Arc<Webauthn>> = LazyLock::new(|| { | ||||
|     let domain = CONFIG.domain(); | ||||
|     let domain_origin = CONFIG.domain_origin(); | ||||
|     let rp_id = Url::parse(&domain).map(|u| u.domain().map(str::to_owned)).ok().flatten().unwrap_or_default(); | ||||
|     let rp_origin = Url::parse(&domain_origin).unwrap(); | ||||
|  | ||||
|     let webauthn = WebauthnBuilder::new(&rp_id, &rp_origin) | ||||
|         .expect("Creating WebauthnBuilder failed") | ||||
|         .rp_name(&domain) | ||||
|         .timeout(Duration::from_millis(60000)); | ||||
|  | ||||
|     Arc::new(webauthn.build().expect("Building Webauthn failed")) | ||||
| }); | ||||
|  | ||||
| pub type Webauthn2FaConfig<'a> = &'a rocket::State<Arc<Webauthn>>; | ||||
|  | ||||
| pub fn routes() -> Vec<Route> { | ||||
|     routes![get_webauthn, generate_webauthn_challenge, activate_webauthn, activate_webauthn_put, delete_webauthn,] | ||||
| @@ -45,52 +71,13 @@ pub struct U2FRegistration { | ||||
|     pub migrated: Option<bool>, | ||||
| } | ||||
|  | ||||
| struct WebauthnConfig { | ||||
|     url: String, | ||||
|     origin: Url, | ||||
|     rpid: String, | ||||
| } | ||||
|  | ||||
| impl WebauthnConfig { | ||||
|     fn load() -> Webauthn<Self> { | ||||
|         let domain = CONFIG.domain(); | ||||
|         let domain_origin = CONFIG.domain_origin(); | ||||
|         Webauthn::new(Self { | ||||
|             rpid: Url::parse(&domain).map(|u| u.domain().map(str::to_owned)).ok().flatten().unwrap_or_default(), | ||||
|             url: domain, | ||||
|             origin: Url::parse(&domain_origin).unwrap(), | ||||
|         }) | ||||
|     } | ||||
| } | ||||
|  | ||||
| impl webauthn_rs::WebauthnConfig for WebauthnConfig { | ||||
|     fn get_relying_party_name(&self) -> &str { | ||||
|         &self.url | ||||
|     } | ||||
|  | ||||
|     fn get_origin(&self) -> &Url { | ||||
|         &self.origin | ||||
|     } | ||||
|  | ||||
|     fn get_relying_party_id(&self) -> &str { | ||||
|         &self.rpid | ||||
|     } | ||||
|  | ||||
|     /// We have WebAuthn configured to discourage user verification | ||||
|     /// if we leave this enabled, it will cause verification issues when a keys send UV=1. | ||||
|     /// Upstream (the library they use) ignores this when set to discouraged, so we should too. | ||||
|     fn get_require_uv_consistency(&self) -> bool { | ||||
|         false | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Serialize, Deserialize)] | ||||
| pub struct WebauthnRegistration { | ||||
|     pub id: i32, | ||||
|     pub name: String, | ||||
|     pub migrated: bool, | ||||
|  | ||||
|     pub credential: Credential, | ||||
|     pub credential: Passkey, | ||||
| } | ||||
|  | ||||
| impl WebauthnRegistration { | ||||
| @@ -101,6 +88,24 @@ impl WebauthnRegistration { | ||||
|             "migrated": self.migrated, | ||||
|         }) | ||||
|     } | ||||
|  | ||||
|     fn set_backup_eligible(&mut self, backup_eligible: bool, backup_state: bool) -> bool { | ||||
|         let mut changed = false; | ||||
|         let mut cred: Credential = self.credential.clone().into(); | ||||
|  | ||||
|         if cred.backup_state != backup_state { | ||||
|             cred.backup_state = backup_state; | ||||
|             changed = true; | ||||
|         } | ||||
|  | ||||
|         if backup_eligible && !cred.backup_eligible { | ||||
|             cred.backup_eligible = true; | ||||
|             changed = true; | ||||
|         } | ||||
|  | ||||
|         self.credential = cred.into(); | ||||
|         changed | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[post("/two-factor/get-webauthn", data = "<data>")] | ||||
| @@ -125,7 +130,12 @@ async fn get_webauthn(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: | ||||
| } | ||||
|  | ||||
| #[post("/two-factor/get-webauthn-challenge", data = "<data>")] | ||||
| async fn generate_webauthn_challenge(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> JsonResult { | ||||
| async fn generate_webauthn_challenge( | ||||
|     data: Json<PasswordOrOtpData>, | ||||
|     headers: Headers, | ||||
|     webauthn: Webauthn2FaConfig<'_>, | ||||
|     mut conn: DbConn, | ||||
| ) -> JsonResult { | ||||
|     let data: PasswordOrOtpData = data.into_inner(); | ||||
|     let user = headers.user; | ||||
|  | ||||
| @@ -135,21 +145,30 @@ async fn generate_webauthn_challenge(data: Json<PasswordOrOtpData>, headers: Hea | ||||
|         .await? | ||||
|         .1 | ||||
|         .into_iter() | ||||
|         .map(|r| r.credential.cred_id) // We return the credentialIds to the clients to avoid double registering | ||||
|         .map(|r| r.credential.cred_id().to_owned()) // We return the credentialIds to the clients to avoid double registering | ||||
|         .collect(); | ||||
|  | ||||
|     let (challenge, state) = WebauthnConfig::load().generate_challenge_register_options( | ||||
|         user.uuid.as_bytes().to_vec(), | ||||
|         user.email, | ||||
|         user.name, | ||||
|     let (mut challenge, state) = webauthn.start_passkey_registration( | ||||
|         Uuid::from_str(&user.uuid).expect("Failed to parse UUID"), // Should never fail | ||||
|         &user.email, | ||||
|         &user.name, | ||||
|         Some(registrations), | ||||
|         None, | ||||
|         None, | ||||
|     )?; | ||||
|  | ||||
|     let mut state = serde_json::to_value(&state)?; | ||||
|     state["rs"]["policy"] = Value::String("discouraged".to_string()); | ||||
|     state["rs"]["extensions"].as_object_mut().unwrap().clear(); | ||||
|  | ||||
|     let type_ = TwoFactorType::WebauthnRegisterChallenge; | ||||
|     TwoFactor::new(user.uuid.clone(), type_, serde_json::to_string(&state)?).save(&mut conn).await?; | ||||
|  | ||||
|     // Because for this flow we abuse the passkeys as 2FA, and use it more like a securitykey | ||||
|     // we need to modify some of the default settings defined by `start_passkey_registration()`. | ||||
|     challenge.public_key.extensions = None; | ||||
|     if let Some(asc) = challenge.public_key.authenticator_selection.as_mut() { | ||||
|         asc.user_verification = UserVerificationPolicy::Discouraged_DO_NOT_USE; | ||||
|     } | ||||
|  | ||||
|     let mut challenge_value = serde_json::to_value(challenge.public_key)?; | ||||
|     challenge_value["status"] = "ok".into(); | ||||
|     challenge_value["errorMessage"] = "".into(); | ||||
| @@ -193,8 +212,10 @@ impl From<RegisterPublicKeyCredentialCopy> for RegisterPublicKeyCredential { | ||||
|             response: AuthenticatorAttestationResponseRaw { | ||||
|                 attestation_object: r.response.attestation_object, | ||||
|                 client_data_json: r.response.client_data_json, | ||||
|                 transports: None, | ||||
|             }, | ||||
|             type_: r.r#type, | ||||
|             extensions: RegistrationExtensionsClientOutputs::default(), | ||||
|         } | ||||
|     } | ||||
| } | ||||
| @@ -205,7 +226,7 @@ pub struct PublicKeyCredentialCopy { | ||||
|     pub id: String, | ||||
|     pub raw_id: Base64UrlSafeData, | ||||
|     pub response: AuthenticatorAssertionResponseRawCopy, | ||||
|     pub extensions: Option<AuthenticationExtensionsClientOutputs>, | ||||
|     pub extensions: AuthenticationExtensionsClientOutputs, | ||||
|     pub r#type: String, | ||||
| } | ||||
|  | ||||
| @@ -238,7 +259,12 @@ impl From<PublicKeyCredentialCopy> for PublicKeyCredential { | ||||
| } | ||||
|  | ||||
| #[post("/two-factor/webauthn", data = "<data>")] | ||||
| async fn activate_webauthn(data: Json<EnableWebauthnData>, headers: Headers, mut conn: DbConn) -> JsonResult { | ||||
| async fn activate_webauthn( | ||||
|     data: Json<EnableWebauthnData>, | ||||
|     headers: Headers, | ||||
|     webauthn: Webauthn2FaConfig<'_>, | ||||
|     mut conn: DbConn, | ||||
| ) -> JsonResult { | ||||
|     let data: EnableWebauthnData = data.into_inner(); | ||||
|     let mut user = headers.user; | ||||
|  | ||||
| @@ -253,7 +279,7 @@ async fn activate_webauthn(data: Json<EnableWebauthnData>, headers: Headers, mut | ||||
|     let type_ = TwoFactorType::WebauthnRegisterChallenge as i32; | ||||
|     let state = match TwoFactor::find_by_user_and_type(&user.uuid, type_, &mut conn).await { | ||||
|         Some(tf) => { | ||||
|             let state: RegistrationState = serde_json::from_str(&tf.data)?; | ||||
|             let state: PasskeyRegistration = serde_json::from_str(&tf.data)?; | ||||
|             tf.delete(&mut conn).await?; | ||||
|             state | ||||
|         } | ||||
| @@ -261,8 +287,7 @@ async fn activate_webauthn(data: Json<EnableWebauthnData>, headers: Headers, mut | ||||
|     }; | ||||
|  | ||||
|     // Verify the credentials with the saved state | ||||
|     let (credential, _data) = | ||||
|         WebauthnConfig::load().register_credential(&data.device_response.into(), &state, |_| Ok(false))?; | ||||
|     let credential = webauthn.finish_passkey_registration(&data.device_response.into(), &state)?; | ||||
|  | ||||
|     let mut registrations: Vec<_> = get_webauthn_registrations(&user.uuid, &mut conn).await?.1; | ||||
|     // TODO: Check for repeated ID's | ||||
| @@ -291,8 +316,13 @@ async fn activate_webauthn(data: Json<EnableWebauthnData>, headers: Headers, mut | ||||
| } | ||||
|  | ||||
| #[put("/two-factor/webauthn", data = "<data>")] | ||||
| async fn activate_webauthn_put(data: Json<EnableWebauthnData>, headers: Headers, conn: DbConn) -> JsonResult { | ||||
|     activate_webauthn(data, headers, conn).await | ||||
| async fn activate_webauthn_put( | ||||
|     data: Json<EnableWebauthnData>, | ||||
|     headers: Headers, | ||||
|     webauthn: Webauthn2FaConfig<'_>, | ||||
|     conn: DbConn, | ||||
| ) -> JsonResult { | ||||
|     activate_webauthn(data, headers, webauthn, conn).await | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Deserialize)] | ||||
| @@ -335,7 +365,7 @@ async fn delete_webauthn(data: Json<DeleteU2FData>, headers: Headers, mut conn: | ||||
|             Err(_) => err!("Error parsing U2F data"), | ||||
|         }; | ||||
|  | ||||
|         data.retain(|r| r.reg.key_handle != removed_item.credential.cred_id); | ||||
|         data.retain(|r| r.reg.key_handle != removed_item.credential.cred_id().as_slice()); | ||||
|         let new_data_str = serde_json::to_string(&data)?; | ||||
|  | ||||
|         u2f.data = new_data_str; | ||||
| @@ -362,9 +392,13 @@ pub async fn get_webauthn_registrations( | ||||
|     } | ||||
| } | ||||
|  | ||||
| pub async fn generate_webauthn_login(user_id: &UserId, conn: &mut DbConn) -> JsonResult { | ||||
| pub async fn generate_webauthn_login( | ||||
|     user_id: &UserId, | ||||
|     webauthn: Webauthn2FaConfig<'_>, | ||||
|     conn: &mut DbConn, | ||||
| ) -> JsonResult { | ||||
|     // Load saved credentials | ||||
|     let creds: Vec<Credential> = | ||||
|     let creds: Vec<Passkey> = | ||||
|         get_webauthn_registrations(user_id, conn).await?.1.into_iter().map(|r| r.credential).collect(); | ||||
|  | ||||
|     if creds.is_empty() { | ||||
| @@ -372,8 +406,26 @@ pub async fn generate_webauthn_login(user_id: &UserId, conn: &mut DbConn) -> Jso | ||||
|     } | ||||
|  | ||||
|     // Generate a challenge based on the credentials | ||||
|     let ext = RequestAuthenticationExtensions::builder().appid(format!("{}/app-id.json", &CONFIG.domain())).build(); | ||||
|     let (response, state) = WebauthnConfig::load().generate_challenge_authenticate_options(creds, Some(ext))?; | ||||
|     let (mut response, state) = webauthn.start_passkey_authentication(&creds)?; | ||||
|  | ||||
|     // Modify to discourage user verification | ||||
|     let mut state = serde_json::to_value(&state)?; | ||||
|     state["ast"]["policy"] = Value::String("discouraged".to_string()); | ||||
|  | ||||
|     // Add appid, this is only needed for U2F compatibility, so maybe it can be removed as well | ||||
|     let app_id = format!("{}/app-id.json", &CONFIG.domain()); | ||||
|     state["ast"]["appid"] = Value::String(app_id.clone()); | ||||
|  | ||||
|     response.public_key.user_verification = UserVerificationPolicy::Discouraged_DO_NOT_USE; | ||||
|     response | ||||
|         .public_key | ||||
|         .extensions | ||||
|         .get_or_insert(RequestAuthenticationExtensions { | ||||
|             appid: None, | ||||
|             uvm: None, | ||||
|             hmac_get_secret: None, | ||||
|         }) | ||||
|         .appid = Some(app_id); | ||||
|  | ||||
|     // Save the challenge state for later validation | ||||
|     TwoFactor::new(user_id.clone(), TwoFactorType::WebauthnLoginChallenge, serde_json::to_string(&state)?) | ||||
| @@ -384,11 +436,16 @@ pub async fn generate_webauthn_login(user_id: &UserId, conn: &mut DbConn) -> Jso | ||||
|     Ok(Json(serde_json::to_value(response.public_key)?)) | ||||
| } | ||||
|  | ||||
| pub async fn validate_webauthn_login(user_id: &UserId, response: &str, conn: &mut DbConn) -> EmptyResult { | ||||
| pub async fn validate_webauthn_login( | ||||
|     user_id: &UserId, | ||||
|     response: &str, | ||||
|     webauthn: Webauthn2FaConfig<'_>, | ||||
|     conn: &mut DbConn, | ||||
| ) -> EmptyResult { | ||||
|     let type_ = TwoFactorType::WebauthnLoginChallenge as i32; | ||||
|     let state = match TwoFactor::find_by_user_and_type(user_id, type_, conn).await { | ||||
|     let mut state = match TwoFactor::find_by_user_and_type(user_id, type_, conn).await { | ||||
|         Some(tf) => { | ||||
|             let state: AuthenticationState = serde_json::from_str(&tf.data)?; | ||||
|             let state: PasskeyAuthentication = serde_json::from_str(&tf.data)?; | ||||
|             tf.delete(conn).await?; | ||||
|             state | ||||
|         } | ||||
| @@ -405,17 +462,22 @@ pub async fn validate_webauthn_login(user_id: &UserId, response: &str, conn: &mu | ||||
|  | ||||
|     let mut registrations = get_webauthn_registrations(user_id, conn).await?.1; | ||||
|  | ||||
|     // If the credential we received is migrated from U2F, enable the U2F compatibility | ||||
|     //let use_u2f = registrations.iter().any(|r| r.migrated && r.credential.cred_id == rsp.raw_id.0); | ||||
|     let (cred_id, auth_data) = WebauthnConfig::load().authenticate_credential(&rsp, &state)?; | ||||
|     // We need to check for and update the backup_eligible flag when needed. | ||||
|     // Vaultwarden did not have knowledge of this flag prior to migrating to webauthn-rs v0.5.x | ||||
|     // Because of this we check the flag at runtime and update the registrations and state when needed | ||||
|     check_and_update_backup_eligible(user_id, &rsp, &mut registrations, &mut state, conn).await?; | ||||
|  | ||||
|     let authentication_result = webauthn.finish_passkey_authentication(&rsp, &state)?; | ||||
|  | ||||
|     for reg in &mut registrations { | ||||
|         if ®.credential.cred_id == cred_id { | ||||
|             reg.credential.counter = auth_data.counter; | ||||
|  | ||||
|             TwoFactor::new(user_id.clone(), TwoFactorType::Webauthn, serde_json::to_string(®istrations)?) | ||||
|                 .save(conn) | ||||
|                 .await?; | ||||
|         if ct_eq(reg.credential.cred_id(), authentication_result.cred_id()) { | ||||
|             // If the cred id matches and the credential is updated, Some(true) is returned | ||||
|             // In those cases, update the record, else leave it alone | ||||
|             if reg.credential.update_credential(&authentication_result) == Some(true) { | ||||
|                 TwoFactor::new(user_id.clone(), TwoFactorType::Webauthn, serde_json::to_string(®istrations)?) | ||||
|                     .save(conn) | ||||
|                     .await?; | ||||
|             } | ||||
|             return Ok(()); | ||||
|         } | ||||
|     } | ||||
| @@ -427,3 +489,66 @@ pub async fn validate_webauthn_login(user_id: &UserId, response: &str, conn: &mu | ||||
|         } | ||||
|     ) | ||||
| } | ||||
|  | ||||
| async fn check_and_update_backup_eligible( | ||||
|     user_id: &UserId, | ||||
|     rsp: &PublicKeyCredential, | ||||
|     registrations: &mut Vec<WebauthnRegistration>, | ||||
|     state: &mut PasskeyAuthentication, | ||||
|     conn: &mut DbConn, | ||||
| ) -> EmptyResult { | ||||
|     // The feature flags from the response | ||||
|     // For details see: https://www.w3.org/TR/webauthn-3/#sctn-authenticator-data | ||||
|     const FLAG_BACKUP_ELIGIBLE: u8 = 0b0000_1000; | ||||
|     const FLAG_BACKUP_STATE: u8 = 0b0001_0000; | ||||
|  | ||||
|     if let Some(bits) = rsp.response.authenticator_data.get(32) { | ||||
|         let backup_eligible = 0 != (bits & FLAG_BACKUP_ELIGIBLE); | ||||
|         let backup_state = 0 != (bits & FLAG_BACKUP_STATE); | ||||
|  | ||||
|         // If the current key is backup eligible, then we probably need to update one of the keys already stored in the database | ||||
|         // This is needed because Vaultwarden didn't store this information when using the previous version of webauthn-rs since it was a new addition to the protocol | ||||
|         // Because we store multiple keys in one json string, we need to fetch the correct key first, and update its information before we let it verify | ||||
|         if backup_eligible { | ||||
|             let rsp_id = rsp.raw_id.as_slice(); | ||||
|             for reg in &mut *registrations { | ||||
|                 if ct_eq(reg.credential.cred_id().as_slice(), rsp_id) { | ||||
|                     // Try to update the key, and if needed also update the database, before the actual state check is done | ||||
|                     if reg.set_backup_eligible(backup_eligible, backup_state) { | ||||
|                         TwoFactor::new( | ||||
|                             user_id.clone(), | ||||
|                             TwoFactorType::Webauthn, | ||||
|                             serde_json::to_string(®istrations)?, | ||||
|                         ) | ||||
|                         .save(conn) | ||||
|                         .await?; | ||||
|  | ||||
|                         // We also need to adjust the current state which holds the challenge used to start the authentication verification | ||||
|                         // Because Vaultwarden supports multiple keys, we need to loop through the deserialized state and check which key to update | ||||
|                         let mut raw_state = serde_json::to_value(&state)?; | ||||
|                         if let Some(credentials) = raw_state | ||||
|                             .get_mut("ast") | ||||
|                             .and_then(|v| v.get_mut("credentials")) | ||||
|                             .and_then(|v| v.as_array_mut()) | ||||
|                         { | ||||
|                             for cred in credentials.iter_mut() { | ||||
|                                 if cred.get("cred_id").is_some_and(|v| { | ||||
|                                     // Deserialize to a [u8] so it can be compared using `ct_eq` with the `rsp_id` | ||||
|                                     let cred_id_slice: Base64UrlSafeData = serde_json::from_value(v.clone()).unwrap(); | ||||
|                                     ct_eq(cred_id_slice, rsp_id) | ||||
|                                 }) { | ||||
|                                     cred["backup_eligible"] = Value::Bool(backup_eligible); | ||||
|                                     cred["backup_state"] = Value::Bool(backup_state); | ||||
|                                 } | ||||
|                             } | ||||
|                         } | ||||
|  | ||||
|                         *state = serde_json::from_value(raw_state)?; | ||||
|                     } | ||||
|                     break; | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|     Ok(()) | ||||
| } | ||||
|   | ||||
| @@ -641,9 +641,9 @@ async fn stream_to_bytes_limit(res: Response, max_size: usize) -> Result<Bytes, | ||||
|     let mut buf = BytesMut::new(); | ||||
|     let mut size = 0; | ||||
|     while let Some(chunk) = stream.next().await { | ||||
|         // It is possible that there might occure UnexpectedEof errors or others | ||||
|         // It is possible that there might occur UnexpectedEof errors or others | ||||
|         // This is most of the time no issue, and if there is no chunked data anymore or at all parsing the HTML will not happen anyway. | ||||
|         // Therfore if chunk is an err, just break and continue with the data be have received. | ||||
|         // Therefore if chunk is an err, just break and continue with the data be have received. | ||||
|         if chunk.is_err() { | ||||
|             break; | ||||
|         } | ||||
|   | ||||
| @@ -1,16 +1,19 @@ | ||||
| use chrono::Utc; | ||||
| use chrono::{NaiveDateTime, Utc}; | ||||
| use num_traits::FromPrimitive; | ||||
| use rocket::serde::json::Json; | ||||
| use rocket::{ | ||||
|     form::{Form, FromForm}, | ||||
|     http::Status, | ||||
|     response::Redirect, | ||||
|     serde::json::Json, | ||||
|     Route, | ||||
| }; | ||||
| use serde_json::Value; | ||||
|  | ||||
| use crate::api::core::two_factor::webauthn::Webauthn2FaConfig; | ||||
| use crate::{ | ||||
|     api::{ | ||||
|         core::{ | ||||
|             accounts::{PreloginData, RegisterData, _prelogin, _register}, | ||||
|             accounts::{PreloginData, RegisterData, _prelogin, _register, kdf_upgrade}, | ||||
|             log_user_event, | ||||
|             two_factor::{authenticator, duo, duo_oidc, email, enforce_2fa_policy, webauthn, yubikey}, | ||||
|         }, | ||||
| @@ -18,14 +21,27 @@ use crate::{ | ||||
|         push::register_push_device, | ||||
|         ApiResult, EmptyResult, JsonResult, | ||||
|     }, | ||||
|     auth::{generate_organization_api_key_login_claims, ClientHeaders, ClientIp, ClientVersion}, | ||||
|     auth, | ||||
|     auth::{generate_organization_api_key_login_claims, AuthMethod, ClientHeaders, ClientIp, ClientVersion}, | ||||
|     db::{models::*, DbConn}, | ||||
|     error::MapResult, | ||||
|     mail, util, CONFIG, | ||||
|     mail, sso, | ||||
|     sso::{OIDCCode, OIDCState}, | ||||
|     util, CONFIG, | ||||
| }; | ||||
|  | ||||
| pub fn routes() -> Vec<Route> { | ||||
|     routes![login, prelogin, identity_register, register_verification_email, register_finish] | ||||
|     routes![ | ||||
|         login, | ||||
|         prelogin, | ||||
|         identity_register, | ||||
|         register_verification_email, | ||||
|         register_finish, | ||||
|         prevalidate, | ||||
|         authorize, | ||||
|         oidcsignin, | ||||
|         oidcsignin_error | ||||
|     ] | ||||
| } | ||||
|  | ||||
| #[post("/connect/token", data = "<data>")] | ||||
| @@ -33,6 +49,7 @@ async fn login( | ||||
|     data: Form<ConnectData>, | ||||
|     client_header: ClientHeaders, | ||||
|     client_version: Option<ClientVersion>, | ||||
|     webauthn: Webauthn2FaConfig<'_>, | ||||
|     mut conn: DbConn, | ||||
| ) -> JsonResult { | ||||
|     let data: ConnectData = data.into_inner(); | ||||
| @@ -42,8 +59,9 @@ async fn login( | ||||
|     let login_result = match data.grant_type.as_ref() { | ||||
|         "refresh_token" => { | ||||
|             _check_is_some(&data.refresh_token, "refresh_token cannot be blank")?; | ||||
|             _refresh_login(data, &mut conn).await | ||||
|             _refresh_login(data, &mut conn, &client_header.ip).await | ||||
|         } | ||||
|         "password" if CONFIG.sso_enabled() && CONFIG.sso_only() => err!("SSO sign-in is required"), | ||||
|         "password" => { | ||||
|             _check_is_some(&data.client_id, "client_id cannot be blank")?; | ||||
|             _check_is_some(&data.password, "password cannot be blank")?; | ||||
| @@ -54,7 +72,7 @@ async fn login( | ||||
|             _check_is_some(&data.device_name, "device_name cannot be blank")?; | ||||
|             _check_is_some(&data.device_type, "device_type cannot be blank")?; | ||||
|  | ||||
|             _password_login(data, &mut user_id, &mut conn, &client_header.ip, &client_version).await | ||||
|             _password_login(data, &mut user_id, &mut conn, &client_header.ip, &client_version, webauthn).await | ||||
|         } | ||||
|         "client_credentials" => { | ||||
|             _check_is_some(&data.client_id, "client_id cannot be blank")?; | ||||
| @@ -67,6 +85,17 @@ async fn login( | ||||
|  | ||||
|             _api_key_login(data, &mut user_id, &mut conn, &client_header.ip).await | ||||
|         } | ||||
|         "authorization_code" if CONFIG.sso_enabled() => { | ||||
|             _check_is_some(&data.client_id, "client_id cannot be blank")?; | ||||
|             _check_is_some(&data.code, "code cannot be blank")?; | ||||
|  | ||||
|             _check_is_some(&data.device_identifier, "device_identifier cannot be blank")?; | ||||
|             _check_is_some(&data.device_name, "device_name cannot be blank")?; | ||||
|             _check_is_some(&data.device_type, "device_type cannot be blank")?; | ||||
|  | ||||
|             _sso_login(data, &mut user_id, &mut conn, &client_header.ip, &client_version, webauthn).await | ||||
|         } | ||||
|         "authorization_code" => err!("SSO sign-in is not available"), | ||||
|         t => err!("Invalid type", t), | ||||
|     }; | ||||
|  | ||||
| @@ -100,37 +129,194 @@ async fn login( | ||||
|     login_result | ||||
| } | ||||
|  | ||||
| async fn _refresh_login(data: ConnectData, conn: &mut DbConn) -> JsonResult { | ||||
| // Return Status::Unauthorized to trigger logout | ||||
| async fn _refresh_login(data: ConnectData, conn: &mut DbConn, ip: &ClientIp) -> JsonResult { | ||||
|     // Extract token | ||||
|     let token = data.refresh_token.unwrap(); | ||||
|     let refresh_token = match data.refresh_token { | ||||
|         Some(token) => token, | ||||
|         None => err_code!("Missing refresh_token", Status::Unauthorized.code), | ||||
|     }; | ||||
|  | ||||
|     // Get device by refresh token | ||||
|     let mut device = Device::find_by_refresh_token(&token, conn).await.map_res("Invalid refresh token")?; | ||||
|  | ||||
|     let scope = "api offline_access"; | ||||
|     let scope_vec = vec!["api".into(), "offline_access".into()]; | ||||
|  | ||||
|     // Common | ||||
|     let user = User::find_by_uuid(&device.user_uuid, conn).await.unwrap(); | ||||
|     // --- | ||||
|     // Disabled this variable, it was used to generate the JWT | ||||
|     // Because this might get used in the future, and is add by the Bitwarden Server, lets keep it, but then commented out | ||||
|     // See: https://github.com/dani-garcia/vaultwarden/issues/4156 | ||||
|     // --- | ||||
|     // let members = Membership::find_confirmed_by_user(&user.uuid, conn).await; | ||||
|     let (access_token, expires_in) = device.refresh_tokens(&user, scope_vec, data.client_id); | ||||
|     device.save(conn).await?; | ||||
|     match auth::refresh_tokens(ip, &refresh_token, data.client_id, conn).await { | ||||
|         Err(err) => { | ||||
|             err_code!(format!("Unable to refresh login credentials: {}", err.message()), Status::Unauthorized.code) | ||||
|         } | ||||
|         Ok((mut device, auth_tokens)) => { | ||||
|             // Save to update `device.updated_at` to track usage and toggle new status | ||||
|             device.save(conn).await?; | ||||
|  | ||||
|     let result = json!({ | ||||
|         "access_token": access_token, | ||||
|         "expires_in": expires_in, | ||||
|         "token_type": "Bearer", | ||||
|         "refresh_token": device.refresh_token, | ||||
|             let result = json!({ | ||||
|                 "refresh_token": auth_tokens.refresh_token(), | ||||
|                 "access_token": auth_tokens.access_token(), | ||||
|                 "expires_in": auth_tokens.expires_in(), | ||||
|                 "token_type": "Bearer", | ||||
|                 "scope": auth_tokens.scope(), | ||||
|             }); | ||||
|  | ||||
|         "scope": scope, | ||||
|     }); | ||||
|             Ok(Json(result)) | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
|     Ok(Json(result)) | ||||
| // After exchanging the code we need to check first if 2FA is needed before continuing | ||||
| async fn _sso_login( | ||||
|     data: ConnectData, | ||||
|     user_id: &mut Option<UserId>, | ||||
|     conn: &mut DbConn, | ||||
|     ip: &ClientIp, | ||||
|     client_version: &Option<ClientVersion>, | ||||
|     webauthn: Webauthn2FaConfig<'_>, | ||||
| ) -> JsonResult { | ||||
|     AuthMethod::Sso.check_scope(data.scope.as_ref())?; | ||||
|  | ||||
|     // Ratelimit the login | ||||
|     crate::ratelimit::check_limit_login(&ip.ip)?; | ||||
|  | ||||
|     let code = match data.code.as_ref() { | ||||
|         None => err!( | ||||
|             "Got no code in OIDC data", | ||||
|             ErrorEvent { | ||||
|                 event: EventType::UserFailedLogIn | ||||
|             } | ||||
|         ), | ||||
|         Some(code) => code, | ||||
|     }; | ||||
|  | ||||
|     let user_infos = sso::exchange_code(code, conn).await?; | ||||
|     let user_with_sso = match SsoUser::find_by_identifier(&user_infos.identifier, conn).await { | ||||
|         None => match SsoUser::find_by_mail(&user_infos.email, conn).await { | ||||
|             None => None, | ||||
|             Some((user, Some(_))) => { | ||||
|                 error!( | ||||
|                     "Login failure ({}), existing SSO user ({}) with same email ({})", | ||||
|                     user_infos.identifier, user.uuid, user.email | ||||
|                 ); | ||||
|                 err_silent!( | ||||
|                     "Existing SSO user with same email", | ||||
|                     ErrorEvent { | ||||
|                         event: EventType::UserFailedLogIn | ||||
|                     } | ||||
|                 ) | ||||
|             } | ||||
|             Some((user, None)) if user.private_key.is_some() && !CONFIG.sso_signups_match_email() => { | ||||
|                 error!( | ||||
|                     "Login failure ({}), existing non SSO user ({}) with same email ({}) and association is disabled", | ||||
|                     user_infos.identifier, user.uuid, user.email | ||||
|                 ); | ||||
|                 err_silent!( | ||||
|                     "Existing non SSO user with same email", | ||||
|                     ErrorEvent { | ||||
|                         event: EventType::UserFailedLogIn | ||||
|                     } | ||||
|                 ) | ||||
|             } | ||||
|             Some((user, None)) => Some((user, None)), | ||||
|         }, | ||||
|         Some((user, sso_user)) => Some((user, Some(sso_user))), | ||||
|     }; | ||||
|  | ||||
|     let now = Utc::now().naive_utc(); | ||||
|     // Will trigger 2FA flow if needed | ||||
|     let (user, mut device, twofactor_token, sso_user) = match user_with_sso { | ||||
|         None => { | ||||
|             if !CONFIG.is_email_domain_allowed(&user_infos.email) { | ||||
|                 err!( | ||||
|                     "Email domain not allowed", | ||||
|                     ErrorEvent { | ||||
|                         event: EventType::UserFailedLogIn | ||||
|                     } | ||||
|                 ); | ||||
|             } | ||||
|  | ||||
|             match user_infos.email_verified { | ||||
|                 None if !CONFIG.sso_allow_unknown_email_verification() => err!( | ||||
|                     "Your provider does not send email verification status.\n\ | ||||
|                     You will need to change the server configuration (check `SSO_ALLOW_UNKNOWN_EMAIL_VERIFICATION`) to log in.", | ||||
|                     ErrorEvent { | ||||
|                         event: EventType::UserFailedLogIn | ||||
|                     } | ||||
|                 ), | ||||
|                 Some(false) => err!( | ||||
|                     "You need to verify your email with your provider before you can log in", | ||||
|                     ErrorEvent { | ||||
|                         event: EventType::UserFailedLogIn | ||||
|                     } | ||||
|                 ), | ||||
|                 _ => (), | ||||
|             } | ||||
|  | ||||
|             let mut user = User::new(user_infos.email, user_infos.user_name); | ||||
|             user.verified_at = Some(now); | ||||
|             user.save(conn).await?; | ||||
|  | ||||
|             let device = get_device(&data, conn, &user).await?; | ||||
|  | ||||
|             (user, device, None, None) | ||||
|         } | ||||
|         Some((user, _)) if !user.enabled => { | ||||
|             err!( | ||||
|                 "This user has been disabled", | ||||
|                 format!("IP: {}. Username: {}.", ip.ip, user.name), | ||||
|                 ErrorEvent { | ||||
|                     event: EventType::UserFailedLogIn | ||||
|                 } | ||||
|             ) | ||||
|         } | ||||
|         Some((mut user, sso_user)) => { | ||||
|             let mut device = get_device(&data, conn, &user).await?; | ||||
|             let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, client_version, webauthn, conn).await?; | ||||
|  | ||||
|             if user.private_key.is_none() { | ||||
|                 // User was invited a stub was created | ||||
|                 user.verified_at = Some(now); | ||||
|                 if let Some(user_name) = user_infos.user_name { | ||||
|                     user.name = user_name; | ||||
|                 } | ||||
|  | ||||
|                 user.save(conn).await?; | ||||
|             } | ||||
|  | ||||
|             if user.email != user_infos.email { | ||||
|                 if CONFIG.mail_enabled() { | ||||
|                     mail::send_sso_change_email(&user_infos.email).await?; | ||||
|                 } | ||||
|                 info!("User {} email changed in SSO provider from {} to {}", user.uuid, user.email, user_infos.email); | ||||
|             } | ||||
|  | ||||
|             (user, device, twofactor_token, sso_user) | ||||
|         } | ||||
|     }; | ||||
|  | ||||
|     // We passed 2FA get full user information | ||||
|     let auth_user = sso::redeem(&user_infos.state, conn).await?; | ||||
|  | ||||
|     if sso_user.is_none() { | ||||
|         let user_sso = SsoUser { | ||||
|             user_uuid: user.uuid.clone(), | ||||
|             identifier: user_infos.identifier, | ||||
|         }; | ||||
|         user_sso.save(conn).await?; | ||||
|     } | ||||
|  | ||||
|     // Set the user_uuid here to be passed back used for event logging. | ||||
|     *user_id = Some(user.uuid.clone()); | ||||
|  | ||||
|     let auth_tokens = sso::create_auth_tokens( | ||||
|         &device, | ||||
|         &user, | ||||
|         data.client_id, | ||||
|         auth_user.refresh_token, | ||||
|         auth_user.access_token, | ||||
|         auth_user.expires_in, | ||||
|     )?; | ||||
|  | ||||
|     authenticated_response(&user, &mut device, auth_tokens, twofactor_token, &now, conn, ip).await | ||||
| } | ||||
|  | ||||
| async fn _password_login( | ||||
| @@ -139,13 +325,10 @@ async fn _password_login( | ||||
|     conn: &mut DbConn, | ||||
|     ip: &ClientIp, | ||||
|     client_version: &Option<ClientVersion>, | ||||
|     webauthn: Webauthn2FaConfig<'_>, | ||||
| ) -> JsonResult { | ||||
|     // Validate scope | ||||
|     let scope = data.scope.as_ref().unwrap(); | ||||
|     if scope != "api offline_access" { | ||||
|         err!("Scope not supported") | ||||
|     } | ||||
|     let scope_vec = vec!["api".into(), "offline_access".into()]; | ||||
|     AuthMethod::Password.check_scope(data.scope.as_ref())?; | ||||
|  | ||||
|     // Ratelimit the login | ||||
|     crate::ratelimit::check_limit_login(&ip.ip)?; | ||||
| @@ -212,13 +395,8 @@ async fn _password_login( | ||||
|     } | ||||
|  | ||||
|     // Change the KDF Iterations (only when not logging in with an auth request) | ||||
|     if data.auth_request.is_none() && user.password_iterations != CONFIG.password_iterations() { | ||||
|         user.password_iterations = CONFIG.password_iterations(); | ||||
|         user.set_password(password, None, false, None); | ||||
|  | ||||
|         if let Err(e) = user.save(conn).await { | ||||
|             error!("Error updating user: {e:#?}"); | ||||
|         } | ||||
|     if data.auth_request.is_none() { | ||||
|         kdf_upgrade(&mut user, password, conn).await?; | ||||
|     } | ||||
|  | ||||
|     let now = Utc::now().naive_utc(); | ||||
| @@ -255,12 +433,27 @@ async fn _password_login( | ||||
|         ) | ||||
|     } | ||||
|  | ||||
|     let (mut device, new_device) = get_device(&data, conn, &user).await; | ||||
|     let mut device = get_device(&data, conn, &user).await?; | ||||
|  | ||||
|     let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, client_version, conn).await?; | ||||
|     let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, client_version, webauthn, conn).await?; | ||||
|  | ||||
|     if CONFIG.mail_enabled() && new_device { | ||||
|         if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device).await { | ||||
|     let auth_tokens = auth::AuthTokens::new(&device, &user, AuthMethod::Password, data.client_id); | ||||
|  | ||||
|     authenticated_response(&user, &mut device, auth_tokens, twofactor_token, &now, conn, ip).await | ||||
| } | ||||
|  | ||||
| #[allow(clippy::too_many_arguments)] | ||||
| async fn authenticated_response( | ||||
|     user: &User, | ||||
|     device: &mut Device, | ||||
|     auth_tokens: auth::AuthTokens, | ||||
|     twofactor_token: Option<String>, | ||||
|     now: &NaiveDateTime, | ||||
|     conn: &mut DbConn, | ||||
|     ip: &ClientIp, | ||||
| ) -> JsonResult { | ||||
|     if CONFIG.mail_enabled() && device.is_new() { | ||||
|         if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), now, device).await { | ||||
|             error!("Error sending new device email: {e:#?}"); | ||||
|  | ||||
|             if CONFIG.require_device_email() { | ||||
| @@ -275,31 +468,21 @@ async fn _password_login( | ||||
|     } | ||||
|  | ||||
|     // register push device | ||||
|     if !new_device { | ||||
|         register_push_device(&mut device, conn).await?; | ||||
|     if !device.is_new() { | ||||
|         register_push_device(device, conn).await?; | ||||
|     } | ||||
|  | ||||
|     // Common | ||||
|     // --- | ||||
|     // Disabled this variable, it was used to generate the JWT | ||||
|     // Because this might get used in the future, and is add by the Bitwarden Server, lets keep it, but then commented out | ||||
|     // See: https://github.com/dani-garcia/vaultwarden/issues/4156 | ||||
|     // --- | ||||
|     // let members = Membership::find_confirmed_by_user(&user.uuid, conn).await; | ||||
|     let (access_token, expires_in) = device.refresh_tokens(&user, scope_vec, data.client_id); | ||||
|     // Save to update `device.updated_at` to track usage and toggle new status | ||||
|     device.save(conn).await?; | ||||
|  | ||||
|     let master_password_policy = master_password_policy(&user, conn).await; | ||||
|     let master_password_policy = master_password_policy(user, conn).await; | ||||
|  | ||||
|     let mut result = json!({ | ||||
|         "access_token": access_token, | ||||
|         "expires_in": expires_in, | ||||
|         "access_token": auth_tokens.access_token(), | ||||
|         "expires_in": auth_tokens.expires_in(), | ||||
|         "token_type": "Bearer", | ||||
|         "refresh_token": device.refresh_token, | ||||
|         "Key": user.akey, | ||||
|         "refresh_token": auth_tokens.refresh_token(), | ||||
|         "PrivateKey": user.private_key, | ||||
|         //"TwoFactorToken": "11122233333444555666777888999" | ||||
|  | ||||
|         "Kdf": user.client_kdf_type, | ||||
|         "KdfIterations": user.client_kdf_iter, | ||||
|         "KdfMemory": user.client_kdf_memory, | ||||
| @@ -307,19 +490,22 @@ async fn _password_login( | ||||
|         "ResetMasterPassword": false, // TODO: Same as above | ||||
|         "ForcePasswordReset": false, | ||||
|         "MasterPasswordPolicy": master_password_policy, | ||||
|  | ||||
|         "scope": scope, | ||||
|         "scope": auth_tokens.scope(), | ||||
|         "UserDecryptionOptions": { | ||||
|             "HasMasterPassword": !user.password_hash.is_empty(), | ||||
|             "Object": "userDecryptionOptions" | ||||
|         }, | ||||
|     }); | ||||
|  | ||||
|     if !user.akey.is_empty() { | ||||
|         result["Key"] = Value::String(user.akey.clone()); | ||||
|     } | ||||
|  | ||||
|     if let Some(token) = twofactor_token { | ||||
|         result["TwoFactorToken"] = Value::String(token); | ||||
|     } | ||||
|  | ||||
|     info!("User {username} logged in successfully. IP: {}", ip.ip); | ||||
|     info!("User {} logged in successfully. IP: {}", &user.name, ip.ip); | ||||
|     Ok(Json(result)) | ||||
| } | ||||
|  | ||||
| @@ -333,9 +519,9 @@ async fn _api_key_login( | ||||
|     crate::ratelimit::check_limit_login(&ip.ip)?; | ||||
|  | ||||
|     // Validate scope | ||||
|     match data.scope.as_ref().unwrap().as_ref() { | ||||
|         "api" => _user_api_key_login(data, user_id, conn, ip).await, | ||||
|         "api.organization" => _organization_api_key_login(data, conn, ip).await, | ||||
|     match data.scope.as_ref() { | ||||
|         Some(scope) if scope == &AuthMethod::UserApiKey.scope() => _user_api_key_login(data, user_id, conn, ip).await, | ||||
|         Some(scope) if scope == &AuthMethod::OrgApiKey.scope() => _organization_api_key_login(data, conn, ip).await, | ||||
|         _ => err!("Scope not supported"), | ||||
|     } | ||||
| } | ||||
| @@ -382,9 +568,9 @@ async fn _user_api_key_login( | ||||
|         ) | ||||
|     } | ||||
|  | ||||
|     let (mut device, new_device) = get_device(&data, conn, &user).await; | ||||
|     let mut device = get_device(&data, conn, &user).await?; | ||||
|  | ||||
|     if CONFIG.mail_enabled() && new_device { | ||||
|     if CONFIG.mail_enabled() && device.is_new() { | ||||
|         let now = Utc::now().naive_utc(); | ||||
|         if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device).await { | ||||
|             error!("Error sending new device email: {e:#?}"); | ||||
| @@ -400,15 +586,15 @@ async fn _user_api_key_login( | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // Common | ||||
|     let scope_vec = vec!["api".into()]; | ||||
|     // --- | ||||
|     // Disabled this variable, it was used to generate the JWT | ||||
|     // Because this might get used in the future, and is add by the Bitwarden Server, lets keep it, but then commented out | ||||
|     // See: https://github.com/dani-garcia/vaultwarden/issues/4156 | ||||
|     // --- | ||||
|     // let members = Membership::find_confirmed_by_user(&user.uuid, conn).await; | ||||
|     let (access_token, expires_in) = device.refresh_tokens(&user, scope_vec, data.client_id); | ||||
|     // let orgs = Membership::find_confirmed_by_user(&user.uuid, conn).await; | ||||
|     let access_claims = auth::LoginJwtClaims::default(&device, &user, &AuthMethod::UserApiKey, data.client_id); | ||||
|  | ||||
|     // Save to update `device.updated_at` to track usage and toggle new status | ||||
|     device.save(conn).await?; | ||||
|  | ||||
|     info!("User {} logged in successfully via API key. IP: {}", user.email, ip.ip); | ||||
| @@ -416,8 +602,8 @@ async fn _user_api_key_login( | ||||
|     // Note: No refresh_token is returned. The CLI just repeats the | ||||
|     // client_credentials login flow when the existing token expires. | ||||
|     let result = json!({ | ||||
|         "access_token": access_token, | ||||
|         "expires_in": expires_in, | ||||
|         "access_token": access_claims.token(), | ||||
|         "expires_in": access_claims.expires_in(), | ||||
|         "token_type": "Bearer", | ||||
|         "Key": user.akey, | ||||
|         "PrivateKey": user.private_key, | ||||
| @@ -427,7 +613,7 @@ async fn _user_api_key_login( | ||||
|         "KdfMemory": user.client_kdf_memory, | ||||
|         "KdfParallelism": user.client_kdf_parallelism, | ||||
|         "ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing | ||||
|         "scope": "api", | ||||
|         "scope": AuthMethod::UserApiKey.scope(), | ||||
|     }); | ||||
|  | ||||
|     Ok(Json(result)) | ||||
| @@ -451,35 +637,29 @@ async fn _organization_api_key_login(data: ConnectData, conn: &mut DbConn, ip: & | ||||
|     } | ||||
|  | ||||
|     let claim = generate_organization_api_key_login_claims(org_api_key.uuid, org_api_key.org_uuid); | ||||
|     let access_token = crate::auth::encode_jwt(&claim); | ||||
|     let access_token = auth::encode_jwt(&claim); | ||||
|  | ||||
|     Ok(Json(json!({ | ||||
|         "access_token": access_token, | ||||
|         "expires_in": 3600, | ||||
|         "token_type": "Bearer", | ||||
|         "scope": "api.organization", | ||||
|         "scope": AuthMethod::OrgApiKey.scope(), | ||||
|     }))) | ||||
| } | ||||
|  | ||||
| /// Retrieves an existing device or creates a new device from ConnectData and the User | ||||
| async fn get_device(data: &ConnectData, conn: &mut DbConn, user: &User) -> (Device, bool) { | ||||
| async fn get_device(data: &ConnectData, conn: &mut DbConn, user: &User) -> ApiResult<Device> { | ||||
|     // On iOS, device_type sends "iOS", on others it sends a number | ||||
|     // When unknown or unable to parse, return 14, which is 'Unknown Browser' | ||||
|     let device_type = util::try_parse_string(data.device_type.as_ref()).unwrap_or(14); | ||||
|     let device_id = data.device_identifier.clone().expect("No device id provided"); | ||||
|     let device_name = data.device_name.clone().expect("No device name provided"); | ||||
|  | ||||
|     let mut new_device = false; | ||||
|     // Find device or create new | ||||
|     let device = match Device::find_by_uuid_and_user(&device_id, &user.uuid, conn).await { | ||||
|         Some(device) => device, | ||||
|         None => { | ||||
|             new_device = true; | ||||
|             Device::new(device_id, user.uuid.clone(), device_name, device_type) | ||||
|         } | ||||
|     }; | ||||
|  | ||||
|     (device, new_device) | ||||
|     match Device::find_by_uuid_and_user(&device_id, &user.uuid, conn).await { | ||||
|         Some(device) => Ok(device), | ||||
|         None => Device::new(device_id, user.uuid.clone(), device_name, device_type, conn).await, | ||||
|     } | ||||
| } | ||||
|  | ||||
| async fn twofactor_auth( | ||||
| @@ -488,6 +668,7 @@ async fn twofactor_auth( | ||||
|     device: &mut Device, | ||||
|     ip: &ClientIp, | ||||
|     client_version: &Option<ClientVersion>, | ||||
|     webauthn: Webauthn2FaConfig<'_>, | ||||
|     conn: &mut DbConn, | ||||
| ) -> ApiResult<Option<String>> { | ||||
|     let twofactors = TwoFactor::find_by_user(&user.uuid, conn).await; | ||||
| @@ -507,7 +688,7 @@ async fn twofactor_auth( | ||||
|         Some(ref code) => code, | ||||
|         None => { | ||||
|             err_json!( | ||||
|                 _json_err_twofactor(&twofactor_ids, &user.uuid, data, client_version, conn).await?, | ||||
|                 _json_err_twofactor(&twofactor_ids, &user.uuid, data, client_version, webauthn, conn).await?, | ||||
|                 "2FA token not provided" | ||||
|             ) | ||||
|         } | ||||
| @@ -524,7 +705,9 @@ async fn twofactor_auth( | ||||
|         Some(TwoFactorType::Authenticator) => { | ||||
|             authenticator::validate_totp_code_str(&user.uuid, twofactor_code, &selected_data?, ip, conn).await? | ||||
|         } | ||||
|         Some(TwoFactorType::Webauthn) => webauthn::validate_webauthn_login(&user.uuid, twofactor_code, conn).await?, | ||||
|         Some(TwoFactorType::Webauthn) => { | ||||
|             webauthn::validate_webauthn_login(&user.uuid, twofactor_code, webauthn, conn).await? | ||||
|         } | ||||
|         Some(TwoFactorType::YubiKey) => yubikey::validate_yubikey_login(twofactor_code, &selected_data?).await?, | ||||
|         Some(TwoFactorType::Duo) => { | ||||
|             match CONFIG.duo_use_iframe() { | ||||
| @@ -556,7 +739,7 @@ async fn twofactor_auth( | ||||
|                 } | ||||
|                 _ => { | ||||
|                     err_json!( | ||||
|                         _json_err_twofactor(&twofactor_ids, &user.uuid, data, client_version, conn).await?, | ||||
|                         _json_err_twofactor(&twofactor_ids, &user.uuid, data, client_version, webauthn, conn).await?, | ||||
|                         "2FA Remember token not provided" | ||||
|                     ) | ||||
|                 } | ||||
| @@ -572,12 +755,13 @@ async fn twofactor_auth( | ||||
|  | ||||
|     TwoFactorIncomplete::mark_complete(&user.uuid, &device.uuid, conn).await?; | ||||
|  | ||||
|     if !CONFIG.disable_2fa_remember() && remember == 1 { | ||||
|         Ok(Some(device.refresh_twofactor_remember())) | ||||
|     let two_factor = if !CONFIG.disable_2fa_remember() && remember == 1 { | ||||
|         Some(device.refresh_twofactor_remember()) | ||||
|     } else { | ||||
|         device.delete_twofactor_remember(); | ||||
|         Ok(None) | ||||
|     } | ||||
|         None | ||||
|     }; | ||||
|     Ok(two_factor) | ||||
| } | ||||
|  | ||||
| fn _selected_data(tf: Option<TwoFactor>) -> ApiResult<String> { | ||||
| @@ -589,6 +773,7 @@ async fn _json_err_twofactor( | ||||
|     user_id: &UserId, | ||||
|     data: &ConnectData, | ||||
|     client_version: &Option<ClientVersion>, | ||||
|     webauthn: Webauthn2FaConfig<'_>, | ||||
|     conn: &mut DbConn, | ||||
| ) -> ApiResult<Value> { | ||||
|     let mut result = json!({ | ||||
| @@ -608,7 +793,7 @@ async fn _json_err_twofactor( | ||||
|             Some(TwoFactorType::Authenticator) => { /* Nothing to do for TOTP */ } | ||||
|  | ||||
|             Some(TwoFactorType::Webauthn) if CONFIG.domain_set() => { | ||||
|                 let request = webauthn::generate_webauthn_login(user_id, conn).await?; | ||||
|                 let request = webauthn::generate_webauthn_login(user_id, webauthn, conn).await?; | ||||
|                 result["TwoFactorProviders2"][provider.to_string()] = request.0; | ||||
|             } | ||||
|  | ||||
| @@ -727,9 +912,8 @@ async fn register_verification_email( | ||||
|  | ||||
|     let should_send_mail = CONFIG.mail_enabled() && CONFIG.signups_verify(); | ||||
|  | ||||
|     let token_claims = | ||||
|         crate::auth::generate_register_verify_claims(data.email.clone(), data.name.clone(), should_send_mail); | ||||
|     let token = crate::auth::encode_jwt(&token_claims); | ||||
|     let token_claims = auth::generate_register_verify_claims(data.email.clone(), data.name.clone(), should_send_mail); | ||||
|     let token = auth::encode_jwt(&token_claims); | ||||
|  | ||||
|     if should_send_mail { | ||||
|         let user = User::find_by_mail(&data.email, &mut conn).await; | ||||
| @@ -812,11 +996,131 @@ struct ConnectData { | ||||
|     two_factor_remember: Option<i32>, | ||||
|     #[field(name = uncased("authrequest"))] | ||||
|     auth_request: Option<AuthRequestId>, | ||||
|     // Needed for authorization code | ||||
|     #[field(name = uncased("code"))] | ||||
|     code: Option<String>, | ||||
| } | ||||
|  | ||||
| fn _check_is_some<T>(value: &Option<T>, msg: &str) -> EmptyResult { | ||||
|     if value.is_none() { | ||||
|         err!(msg) | ||||
|     } | ||||
|     Ok(()) | ||||
| } | ||||
|  | ||||
| #[get("/sso/prevalidate")] | ||||
| fn prevalidate() -> JsonResult { | ||||
|     if CONFIG.sso_enabled() { | ||||
|         let sso_token = sso::encode_ssotoken_claims(); | ||||
|         Ok(Json(json!({ | ||||
|             "token": sso_token, | ||||
|         }))) | ||||
|     } else { | ||||
|         err!("SSO sign-in is not available") | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[get("/connect/oidc-signin?<code>&<state>", rank = 1)] | ||||
| async fn oidcsignin(code: OIDCCode, state: String, conn: DbConn) -> ApiResult<Redirect> { | ||||
|     oidcsignin_redirect( | ||||
|         state, | ||||
|         |decoded_state| sso::OIDCCodeWrapper::Ok { | ||||
|             state: decoded_state, | ||||
|             code, | ||||
|         }, | ||||
|         &conn, | ||||
|     ) | ||||
|     .await | ||||
| } | ||||
|  | ||||
| // Bitwarden client appear to only care for code and state so we pipe it through | ||||
| // cf: https://github.com/bitwarden/clients/blob/80b74b3300e15b4ae414dc06044cc9b02b6c10a6/libs/auth/src/angular/sso/sso.component.ts#L141 | ||||
| #[get("/connect/oidc-signin?<state>&<error>&<error_description>", rank = 2)] | ||||
| async fn oidcsignin_error( | ||||
|     state: String, | ||||
|     error: String, | ||||
|     error_description: Option<String>, | ||||
|     conn: DbConn, | ||||
| ) -> ApiResult<Redirect> { | ||||
|     oidcsignin_redirect( | ||||
|         state, | ||||
|         |decoded_state| sso::OIDCCodeWrapper::Error { | ||||
|             state: decoded_state, | ||||
|             error, | ||||
|             error_description, | ||||
|         }, | ||||
|         &conn, | ||||
|     ) | ||||
|     .await | ||||
| } | ||||
|  | ||||
| // The state was encoded using Base64 to ensure no issue with providers. | ||||
| // iss and scope parameters are needed for redirection to work on IOS. | ||||
| async fn oidcsignin_redirect( | ||||
|     base64_state: String, | ||||
|     wrapper: impl FnOnce(OIDCState) -> sso::OIDCCodeWrapper, | ||||
|     conn: &DbConn, | ||||
| ) -> ApiResult<Redirect> { | ||||
|     let state = sso::decode_state(base64_state)?; | ||||
|     let code = sso::encode_code_claims(wrapper(state.clone())); | ||||
|  | ||||
|     let nonce = match SsoNonce::find(&state, conn).await { | ||||
|         Some(n) => n, | ||||
|         None => err!(format!("Failed to retrieve redirect_uri with {state}")), | ||||
|     }; | ||||
|  | ||||
|     let mut url = match url::Url::parse(&nonce.redirect_uri) { | ||||
|         Ok(url) => url, | ||||
|         Err(err) => err!(format!("Failed to parse redirect uri ({}): {err}", nonce.redirect_uri)), | ||||
|     }; | ||||
|  | ||||
|     url.query_pairs_mut() | ||||
|         .append_pair("code", &code) | ||||
|         .append_pair("state", &state) | ||||
|         .append_pair("scope", &AuthMethod::Sso.scope()) | ||||
|         .append_pair("iss", &CONFIG.domain()); | ||||
|  | ||||
|     debug!("Redirection to {url}"); | ||||
|  | ||||
|     Ok(Redirect::temporary(String::from(url))) | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Clone, Default, FromForm)] | ||||
| struct AuthorizeData { | ||||
|     #[field(name = uncased("client_id"))] | ||||
|     #[field(name = uncased("clientid"))] | ||||
|     client_id: String, | ||||
|     #[field(name = uncased("redirect_uri"))] | ||||
|     #[field(name = uncased("redirecturi"))] | ||||
|     redirect_uri: String, | ||||
|     #[allow(unused)] | ||||
|     response_type: Option<String>, | ||||
|     #[allow(unused)] | ||||
|     scope: Option<String>, | ||||
|     state: OIDCState, | ||||
|     #[allow(unused)] | ||||
|     code_challenge: Option<String>, | ||||
|     #[allow(unused)] | ||||
|     code_challenge_method: Option<String>, | ||||
|     #[allow(unused)] | ||||
|     response_mode: Option<String>, | ||||
|     #[allow(unused)] | ||||
|     domain_hint: Option<String>, | ||||
|     #[allow(unused)] | ||||
|     #[field(name = uncased("ssoToken"))] | ||||
|     sso_token: Option<String>, | ||||
| } | ||||
|  | ||||
| // The `redirect_uri` will change depending of the client (web, android, ios ..) | ||||
| #[get("/connect/authorize?<data..>")] | ||||
| async fn authorize(data: AuthorizeData, conn: DbConn) -> ApiResult<Redirect> { | ||||
|     let AuthorizeData { | ||||
|         client_id, | ||||
|         redirect_uri, | ||||
|         state, | ||||
|         .. | ||||
|     } = data; | ||||
|  | ||||
|     let auth_url = sso::authorize_url(state, &client_id, &redirect_uri, conn).await?; | ||||
|  | ||||
|     Ok(Redirect::temporary(String::from(auth_url))) | ||||
| } | ||||
|   | ||||
| @@ -36,9 +36,10 @@ use crate::db::{ | ||||
|     models::{OrgPolicy, OrgPolicyType, User}, | ||||
|     DbConn, | ||||
| }; | ||||
| use crate::CONFIG; | ||||
|  | ||||
| // Type aliases for API methods results | ||||
| type ApiResult<T> = Result<T, crate::error::Error>; | ||||
| pub type ApiResult<T> = Result<T, crate::error::Error>; | ||||
| pub type JsonResult = ApiResult<Json<Value>>; | ||||
| pub type EmptyResult = ApiResult<()>; | ||||
|  | ||||
| @@ -109,6 +110,8 @@ async fn master_password_policy(user: &User, conn: &DbConn) -> Value { | ||||
|                 enforce_on_login: acc.enforce_on_login || policy.enforce_on_login, | ||||
|             } | ||||
|         })) | ||||
|     } else if let Some(policy_str) = CONFIG.sso_master_password_policy().filter(|_| CONFIG.sso_enabled()) { | ||||
|         serde_json::from_str(&policy_str).unwrap_or(json!({})) | ||||
|     } else { | ||||
|         json!({}) | ||||
|     }; | ||||
|   | ||||
| @@ -619,7 +619,7 @@ fn create_ping() -> Vec<u8> { | ||||
|     serialize(Value::Array(vec![6.into()])) | ||||
| } | ||||
|  | ||||
| #[allow(dead_code)] | ||||
| // https://github.com/bitwarden/server/blob/375af7c43b10d9da03525d41452f95de3f921541/src/Core/Enums/PushType.cs | ||||
| #[derive(Copy, Clone, Eq, PartialEq)] | ||||
| pub enum UpdateType { | ||||
|     SyncCipherUpdate = 0, | ||||
| @@ -632,7 +632,7 @@ pub enum UpdateType { | ||||
|     SyncOrgKeys = 6, | ||||
|     SyncFolderCreate = 7, | ||||
|     SyncFolderUpdate = 8, | ||||
|     SyncCipherDelete = 9, | ||||
|     // SyncCipherDelete = 9, // Redirects to `SyncLoginDelete` on upstream | ||||
|     SyncSettings = 10, | ||||
|  | ||||
|     LogOut = 11, | ||||
| @@ -644,6 +644,14 @@ pub enum UpdateType { | ||||
|     AuthRequest = 15, | ||||
|     AuthRequestResponse = 16, | ||||
|  | ||||
|     // SyncOrganizations = 17, // Not supported | ||||
|     // SyncOrganizationStatusChanged = 18, // Not supported | ||||
|     // SyncOrganizationCollectionSettingChanged = 19, // Not supported | ||||
|  | ||||
|     // Notification = 20, // Not supported | ||||
|     // NotificationStatus = 21, // Not supported | ||||
|  | ||||
|     // RefreshSecurityTasks = 22, // Not supported | ||||
|     None = 100, | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -55,13 +55,15 @@ fn not_found() -> ApiResult<Html<String>> { | ||||
| #[get("/css/vaultwarden.css")] | ||||
| fn vaultwarden_css() -> Cached<Css<String>> { | ||||
|     let css_options = json!({ | ||||
|         "signup_disabled": !CONFIG.signups_allowed() && CONFIG.signups_domains_whitelist().is_empty(), | ||||
|         "mail_enabled": CONFIG.mail_enabled(), | ||||
|         "mail_2fa_enabled": CONFIG._enable_email_2fa(), | ||||
|         "yubico_enabled": CONFIG._enable_yubico() && CONFIG.yubico_client_id().is_some() && CONFIG.yubico_secret_key().is_some(), | ||||
|         "emergency_access_allowed": CONFIG.emergency_access_allowed(), | ||||
|         "sends_allowed": CONFIG.sends_allowed(), | ||||
|         "load_user_scss": true, | ||||
|         "mail_2fa_enabled": CONFIG._enable_email_2fa(), | ||||
|         "mail_enabled": CONFIG.mail_enabled(), | ||||
|         "sends_allowed": CONFIG.sends_allowed(), | ||||
|         "signup_disabled": CONFIG.is_signup_disabled(), | ||||
|         "sso_enabled": CONFIG.sso_enabled(), | ||||
|         "sso_only": CONFIG.sso_enabled() && CONFIG.sso_only(), | ||||
|         "yubico_enabled": CONFIG._enable_yubico() && CONFIG.yubico_client_id().is_some() && CONFIG.yubico_secret_key().is_some(), | ||||
|     }); | ||||
|  | ||||
|     let scss = match CONFIG.render_template("scss/vaultwarden.scss", &css_options) { | ||||
|   | ||||
							
								
								
									
										254
									
								
								src/auth.rs
									
									
									
									
									
								
							
							
						
						
									
										254
									
								
								src/auth.rs
									
									
									
									
									
								
							| @@ -1,6 +1,5 @@ | ||||
| // JWT Handling | ||||
| // | ||||
| use chrono::{TimeDelta, Utc}; | ||||
| use chrono::{DateTime, TimeDelta, Utc}; | ||||
| use jsonwebtoken::{errors::ErrorKind, Algorithm, DecodingKey, EncodingKey, Header}; | ||||
| use num_traits::FromPrimitive; | ||||
| use once_cell::sync::{Lazy, OnceCell}; | ||||
| @@ -10,17 +9,24 @@ use serde::ser::Serialize; | ||||
| use std::{env, net::IpAddr}; | ||||
|  | ||||
| use crate::{ | ||||
|     api::ApiResult, | ||||
|     config::PathType, | ||||
|     db::models::{ | ||||
|         AttachmentId, CipherId, CollectionId, DeviceId, EmergencyAccessId, MembershipId, OrgApiKeyId, OrganizationId, | ||||
|         SendFileId, SendId, UserId, | ||||
|         AttachmentId, CipherId, CollectionId, DeviceId, DeviceType, EmergencyAccessId, MembershipId, OrgApiKeyId, | ||||
|         OrganizationId, SendFileId, SendId, UserId, | ||||
|     }, | ||||
|     error::Error, | ||||
|     sso, CONFIG, | ||||
| }; | ||||
| use crate::{error::Error, CONFIG}; | ||||
|  | ||||
| const JWT_ALGORITHM: Algorithm = Algorithm::RS256; | ||||
|  | ||||
| pub static DEFAULT_VALIDITY: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_hours(2).unwrap()); | ||||
| // Limit when BitWarden consider the token as expired | ||||
| pub static BW_EXPIRATION: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_minutes(5).unwrap()); | ||||
|  | ||||
| pub static DEFAULT_REFRESH_VALIDITY: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_days(30).unwrap()); | ||||
| pub static MOBILE_REFRESH_VALIDITY: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_days(90).unwrap()); | ||||
| pub static DEFAULT_ACCESS_VALIDITY: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_hours(2).unwrap()); | ||||
| static JWT_HEADER: Lazy<Header> = Lazy::new(|| Header::new(JWT_ALGORITHM)); | ||||
|  | ||||
| pub static JWT_LOGIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|login", CONFIG.domain_origin())); | ||||
| @@ -85,7 +91,7 @@ pub fn encode_jwt<T: Serialize>(claims: &T) -> String { | ||||
|     } | ||||
| } | ||||
|  | ||||
| fn decode_jwt<T: DeserializeOwned>(token: &str, issuer: String) -> Result<T, Error> { | ||||
| pub fn decode_jwt<T: DeserializeOwned>(token: &str, issuer: String) -> Result<T, Error> { | ||||
|     let mut validation = jsonwebtoken::Validation::new(JWT_ALGORITHM); | ||||
|     validation.leeway = 30; // 30 seconds | ||||
|     validation.validate_exp = true; | ||||
| @@ -99,11 +105,15 @@ fn decode_jwt<T: DeserializeOwned>(token: &str, issuer: String) -> Result<T, Err | ||||
|             ErrorKind::InvalidToken => err!("Token is invalid"), | ||||
|             ErrorKind::InvalidIssuer => err!("Issuer is invalid"), | ||||
|             ErrorKind::ExpiredSignature => err!("Token has expired"), | ||||
|             _ => err!("Error decoding JWT"), | ||||
|             _ => err!(format!("Error decoding JWT: {:?}", err)), | ||||
|         }, | ||||
|     } | ||||
| } | ||||
|  | ||||
| pub fn decode_refresh(token: &str) -> Result<RefreshJwtClaims, Error> { | ||||
|     decode_jwt(token, JWT_LOGIN_ISSUER.to_string()) | ||||
| } | ||||
|  | ||||
| pub fn decode_login(token: &str) -> Result<LoginJwtClaims, Error> { | ||||
|     decode_jwt(token, JWT_LOGIN_ISSUER.to_string()) | ||||
| } | ||||
| @@ -186,6 +196,84 @@ pub struct LoginJwtClaims { | ||||
|     pub amr: Vec<String>, | ||||
| } | ||||
|  | ||||
| impl LoginJwtClaims { | ||||
|     pub fn new( | ||||
|         device: &Device, | ||||
|         user: &User, | ||||
|         nbf: i64, | ||||
|         exp: i64, | ||||
|         scope: Vec<String>, | ||||
|         client_id: Option<String>, | ||||
|         now: DateTime<Utc>, | ||||
|     ) -> Self { | ||||
|         // --- | ||||
|         // Disabled these keys to be added to the JWT since they could cause the JWT to get too large | ||||
|         // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients | ||||
|         // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out | ||||
|         // --- | ||||
|         // fn arg: orgs: Vec<super::UserOrganization>, | ||||
|         // --- | ||||
|         // let orgowner: Vec<_> = orgs.iter().filter(|o| o.atype == 0).map(|o| o.org_uuid.clone()).collect(); | ||||
|         // let orgadmin: Vec<_> = orgs.iter().filter(|o| o.atype == 1).map(|o| o.org_uuid.clone()).collect(); | ||||
|         // let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect(); | ||||
|         // let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect(); | ||||
|  | ||||
|         if exp <= (now + *BW_EXPIRATION).timestamp() { | ||||
|             warn!("Raise access_token lifetime to more than 5min.") | ||||
|         } | ||||
|  | ||||
|         // Create the JWT claims struct, to send to the client | ||||
|         Self { | ||||
|             nbf, | ||||
|             exp, | ||||
|             iss: JWT_LOGIN_ISSUER.to_string(), | ||||
|             sub: user.uuid.clone(), | ||||
|             premium: true, | ||||
|             name: user.name.clone(), | ||||
|             email: user.email.clone(), | ||||
|             email_verified: !CONFIG.mail_enabled() || user.verified_at.is_some(), | ||||
|  | ||||
|             // --- | ||||
|             // Disabled these keys to be added to the JWT since they could cause the JWT to get too large | ||||
|             // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients | ||||
|             // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out | ||||
|             // See: https://github.com/dani-garcia/vaultwarden/issues/4156 | ||||
|             // --- | ||||
|             // orgowner, | ||||
|             // orgadmin, | ||||
|             // orguser, | ||||
|             // orgmanager, | ||||
|             sstamp: user.security_stamp.clone(), | ||||
|             device: device.uuid.clone(), | ||||
|             devicetype: DeviceType::from_i32(device.atype).to_string(), | ||||
|             client_id: client_id.unwrap_or("undefined".to_string()), | ||||
|             scope, | ||||
|             amr: vec!["Application".into()], | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     pub fn default(device: &Device, user: &User, auth_method: &AuthMethod, client_id: Option<String>) -> Self { | ||||
|         let time_now = Utc::now(); | ||||
|         Self::new( | ||||
|             device, | ||||
|             user, | ||||
|             time_now.timestamp(), | ||||
|             (time_now + *DEFAULT_ACCESS_VALIDITY).timestamp(), | ||||
|             auth_method.scope_vec(), | ||||
|             client_id, | ||||
|             time_now, | ||||
|         ) | ||||
|     } | ||||
|  | ||||
|     pub fn token(&self) -> String { | ||||
|         encode_jwt(&self) | ||||
|     } | ||||
|  | ||||
|     pub fn expires_in(&self) -> i64 { | ||||
|         self.exp - Utc::now().timestamp() | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Serialize, Deserialize)] | ||||
| pub struct InviteJwtClaims { | ||||
|     // Not before | ||||
| @@ -1001,3 +1089,153 @@ impl<'r> FromRequest<'r> for ClientVersion { | ||||
|         Outcome::Success(ClientVersion(version)) | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Serialize, Deserialize)] | ||||
| #[serde(rename_all = "lowercase")] | ||||
| pub enum AuthMethod { | ||||
|     OrgApiKey, | ||||
|     Password, | ||||
|     Sso, | ||||
|     UserApiKey, | ||||
| } | ||||
|  | ||||
| impl AuthMethod { | ||||
|     pub fn scope(&self) -> String { | ||||
|         match self { | ||||
|             AuthMethod::OrgApiKey => "api.organization".to_string(), | ||||
|             AuthMethod::Password => "api offline_access".to_string(), | ||||
|             AuthMethod::Sso => "api offline_access".to_string(), | ||||
|             AuthMethod::UserApiKey => "api".to_string(), | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     pub fn scope_vec(&self) -> Vec<String> { | ||||
|         self.scope().split_whitespace().map(str::to_string).collect() | ||||
|     } | ||||
|  | ||||
|     pub fn check_scope(&self, scope: Option<&String>) -> ApiResult<String> { | ||||
|         let method_scope = self.scope(); | ||||
|         match scope { | ||||
|             None => err!("Missing scope"), | ||||
|             Some(scope) if scope == &method_scope => Ok(method_scope), | ||||
|             Some(scope) => err!(format!("Scope ({scope}) not supported")), | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Serialize, Deserialize)] | ||||
| pub enum TokenWrapper { | ||||
|     Access(String), | ||||
|     Refresh(String), | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Serialize, Deserialize)] | ||||
| pub struct RefreshJwtClaims { | ||||
|     // Not before | ||||
|     pub nbf: i64, | ||||
|     // Expiration time | ||||
|     pub exp: i64, | ||||
|     // Issuer | ||||
|     pub iss: String, | ||||
|     // Subject | ||||
|     pub sub: AuthMethod, | ||||
|  | ||||
|     pub device_token: String, | ||||
|  | ||||
|     pub token: Option<TokenWrapper>, | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Serialize, Deserialize)] | ||||
| pub struct AuthTokens { | ||||
|     pub refresh_claims: RefreshJwtClaims, | ||||
|     pub access_claims: LoginJwtClaims, | ||||
| } | ||||
|  | ||||
| impl AuthTokens { | ||||
|     pub fn refresh_token(&self) -> String { | ||||
|         encode_jwt(&self.refresh_claims) | ||||
|     } | ||||
|  | ||||
|     pub fn access_token(&self) -> String { | ||||
|         self.access_claims.token() | ||||
|     } | ||||
|  | ||||
|     pub fn expires_in(&self) -> i64 { | ||||
|         self.access_claims.expires_in() | ||||
|     } | ||||
|  | ||||
|     pub fn scope(&self) -> String { | ||||
|         self.refresh_claims.sub.scope() | ||||
|     } | ||||
|  | ||||
|     // Create refresh_token and access_token with default validity | ||||
|     pub fn new(device: &Device, user: &User, sub: AuthMethod, client_id: Option<String>) -> Self { | ||||
|         let time_now = Utc::now(); | ||||
|  | ||||
|         let access_claims = LoginJwtClaims::default(device, user, &sub, client_id); | ||||
|  | ||||
|         let validity = if device.is_mobile() { | ||||
|             *MOBILE_REFRESH_VALIDITY | ||||
|         } else { | ||||
|             *DEFAULT_REFRESH_VALIDITY | ||||
|         }; | ||||
|  | ||||
|         let refresh_claims = RefreshJwtClaims { | ||||
|             nbf: time_now.timestamp(), | ||||
|             exp: (time_now + validity).timestamp(), | ||||
|             iss: JWT_LOGIN_ISSUER.to_string(), | ||||
|             sub, | ||||
|             device_token: device.refresh_token.clone(), | ||||
|             token: None, | ||||
|         }; | ||||
|  | ||||
|         Self { | ||||
|             refresh_claims, | ||||
|             access_claims, | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| pub async fn refresh_tokens( | ||||
|     ip: &ClientIp, | ||||
|     refresh_token: &str, | ||||
|     client_id: Option<String>, | ||||
|     conn: &mut DbConn, | ||||
| ) -> ApiResult<(Device, AuthTokens)> { | ||||
|     let refresh_claims = match decode_refresh(refresh_token) { | ||||
|         Err(err) => { | ||||
|             debug!("Failed to decode {} refresh_token: {refresh_token}", ip.ip); | ||||
|             err_silent!(format!("Impossible to read refresh_token: {}", err.message())) | ||||
|         } | ||||
|         Ok(claims) => claims, | ||||
|     }; | ||||
|  | ||||
|     // Get device by refresh token | ||||
|     let mut device = match Device::find_by_refresh_token(&refresh_claims.device_token, conn).await { | ||||
|         None => err!("Invalid refresh token"), | ||||
|         Some(device) => device, | ||||
|     }; | ||||
|  | ||||
|     // Save to update `updated_at`. | ||||
|     device.save(conn).await?; | ||||
|  | ||||
|     let user = match User::find_by_uuid(&device.user_uuid, conn).await { | ||||
|         None => err!("Impossible to find user"), | ||||
|         Some(user) => user, | ||||
|     }; | ||||
|  | ||||
|     let auth_tokens = match refresh_claims.sub { | ||||
|         AuthMethod::Sso if CONFIG.sso_enabled() && CONFIG.sso_auth_only_not_session() => { | ||||
|             AuthTokens::new(&device, &user, refresh_claims.sub, client_id) | ||||
|         } | ||||
|         AuthMethod::Sso if CONFIG.sso_enabled() => { | ||||
|             sso::exchange_refresh_token(&device, &user, client_id, refresh_claims).await? | ||||
|         } | ||||
|         AuthMethod::Sso => err!("SSO is now disabled, Login again using email and master password"), | ||||
|         AuthMethod::Password if CONFIG.sso_enabled() && CONFIG.sso_only() => err!("SSO is now required, Login again"), | ||||
|         AuthMethod::Password => AuthTokens::new(&device, &user, refresh_claims.sub, client_id), | ||||
|         _ => err!("Invalid auth method, cannot refresh token"), | ||||
|     }; | ||||
|  | ||||
|     Ok((device, auth_tokens)) | ||||
| } | ||||
|   | ||||
							
								
								
									
										123
									
								
								src/config.rs
									
									
									
									
									
								
							
							
						
						
									
										123
									
								
								src/config.rs
									
									
									
									
									
								
							| @@ -283,6 +283,9 @@ macro_rules! make_config { | ||||
|                     "smtp_host", | ||||
|                     "smtp_username", | ||||
|                     "_smtp_img_src", | ||||
|                     "sso_client_id", | ||||
|                     "sso_authority", | ||||
|                     "sso_callback_path", | ||||
|                 ]; | ||||
|  | ||||
|                 let cfg = { | ||||
| @@ -458,6 +461,9 @@ make_config! { | ||||
|         /// Duo Auth context cleanup schedule |> Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt. | ||||
|         /// Defaults to once every minute. Set blank to disable this job. | ||||
|         duo_context_purge_schedule:   String, false,  def,    "30 * * * * *".to_string(); | ||||
|         /// Purge incomplete SSO nonce. |> Cron schedule of the job that cleans leftover nonce in db due to incomplete SSO login. | ||||
|         /// Defaults to daily. Set blank to disable this job. | ||||
|         purge_incomplete_sso_nonce: String, false,  def,   "0 20 0 * * *".to_string(); | ||||
|     }, | ||||
|  | ||||
|     /// General settings | ||||
| @@ -633,9 +639,15 @@ make_config! { | ||||
|         /// Timeout when acquiring database connection | ||||
|         database_timeout:       u64,    false,  def,    30; | ||||
|  | ||||
|         /// Database connection pool size | ||||
|         /// Timeout in seconds before idle connections to the database are closed | ||||
|         database_idle_timeout:  u64,    false, def,     600; | ||||
|  | ||||
|         /// Database connection max pool size | ||||
|         database_max_conns:     u32,    false,  def,    10; | ||||
|  | ||||
|         /// Database connection min pool size | ||||
|         database_min_conns:     u32,    false,  def,    2; | ||||
|  | ||||
|         /// Database connection init |> SQL statements to run when creating a new database connection, mainly useful for connection-scoped pragmas. If empty, a database-specific default is used. | ||||
|         database_conn_init:     String, false,  def,    String::new(); | ||||
|  | ||||
| @@ -676,6 +688,42 @@ make_config! { | ||||
|         enforce_single_org_with_reset_pw_policy: bool, false, def, false; | ||||
|     }, | ||||
|  | ||||
|     /// OpenID Connect SSO settings | ||||
|     sso { | ||||
|         /// Enabled | ||||
|         sso_enabled:                    bool,   true,   def,    false; | ||||
|         /// Only SSO login |> Disable Email+Master Password login | ||||
|         sso_only:                       bool,   true,   def,    false; | ||||
|         /// Allow email association |> Associate existing non-SSO user based on email | ||||
|         sso_signups_match_email:        bool,   true,   def,    true; | ||||
|         /// Allow unknown email verification status |> Allowing this with `SSO_SIGNUPS_MATCH_EMAIL=true` open potential account takeover. | ||||
|         sso_allow_unknown_email_verification: bool, false, def, false; | ||||
|         /// Client ID | ||||
|         sso_client_id:                  String, true,   def,    String::new(); | ||||
|         /// Client Key | ||||
|         sso_client_secret:              Pass,   true,   def,    String::new(); | ||||
|         /// Authority Server |> Base url of the OIDC provider discovery endpoint (without `/.well-known/openid-configuration`) | ||||
|         sso_authority:                  String, true,   def,    String::new(); | ||||
|         /// Authorization request scopes |> List the of the needed scope (`openid` is implicit) | ||||
|         sso_scopes:                     String, true,  def,   "email profile".to_string(); | ||||
|         /// Authorization request extra parameters | ||||
|         sso_authorize_extra_params:     String, true,  def,    String::new(); | ||||
|         /// Use PKCE during Authorization flow | ||||
|         sso_pkce:                       bool,   true,   def,    true; | ||||
|         /// Regex for additional trusted Id token audience |> By default only the client_id is trusted. | ||||
|         sso_audience_trusted:           String, true,  option; | ||||
|         /// CallBack Path |> Generated from Domain. | ||||
|         sso_callback_path:              String, true,  generated, |c| generate_sso_callback_path(&c.domain); | ||||
|         /// Optional SSO master password policy |> Ex format: '{"enforceOnLogin":false,"minComplexity":3,"minLength":12,"requireLower":false,"requireNumbers":false,"requireSpecial":false,"requireUpper":false}' | ||||
|         sso_master_password_policy:     String, true,  option; | ||||
|         /// Use SSO only for auth not the session lifecycle |> Use default Vaultwarden session lifecycle (Idle refresh token valid for 30days) | ||||
|         sso_auth_only_not_session:      bool,   true,   def,    false; | ||||
|         /// Client cache for discovery endpoint. |> Duration in seconds (0 or less to disable). More details: https://github.com/dani-garcia/vaultwarden/wiki/Enabling-SSO-support-using-OpenId-Connect#client-cache | ||||
|         sso_client_cache_expiration:    u64,    true,   def,    0; | ||||
|         /// Log all tokens |> `LOG_LEVEL=debug` or `LOG_LEVEL=info,vaultwarden::sso=debug` is required | ||||
|         sso_debug_tokens:               bool,   true,   def,    false; | ||||
|     }, | ||||
|  | ||||
|     /// Yubikey settings | ||||
|     yubico: _enable_yubico { | ||||
|         /// Enabled | ||||
| @@ -734,7 +782,7 @@ make_config! { | ||||
|         smtp_auth_mechanism:           String, true,   option; | ||||
|         /// SMTP connection timeout |> Number of seconds when to stop trying to connect to the SMTP server | ||||
|         smtp_timeout:                  u64,    true,   def,     15; | ||||
|         /// Server name sent during HELO |> By default this value should be is on the machine's hostname, but might need to be changed in case it trips some anti-spam filters | ||||
|         /// Server name sent during HELO |> By default this value should be the machine's hostname, but might need to be changed in case it trips some anti-spam filters | ||||
|         helo_name:                     String, true,   option; | ||||
|         /// Embed images as email attachments. | ||||
|         smtp_embed_images:             bool, true, def, true; | ||||
| @@ -786,6 +834,14 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { | ||||
|         err!(format!("`DATABASE_MAX_CONNS` contains an invalid value. Ensure it is between 1 and {limit}.",)); | ||||
|     } | ||||
|  | ||||
|     if cfg.database_min_conns < 1 || cfg.database_min_conns > limit { | ||||
|         err!(format!("`DATABASE_MIN_CONNS` contains an invalid value. Ensure it is between 1 and {limit}.",)); | ||||
|     } | ||||
|  | ||||
|     if cfg.database_min_conns > cfg.database_max_conns { | ||||
|         err!(format!("`DATABASE_MIN_CONNS` must be smaller than or equal to `DATABASE_MAX_CONNS`.",)); | ||||
|     } | ||||
|  | ||||
|     if let Some(log_file) = &cfg.log_file { | ||||
|         if std::fs::OpenOptions::new().append(true).create(true).open(log_file).is_err() { | ||||
|             err!("Unable to write to log file", log_file); | ||||
| @@ -911,6 +967,16 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { | ||||
|         err!("All Duo options need to be set for global Duo support") | ||||
|     } | ||||
|  | ||||
|     if cfg.sso_enabled { | ||||
|         if cfg.sso_client_id.is_empty() || cfg.sso_client_secret.is_empty() || cfg.sso_authority.is_empty() { | ||||
|             err!("`SSO_CLIENT_ID`, `SSO_CLIENT_SECRET` and `SSO_AUTHORITY` must be set for SSO support") | ||||
|         } | ||||
|  | ||||
|         validate_internal_sso_issuer_url(&cfg.sso_authority)?; | ||||
|         validate_internal_sso_redirect_url(&cfg.sso_callback_path)?; | ||||
|         check_master_password_policy(&cfg.sso_master_password_policy)?; | ||||
|     } | ||||
|  | ||||
|     if cfg._enable_yubico { | ||||
|         if cfg.yubico_client_id.is_some() != cfg.yubico_secret_key.is_some() { | ||||
|             err!("Both `YUBICO_CLIENT_ID` and `YUBICO_SECRET_KEY` must be set for Yubikey OTP support") | ||||
| @@ -1088,6 +1154,28 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { | ||||
|     Ok(()) | ||||
| } | ||||
|  | ||||
| fn validate_internal_sso_issuer_url(sso_authority: &String) -> Result<openidconnect::IssuerUrl, Error> { | ||||
|     match openidconnect::IssuerUrl::new(sso_authority.clone()) { | ||||
|         Err(err) => err!(format!("Invalid sso_authority URL ({sso_authority}): {err}")), | ||||
|         Ok(issuer_url) => Ok(issuer_url), | ||||
|     } | ||||
| } | ||||
|  | ||||
| fn validate_internal_sso_redirect_url(sso_callback_path: &String) -> Result<openidconnect::RedirectUrl, Error> { | ||||
|     match openidconnect::RedirectUrl::new(sso_callback_path.clone()) { | ||||
|         Err(err) => err!(format!("Invalid sso_callback_path ({sso_callback_path} built using `domain`) URL: {err}")), | ||||
|         Ok(redirect_url) => Ok(redirect_url), | ||||
|     } | ||||
| } | ||||
|  | ||||
| fn check_master_password_policy(sso_master_password_policy: &Option<String>) -> Result<(), Error> { | ||||
|     let policy = sso_master_password_policy.as_ref().map(|mpp| serde_json::from_str::<serde_json::Value>(mpp)); | ||||
|     if let Some(Err(error)) = policy { | ||||
|         err!(format!("Invalid sso_master_password_policy ({error}), Ensure that it's correctly escaped with ''")) | ||||
|     } | ||||
|     Ok(()) | ||||
| } | ||||
|  | ||||
| /// Extracts an RFC 6454 web origin from a URL. | ||||
| fn extract_url_origin(url: &str) -> String { | ||||
|     match Url::parse(url) { | ||||
| @@ -1119,6 +1207,10 @@ fn generate_smtp_img_src(embed_images: bool, domain: &str) -> String { | ||||
|     } | ||||
| } | ||||
|  | ||||
| fn generate_sso_callback_path(domain: &str) -> String { | ||||
|     format!("{domain}/identity/connect/oidc-signin") | ||||
| } | ||||
|  | ||||
| /// Generate the correct URL for the icon service. | ||||
| /// This will be used within icons.rs to call the external icon service. | ||||
| fn generate_icon_service_url(icon_service: &str) -> String { | ||||
| @@ -1354,6 +1446,16 @@ impl Config { | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // The registration link should be hidden if | ||||
|     //  - Signup is not allowed and email whitelist is empty unless mail is disabled and invitations are allowed | ||||
|     //  - The SSO is activated and password login is disabled. | ||||
|     pub fn is_signup_disabled(&self) -> bool { | ||||
|         (!self.signups_allowed() | ||||
|             && self.signups_domains_whitelist().is_empty() | ||||
|             && (self.mail_enabled() || !self.invitations_allowed())) | ||||
|             || (self.sso_enabled() && self.sso_only()) | ||||
|     } | ||||
|  | ||||
|     /// Tests whether the specified user is allowed to create an organization. | ||||
|     pub fn is_org_creation_allowed(&self, email: &str) -> bool { | ||||
|         let users = self.org_creation_users(); | ||||
| @@ -1467,6 +1569,22 @@ impl Config { | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     pub fn sso_issuer_url(&self) -> Result<openidconnect::IssuerUrl, Error> { | ||||
|         validate_internal_sso_issuer_url(&self.sso_authority()) | ||||
|     } | ||||
|  | ||||
|     pub fn sso_redirect_url(&self) -> Result<openidconnect::RedirectUrl, Error> { | ||||
|         validate_internal_sso_redirect_url(&self.sso_callback_path()) | ||||
|     } | ||||
|  | ||||
|     pub fn sso_scopes_vec(&self) -> Vec<String> { | ||||
|         self.sso_scopes().split_whitespace().map(str::to_string).collect() | ||||
|     } | ||||
|  | ||||
|     pub fn sso_authorize_extra_params_vec(&self) -> Vec<(String, String)> { | ||||
|         url::form_urlencoded::parse(self.sso_authorize_extra_params().as_bytes()).into_owned().collect() | ||||
|     } | ||||
| } | ||||
|  | ||||
| use handlebars::{ | ||||
| @@ -1532,6 +1650,7 @@ where | ||||
|     reg!("email/send_org_invite", ".html"); | ||||
|     reg!("email/send_single_org_removed_from_org", ".html"); | ||||
|     reg!("email/smtp_test", ".html"); | ||||
|     reg!("email/sso_change_email", ".html"); | ||||
|     reg!("email/twofactor_email", ".html"); | ||||
|     reg!("email/verify_email", ".html"); | ||||
|     reg!("email/welcome_must_verify", ".html"); | ||||
|   | ||||
| @@ -134,6 +134,8 @@ macro_rules! generate_connections { | ||||
|                             let manager = ConnectionManager::new(&url); | ||||
|                             let pool = Pool::builder() | ||||
|                                 .max_size(CONFIG.database_max_conns()) | ||||
|                                 .min_idle(Some(CONFIG.database_min_conns())) | ||||
|                                 .idle_timeout(Some(Duration::from_secs(CONFIG.database_idle_timeout()))) | ||||
|                                 .connection_timeout(Duration::from_secs(CONFIG.database_timeout())) | ||||
|                                 .connection_customizer(Box::new(DbConnOptions{ | ||||
|                                     init_stmts: conn_type.get_init_stmts() | ||||
|   | ||||
| @@ -6,7 +6,7 @@ use macros::UuidFromParam; | ||||
| use serde_json::Value; | ||||
|  | ||||
| db_object! { | ||||
|     #[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset, Deserialize, Serialize)] | ||||
|     #[derive(Identifiable, Queryable, Insertable, AsChangeset, Deserialize, Serialize)] | ||||
|     #[diesel(table_name = auth_requests)] | ||||
|     #[diesel(treat_none_as_null = true)] | ||||
|     #[diesel(primary_key(uuid))] | ||||
|   | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user