mirror of
				https://github.com/dani-garcia/vaultwarden.git
				synced 2025-10-28 00:40:01 +02:00 
			
		
		
		
	Merge branch 'async-updates' of https://github.com/BlackDex/vaultwarden into BlackDex-async-updates
This commit is contained in:
		| @@ -3,13 +3,18 @@ target | ||||
|  | ||||
| # Data folder | ||||
| data | ||||
|  | ||||
| # Misc | ||||
| .env | ||||
| .env.template | ||||
| .gitattributes | ||||
| .gitignore | ||||
| rustfmt.toml | ||||
|  | ||||
| # IDE files | ||||
| .vscode | ||||
| .idea | ||||
| .editorconfig | ||||
| *.iml | ||||
|  | ||||
| # Documentation | ||||
| @@ -19,9 +24,17 @@ data | ||||
| *.yml | ||||
| *.yaml | ||||
|  | ||||
| # Docker folders | ||||
| # Docker | ||||
| hooks | ||||
| tools | ||||
| Dockerfile | ||||
| .dockerignore | ||||
| docker/** | ||||
| !docker/healthcheck.sh | ||||
| !docker/start.sh | ||||
|  | ||||
| # Web vault | ||||
| web-vault | ||||
|  | ||||
| # Vaultwarden Resources | ||||
| resources | ||||
|   | ||||
| @@ -336,9 +336,8 @@ | ||||
| # SMTP_HOST=smtp.domain.tld | ||||
| # SMTP_FROM=vaultwarden@domain.tld | ||||
| # SMTP_FROM_NAME=Vaultwarden | ||||
| # SMTP_SECURITY=starttls # ("starttls", "force_tls", "off") Enable a secure connection. Default is "starttls" (Explicit - ports 587 or 25), "force_tls" (Implicit - port 465) or "off", no encryption (port 25) | ||||
| # SMTP_PORT=587          # Ports 587 (submission) and 25 (smtp) are standard without encryption and with encryption via STARTTLS (Explicit TLS). Port 465 is outdated and used with Implicit TLS. | ||||
| # SMTP_SSL=true          # (Explicit) - This variable by default configures Explicit STARTTLS, it will upgrade an insecure connection to a secure one. Unless SMTP_EXPLICIT_TLS is set to true. Either port 587 or 25 are default. | ||||
| # SMTP_EXPLICIT_TLS=true # (Implicit) - N.B. This variable configures Implicit TLS. It's currently mislabelled (see bug #851) - SMTP_SSL Needs to be set to true for this option to work. Usually port 465 is used here. | ||||
| # SMTP_USERNAME=username | ||||
| # SMTP_PASSWORD=password | ||||
| # SMTP_TIMEOUT=15 | ||||
|   | ||||
							
								
								
									
										22
									
								
								.github/workflows/build.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										22
									
								
								.github/workflows/build.yml
									
									
									
									
										vendored
									
									
								
							| @@ -30,14 +30,14 @@ jobs: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         channel: | ||||
|           - nightly | ||||
|           - stable | ||||
|         target-triple: | ||||
|           - x86_64-unknown-linux-gnu | ||||
|         include: | ||||
|           - target-triple: x86_64-unknown-linux-gnu | ||||
|             host-triple: x86_64-unknown-linux-gnu | ||||
|             features: [sqlite,mysql,postgresql] # Remember to update the `cargo test` to match the amount of features | ||||
|             channel: nightly | ||||
|             channel: stable | ||||
|             os: ubuntu-20.04 | ||||
|             ext: "" | ||||
|  | ||||
| @@ -46,7 +46,7 @@ jobs: | ||||
|     steps: | ||||
|       # Checkout the repo | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f # v2.3.4 | ||||
|         uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # v2.4.0 | ||||
|       # End Checkout the repo | ||||
|  | ||||
|  | ||||
| @@ -82,28 +82,28 @@ jobs: | ||||
|       # Run cargo tests (In release mode to speed up future builds) | ||||
|       # First test all features together, afterwards test them separately. | ||||
|       - name: "`cargo test --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }}`" | ||||
|         uses: actions-rs/cargo@ae10961054e4aa8b4aa7dffede299aaf087aa33b # v1.0.1 | ||||
|         uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 | ||||
|         with: | ||||
|           command: test | ||||
|           args: --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }} | ||||
|       # Test single features | ||||
|       # 0: sqlite | ||||
|       - name: "`cargo test --release --features ${{ matrix.features[0] }} --target ${{ matrix.target-triple }}`" | ||||
|         uses: actions-rs/cargo@ae10961054e4aa8b4aa7dffede299aaf087aa33b # v1.0.1 | ||||
|         uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 | ||||
|         with: | ||||
|           command: test | ||||
|           args: --release --features ${{ matrix.features[0] }} --target ${{ matrix.target-triple }} | ||||
|         if: ${{ matrix.features[0] != '' }} | ||||
|       # 1: mysql | ||||
|       - name: "`cargo test --release --features ${{ matrix.features[1] }} --target ${{ matrix.target-triple }}`" | ||||
|         uses: actions-rs/cargo@ae10961054e4aa8b4aa7dffede299aaf087aa33b # v1.0.1 | ||||
|         uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 | ||||
|         with: | ||||
|           command: test | ||||
|           args: --release --features ${{ matrix.features[1] }} --target ${{ matrix.target-triple }} | ||||
|         if: ${{ matrix.features[1] != '' }} | ||||
|       # 2: postgresql | ||||
|       - name: "`cargo test --release --features ${{ matrix.features[2] }} --target ${{ matrix.target-triple }}`" | ||||
|         uses: actions-rs/cargo@ae10961054e4aa8b4aa7dffede299aaf087aa33b # v1.0.1 | ||||
|         uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 | ||||
|         with: | ||||
|           command: test | ||||
|           args: --release --features ${{ matrix.features[2] }} --target ${{ matrix.target-triple }} | ||||
| @@ -113,7 +113,7 @@ jobs: | ||||
|  | ||||
|       # Run cargo clippy, and fail on warnings (In release mode to speed up future builds) | ||||
|       - name: "`cargo clippy --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }}`" | ||||
|         uses: actions-rs/cargo@ae10961054e4aa8b4aa7dffede299aaf087aa33b # v1.0.1 | ||||
|         uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 | ||||
|         with: | ||||
|           command: clippy | ||||
|           args: --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }} -- -D warnings | ||||
| @@ -122,7 +122,7 @@ jobs: | ||||
|  | ||||
|       # Run cargo fmt | ||||
|       - name: '`cargo fmt`' | ||||
|         uses: actions-rs/cargo@ae10961054e4aa8b4aa7dffede299aaf087aa33b # v1.0.1 | ||||
|         uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 | ||||
|         with: | ||||
|           command: fmt | ||||
|           args: --all -- --check | ||||
| @@ -131,7 +131,7 @@ jobs: | ||||
|  | ||||
|       # Build the binary | ||||
|       - name: "`cargo build --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }}`" | ||||
|         uses: actions-rs/cargo@ae10961054e4aa8b4aa7dffede299aaf087aa33b # v1.0.1 | ||||
|         uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 | ||||
|         with: | ||||
|           command: build | ||||
|           args: --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }} | ||||
| @@ -140,7 +140,7 @@ jobs: | ||||
|  | ||||
|       # Upload artifact to Github Actions | ||||
|       - name: Upload artifact | ||||
|         uses: actions/upload-artifact@27121b0bdffd731efa15d66772be8dc71245d074 # v2.2.4 | ||||
|         uses: actions/upload-artifact@82c141cc518b40d92cc801eee768e7aafc9c2fa2 # v2.3.1 | ||||
|         with: | ||||
|           name: vaultwarden-${{ matrix.target-triple }}${{ matrix.ext }} | ||||
|           path: target/${{ matrix.target-triple }}/release/vaultwarden${{ matrix.ext }} | ||||
|   | ||||
							
								
								
									
										4
									
								
								.github/workflows/hadolint.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/hadolint.yml
									
									
									
									
										vendored
									
									
								
							| @@ -16,7 +16,7 @@ jobs: | ||||
|     steps: | ||||
|       # Checkout the repo | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f # v2.3.4 | ||||
|         uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # v2.4.0 | ||||
|       # End Checkout the repo | ||||
|  | ||||
|  | ||||
| @@ -27,7 +27,7 @@ jobs: | ||||
|           sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \ | ||||
|           sudo chmod +x /usr/local/bin/hadolint | ||||
|         env: | ||||
|           HADOLINT_VERSION: 2.7.0 | ||||
|           HADOLINT_VERSION: 2.8.0 | ||||
|       # End Download hadolint | ||||
|  | ||||
|       # Test Dockerfiles | ||||
|   | ||||
							
								
								
									
										4
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -60,13 +60,13 @@ jobs: | ||||
|     steps: | ||||
|       # Checkout the repo | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f # v2.3.4 | ||||
|         uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # v2.4.0 | ||||
|         with: | ||||
|           fetch-depth: 0 | ||||
|  | ||||
|       # Login to Docker Hub | ||||
|       - name: Login to Docker Hub | ||||
|         uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 # v1.10.0 | ||||
|         uses: docker/login-action@42d299face0c5c43a0487c477f595ac9cf22f1a7 # v1.12.0 | ||||
|         with: | ||||
|           username: ${{ secrets.DOCKERHUB_USERNAME }} | ||||
|           password: ${{ secrets.DOCKERHUB_TOKEN }} | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| --- | ||||
| repos: | ||||
| -   repo: https://github.com/pre-commit/pre-commit-hooks | ||||
|     rev: v4.0.1 | ||||
|     rev: v4.1.0 | ||||
|     hooks: | ||||
|     - id: check-yaml | ||||
|     - id: check-json | ||||
|   | ||||
							
								
								
									
										803
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										803
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										165
									
								
								Cargo.toml
									
									
									
									
									
								
							
							
						
						
									
										165
									
								
								Cargo.toml
									
									
									
									
									
								
							| @@ -3,7 +3,7 @@ name = "vaultwarden" | ||||
| version = "1.0.0" | ||||
| authors = ["Daniel García <dani-garcia@users.noreply.github.com>"] | ||||
| edition = "2021" | ||||
| rust-version = "1.56" | ||||
| rust-version = "1.59" | ||||
| resolver = "2" | ||||
|  | ||||
| repository = "https://github.com/dani-garcia/vaultwarden" | ||||
| @@ -27,79 +27,15 @@ vendored_openssl = ["openssl/vendored"] | ||||
| unstable = [] | ||||
|  | ||||
| [target."cfg(not(windows))".dependencies] | ||||
| syslog = "4.0.1" | ||||
| # Logging | ||||
| syslog = "4.0.1" # Needs to be v4 until fern is updated | ||||
|  | ||||
| [dependencies] | ||||
| # Web framework | ||||
| rocket = { version = "0.5.0-rc.1", features = ["tls", "json"], default-features = false } | ||||
|  | ||||
| # Async futures | ||||
| futures = "0.3.19" | ||||
| tokio = { version = "1.16.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot"] } | ||||
|   | ||||
|  # HTTP client | ||||
| reqwest = { version = "0.11.9", features = ["stream", "json", "gzip", "brotli", "socks", "cookies", "trust-dns"] } | ||||
| bytes = "1.1.0" | ||||
|  | ||||
| # Used for custom short lived cookie jar | ||||
| cookie = "0.15.1" | ||||
| cookie_store = "0.15.1" | ||||
| url = "2.2.2" | ||||
|  | ||||
| # WebSockets library | ||||
| ws = { version = "0.11.1", package = "parity-ws" } | ||||
|  | ||||
| # MessagePack library | ||||
| rmpv = "1.0.0" | ||||
|  | ||||
| # Concurrent hashmap implementation | ||||
| chashmap = "2.2.2" | ||||
|  | ||||
| # A generic serialization/deserialization framework | ||||
| serde = { version = "1.0.136", features = ["derive"] } | ||||
| serde_json = "1.0.78" | ||||
|  | ||||
| # Logging | ||||
| log = "0.4.14" | ||||
| fern = { version = "0.6.0", features = ["syslog-4"] } | ||||
|  | ||||
| # A safe, extensible ORM and Query builder | ||||
| diesel = { version = "1.4.8", features = [ "chrono", "r2d2"] } | ||||
| diesel_migrations = "1.4.0" | ||||
|  | ||||
| # Bundled SQLite | ||||
| libsqlite3-sys = { version = "0.22.2", features = ["bundled"], optional = true } | ||||
|  | ||||
| # Crypto-related libraries | ||||
| rand = "0.8.4" | ||||
| ring = "0.16.20" | ||||
|  | ||||
| # UUID generation | ||||
| uuid = { version = "0.8.2", features = ["v4"] } | ||||
|  | ||||
| # Date and time libraries | ||||
| chrono = { version = "0.4.19", features = ["serde"] } | ||||
| chrono-tz = "0.6.1" | ||||
| time = "0.2.27" | ||||
|  | ||||
| # Job scheduler | ||||
| job_scheduler = "1.2.1" | ||||
|  | ||||
| # TOTP library | ||||
| totp-lite = "1.0.3" | ||||
|  | ||||
| # Data encoding library | ||||
| data-encoding = "2.3.2" | ||||
|  | ||||
| # JWT library | ||||
| jsonwebtoken = "7.2.0" | ||||
|  | ||||
| # U2F library | ||||
| u2f = "0.2.0" | ||||
| webauthn-rs = "0.3.2" | ||||
|  | ||||
| # Yubico Library | ||||
| yubico = { version = "0.10.0", features = ["online-tokio"], default-features = false } | ||||
| tracing = { version = "0.1.31", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work | ||||
| backtrace = "0.3.64" # Logging panics to logfile instead stderr only | ||||
|  | ||||
| # A `dotenv` implementation for Rust | ||||
| dotenv = { version = "0.15.0", default-features = false } | ||||
| @@ -111,41 +47,100 @@ once_cell = "1.9.0" | ||||
| num-traits = "0.2.14" | ||||
| num-derive = "0.3.3" | ||||
|  | ||||
| # Web framework | ||||
| rocket = { version = "0.5.0-rc.1", features = ["tls", "json"], default-features = false } | ||||
|  | ||||
| # WebSockets libraries | ||||
| ws = { version = "0.11.1", package = "parity-ws" } | ||||
| rmpv = "1.0.0" # MessagePack library | ||||
| chashmap = "2.2.2" # Concurrent hashmap implementation | ||||
|  | ||||
| # Async futures | ||||
| futures = "0.3.21" | ||||
| tokio = { version = "1.17.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot"] } | ||||
|  | ||||
| # A generic serialization/deserialization framework | ||||
| serde = { version = "1.0.136", features = ["derive"] } | ||||
| serde_json = "1.0.79" | ||||
|  | ||||
| # A safe, extensible ORM and Query builder | ||||
| diesel = { version = "1.4.8", features = [ "chrono", "r2d2"] } | ||||
| diesel_migrations = "1.4.0" | ||||
|  | ||||
| # Bundled SQLite | ||||
| libsqlite3-sys = { version = "0.22.2", features = ["bundled"], optional = true } | ||||
|  | ||||
| # Crypto-related libraries | ||||
| rand = "0.8.5" | ||||
| ring = "0.16.20" | ||||
|  | ||||
| # UUID generation | ||||
| uuid = { version = "0.8.2", features = ["v4"] } | ||||
|  | ||||
| # Date and time libraries | ||||
| chrono = { version = "0.4.19", features = ["clock", "serde"], default-features = false } | ||||
| chrono-tz = "0.6.1" | ||||
| time = "0.2.27" | ||||
|  | ||||
| # Job scheduler | ||||
| job_scheduler = "1.2.1" | ||||
|  | ||||
| # Data encoding library Hex/Base32/Base64 | ||||
| data-encoding = "2.3.2" | ||||
|  | ||||
| # JWT library | ||||
| jsonwebtoken = "7.2.0" | ||||
|  | ||||
| # TOTP library | ||||
| totp-lite = "1.0.3" | ||||
|  | ||||
| # Yubico Library | ||||
| yubico = { version = "0.10.0", features = ["online-tokio"], default-features = false } | ||||
|  | ||||
| # U2F libraries | ||||
| u2f = "0.2.0" | ||||
| webauthn-rs = "0.3.2" | ||||
|  | ||||
| # Handling of URL's for WebAuthn | ||||
| url = "2.2.2" | ||||
|  | ||||
| # Email libraries | ||||
| tracing = { version = "0.1.29", features = ["log"] } # Needed to have lettre trace logging used when SMTP_DEBUG is enabled. | ||||
| lettre = { version = "0.10.0-rc.4", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname", "tracing"], default-features = false } | ||||
| idna = "0.2.3" # Punycode conversion | ||||
| percent-encoding = "2.1.0" # URL encoding library used for URL's in the emails | ||||
|  | ||||
| # Template library | ||||
| handlebars = { version = "4.2.1", features = ["dir_source"] } | ||||
|  | ||||
| # HTTP client | ||||
| reqwest = { version = "0.11.9", features = ["stream", "json", "gzip", "brotli", "socks", "cookies", "trust-dns"] } | ||||
|  | ||||
| # For favicon extraction from main website | ||||
| html5ever = "0.25.1" | ||||
| markup5ever_rcdom = "0.1.0" | ||||
| html5gum = "0.4.0" | ||||
| regex = { version = "1.5.4", features = ["std", "perf", "unicode-perl"], default-features = false } | ||||
| data-url = "0.1.1" | ||||
| bytes = "1.1.0" | ||||
| cached = "0.30.0" | ||||
|  | ||||
| # Used for custom short lived cookie jar during favicon extraction | ||||
| cookie = "0.15.1" | ||||
| cookie_store = "0.15.1" | ||||
|  | ||||
| # Used by U2F, JWT and Postgres | ||||
| openssl = "0.10.38" | ||||
|  | ||||
| # URL encoding library | ||||
| percent-encoding = "2.1.0" | ||||
| # Punycode conversion | ||||
| idna = "0.2.3" | ||||
|  | ||||
| # CLI argument parsing | ||||
| pico-args = "0.4.2" | ||||
|  | ||||
| # Logging panics to logfile instead stderr only | ||||
| backtrace = "0.3.64" | ||||
|  | ||||
| # Macro ident concatenation | ||||
| paste = "1.0.6" | ||||
| governor = "0.4.1" | ||||
| governor = "0.4.2" | ||||
|  | ||||
| # Capture CTRL+C | ||||
| ctrlc = { version = "3.2.1", features = ["termination"] } | ||||
|  | ||||
| [patch.crates-io] | ||||
| rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = '8cae077ba1d54b92cdef3e171a730b819d5eeb8e' } | ||||
| rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = '91e3b4397a1637d0f55f23db712cf7bda0c7f891' } | ||||
|  | ||||
| # The maintainer of the `job_scheduler` crate doesn't seem to have responded | ||||
| # to any issues or PRs for almost a year (as of April 2021). This hopefully | ||||
| @@ -153,3 +148,9 @@ rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = '8cae077ba1d54 | ||||
| # In particular, `cron` has since implemented parsing of some common syntax | ||||
| # that wasn't previously supported (https://github.com/zslayton/cron/pull/64). | ||||
| job_scheduler = { git = 'https://github.com/jjlin/job_scheduler', rev = 'ee023418dbba2bfe1e30a5fd7d937f9e33739806' } | ||||
|  | ||||
| # Strip debuginfo from the release builds | ||||
| # Also enable thin LTO for some optimizations | ||||
| [profile.release] | ||||
| strip = "debuginfo" | ||||
| lto = "thin" | ||||
|   | ||||
| @@ -1,3 +1,4 @@ | ||||
| # syntax=docker/dockerfile:1 | ||||
| # The cross-built images have the build arch (`amd64`) embedded in the image | ||||
| # manifest, rather than the target arch. For example: | ||||
| # | ||||
|   | ||||
| @@ -6,19 +6,19 @@ | ||||
| {% set build_stage_base_image = "rust:1.58-buster" %} | ||||
| {% if "alpine" in target_file %} | ||||
| {%   if "amd64" in target_file %} | ||||
| {%     set build_stage_base_image = "blackdex/rust-musl:x86_64-musl-nightly-2022-01-23" %} | ||||
| {%     set build_stage_base_image = "blackdex/rust-musl:x86_64-musl-stable" %} | ||||
| {%     set runtime_stage_base_image = "alpine:3.15" %} | ||||
| {%     set package_arch_target = "x86_64-unknown-linux-musl" %} | ||||
| {%   elif "armv7" in target_file %} | ||||
| {%     set build_stage_base_image = "blackdex/rust-musl:armv7-musleabihf-nightly-2022-01-23" %} | ||||
| {%     set build_stage_base_image = "blackdex/rust-musl:armv7-musleabihf-stable" %} | ||||
| {%     set runtime_stage_base_image = "balenalib/armv7hf-alpine:3.15" %} | ||||
| {%     set package_arch_target = "armv7-unknown-linux-musleabihf" %} | ||||
| {%   elif "armv6" in target_file %} | ||||
| {%     set build_stage_base_image = "blackdex/rust-musl:arm-musleabi-nightly-2022-01-23" %} | ||||
| {%     set build_stage_base_image = "blackdex/rust-musl:arm-musleabi-stable" %} | ||||
| {%     set runtime_stage_base_image = "balenalib/rpi-alpine:3.15" %} | ||||
| {%     set package_arch_target = "arm-unknown-linux-musleabi" %} | ||||
| {%   elif "arm64" in target_file %} | ||||
| {%     set build_stage_base_image = "blackdex/rust-musl:aarch64-musl-nightly-2022-01-23" %} | ||||
| {%     set build_stage_base_image = "blackdex/rust-musl:aarch64-musl-stable" %} | ||||
| {%     set runtime_stage_base_image = "balenalib/aarch64-alpine:3.15" %} | ||||
| {%     set package_arch_target = "aarch64-unknown-linux-musl" %} | ||||
| {%   endif %} | ||||
| @@ -182,21 +182,15 @@ RUN touch src/main.rs | ||||
| # your actual source files being built | ||||
| # hadolint ignore=DL3059 | ||||
| RUN {{ mount_rust_cache -}} cargo build --features ${DB} --release{{ package_arch_target_param }} | ||||
| {% if "alpine" in target_file %} | ||||
| {%   if "armv7" in target_file %} | ||||
| # hadolint ignore=DL3059 | ||||
| RUN musl-strip target/{{ package_arch_target }}/release/vaultwarden | ||||
| {%   endif %} | ||||
| {% endif %} | ||||
|  | ||||
| ######################## RUNTIME IMAGE  ######################## | ||||
| # Create a new stage with a minimal image | ||||
| # because we already have a binary built | ||||
| FROM {{ runtime_stage_base_image }} | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 | ||||
| {%- if "alpine" in runtime_stage_base_image %} \ | ||||
|     SSL_CERT_DIR=/etc/ssl/certs | ||||
| {% endif %} | ||||
|   | ||||
| @@ -89,9 +89,9 @@ RUN cargo build --features ${DB} --release | ||||
| # because we already have a binary built | ||||
| FROM debian:buster-slim | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 | ||||
|  | ||||
|  | ||||
| # Create data folder and Install needed libraries | ||||
|   | ||||
| @@ -27,7 +27,7 @@ | ||||
| FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault | ||||
|  | ||||
| ########################## BUILD IMAGE  ########################## | ||||
| FROM blackdex/rust-musl:x86_64-musl-nightly-2022-01-23 as build | ||||
| FROM blackdex/rust-musl:x86_64-musl-stable as build | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -83,9 +83,9 @@ RUN cargo build --features ${DB} --release --target=x86_64-unknown-linux-musl | ||||
| # because we already have a binary built | ||||
| FROM alpine:3.15 | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 \ | ||||
|     SSL_CERT_DIR=/etc/ssl/certs | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -89,9 +89,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. | ||||
| # because we already have a binary built | ||||
| FROM debian:buster-slim | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 | ||||
|  | ||||
|  | ||||
| # Create data folder and Install needed libraries | ||||
|   | ||||
| @@ -27,7 +27,7 @@ | ||||
| FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault | ||||
|  | ||||
| ########################## BUILD IMAGE  ########################## | ||||
| FROM blackdex/rust-musl:x86_64-musl-nightly-2022-01-23 as build | ||||
| FROM blackdex/rust-musl:x86_64-musl-stable as build | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -83,9 +83,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. | ||||
| # because we already have a binary built | ||||
| FROM alpine:3.15 | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 \ | ||||
|     SSL_CERT_DIR=/etc/ssl/certs | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -109,9 +109,9 @@ RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu | ||||
| # because we already have a binary built | ||||
| FROM balenalib/aarch64-debian:buster | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 | ||||
|  | ||||
| # hadolint ignore=DL3059 | ||||
| RUN [ "cross-build-start" ] | ||||
|   | ||||
| @@ -27,7 +27,7 @@ | ||||
| FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault | ||||
|  | ||||
| ########################## BUILD IMAGE  ########################## | ||||
| FROM blackdex/rust-musl:aarch64-musl-nightly-2022-01-23 as build | ||||
| FROM blackdex/rust-musl:aarch64-musl-stable as build | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -83,9 +83,9 @@ RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-musl | ||||
| # because we already have a binary built | ||||
| FROM balenalib/aarch64-alpine:3.15 | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 \ | ||||
|     SSL_CERT_DIR=/etc/ssl/certs | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -109,9 +109,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. | ||||
| # because we already have a binary built | ||||
| FROM balenalib/aarch64-debian:buster | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 | ||||
|  | ||||
| # hadolint ignore=DL3059 | ||||
| RUN [ "cross-build-start" ] | ||||
|   | ||||
| @@ -27,7 +27,7 @@ | ||||
| FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault | ||||
|  | ||||
| ########################## BUILD IMAGE  ########################## | ||||
| FROM blackdex/rust-musl:aarch64-musl-nightly-2022-01-23 as build | ||||
| FROM blackdex/rust-musl:aarch64-musl-stable as build | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -83,9 +83,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. | ||||
| # because we already have a binary built | ||||
| FROM balenalib/aarch64-alpine:3.15 | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 \ | ||||
|     SSL_CERT_DIR=/etc/ssl/certs | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -109,9 +109,9 @@ RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi | ||||
| # because we already have a binary built | ||||
| FROM balenalib/rpi-debian:buster | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 | ||||
|  | ||||
| # hadolint ignore=DL3059 | ||||
| RUN [ "cross-build-start" ] | ||||
|   | ||||
| @@ -27,7 +27,7 @@ | ||||
| FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault | ||||
|  | ||||
| ########################## BUILD IMAGE  ########################## | ||||
| FROM blackdex/rust-musl:arm-musleabi-nightly-2022-01-23 as build | ||||
| FROM blackdex/rust-musl:arm-musleabi-stable as build | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -83,9 +83,9 @@ RUN cargo build --features ${DB} --release --target=arm-unknown-linux-musleabi | ||||
| # because we already have a binary built | ||||
| FROM balenalib/rpi-alpine:3.15 | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 \ | ||||
|     SSL_CERT_DIR=/etc/ssl/certs | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -109,9 +109,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. | ||||
| # because we already have a binary built | ||||
| FROM balenalib/rpi-debian:buster | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 | ||||
|  | ||||
| # hadolint ignore=DL3059 | ||||
| RUN [ "cross-build-start" ] | ||||
|   | ||||
| @@ -27,7 +27,7 @@ | ||||
| FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault | ||||
|  | ||||
| ########################## BUILD IMAGE  ########################## | ||||
| FROM blackdex/rust-musl:arm-musleabi-nightly-2022-01-23 as build | ||||
| FROM blackdex/rust-musl:arm-musleabi-stable as build | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -83,9 +83,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. | ||||
| # because we already have a binary built | ||||
| FROM balenalib/rpi-alpine:3.15 | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 \ | ||||
|     SSL_CERT_DIR=/etc/ssl/certs | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -109,9 +109,9 @@ RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabih | ||||
| # because we already have a binary built | ||||
| FROM balenalib/armv7hf-debian:buster | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 | ||||
|  | ||||
| # hadolint ignore=DL3059 | ||||
| RUN [ "cross-build-start" ] | ||||
|   | ||||
| @@ -27,7 +27,7 @@ | ||||
| FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault | ||||
|  | ||||
| ########################## BUILD IMAGE  ########################## | ||||
| FROM blackdex/rust-musl:armv7-musleabihf-nightly-2022-01-23 as build | ||||
| FROM blackdex/rust-musl:armv7-musleabihf-stable as build | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -78,17 +78,15 @@ RUN touch src/main.rs | ||||
| # your actual source files being built | ||||
| # hadolint ignore=DL3059 | ||||
| RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf | ||||
| # hadolint ignore=DL3059 | ||||
| RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden | ||||
|  | ||||
| ######################## RUNTIME IMAGE  ######################## | ||||
| # Create a new stage with a minimal image | ||||
| # because we already have a binary built | ||||
| FROM balenalib/armv7hf-alpine:3.15 | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 \ | ||||
|     SSL_CERT_DIR=/etc/ssl/certs | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -109,9 +109,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. | ||||
| # because we already have a binary built | ||||
| FROM balenalib/armv7hf-debian:buster | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 | ||||
|  | ||||
| # hadolint ignore=DL3059 | ||||
| RUN [ "cross-build-start" ] | ||||
|   | ||||
| @@ -27,7 +27,7 @@ | ||||
| FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault | ||||
|  | ||||
| ########################## BUILD IMAGE  ########################## | ||||
| FROM blackdex/rust-musl:armv7-musleabihf-nightly-2022-01-23 as build | ||||
| FROM blackdex/rust-musl:armv7-musleabihf-stable as build | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -78,17 +78,15 @@ RUN touch src/main.rs | ||||
| # your actual source files being built | ||||
| # hadolint ignore=DL3059 | ||||
| RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf | ||||
| # hadolint ignore=DL3059 | ||||
| RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden | ||||
|  | ||||
| ######################## RUNTIME IMAGE  ######################## | ||||
| # Create a new stage with a minimal image | ||||
| # because we already have a binary built | ||||
| FROM balenalib/armv7hf-alpine:3.15 | ||||
|  | ||||
| ENV ROCKET_ENV="staging" \ | ||||
| ENV ROCKET_PROFILE="release" \ | ||||
|     ROCKET_ADDRESS=0.0.0.0 \ | ||||
|     ROCKET_PORT=80 \ | ||||
|     ROCKET_WORKERS=10 \ | ||||
|     SSL_CERT_DIR=/etc/ssl/certs | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -301,7 +301,7 @@ fn test_smtp(data: Json<InviteData>, _token: AdminToken) -> EmptyResult { | ||||
|  | ||||
| #[get("/logout")] | ||||
| fn logout(cookies: &CookieJar<'_>, referer: Referer) -> Redirect { | ||||
|     cookies.remove(Cookie::named(COOKIE_NAME)); | ||||
|     cookies.remove(Cookie::build(COOKIE_NAME, "").path(admin_path()).finish()); | ||||
|     Redirect::to(admin_url(referer)) | ||||
| } | ||||
|  | ||||
| @@ -638,7 +638,7 @@ impl<'r> FromRequest<'r> for AdminToken { | ||||
|  | ||||
|             if decode_admin(access_token).is_err() { | ||||
|                 // Remove admin cookie | ||||
|                 cookies.remove(Cookie::named(COOKIE_NAME)); | ||||
|                 cookies.remove(Cookie::build(COOKIE_NAME, "").path(admin_path()).finish()); | ||||
|                 error!("Invalid or expired admin JWT. IP: {}.", ip); | ||||
|                 return Outcome::Forward(()); | ||||
|             } | ||||
|   | ||||
| @@ -1182,9 +1182,7 @@ async fn post_org_import( | ||||
|     let ciphers = stream::iter(data.Ciphers) | ||||
|         .then(|cipher_data| async { | ||||
|             let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); | ||||
|             update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::CipherCreate) | ||||
|                 .await | ||||
|                 .ok(); | ||||
|             update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::None).await.ok(); | ||||
|             cipher | ||||
|         }) | ||||
|         .collect::<Vec<Cipher>>() | ||||
|   | ||||
							
								
								
									
										478
									
								
								src/api/icons.rs
									
									
									
									
									
								
							
							
						
						
									
										478
									
								
								src/api/icons.rs
									
									
									
									
									
								
							| @@ -1,21 +1,28 @@ | ||||
| use std::{ | ||||
|     collections::HashMap, | ||||
|     net::{IpAddr, ToSocketAddrs}, | ||||
|     sync::{Arc, RwLock}, | ||||
|     net::IpAddr, | ||||
|     sync::Arc, | ||||
|     time::{Duration, SystemTime}, | ||||
| }; | ||||
|  | ||||
| use bytes::{Buf, Bytes, BytesMut}; | ||||
| use bytes::{Bytes, BytesMut}; | ||||
| use futures::{stream::StreamExt, TryFutureExt}; | ||||
| use once_cell::sync::Lazy; | ||||
| use regex::Regex; | ||||
| use reqwest::{header, Client, Response}; | ||||
| use reqwest::{ | ||||
|     header::{self, HeaderMap, HeaderValue}, | ||||
|     Client, Response, | ||||
| }; | ||||
| use rocket::{http::ContentType, response::Redirect, Route}; | ||||
| use tokio::{ | ||||
|     fs::{create_dir_all, remove_file, symlink_metadata, File}, | ||||
|     io::{AsyncReadExt, AsyncWriteExt}, | ||||
|     net::lookup_host, | ||||
|     sync::RwLock, | ||||
| }; | ||||
|  | ||||
| use html5gum::{Emitter, EndTag, InfallibleTokenizer, Readable, StartTag, StringReader, Tokenizer}; | ||||
|  | ||||
| use crate::{ | ||||
|     error::Error, | ||||
|     util::{get_reqwest_client_builder, Cached}, | ||||
| @@ -34,39 +41,50 @@ pub fn routes() -> Vec<Route> { | ||||
|  | ||||
| static CLIENT: Lazy<Client> = Lazy::new(|| { | ||||
|     // Generate the default headers | ||||
|     let mut default_headers = header::HeaderMap::new(); | ||||
|     default_headers | ||||
|         .insert(header::USER_AGENT, header::HeaderValue::from_static("Links (2.22; Linux X86_64; GNU C; text)")); | ||||
|     default_headers | ||||
|         .insert(header::ACCEPT, header::HeaderValue::from_static("text/html, text/*;q=0.5, image/*, */*;q=0.1")); | ||||
|     default_headers.insert(header::ACCEPT_LANGUAGE, header::HeaderValue::from_static("en,*;q=0.1")); | ||||
|     default_headers.insert(header::CACHE_CONTROL, header::HeaderValue::from_static("no-cache")); | ||||
|     default_headers.insert(header::PRAGMA, header::HeaderValue::from_static("no-cache")); | ||||
|     let mut default_headers = HeaderMap::new(); | ||||
|     default_headers.insert(header::USER_AGENT, HeaderValue::from_static("Links (2.22; Linux X86_64; GNU C; text)")); | ||||
|     default_headers.insert(header::ACCEPT, HeaderValue::from_static("text/html, text/*;q=0.5, image/*, */*;q=0.1")); | ||||
|     default_headers.insert(header::ACCEPT_LANGUAGE, HeaderValue::from_static("en,*;q=0.1")); | ||||
|     default_headers.insert(header::CACHE_CONTROL, HeaderValue::from_static("no-cache")); | ||||
|     default_headers.insert(header::PRAGMA, HeaderValue::from_static("no-cache")); | ||||
|  | ||||
|     // Generate the cookie store | ||||
|     let cookie_store = Arc::new(Jar::default()); | ||||
|  | ||||
|     // Reuse the client between requests | ||||
|     let client = get_reqwest_client_builder() | ||||
|         .cookie_provider(cookie_store.clone()) | ||||
|         .timeout(Duration::from_secs(CONFIG.icon_download_timeout())) | ||||
|         .default_headers(default_headers.clone()); | ||||
|  | ||||
|     match client.build() { | ||||
|         Ok(client) => client, | ||||
|         Err(e) => { | ||||
|             error!("Possible trust-dns error, trying with trust-dns disabled: '{e}'"); | ||||
|             get_reqwest_client_builder() | ||||
|         .cookie_provider(Arc::new(Jar::default())) | ||||
|                 .cookie_provider(cookie_store) | ||||
|                 .timeout(Duration::from_secs(CONFIG.icon_download_timeout())) | ||||
|                 .default_headers(default_headers) | ||||
|                 .trust_dns(false) | ||||
|                 .build() | ||||
|         .expect("Failed to build icon client") | ||||
|                 .expect("Failed to build client") | ||||
|         } | ||||
|     } | ||||
| }); | ||||
|  | ||||
| // Build Regex only once since this takes a lot of time. | ||||
| static ICON_REL_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?i)icon$|apple.*icon").unwrap()); | ||||
| static ICON_REL_BLACKLIST: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?i)mask-icon").unwrap()); | ||||
| static ICON_SIZE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap()); | ||||
|  | ||||
| // Special HashMap which holds the user defined Regex to speedup matching the regex. | ||||
| static ICON_BLACKLIST_REGEX: Lazy<RwLock<HashMap<String, Regex>>> = Lazy::new(|| RwLock::new(HashMap::new())); | ||||
|  | ||||
| fn icon_redirect(domain: &str, template: &str) -> Option<Redirect> { | ||||
|     if !is_valid_domain(domain) { | ||||
| async fn icon_redirect(domain: &str, template: &str) -> Option<Redirect> { | ||||
|     if !is_valid_domain(domain).await { | ||||
|         warn!("Invalid domain: {}", domain); | ||||
|         return None; | ||||
|     } | ||||
|  | ||||
|     if is_domain_blacklisted(domain) { | ||||
|     if is_domain_blacklisted(domain).await { | ||||
|         return None; | ||||
|     } | ||||
|  | ||||
| @@ -84,30 +102,30 @@ fn icon_redirect(domain: &str, template: &str) -> Option<Redirect> { | ||||
| } | ||||
|  | ||||
| #[get("/<domain>/icon.png")] | ||||
| fn icon_custom(domain: String) -> Option<Redirect> { | ||||
|     icon_redirect(&domain, &CONFIG.icon_service()) | ||||
| async fn icon_custom(domain: String) -> Option<Redirect> { | ||||
|     icon_redirect(&domain, &CONFIG.icon_service()).await | ||||
| } | ||||
|  | ||||
| #[get("/<domain>/icon.png")] | ||||
| fn icon_bitwarden(domain: String) -> Option<Redirect> { | ||||
|     icon_redirect(&domain, "https://icons.bitwarden.net/{}/icon.png") | ||||
| async fn icon_bitwarden(domain: String) -> Option<Redirect> { | ||||
|     icon_redirect(&domain, "https://icons.bitwarden.net/{}/icon.png").await | ||||
| } | ||||
|  | ||||
| #[get("/<domain>/icon.png")] | ||||
| fn icon_duckduckgo(domain: String) -> Option<Redirect> { | ||||
|     icon_redirect(&domain, "https://icons.duckduckgo.com/ip3/{}.ico") | ||||
| async fn icon_duckduckgo(domain: String) -> Option<Redirect> { | ||||
|     icon_redirect(&domain, "https://icons.duckduckgo.com/ip3/{}.ico").await | ||||
| } | ||||
|  | ||||
| #[get("/<domain>/icon.png")] | ||||
| fn icon_google(domain: String) -> Option<Redirect> { | ||||
|     icon_redirect(&domain, "https://www.google.com/s2/favicons?domain={}&sz=32") | ||||
| async fn icon_google(domain: String) -> Option<Redirect> { | ||||
|     icon_redirect(&domain, "https://www.google.com/s2/favicons?domain={}&sz=32").await | ||||
| } | ||||
|  | ||||
| #[get("/<domain>/icon.png")] | ||||
| async fn icon_internal(domain: String) -> Cached<(ContentType, Vec<u8>)> { | ||||
|     const FALLBACK_ICON: &[u8] = include_bytes!("../static/images/fallback-icon.png"); | ||||
|  | ||||
|     if !is_valid_domain(&domain) { | ||||
|     if !is_valid_domain(&domain).await { | ||||
|         warn!("Invalid domain: {}", domain); | ||||
|         return Cached::ttl( | ||||
|             (ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), | ||||
| @@ -128,7 +146,7 @@ async fn icon_internal(domain: String) -> Cached<(ContentType, Vec<u8>)> { | ||||
| /// | ||||
| /// This does some manual checks and makes use of Url to do some basic checking. | ||||
| /// domains can't be larger then 63 characters (not counting multiple subdomains) according to the RFC's, but we limit the total size to 255. | ||||
| fn is_valid_domain(domain: &str) -> bool { | ||||
| async fn is_valid_domain(domain: &str) -> bool { | ||||
|     const ALLOWED_CHARS: &str = "_-."; | ||||
|  | ||||
|     // If parsing the domain fails using Url, it will not work with reqwest. | ||||
| @@ -260,25 +278,22 @@ mod tests { | ||||
|     } | ||||
| } | ||||
|  | ||||
| fn is_domain_blacklisted(domain: &str) -> bool { | ||||
|     let mut is_blacklisted = CONFIG.icon_blacklist_non_global_ips() | ||||
|         && (domain, 0) | ||||
|             .to_socket_addrs() | ||||
|             .map(|x| { | ||||
|                 for ip_port in x { | ||||
|                     if !is_global(ip_port.ip()) { | ||||
|                         warn!("IP {} for domain '{}' is not a global IP!", ip_port.ip(), domain); | ||||
| use cached::proc_macro::cached; | ||||
| #[cached(key = "String", convert = r#"{ domain.to_string() }"#, size = 16, time = 60)] | ||||
| async fn is_domain_blacklisted(domain: &str) -> bool { | ||||
|     if CONFIG.icon_blacklist_non_global_ips() { | ||||
|         if let Ok(s) = lookup_host((domain, 0)).await { | ||||
|             for addr in s { | ||||
|                 if !is_global(addr.ip()) { | ||||
|                     debug!("IP {} for domain '{}' is not a global IP!", addr.ip(), domain); | ||||
|                     return true; | ||||
|                 } | ||||
|             } | ||||
|                 false | ||||
|             }) | ||||
|             .unwrap_or(false); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // Skip the regex check if the previous one is true already | ||||
|     if !is_blacklisted { | ||||
|     if let Some(blacklist) = CONFIG.icon_blacklist_regex() { | ||||
|             let mut regex_hashmap = ICON_BLACKLIST_REGEX.read().unwrap(); | ||||
|         let mut regex_hashmap = ICON_BLACKLIST_REGEX.read().await; | ||||
|  | ||||
|         // Use the pre-generate Regex stored in a Lazy HashMap if there's one, else generate it. | ||||
|         let regex = if let Some(regex) = regex_hashmap.get(&blacklist) { | ||||
| @@ -286,7 +301,7 @@ fn is_domain_blacklisted(domain: &str) -> bool { | ||||
|         } else { | ||||
|             drop(regex_hashmap); | ||||
|  | ||||
|                 let mut regex_hashmap_write = ICON_BLACKLIST_REGEX.write().unwrap(); | ||||
|             let mut regex_hashmap_write = ICON_BLACKLIST_REGEX.write().await; | ||||
|             // Clear the current list if the previous key doesn't exists. | ||||
|             // To prevent growing of the HashMap after someone has changed it via the admin interface. | ||||
|             if regex_hashmap_write.len() >= 1 { | ||||
| @@ -294,23 +309,21 @@ fn is_domain_blacklisted(domain: &str) -> bool { | ||||
|             } | ||||
|  | ||||
|             // Generate the regex to store in too the Lazy Static HashMap. | ||||
|                 let blacklist_regex = Regex::new(&blacklist).unwrap(); | ||||
|                 regex_hashmap_write.insert(blacklist.to_string(), blacklist_regex); | ||||
|             let blacklist_regex = Regex::new(&blacklist); | ||||
|             regex_hashmap_write.insert(blacklist.to_string(), blacklist_regex.unwrap()); | ||||
|             drop(regex_hashmap_write); | ||||
|  | ||||
|                 regex_hashmap = ICON_BLACKLIST_REGEX.read().unwrap(); | ||||
|             regex_hashmap = ICON_BLACKLIST_REGEX.read().await; | ||||
|             regex_hashmap.get(&blacklist).unwrap() | ||||
|         }; | ||||
|  | ||||
|         // Use the pre-generate Regex stored in a Lazy HashMap. | ||||
|         if regex.is_match(domain) { | ||||
|             debug!("Blacklisted domain: {} matched ICON_BLACKLIST_REGEX", domain); | ||||
|                 is_blacklisted = true; | ||||
|             return true; | ||||
|         } | ||||
|     } | ||||
|     } | ||||
|  | ||||
|     is_blacklisted | ||||
|     false | ||||
| } | ||||
|  | ||||
| async fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> { | ||||
| @@ -322,7 +335,7 @@ async fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> { | ||||
|     } | ||||
|  | ||||
|     if let Some(icon) = get_cached_icon(&path).await { | ||||
|         let icon_type = match get_icon_type(&icon) { | ||||
|         let icon_type = match get_icon_type(&icon).await { | ||||
|             Some(x) => x, | ||||
|             _ => "x-icon", | ||||
|         }; | ||||
| @@ -412,91 +425,62 @@ impl Icon { | ||||
|     } | ||||
| } | ||||
|  | ||||
| /// Iterates over the HTML document to find <base href="http://domain.tld"> | ||||
| /// When found it will stop the iteration and the found base href will be shared deref via `base_href`. | ||||
| /// | ||||
| /// # Arguments | ||||
| /// * `node` - A Parsed HTML document via html5ever::parse_document() | ||||
| /// * `base_href` - a mutable url::Url which will be overwritten when a base href tag has been found. | ||||
| /// | ||||
| fn get_base_href(node: &std::rc::Rc<markup5ever_rcdom::Node>, base_href: &mut url::Url) -> bool { | ||||
|     if let markup5ever_rcdom::NodeData::Element { | ||||
|         name, | ||||
|         attrs, | ||||
|         .. | ||||
|     } = &node.data | ||||
|     { | ||||
|         if name.local.as_ref() == "base" { | ||||
|             let attrs = attrs.borrow(); | ||||
|             for attr in attrs.iter() { | ||||
|                 let attr_name = attr.name.local.as_ref(); | ||||
|                 let attr_value = attr.value.as_ref(); | ||||
| async fn get_favicons_node( | ||||
|     dom: InfallibleTokenizer<StringReader<'_>, FaviconEmitter>, | ||||
|     icons: &mut Vec<Icon>, | ||||
|     url: &url::Url, | ||||
| ) { | ||||
|     const TAG_LINK: &[u8] = b"link"; | ||||
|     const TAG_BASE: &[u8] = b"base"; | ||||
|     const TAG_HEAD: &[u8] = b"head"; | ||||
|     const ATTR_REL: &[u8] = b"rel"; | ||||
|     const ATTR_HREF: &[u8] = b"href"; | ||||
|     const ATTR_SIZES: &[u8] = b"sizes"; | ||||
|  | ||||
|                 if attr_name == "href" { | ||||
|                     debug!("Found base href: {}", attr_value); | ||||
|                     *base_href = match base_href.join(attr_value) { | ||||
|                         Ok(href) => href, | ||||
|                         _ => base_href.clone(), | ||||
|     let mut base_url = url.clone(); | ||||
|     let mut icon_tags: Vec<StartTag> = Vec::new(); | ||||
|     for token in dom { | ||||
|         match token { | ||||
|             FaviconToken::StartTag(tag) => { | ||||
|                 if tag.name == TAG_LINK | ||||
|                     && tag.attributes.contains_key(ATTR_REL) | ||||
|                     && tag.attributes.contains_key(ATTR_HREF) | ||||
|                 { | ||||
|                     let rel_value = std::str::from_utf8(tag.attributes.get(ATTR_REL).unwrap()) | ||||
|                         .unwrap_or_default() | ||||
|                         .to_ascii_lowercase(); | ||||
|                     if rel_value.contains("icon") && !rel_value.contains("mask-icon") { | ||||
|                         icon_tags.push(tag); | ||||
|                     } | ||||
|                 } else if tag.name == TAG_BASE && tag.attributes.contains_key(ATTR_HREF) { | ||||
|                     let href = std::str::from_utf8(tag.attributes.get(ATTR_HREF).unwrap()).unwrap_or_default(); | ||||
|                     debug!("Found base href: {href}"); | ||||
|                     base_url = match base_url.join(href) { | ||||
|                         Ok(inner_url) => inner_url, | ||||
|                         _ => url.clone(), | ||||
|                     }; | ||||
|                     return true; | ||||
|                 } | ||||
|             } | ||||
|             return true; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // TODO: Might want to limit the recursion depth? | ||||
|     for child in node.children.borrow().iter() { | ||||
|         // Check if we got a true back and stop the iter. | ||||
|         // This means we found a <base> tag and can stop processing the html. | ||||
|         if get_base_href(child, base_href) { | ||||
|             return true; | ||||
|         } | ||||
|     } | ||||
|     false | ||||
| } | ||||
|  | ||||
| fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Vec<Icon>, url: &url::Url) { | ||||
|     if let markup5ever_rcdom::NodeData::Element { | ||||
|         name, | ||||
|         attrs, | ||||
|         .. | ||||
|     } = &node.data | ||||
|     { | ||||
|         if name.local.as_ref() == "link" { | ||||
|             let mut has_rel = false; | ||||
|             let mut href = None; | ||||
|             let mut sizes = None; | ||||
|  | ||||
|             let attrs = attrs.borrow(); | ||||
|             for attr in attrs.iter() { | ||||
|                 let attr_name = attr.name.local.as_ref(); | ||||
|                 let attr_value = attr.value.as_ref(); | ||||
|  | ||||
|                 if attr_name == "rel" && ICON_REL_REGEX.is_match(attr_value) && !ICON_REL_BLACKLIST.is_match(attr_value) | ||||
|                 { | ||||
|                     has_rel = true; | ||||
|                 } else if attr_name == "href" { | ||||
|                     href = Some(attr_value); | ||||
|                 } else if attr_name == "sizes" { | ||||
|                     sizes = Some(attr_value); | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             if has_rel { | ||||
|                 if let Some(inner_href) = href { | ||||
|                     if let Ok(full_href) = url.join(inner_href).map(String::from) { | ||||
|                         let priority = get_icon_priority(&full_href, sizes); | ||||
|                         icons.push(Icon::new(priority, full_href)); | ||||
|                     } | ||||
|             FaviconToken::EndTag(tag) => { | ||||
|                 if tag.name == TAG_HEAD { | ||||
|                     break; | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // TODO: Might want to limit the recursion depth? | ||||
|     for child in node.children.borrow().iter() { | ||||
|         get_favicons_node(child, icons, url); | ||||
|     for icon_tag in icon_tags { | ||||
|         if let Some(icon_href) = icon_tag.attributes.get(ATTR_HREF) { | ||||
|             if let Ok(full_href) = base_url.join(std::str::from_utf8(icon_href).unwrap_or_default()) { | ||||
|                 let sizes = if let Some(v) = icon_tag.attributes.get(ATTR_SIZES) { | ||||
|                     std::str::from_utf8(v).unwrap_or_default() | ||||
|                 } else { | ||||
|                     "" | ||||
|                 }; | ||||
|                 let priority = get_icon_priority(full_href.as_str(), sizes).await; | ||||
|                 icons.push(Icon::new(priority, full_href.to_string())); | ||||
|             } | ||||
|         }; | ||||
|     } | ||||
| } | ||||
|  | ||||
| @@ -514,13 +498,13 @@ struct IconUrlResult { | ||||
| /// | ||||
| /// # Example | ||||
| /// ``` | ||||
| /// let icon_result = get_icon_url("github.com")?; | ||||
| /// let icon_result = get_icon_url("vaultwarden.discourse.group")?; | ||||
| /// let icon_result = get_icon_url("github.com").await?; | ||||
| /// let icon_result = get_icon_url("vaultwarden.discourse.group").await?; | ||||
| /// ``` | ||||
| async fn get_icon_url(domain: &str) -> Result<IconUrlResult, Error> { | ||||
|     // Default URL with secure and insecure schemes | ||||
|     let ssldomain = format!("https://{}", domain); | ||||
|     let httpdomain = format!("http://{}", domain); | ||||
|     let ssldomain = format!("https://{domain}"); | ||||
|     let httpdomain = format!("http://{domain}"); | ||||
|  | ||||
|     // First check the domain as given during the request for both HTTPS and HTTP. | ||||
|     let resp = match get_page(&ssldomain).or_else(|_| get_page(&httpdomain)).await { | ||||
| @@ -537,26 +521,25 @@ async fn get_icon_url(domain: &str) -> Result<IconUrlResult, Error> { | ||||
|                     tld = domain_parts.next_back().unwrap(), | ||||
|                     base = domain_parts.next_back().unwrap() | ||||
|                 ); | ||||
|                 if is_valid_domain(&base_domain) { | ||||
|                     let sslbase = format!("https://{}", base_domain); | ||||
|                     let httpbase = format!("http://{}", base_domain); | ||||
|                     debug!("[get_icon_url]: Trying without subdomains '{}'", base_domain); | ||||
|                 if is_valid_domain(&base_domain).await { | ||||
|                     let sslbase = format!("https://{base_domain}"); | ||||
|                     let httpbase = format!("http://{base_domain}"); | ||||
|                     debug!("[get_icon_url]: Trying without subdomains '{base_domain}'"); | ||||
|  | ||||
|                     sub_resp = get_page(&sslbase).or_else(|_| get_page(&httpbase)).await; | ||||
|                 } | ||||
|  | ||||
|             // When the domain is not an IP, and has less then 2 dots, try to add www. infront of it. | ||||
|             } else if is_ip.is_err() && domain.matches('.').count() < 2 { | ||||
|                 let www_domain = format!("www.{}", domain); | ||||
|                 if is_valid_domain(&www_domain) { | ||||
|                     let sslwww = format!("https://{}", www_domain); | ||||
|                     let httpwww = format!("http://{}", www_domain); | ||||
|                     debug!("[get_icon_url]: Trying with www. prefix '{}'", www_domain); | ||||
|                 let www_domain = format!("www.{domain}"); | ||||
|                 if is_valid_domain(&www_domain).await { | ||||
|                     let sslwww = format!("https://{www_domain}"); | ||||
|                     let httpwww = format!("http://{www_domain}"); | ||||
|                     debug!("[get_icon_url]: Trying with www. prefix '{www_domain}'"); | ||||
|  | ||||
|                     sub_resp = get_page(&sslwww).or_else(|_| get_page(&httpwww)).await; | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             sub_resp | ||||
|         } | ||||
|     }; | ||||
| @@ -571,26 +554,23 @@ async fn get_icon_url(domain: &str) -> Result<IconUrlResult, Error> { | ||||
|  | ||||
|         // Set the referer to be used on the final request, some sites check this. | ||||
|         // Mostly used to prevent direct linking and other security resons. | ||||
|         referer = url.as_str().to_string(); | ||||
|         referer = url.to_string(); | ||||
|  | ||||
|         // Add the default favicon.ico to the list with the domain the content responded from. | ||||
|         // Add the fallback favicon.ico and apple-touch-icon.png to the list with the domain the content responded from. | ||||
|         iconlist.push(Icon::new(35, String::from(url.join("/favicon.ico").unwrap()))); | ||||
|         iconlist.push(Icon::new(40, String::from(url.join("/apple-touch-icon.png").unwrap()))); | ||||
|  | ||||
|         // 384KB should be more than enough for the HTML, though as we only really need the HTML header. | ||||
|         let mut limited_reader = stream_to_bytes_limit(content, 384 * 1024).await?.reader(); | ||||
|         let limited_reader = stream_to_bytes_limit(content, 384 * 1024).await?.to_vec(); | ||||
|  | ||||
|         use html5ever::tendril::TendrilSink; | ||||
|         let dom = html5ever::parse_document(markup5ever_rcdom::RcDom::default(), Default::default()) | ||||
|             .from_utf8() | ||||
|             .read_from(&mut limited_reader)?; | ||||
|  | ||||
|         let mut base_url: url::Url = url; | ||||
|         get_base_href(&dom.document, &mut base_url); | ||||
|         get_favicons_node(&dom.document, &mut iconlist, &base_url); | ||||
|         let dom = Tokenizer::new_with_emitter(limited_reader.to_reader(), FaviconEmitter::default()).infallible(); | ||||
|         get_favicons_node(dom, &mut iconlist, &url).await; | ||||
|     } else { | ||||
|         // Add the default favicon.ico to the list with just the given domain | ||||
|         iconlist.push(Icon::new(35, format!("{}/favicon.ico", ssldomain))); | ||||
|         iconlist.push(Icon::new(35, format!("{}/favicon.ico", httpdomain))); | ||||
|         iconlist.push(Icon::new(35, format!("{ssldomain}/favicon.ico"))); | ||||
|         iconlist.push(Icon::new(40, format!("{ssldomain}/apple-touch-icon.png"))); | ||||
|         iconlist.push(Icon::new(35, format!("{httpdomain}/favicon.ico"))); | ||||
|         iconlist.push(Icon::new(40, format!("{httpdomain}/apple-touch-icon.png"))); | ||||
|     } | ||||
|  | ||||
|     // Sort the iconlist by priority | ||||
| @@ -608,7 +588,7 @@ async fn get_page(url: &str) -> Result<Response, Error> { | ||||
| } | ||||
|  | ||||
| async fn get_page_with_referer(url: &str, referer: &str) -> Result<Response, Error> { | ||||
|     if is_domain_blacklisted(url::Url::parse(url).unwrap().host_str().unwrap_or_default()) { | ||||
|     if is_domain_blacklisted(url::Url::parse(url).unwrap().host_str().unwrap_or_default()).await { | ||||
|         warn!("Favicon '{}' resolves to a blacklisted domain or IP!", url); | ||||
|     } | ||||
|  | ||||
| @@ -632,12 +612,12 @@ async fn get_page_with_referer(url: &str, referer: &str) -> Result<Response, Err | ||||
| /// | ||||
| /// # Example | ||||
| /// ``` | ||||
| /// priority1 = get_icon_priority("http://example.com/path/to/a/favicon.png", "32x32"); | ||||
| /// priority2 = get_icon_priority("https://example.com/path/to/a/favicon.ico", ""); | ||||
| /// priority1 = get_icon_priority("http://example.com/path/to/a/favicon.png", "32x32").await; | ||||
| /// priority2 = get_icon_priority("https://example.com/path/to/a/favicon.ico", "").await; | ||||
| /// ``` | ||||
| fn get_icon_priority(href: &str, sizes: Option<&str>) -> u8 { | ||||
| async fn get_icon_priority(href: &str, sizes: &str) -> u8 { | ||||
|     // Check if there is a dimension set | ||||
|     let (width, height) = parse_sizes(sizes); | ||||
|     let (width, height) = parse_sizes(sizes).await; | ||||
|  | ||||
|     // Check if there is a size given | ||||
|     if width != 0 && height != 0 { | ||||
| @@ -679,15 +659,15 @@ fn get_icon_priority(href: &str, sizes: Option<&str>) -> u8 { | ||||
| /// | ||||
| /// # Example | ||||
| /// ``` | ||||
| /// let (width, height) = parse_sizes("64x64"); // (64, 64) | ||||
| /// let (width, height) = parse_sizes("x128x128"); // (128, 128) | ||||
| /// let (width, height) = parse_sizes("32"); // (0, 0) | ||||
| /// let (width, height) = parse_sizes("64x64").await; // (64, 64) | ||||
| /// let (width, height) = parse_sizes("x128x128").await; // (128, 128) | ||||
| /// let (width, height) = parse_sizes("32").await; // (0, 0) | ||||
| /// ``` | ||||
| fn parse_sizes(sizes: Option<&str>) -> (u16, u16) { | ||||
| async fn parse_sizes(sizes: &str) -> (u16, u16) { | ||||
|     let mut width: u16 = 0; | ||||
|     let mut height: u16 = 0; | ||||
|  | ||||
|     if let Some(sizes) = sizes { | ||||
|     if !sizes.is_empty() { | ||||
|         match ICON_SIZE_REGEX.captures(sizes.trim()) { | ||||
|             None => {} | ||||
|             Some(dimensions) => { | ||||
| @@ -703,7 +683,7 @@ fn parse_sizes(sizes: Option<&str>) -> (u16, u16) { | ||||
| } | ||||
|  | ||||
| async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> { | ||||
|     if is_domain_blacklisted(domain) { | ||||
|     if is_domain_blacklisted(domain).await { | ||||
|         err_silent!("Domain is blacklisted", domain) | ||||
|     } | ||||
|  | ||||
| @@ -727,7 +707,7 @@ async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> { | ||||
|                     // Also check if the size is atleast 67 bytes, which seems to be the smallest png i could create | ||||
|                     if body.len() >= 67 { | ||||
|                         // Check if the icon type is allowed, else try an icon from the list. | ||||
|                         icon_type = get_icon_type(&body); | ||||
|                         icon_type = get_icon_type(&body).await; | ||||
|                         if icon_type.is_none() { | ||||
|                             debug!("Icon from {} data:image uri, is not a valid image type", domain); | ||||
|                             continue; | ||||
| @@ -742,10 +722,10 @@ async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> { | ||||
|         } else { | ||||
|             match get_page_with_referer(&icon.href, &icon_result.referer).await { | ||||
|                 Ok(res) => { | ||||
|                     buffer = stream_to_bytes_limit(res, 512 * 1024).await?; // 512 KB for each icon max | ||||
|                                                                             // Check if the icon type is allowed, else try an icon from the list. | ||||
|                     icon_type = get_icon_type(&buffer); | ||||
|                     buffer = stream_to_bytes_limit(res, 5120 * 1024).await?; // 5120KB/5MB for each icon max (Same as icons.bitwarden.net) | ||||
|  | ||||
|                     // Check if the icon type is allowed, else try an icon from the list. | ||||
|                     icon_type = get_icon_type(&buffer).await; | ||||
|                     if icon_type.is_none() { | ||||
|                         buffer.clear(); | ||||
|                         debug!("Icon from {}, is not a valid image type", icon.href); | ||||
| @@ -780,7 +760,7 @@ async fn save_icon(path: &str, icon: &[u8]) { | ||||
|     } | ||||
| } | ||||
|  | ||||
| fn get_icon_type(bytes: &[u8]) -> Option<&'static str> { | ||||
| async fn get_icon_type(bytes: &[u8]) -> Option<&'static str> { | ||||
|     match bytes { | ||||
|         [137, 80, 78, 71, ..] => Some("png"), | ||||
|         [0, 0, 1, 0, ..] => Some("x-icon"), | ||||
| @@ -792,13 +772,30 @@ fn get_icon_type(bytes: &[u8]) -> Option<&'static str> { | ||||
|     } | ||||
| } | ||||
|  | ||||
| /// Minimize the amount of bytes to be parsed from a reqwest result. | ||||
| /// This prevents very long parsing and memory usage. | ||||
| async fn stream_to_bytes_limit(res: Response, max_size: usize) -> Result<Bytes, reqwest::Error> { | ||||
|     let mut stream = res.bytes_stream().take(max_size); | ||||
|     let mut buf = BytesMut::new(); | ||||
|     let mut size = 0; | ||||
|     while let Some(chunk) = stream.next().await { | ||||
|         let chunk = &chunk?; | ||||
|         size += chunk.len(); | ||||
|         buf.extend(chunk); | ||||
|         if size >= max_size { | ||||
|             break; | ||||
|         } | ||||
|     } | ||||
|     Ok(buf.freeze()) | ||||
| } | ||||
|  | ||||
| /// This is an implementation of the default Cookie Jar from Reqwest and reqwest_cookie_store build by pfernie. | ||||
| /// The default cookie jar used by Reqwest keeps all the cookies based upon the Max-Age or Expires which could be a long time. | ||||
| /// That could be used for tracking, to prevent this we force the lifespan of the cookies to always be max two minutes. | ||||
| /// A Cookie Jar is needed because some sites force a redirect with cookies to verify if a request uses cookies or not. | ||||
| use cookie_store::CookieStore; | ||||
| #[derive(Default)] | ||||
| pub struct Jar(RwLock<CookieStore>); | ||||
| pub struct Jar(std::sync::RwLock<CookieStore>); | ||||
|  | ||||
| impl reqwest::cookie::CookieStore for Jar { | ||||
|     fn set_cookies(&self, cookie_headers: &mut dyn Iterator<Item = &header::HeaderValue>, url: &url::Url) { | ||||
| @@ -836,11 +833,136 @@ impl reqwest::cookie::CookieStore for Jar { | ||||
|     } | ||||
| } | ||||
|  | ||||
| async fn stream_to_bytes_limit(res: Response, max_size: usize) -> Result<Bytes, reqwest::Error> { | ||||
|     let mut stream = res.bytes_stream().take(max_size); | ||||
|     let mut buf = BytesMut::new(); | ||||
|     while let Some(chunk) = stream.next().await { | ||||
|         buf.extend(chunk?); | ||||
| /// Custom FaviconEmitter for the html5gum parser. | ||||
| /// The FaviconEmitter is using an almost 1:1 copy of the DefaultEmitter with some small changes. | ||||
| /// This prevents emitting tags like comments, doctype and also strings between the tags. | ||||
| /// Therefor parsing the HTML content is faster. | ||||
| use std::collections::{BTreeSet, VecDeque}; | ||||
|  | ||||
| enum FaviconToken { | ||||
|     StartTag(StartTag), | ||||
|     EndTag(EndTag), | ||||
| } | ||||
|     Ok(buf.freeze()) | ||||
|  | ||||
| #[derive(Default)] | ||||
| struct FaviconEmitter { | ||||
|     current_token: Option<FaviconToken>, | ||||
|     last_start_tag: Vec<u8>, | ||||
|     current_attribute: Option<(Vec<u8>, Vec<u8>)>, | ||||
|     seen_attributes: BTreeSet<Vec<u8>>, | ||||
|     emitted_tokens: VecDeque<FaviconToken>, | ||||
| } | ||||
|  | ||||
| impl FaviconEmitter { | ||||
|     fn emit_token(&mut self, token: FaviconToken) { | ||||
|         self.emitted_tokens.push_front(token); | ||||
|     } | ||||
|  | ||||
|     fn flush_current_attribute(&mut self) { | ||||
|         if let Some((k, v)) = self.current_attribute.take() { | ||||
|             match self.current_token { | ||||
|                 Some(FaviconToken::StartTag(ref mut tag)) => { | ||||
|                     tag.attributes.entry(k).and_modify(|_| {}).or_insert(v); | ||||
|                 } | ||||
|                 Some(FaviconToken::EndTag(_)) => { | ||||
|                     self.seen_attributes.insert(k); | ||||
|                 } | ||||
|                 _ => { | ||||
|                     debug_assert!(false); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| impl Emitter for FaviconEmitter { | ||||
|     type Token = FaviconToken; | ||||
|  | ||||
|     fn set_last_start_tag(&mut self, last_start_tag: Option<&[u8]>) { | ||||
|         self.last_start_tag.clear(); | ||||
|         self.last_start_tag.extend(last_start_tag.unwrap_or_default()); | ||||
|     } | ||||
|  | ||||
|     fn pop_token(&mut self) -> Option<Self::Token> { | ||||
|         self.emitted_tokens.pop_back() | ||||
|     } | ||||
|  | ||||
|     fn init_start_tag(&mut self) { | ||||
|         self.current_token = Some(FaviconToken::StartTag(StartTag::default())); | ||||
|     } | ||||
|  | ||||
|     fn init_end_tag(&mut self) { | ||||
|         self.current_token = Some(FaviconToken::EndTag(EndTag::default())); | ||||
|         self.seen_attributes.clear(); | ||||
|     } | ||||
|  | ||||
|     fn emit_current_tag(&mut self) { | ||||
|         self.flush_current_attribute(); | ||||
|         let mut token = self.current_token.take().unwrap(); | ||||
|         match token { | ||||
|             FaviconToken::EndTag(_) => { | ||||
|                 self.seen_attributes.clear(); | ||||
|             } | ||||
|             FaviconToken::StartTag(ref mut tag) => { | ||||
|                 self.set_last_start_tag(Some(&tag.name)); | ||||
|             } | ||||
|         } | ||||
|         self.emit_token(token); | ||||
|     } | ||||
|  | ||||
|     fn push_tag_name(&mut self, s: &[u8]) { | ||||
|         match self.current_token { | ||||
|             Some( | ||||
|                 FaviconToken::StartTag(StartTag { | ||||
|                     ref mut name, | ||||
|                     .. | ||||
|                 }) | ||||
|                 | FaviconToken::EndTag(EndTag { | ||||
|                     ref mut name, | ||||
|                     .. | ||||
|                 }), | ||||
|             ) => { | ||||
|                 name.extend(s); | ||||
|             } | ||||
|             _ => debug_assert!(false), | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     fn init_attribute(&mut self) { | ||||
|         self.flush_current_attribute(); | ||||
|         self.current_attribute = Some((Vec::new(), Vec::new())); | ||||
|     } | ||||
|  | ||||
|     fn push_attribute_name(&mut self, s: &[u8]) { | ||||
|         self.current_attribute.as_mut().unwrap().0.extend(s); | ||||
|     } | ||||
|  | ||||
|     fn push_attribute_value(&mut self, s: &[u8]) { | ||||
|         self.current_attribute.as_mut().unwrap().1.extend(s); | ||||
|     } | ||||
|  | ||||
|     fn current_is_appropriate_end_tag_token(&mut self) -> bool { | ||||
|         match self.current_token { | ||||
|             Some(FaviconToken::EndTag(ref tag)) => !self.last_start_tag.is_empty() && self.last_start_tag == tag.name, | ||||
|             _ => false, | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // We do not want and need these parts of the HTML document | ||||
|     // These will be skipped and ignored during the tokenization and iteration. | ||||
|     fn emit_current_comment(&mut self) {} | ||||
|     fn emit_current_doctype(&mut self) {} | ||||
|     fn emit_eof(&mut self) {} | ||||
|     fn emit_error(&mut self, _: html5gum::Error) {} | ||||
|     fn emit_string(&mut self, _: &[u8]) {} | ||||
|     fn init_comment(&mut self) {} | ||||
|     fn init_doctype(&mut self) {} | ||||
|     fn push_comment(&mut self, _: &[u8]) {} | ||||
|     fn push_doctype_name(&mut self, _: &[u8]) {} | ||||
|     fn push_doctype_public_identifier(&mut self, _: &[u8]) {} | ||||
|     fn push_doctype_system_identifier(&mut self, _: &[u8]) {} | ||||
|     fn set_doctype_public_identifier(&mut self, _: &[u8]) {} | ||||
|     fn set_doctype_system_identifier(&mut self, _: &[u8]) {} | ||||
|     fn set_force_quirks(&mut self) {} | ||||
|     fn set_self_closing(&mut self) {} | ||||
| } | ||||
|   | ||||
| @@ -569,12 +569,14 @@ make_config! { | ||||
|         _enable_smtp:                  bool,   true,   def,     true; | ||||
|         /// Host | ||||
|         smtp_host:                     String, true,   option; | ||||
|         /// Enable Secure SMTP |> (Explicit) - Enabling this by default would use STARTTLS (Standard ports 587 or 25) | ||||
|         smtp_ssl:                      bool,   true,   def,     true; | ||||
|         /// Force TLS |> (Implicit) - Enabling this would force the use of an SSL/TLS connection, instead of upgrading an insecure one with STARTTLS (Standard port 465) | ||||
|         smtp_explicit_tls:             bool,   true,   def,     false; | ||||
|         /// DEPRECATED smtp_ssl |> DEPRECATED - Please use SMTP_SECURITY | ||||
|         smtp_ssl:                      bool,   false,  option; | ||||
|         /// DEPRECATED smtp_explicit_tls |> DEPRECATED - Please use SMTP_SECURITY | ||||
|         smtp_explicit_tls:             bool,   false,  option; | ||||
|         /// Secure SMTP |> ("starttls", "force_tls", "off") Enable a secure connection. Default is "starttls" (Explicit - ports 587 or 25), "force_tls" (Implicit - port 465) or "off", no encryption | ||||
|         smtp_security:                 String, true,   auto,    |c| smtp_convert_deprecated_ssl_options(c.smtp_ssl, c.smtp_explicit_tls); // TODO: After deprecation make it `def, "starttls".to_string()` | ||||
|         /// Port | ||||
|         smtp_port:                     u16,    true,   auto,    |c| if c.smtp_explicit_tls {465} else if c.smtp_ssl {587} else {25}; | ||||
|         smtp_port:                     u16,    true,   auto,    |c| if c.smtp_security == *"force_tls" {465} else if c.smtp_security == *"starttls" {587} else {25}; | ||||
|         /// From Address | ||||
|         smtp_from:                     String, true,   def,     String::new(); | ||||
|         /// From Name | ||||
| @@ -657,6 +659,13 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { | ||||
|     } | ||||
|  | ||||
|     if cfg._enable_smtp { | ||||
|         match cfg.smtp_security.as_str() { | ||||
|             "off" | "starttls" | "force_tls" => (), | ||||
|             _ => err!( | ||||
|                 "`SMTP_SECURITY` is invalid. It needs to be one of the following options: starttls, force_tls or off" | ||||
|             ), | ||||
|         } | ||||
|  | ||||
|         if cfg.smtp_host.is_some() == cfg.smtp_from.is_empty() { | ||||
|             err!("Both `SMTP_HOST` and `SMTP_FROM` need to be set for email support") | ||||
|         } | ||||
| @@ -735,6 +744,20 @@ fn extract_url_path(url: &str) -> String { | ||||
|     } | ||||
| } | ||||
|  | ||||
| /// Convert the old SMTP_SSL and SMTP_EXPLICIT_TLS options | ||||
| fn smtp_convert_deprecated_ssl_options(smtp_ssl: Option<bool>, smtp_explicit_tls: Option<bool>) -> String { | ||||
|     if smtp_explicit_tls.is_some() || smtp_ssl.is_some() { | ||||
|         println!("[DEPRECATED]: `SMTP_SSL` or `SMTP_EXPLICIT_TLS` is set. Please use `SMTP_SECURITY` instead."); | ||||
|     } | ||||
|     if smtp_explicit_tls.is_some() && smtp_explicit_tls.unwrap() { | ||||
|         return "force_tls".to_string(); | ||||
|     } else if smtp_ssl.is_some() && !smtp_ssl.unwrap() { | ||||
|         return "off".to_string(); | ||||
|     } | ||||
|     // Return the default `starttls` in all other cases | ||||
|     "starttls".to_string() | ||||
| } | ||||
|  | ||||
| impl Config { | ||||
|     pub fn load() -> Result<Self, Error> { | ||||
|         // Loading from env and file | ||||
|   | ||||
| @@ -30,7 +30,7 @@ fn mailer() -> SmtpTransport { | ||||
|         .timeout(Some(Duration::from_secs(CONFIG.smtp_timeout()))); | ||||
|  | ||||
|     // Determine security | ||||
|     let smtp_client = if CONFIG.smtp_ssl() || CONFIG.smtp_explicit_tls() { | ||||
|     let smtp_client = if CONFIG.smtp_security() != *"off" { | ||||
|         let mut tls_parameters = TlsParameters::builder(host); | ||||
|         if CONFIG.smtp_accept_invalid_hostnames() { | ||||
|             tls_parameters = tls_parameters.dangerous_accept_invalid_hostnames(true); | ||||
| @@ -40,7 +40,7 @@ fn mailer() -> SmtpTransport { | ||||
|         } | ||||
|         let tls_parameters = tls_parameters.build().unwrap(); | ||||
|  | ||||
|         if CONFIG.smtp_explicit_tls() { | ||||
|         if CONFIG.smtp_security() == *"force_tls" { | ||||
|             smtp_client.tls(Tls::Wrapper(tls_parameters)) | ||||
|         } else { | ||||
|             smtp_client.tls(Tls::Required(tls_parameters)) | ||||
|   | ||||
| @@ -329,7 +329,6 @@ async fn launch_rocket(pool: db::DbPool, extra_debug: bool) -> Result<(), Error> | ||||
|     let basepath = &CONFIG.domain_path(); | ||||
|  | ||||
|     let mut config = rocket::Config::from(rocket::Config::figment()); | ||||
|     config.address = std::net::IpAddr::V4(std::net::Ipv4Addr::UNSPECIFIED); // TODO: Allow this to be changed, keep ROCKET_ADDRESS for compat | ||||
|     config.temp_dir = canonicalize(CONFIG.tmp_folder()).unwrap().into(); | ||||
|     config.limits = Limits::new() // | ||||
|         .limit("json", 10.megabytes()) | ||||
|   | ||||
							
								
								
									
										5369
									
								
								src/static/scripts/bootstrap-native.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5369
									
								
								src/static/scripts/bootstrap-native.js
									
									
									
									
										vendored
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										38
									
								
								src/static/scripts/datatables.css
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										38
									
								
								src/static/scripts/datatables.css
									
									
									
									
										vendored
									
									
								
							| @@ -4,22 +4,13 @@ | ||||
|  * | ||||
|  * To rebuild or modify this file with the latest versions of the included | ||||
|  * software please visit: | ||||
|  *   https://datatables.net/download/#bs5/dt-1.11.3 | ||||
|  *   https://datatables.net/download/#bs5/dt-1.11.4 | ||||
|  * | ||||
|  * Included libraries: | ||||
|  *   DataTables 1.11.3 | ||||
|  *   DataTables 1.11.4 | ||||
|  */ | ||||
|  | ||||
| @charset "UTF-8"; | ||||
| td.dt-control { | ||||
|   background: url("https://www.datatables.net/examples/resources/details_open.png") no-repeat center center; | ||||
|   cursor: pointer; | ||||
| } | ||||
|  | ||||
| tr.dt-hasChild td.dt-control { | ||||
|   background: url("https://www.datatables.net/examples/resources/details_close.png") no-repeat center center; | ||||
| } | ||||
|  | ||||
| table.dataTable th.dt-left, | ||||
| table.dataTable td.dt-left { | ||||
|   text-align: left; | ||||
| @@ -91,6 +82,31 @@ table.dataTable tbody th.dt-body-nowrap, | ||||
| table.dataTable tbody td.dt-body-nowrap { | ||||
|   white-space: nowrap; | ||||
| } | ||||
| table.dataTable td.dt-control { | ||||
|   text-align: center; | ||||
|   cursor: pointer; | ||||
| } | ||||
| table.dataTable td.dt-control:before { | ||||
|   height: 1em; | ||||
|   width: 1em; | ||||
|   margin-top: -9px; | ||||
|   display: inline-block; | ||||
|   color: white; | ||||
|   border: 0.15em solid white; | ||||
|   border-radius: 1em; | ||||
|   box-shadow: 0 0 0.2em #444; | ||||
|   box-sizing: content-box; | ||||
|   text-align: center; | ||||
|   text-indent: 0 !important; | ||||
|   font-family: "Courier New", Courier, monospace; | ||||
|   line-height: 1em; | ||||
|   content: "+"; | ||||
|   background-color: #31b131; | ||||
| } | ||||
| table.dataTable tr.dt-hasChild td.dt-control:before { | ||||
|   content: "-"; | ||||
|   background-color: #d33333; | ||||
| } | ||||
|  | ||||
| /*! Bootstrap 5 integration for DataTables | ||||
|  * | ||||
|   | ||||
							
								
								
									
										69
									
								
								src/static/scripts/datatables.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										69
									
								
								src/static/scripts/datatables.js
									
									
									
									
										vendored
									
									
								
							| @@ -4,20 +4,20 @@ | ||||
|  * | ||||
|  * To rebuild or modify this file with the latest versions of the included | ||||
|  * software please visit: | ||||
|  *   https://datatables.net/download/#bs5/dt-1.11.3 | ||||
|  *   https://datatables.net/download/#bs5/dt-1.11.4 | ||||
|  * | ||||
|  * Included libraries: | ||||
|  *   DataTables 1.11.3 | ||||
|  *   DataTables 1.11.4 | ||||
|  */ | ||||
|  | ||||
| /*! DataTables 1.11.3 | ||||
| /*! DataTables 1.11.4 | ||||
|  * ©2008-2021 SpryMedia Ltd - datatables.net/license | ||||
|  */ | ||||
|  | ||||
| /** | ||||
|  * @summary     DataTables | ||||
|  * @description Paginate, search and order HTML tables | ||||
|  * @version     1.11.3 | ||||
|  * @version     1.11.4 | ||||
|  * @file        jquery.dataTables.js | ||||
|  * @author      SpryMedia Ltd | ||||
|  * @contact     www.datatables.net | ||||
| @@ -3462,6 +3462,9 @@ | ||||
| 	 */ | ||||
| 	function _fnDraw( oSettings, ajaxComplete ) | ||||
| 	{ | ||||
| 		// Allow for state saving and a custom start position | ||||
| 		_fnStart( oSettings ); | ||||
| 	 | ||||
| 		/* Provide a pre-callback function which can be used to cancel the draw is false is returned */ | ||||
| 		var aPreDraw = _fnCallbackFire( oSettings, 'aoPreDrawCallback', 'preDraw', [oSettings] ); | ||||
| 		if ( $.inArray( false, aPreDraw ) !== -1 ) | ||||
| @@ -3470,34 +3473,18 @@ | ||||
| 			return; | ||||
| 		} | ||||
| 	 | ||||
| 		var i, iLen, n; | ||||
| 		var anRows = []; | ||||
| 		var iRowCount = 0; | ||||
| 		var asStripeClasses = oSettings.asStripeClasses; | ||||
| 		var iStripes = asStripeClasses.length; | ||||
| 		var iOpenRows = oSettings.aoOpenRows.length; | ||||
| 		var oLang = oSettings.oLanguage; | ||||
| 		var iInitDisplayStart = oSettings.iInitDisplayStart; | ||||
| 		var bServerSide = _fnDataSource( oSettings ) == 'ssp'; | ||||
| 		var aiDisplay = oSettings.aiDisplay; | ||||
| 	 | ||||
| 		oSettings.bDrawing = true; | ||||
| 	 | ||||
| 		/* Check and see if we have an initial draw position from state saving */ | ||||
| 		if ( iInitDisplayStart !== undefined && iInitDisplayStart !== -1 ) | ||||
| 		{ | ||||
| 			oSettings._iDisplayStart = bServerSide ? | ||||
| 				iInitDisplayStart : | ||||
| 				iInitDisplayStart >= oSettings.fnRecordsDisplay() ? | ||||
| 					0 : | ||||
| 					iInitDisplayStart; | ||||
| 	 | ||||
| 			oSettings.iInitDisplayStart = -1; | ||||
| 		} | ||||
| 	 | ||||
| 		var iDisplayStart = oSettings._iDisplayStart; | ||||
| 		var iDisplayEnd = oSettings.fnDisplayEnd(); | ||||
| 	 | ||||
| 		oSettings.bDrawing = true; | ||||
| 	 | ||||
| 		/* Server-side processing draw intercept */ | ||||
| 		if ( oSettings.bDeferLoading ) | ||||
| 		{ | ||||
| @@ -3899,6 +3886,28 @@ | ||||
| 		return aReturn; | ||||
| 	} | ||||
| 	 | ||||
| 	/** | ||||
| 	 * Set the start position for draw | ||||
| 	 *  @param {object} oSettings dataTables settings object | ||||
| 	 */ | ||||
| 	function _fnStart( oSettings ) | ||||
| 	{ | ||||
| 		var bServerSide = _fnDataSource( oSettings ) == 'ssp'; | ||||
| 		var iInitDisplayStart = oSettings.iInitDisplayStart; | ||||
| 	 | ||||
| 		// Check and see if we have an initial draw position from state saving | ||||
| 		if ( iInitDisplayStart !== undefined && iInitDisplayStart !== -1 ) | ||||
| 		{ | ||||
| 			oSettings._iDisplayStart = bServerSide ? | ||||
| 				iInitDisplayStart : | ||||
| 				iInitDisplayStart >= oSettings.fnRecordsDisplay() ? | ||||
| 					0 : | ||||
| 					iInitDisplayStart; | ||||
| 	 | ||||
| 			oSettings.iInitDisplayStart = -1; | ||||
| 		} | ||||
| 	} | ||||
| 	 | ||||
| 	/** | ||||
| 	 * Create an Ajax call based on the table's settings, taking into account that | ||||
| 	 * parameters can have multiple forms, and backwards compatibility. | ||||
| @@ -3942,8 +3951,8 @@ | ||||
| 		var ajax = oSettings.ajax; | ||||
| 		var instance = oSettings.oInstance; | ||||
| 		var callback = function ( json ) { | ||||
| 			var status = oSettings.jqXhr | ||||
| 				? oSettings.jqXhr.status | ||||
| 			var status = oSettings.jqXHR | ||||
| 				? oSettings.jqXHR.status | ||||
| 				: null; | ||||
| 	 | ||||
| 			if ( json === null || (typeof status === 'number' && status == 204 ) ) { | ||||
| @@ -5487,7 +5496,7 @@ | ||||
| 	 | ||||
| 		// Sanity check that the table is of a sensible width. If not then we are going to get | ||||
| 		// misalignment - try to prevent this by not allowing the table to shrink below its min width | ||||
| 		if ( table.outerWidth() < sanityWidth ) | ||||
| 		if ( Math.round(table.outerWidth()) < Math.round(sanityWidth) ) | ||||
| 		{ | ||||
| 			// The min width depends upon if we have a vertical scrollbar visible or not */ | ||||
| 			correction = ((divBodyEl.scrollHeight > divBodyEl.offsetHeight || | ||||
| @@ -6496,10 +6505,14 @@ | ||||
| 		// Restore key features - todo - for 1.11 this needs to be done by | ||||
| 		// subscribed events | ||||
| 		if ( s.start !== undefined ) { | ||||
| 			settings._iDisplayStart    = s.start; | ||||
| 			if(api === null) { | ||||
| 				settings._iDisplayStart    = s.start; | ||||
| 				settings.iInitDisplayStart = s.start; | ||||
| 			} | ||||
| 			else { | ||||
| 				_fnPageChange(settings, s.start/s.length); | ||||
| 	 | ||||
| 			} | ||||
| 		} | ||||
| 		if ( s.length !== undefined ) { | ||||
| 			settings._iDisplayLength   = s.length; | ||||
| @@ -9644,7 +9657,7 @@ | ||||
| 	 *  @type string | ||||
| 	 *  @default Version number | ||||
| 	 */ | ||||
| 	DataTable.version = "1.11.3"; | ||||
| 	DataTable.version = "1.11.4"; | ||||
|  | ||||
| 	/** | ||||
| 	 * Private data store, containing all of the settings objects that are | ||||
| @@ -14069,7 +14082,7 @@ | ||||
| 		 * | ||||
| 		 *  @type string | ||||
| 		 */ | ||||
| 		build:"bs5/dt-1.11.3", | ||||
| 		build:"bs5/dt-1.11.4", | ||||
| 	 | ||||
| 	 | ||||
| 		/** | ||||
|   | ||||
| @@ -616,7 +616,13 @@ where | ||||
| use reqwest::{header, Client, ClientBuilder}; | ||||
|  | ||||
| pub fn get_reqwest_client() -> Client { | ||||
|     get_reqwest_client_builder().build().expect("Failed to build client") | ||||
|     match get_reqwest_client_builder().build() { | ||||
|         Ok(client) => client, | ||||
|         Err(e) => { | ||||
|             error!("Possible trust-dns error, trying with trust-dns disabled: '{e}'"); | ||||
|             get_reqwest_client_builder().trust_dns(false).build().expect("Failed to build client") | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| pub fn get_reqwest_client_builder() -> ClientBuilder { | ||||
|   | ||||
		Reference in New Issue
	
	Block a user