mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-09-10 18:55:57 +03:00
Compare commits
12 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
33bae5fbe9 | ||
|
f60502a17e | ||
|
13f4b66e62 | ||
|
c967d0ddc1 | ||
|
ae6ed0ece8 | ||
|
b7c254eb30 | ||
|
a47b484172 | ||
|
65629a99f0 | ||
|
49c5dec9b6 | ||
|
cd195ff243 | ||
|
e3541763fd | ||
|
f0efec7c96 |
@@ -347,6 +347,7 @@
|
|||||||
## - "autofill-overlay": Add an overlay menu to form fields for quick access to credentials.
|
## - "autofill-overlay": Add an overlay menu to form fields for quick access to credentials.
|
||||||
## - "autofill-v2": Use the new autofill implementation.
|
## - "autofill-v2": Use the new autofill implementation.
|
||||||
## - "browser-fileless-import": Directly import credentials from other providers without a file.
|
## - "browser-fileless-import": Directly import credentials from other providers without a file.
|
||||||
|
## - "extension-refresh": Temporarily enable the new extension design until general availability (should be used with the beta Chrome extension)
|
||||||
## - "fido2-vault-credentials": Enable the use of FIDO2 security keys as second factor.
|
## - "fido2-vault-credentials": Enable the use of FIDO2 security keys as second factor.
|
||||||
# EXPERIMENTAL_CLIENT_FEATURE_FLAGS=fido2-vault-credentials
|
# EXPERIMENTAL_CLIENT_FEATURE_FLAGS=fido2-vault-credentials
|
||||||
|
|
||||||
|
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
# Checkout the repo
|
# Checkout the repo
|
||||||
- name: "Checkout"
|
- name: "Checkout"
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4.1.7
|
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
|
||||||
# End Checkout the repo
|
# End Checkout the repo
|
||||||
|
|
||||||
|
|
||||||
|
4
.github/workflows/hadolint.yml
vendored
4
.github/workflows/hadolint.yml
vendored
@@ -13,12 +13,12 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
# Checkout the repo
|
# Checkout the repo
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
|
||||||
# End Checkout the repo
|
# End Checkout the repo
|
||||||
|
|
||||||
# Start Docker Buildx
|
# Start Docker Buildx
|
||||||
- name: Setup Docker Buildx
|
- name: Setup Docker Buildx
|
||||||
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
|
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1
|
||||||
# https://github.com/moby/buildkit/issues/3969
|
# https://github.com/moby/buildkit/issues/3969
|
||||||
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
|
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
|
||||||
with:
|
with:
|
||||||
|
14
.github/workflows/release.yml
vendored
14
.github/workflows/release.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
# Checkout the repo
|
# Checkout the repo
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -69,7 +69,7 @@ jobs:
|
|||||||
|
|
||||||
# Start Docker Buildx
|
# Start Docker Buildx
|
||||||
- name: Setup Docker Buildx
|
- name: Setup Docker Buildx
|
||||||
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
|
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1
|
||||||
# https://github.com/moby/buildkit/issues/3969
|
# https://github.com/moby/buildkit/issues/3969
|
||||||
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
|
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
|
||||||
with:
|
with:
|
||||||
@@ -165,7 +165,7 @@ jobs:
|
|||||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}"
|
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}"
|
||||||
|
|
||||||
- name: Bake ${{ matrix.base_image }} containers
|
- name: Bake ${{ matrix.base_image }} containers
|
||||||
uses: docker/bake-action@76cc8060bdff6d632a465001e4cf300684c5472c # v5.7.0
|
uses: docker/bake-action@2e3d19baedb14545e5d41222653874f25d5b4dfb # v5.10.0
|
||||||
env:
|
env:
|
||||||
BASE_TAGS: "${{ env.BASE_TAGS }}"
|
BASE_TAGS: "${{ env.BASE_TAGS }}"
|
||||||
SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}"
|
SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}"
|
||||||
@@ -223,28 +223,28 @@ jobs:
|
|||||||
|
|
||||||
# Upload artifacts to Github Actions
|
# Upload artifacts to Github Actions
|
||||||
- name: "Upload amd64 artifact"
|
- name: "Upload amd64 artifact"
|
||||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||||
if: ${{ matrix.base_image == 'alpine' }}
|
if: ${{ matrix.base_image == 'alpine' }}
|
||||||
with:
|
with:
|
||||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64
|
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64
|
||||||
path: vaultwarden-amd64
|
path: vaultwarden-amd64
|
||||||
|
|
||||||
- name: "Upload arm64 artifact"
|
- name: "Upload arm64 artifact"
|
||||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||||
if: ${{ matrix.base_image == 'alpine' }}
|
if: ${{ matrix.base_image == 'alpine' }}
|
||||||
with:
|
with:
|
||||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64
|
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64
|
||||||
path: vaultwarden-arm64
|
path: vaultwarden-arm64
|
||||||
|
|
||||||
- name: "Upload armv7 artifact"
|
- name: "Upload armv7 artifact"
|
||||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||||
if: ${{ matrix.base_image == 'alpine' }}
|
if: ${{ matrix.base_image == 'alpine' }}
|
||||||
with:
|
with:
|
||||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7
|
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7
|
||||||
path: vaultwarden-armv7
|
path: vaultwarden-armv7
|
||||||
|
|
||||||
- name: "Upload armv6 artifact"
|
- name: "Upload armv6 artifact"
|
||||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
|
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||||
if: ${{ matrix.base_image == 'alpine' }}
|
if: ${{ matrix.base_image == 'alpine' }}
|
||||||
with:
|
with:
|
||||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6
|
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6
|
||||||
|
11
.github/workflows/trivy.yml
vendored
11
.github/workflows/trivy.yml
vendored
@@ -9,13 +9,16 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
branches: [ "main" ]
|
branches: [ "main" ]
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '00 12 * * *'
|
- cron: '08 11 * * *'
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
trivy-scan:
|
trivy-scan:
|
||||||
|
# Only run this in the master repo and not on forks
|
||||||
|
# When all forks run this at the same time, it is causing `Too Many Requests` issues
|
||||||
|
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||||
name: Check
|
name: Check
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
@@ -25,10 +28,10 @@ jobs:
|
|||||||
actions: read
|
actions: read
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4.1.7
|
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
|
||||||
|
|
||||||
- name: Run Trivy vulnerability scanner
|
- name: Run Trivy vulnerability scanner
|
||||||
uses: aquasecurity/trivy-action@6e7b7d1fd3e4fef0c5fa8cce1229c54b2c9bd0d8 # v0.24.0
|
uses: aquasecurity/trivy-action@5681af892cd0f4997658e2bacc62bd0a894cf564 # v0.27.0
|
||||||
with:
|
with:
|
||||||
scan-type: repo
|
scan-type: repo
|
||||||
ignore-unfixed: true
|
ignore-unfixed: true
|
||||||
@@ -37,6 +40,6 @@ jobs:
|
|||||||
severity: CRITICAL,HIGH
|
severity: CRITICAL,HIGH
|
||||||
|
|
||||||
- name: Upload Trivy scan results to GitHub Security tab
|
- name: Upload Trivy scan results to GitHub Security tab
|
||||||
uses: github/codeql-action/upload-sarif@2bbafcdd7fbf96243689e764c2f15d9735164f33 # v3.25.10
|
uses: github/codeql-action/upload-sarif@2bbafcdd7fbf96243689e764c2f15d9735164f33 # v3.26.6
|
||||||
with:
|
with:
|
||||||
sarif_file: 'trivy-results.sarif'
|
sarif_file: 'trivy-results.sarif'
|
||||||
|
417
Cargo.lock
generated
417
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
30
Cargo.toml
30
Cargo.toml
@@ -3,7 +3,7 @@ name = "vaultwarden"
|
|||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.79.0"
|
rust-version = "1.80.0"
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
repository = "https://github.com/dani-garcia/vaultwarden"
|
repository = "https://github.com/dani-garcia/vaultwarden"
|
||||||
@@ -34,21 +34,21 @@ query_logger = ["dep:diesel_logger"]
|
|||||||
# Currently only used to enable rusts official ip support
|
# Currently only used to enable rusts official ip support
|
||||||
unstable = []
|
unstable = []
|
||||||
|
|
||||||
[target."cfg(not(windows))".dependencies]
|
[target."cfg(unix)".dependencies]
|
||||||
# Logging
|
# Logging
|
||||||
syslog = "6.1.1"
|
syslog = "6.1.1"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# Logging
|
# Logging
|
||||||
log = "0.4.22"
|
log = "0.4.22"
|
||||||
fern = { version = "0.6.2", features = ["syslog-6", "reopen-1"] }
|
fern = { version = "0.7.0", features = ["syslog-6", "reopen-1"] }
|
||||||
tracing = { version = "0.1.40", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
|
tracing = { version = "0.1.40", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
|
||||||
|
|
||||||
# A `dotenv` implementation for Rust
|
# A `dotenv` implementation for Rust
|
||||||
dotenvy = { version = "0.15.7", default-features = false }
|
dotenvy = { version = "0.15.7", default-features = false }
|
||||||
|
|
||||||
# Lazy initialization
|
# Lazy initialization
|
||||||
once_cell = "1.19.0"
|
once_cell = "1.20.2"
|
||||||
|
|
||||||
# Numerical libraries
|
# Numerical libraries
|
||||||
num-traits = "0.2.19"
|
num-traits = "0.2.19"
|
||||||
@@ -66,12 +66,12 @@ rmpv = "1.3.0" # MessagePack library
|
|||||||
dashmap = "6.1.0"
|
dashmap = "6.1.0"
|
||||||
|
|
||||||
# Async futures
|
# Async futures
|
||||||
futures = "0.3.30"
|
futures = "0.3.31"
|
||||||
tokio = { version = "1.40.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
tokio = { version = "1.41.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
||||||
|
|
||||||
# A generic serialization/deserialization framework
|
# A generic serialization/deserialization framework
|
||||||
serde = { version = "1.0.210", features = ["derive"] }
|
serde = { version = "1.0.213", features = ["derive"] }
|
||||||
serde_json = "1.0.128"
|
serde_json = "1.0.132"
|
||||||
|
|
||||||
# A safe, extensible ORM and Query builder
|
# A safe, extensible ORM and Query builder
|
||||||
diesel = { version = "2.2.4", features = ["chrono", "r2d2", "numeric"] }
|
diesel = { version = "2.2.4", features = ["chrono", "r2d2", "numeric"] }
|
||||||
@@ -86,7 +86,7 @@ rand = { version = "0.8.5", features = ["small_rng"] }
|
|||||||
ring = "0.17.8"
|
ring = "0.17.8"
|
||||||
|
|
||||||
# UUID generation
|
# UUID generation
|
||||||
uuid = { version = "1.10.0", features = ["v4"] }
|
uuid = { version = "1.11.0", features = ["v4"] }
|
||||||
|
|
||||||
# Date and time libraries
|
# Date and time libraries
|
||||||
chrono = { version = "0.4.38", features = ["clock", "serde"], default-features = false }
|
chrono = { version = "0.4.38", features = ["clock", "serde"], default-features = false }
|
||||||
@@ -115,7 +115,7 @@ webauthn-rs = "0.3.2"
|
|||||||
url = "2.5.2"
|
url = "2.5.2"
|
||||||
|
|
||||||
# Email libraries
|
# Email libraries
|
||||||
lettre = { version = "0.11.9", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
lettre = { version = "0.11.10", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
||||||
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
|
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
|
||||||
email_address = "0.2.9"
|
email_address = "0.2.9"
|
||||||
|
|
||||||
@@ -123,14 +123,14 @@ email_address = "0.2.9"
|
|||||||
handlebars = { version = "6.1.0", features = ["dir_source"] }
|
handlebars = { version = "6.1.0", features = ["dir_source"] }
|
||||||
|
|
||||||
# HTTP client (Used for favicons, version check, DUO and HIBP API)
|
# HTTP client (Used for favicons, version check, DUO and HIBP API)
|
||||||
reqwest = { version = "0.12.7", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] }
|
reqwest = { version = "0.12.8", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] }
|
||||||
hickory-resolver = "0.24.1"
|
hickory-resolver = "0.24.1"
|
||||||
|
|
||||||
# Favicon extraction libraries
|
# Favicon extraction libraries
|
||||||
html5gum = "0.5.7"
|
html5gum = "0.5.7"
|
||||||
regex = { version = "1.10.6", features = ["std", "perf", "unicode-perl"], default-features = false }
|
regex = { version = "1.11.0", features = ["std", "perf", "unicode-perl"], default-features = false }
|
||||||
data-url = "0.3.1"
|
data-url = "0.3.1"
|
||||||
bytes = "1.7.2"
|
bytes = "1.8.0"
|
||||||
|
|
||||||
# Cache function results (Used for version check and favicon fetching)
|
# Cache function results (Used for version check and favicon fetching)
|
||||||
cached = { version = "0.53.1", features = ["async"] }
|
cached = { version = "0.53.1", features = ["async"] }
|
||||||
@@ -140,14 +140,14 @@ cookie = "0.18.1"
|
|||||||
cookie_store = "0.21.0"
|
cookie_store = "0.21.0"
|
||||||
|
|
||||||
# Used by U2F, JWT and PostgreSQL
|
# Used by U2F, JWT and PostgreSQL
|
||||||
openssl = "0.10.66"
|
openssl = "0.10.68"
|
||||||
|
|
||||||
# CLI argument parsing
|
# CLI argument parsing
|
||||||
pico-args = "0.5.0"
|
pico-args = "0.5.0"
|
||||||
|
|
||||||
# Macro ident concatenation
|
# Macro ident concatenation
|
||||||
paste = "1.0.15"
|
paste = "1.0.15"
|
||||||
governor = "0.6.3"
|
governor = "0.7.0"
|
||||||
|
|
||||||
# Check client versions for specific features.
|
# Check client versions for specific features.
|
||||||
semver = "1.0.23"
|
semver = "1.0.23"
|
||||||
|
@@ -5,7 +5,7 @@ vault_image_digest: "sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf716
|
|||||||
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
|
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
|
||||||
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
|
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
|
||||||
xx_image_digest: "sha256:1978e7a58a1777cb0ef0dde76bad60b7914b21da57cfa88047875e4f364297aa"
|
xx_image_digest: "sha256:1978e7a58a1777cb0ef0dde76bad60b7914b21da57cfa88047875e4f364297aa"
|
||||||
rust_version: 1.81.0 # Rust version to be used
|
rust_version: 1.82.0 # Rust version to be used
|
||||||
debian_version: bookworm # Debian release name to be used
|
debian_version: bookworm # Debian release name to be used
|
||||||
alpine_version: "3.20" # Alpine version to be used
|
alpine_version: "3.20" # Alpine version to be used
|
||||||
# For which platforms/architectures will we try to build images
|
# For which platforms/architectures will we try to build images
|
||||||
|
@@ -32,10 +32,10 @@ FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:409ab328ca931
|
|||||||
########################## ALPINE BUILD IMAGES ##########################
|
########################## ALPINE BUILD IMAGES ##########################
|
||||||
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
||||||
## And for Alpine we define all build images here, they will only be loaded when actually used
|
## And for Alpine we define all build images here, they will only be loaded when actually used
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.81.0 AS build_amd64
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.82.0 AS build_amd64
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.81.0 AS build_arm64
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.82.0 AS build_arm64
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.81.0 AS build_armv7
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.82.0 AS build_armv7
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.81.0 AS build_armv6
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.82.0 AS build_armv6
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# hadolint ignore=DL3006
|
# hadolint ignore=DL3006
|
||||||
|
@@ -36,7 +36,7 @@ FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:1978e7a58a1777cb0ef0d
|
|||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# hadolint ignore=DL3006
|
# hadolint ignore=DL3006
|
||||||
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.81.0-slim-bookworm AS build
|
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.82.0-slim-bookworm AS build
|
||||||
COPY --from=xx / /
|
COPY --from=xx / /
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG TARGETVARIANT
|
ARG TARGETVARIANT
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
[toolchain]
|
[toolchain]
|
||||||
channel = "1.81.0"
|
channel = "1.82.0"
|
||||||
components = [ "rustfmt", "clippy" ]
|
components = [ "rustfmt", "clippy" ]
|
||||||
profile = "minimal"
|
profile = "minimal"
|
||||||
|
@@ -150,7 +150,6 @@ async fn sync(data: SyncData, headers: Headers, mut conn: DbConn) -> Json<Value>
|
|||||||
"ciphers": ciphers_json,
|
"ciphers": ciphers_json,
|
||||||
"domains": domains_json,
|
"domains": domains_json,
|
||||||
"sends": sends_json,
|
"sends": sends_json,
|
||||||
"unofficialServer": true,
|
|
||||||
"object": "sync"
|
"object": "sync"
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
@@ -358,11 +358,12 @@ async fn get_org_collections_details(org_id: &str, headers: ManagerHeadersLoose,
|
|||||||
Vec::with_capacity(0)
|
Vec::with_capacity(0)
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut json_object = col.to_json();
|
let mut json_object = col.to_json_details(&headers.user.uuid, None, &mut conn).await;
|
||||||
json_object["assigned"] = json!(assigned);
|
json_object["assigned"] = json!(assigned);
|
||||||
json_object["users"] = json!(users);
|
json_object["users"] = json!(users);
|
||||||
json_object["groups"] = json!(groups);
|
json_object["groups"] = json!(groups);
|
||||||
json_object["object"] = json!("collectionAccessDetails");
|
json_object["object"] = json!("collectionAccessDetails");
|
||||||
|
json_object["unmanaged"] = json!(false);
|
||||||
data.push(json_object)
|
data.push(json_object)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -680,7 +681,7 @@ async fn get_org_collection_detail(
|
|||||||
|
|
||||||
let assigned = Collection::can_access_collection(&user_org, &collection.uuid, &mut conn).await;
|
let assigned = Collection::can_access_collection(&user_org, &collection.uuid, &mut conn).await;
|
||||||
|
|
||||||
let mut json_object = collection.to_json();
|
let mut json_object = collection.to_json_details(&headers.user.uuid, None, &mut conn).await;
|
||||||
json_object["assigned"] = json!(assigned);
|
json_object["assigned"] = json!(assigned);
|
||||||
json_object["users"] = json!(users);
|
json_object["users"] = json!(users);
|
||||||
json_object["groups"] = json!(groups);
|
json_object["groups"] = json!(groups);
|
||||||
@@ -872,20 +873,19 @@ async fn send_invite(org_id: &str, data: Json<InviteData>, headers: AdminHeaders
|
|||||||
}
|
}
|
||||||
|
|
||||||
for email in data.emails.iter() {
|
for email in data.emails.iter() {
|
||||||
let email = email.to_lowercase();
|
|
||||||
let mut user_org_status = UserOrgStatus::Invited as i32;
|
let mut user_org_status = UserOrgStatus::Invited as i32;
|
||||||
let user = match User::find_by_mail(&email, &mut conn).await {
|
let user = match User::find_by_mail(email, &mut conn).await {
|
||||||
None => {
|
None => {
|
||||||
if !CONFIG.invitations_allowed() {
|
if !CONFIG.invitations_allowed() {
|
||||||
err!(format!("User does not exist: {email}"))
|
err!(format!("User does not exist: {email}"))
|
||||||
}
|
}
|
||||||
|
|
||||||
if !CONFIG.is_email_domain_allowed(&email) {
|
if !CONFIG.is_email_domain_allowed(email) {
|
||||||
err!("Email domain not eligible for invitations")
|
err!("Email domain not eligible for invitations")
|
||||||
}
|
}
|
||||||
|
|
||||||
if !CONFIG.mail_enabled() {
|
if !CONFIG.mail_enabled() {
|
||||||
let invitation = Invitation::new(&email);
|
let invitation = Invitation::new(email);
|
||||||
invitation.save(&mut conn).await?;
|
invitation.save(&mut conn).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -120,16 +120,8 @@ async fn _refresh_login(data: ConnectData, conn: &mut DbConn) -> JsonResult {
|
|||||||
"expires_in": expires_in,
|
"expires_in": expires_in,
|
||||||
"token_type": "Bearer",
|
"token_type": "Bearer",
|
||||||
"refresh_token": device.refresh_token,
|
"refresh_token": device.refresh_token,
|
||||||
"Key": user.akey,
|
|
||||||
"PrivateKey": user.private_key,
|
|
||||||
|
|
||||||
"Kdf": user.client_kdf_type,
|
|
||||||
"KdfIterations": user.client_kdf_iter,
|
|
||||||
"KdfMemory": user.client_kdf_memory,
|
|
||||||
"KdfParallelism": user.client_kdf_parallelism,
|
|
||||||
"ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing
|
|
||||||
"scope": scope,
|
"scope": scope,
|
||||||
"unofficialServer": true,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
Ok(Json(result))
|
Ok(Json(result))
|
||||||
@@ -342,7 +334,6 @@ async fn _password_login(
|
|||||||
"MasterPasswordPolicy": master_password_policy,
|
"MasterPasswordPolicy": master_password_policy,
|
||||||
|
|
||||||
"scope": scope,
|
"scope": scope,
|
||||||
"unofficialServer": true,
|
|
||||||
"UserDecryptionOptions": {
|
"UserDecryptionOptions": {
|
||||||
"HasMasterPassword": !user.password_hash.is_empty(),
|
"HasMasterPassword": !user.password_hash.is_empty(),
|
||||||
"Object": "userDecryptionOptions"
|
"Object": "userDecryptionOptions"
|
||||||
@@ -461,9 +452,8 @@ async fn _user_api_key_login(
|
|||||||
"KdfIterations": user.client_kdf_iter,
|
"KdfIterations": user.client_kdf_iter,
|
||||||
"KdfMemory": user.client_kdf_memory,
|
"KdfMemory": user.client_kdf_memory,
|
||||||
"KdfParallelism": user.client_kdf_parallelism,
|
"KdfParallelism": user.client_kdf_parallelism,
|
||||||
"ResetMasterPassword": false, // TODO: Same as above
|
"ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing
|
||||||
"scope": "api",
|
"scope": "api",
|
||||||
"unofficialServer": true,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
Ok(Json(result))
|
Ok(Json(result))
|
||||||
@@ -495,7 +485,6 @@ async fn _organization_api_key_login(data: ConnectData, conn: &mut DbConn, ip: &
|
|||||||
"expires_in": 3600,
|
"expires_in": 3600,
|
||||||
"token_type": "Bearer",
|
"token_type": "Bearer",
|
||||||
"scope": "api.organization",
|
"scope": "api.organization",
|
||||||
"unofficialServer": true,
|
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -1,6 +1,9 @@
|
|||||||
use std::env::consts::EXE_SUFFIX;
|
use std::env::consts::EXE_SUFFIX;
|
||||||
use std::process::exit;
|
use std::process::exit;
|
||||||
use std::sync::RwLock;
|
use std::sync::{
|
||||||
|
atomic::{AtomicBool, Ordering},
|
||||||
|
RwLock,
|
||||||
|
};
|
||||||
|
|
||||||
use job_scheduler_ng::Schedule;
|
use job_scheduler_ng::Schedule;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
@@ -17,6 +20,8 @@ static CONFIG_FILE: Lazy<String> = Lazy::new(|| {
|
|||||||
get_env("CONFIG_FILE").unwrap_or_else(|| format!("{data_folder}/config.json"))
|
get_env("CONFIG_FILE").unwrap_or_else(|| format!("{data_folder}/config.json"))
|
||||||
});
|
});
|
||||||
|
|
||||||
|
pub static SKIP_CONFIG_VALIDATION: AtomicBool = AtomicBool::new(false);
|
||||||
|
|
||||||
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
|
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
|
||||||
Config::load().unwrap_or_else(|e| {
|
Config::load().unwrap_or_else(|e| {
|
||||||
println!("Error loading config:\n {e:?}\n");
|
println!("Error loading config:\n {e:?}\n");
|
||||||
@@ -807,7 +812,7 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
|
|||||||
|
|
||||||
// TODO: deal with deprecated flags so they can be removed from this list, cf. #4263
|
// TODO: deal with deprecated flags so they can be removed from this list, cf. #4263
|
||||||
const KNOWN_FLAGS: &[&str] =
|
const KNOWN_FLAGS: &[&str] =
|
||||||
&["autofill-overlay", "autofill-v2", "browser-fileless-import", "fido2-vault-credentials"];
|
&["autofill-overlay", "autofill-v2", "browser-fileless-import", "extension-refresh", "fido2-vault-credentials"];
|
||||||
let configured_flags = parse_experimental_client_feature_flags(&cfg.experimental_client_feature_flags);
|
let configured_flags = parse_experimental_client_feature_flags(&cfg.experimental_client_feature_flags);
|
||||||
let invalid_flags: Vec<_> = configured_flags.keys().filter(|flag| !KNOWN_FLAGS.contains(&flag.as_str())).collect();
|
let invalid_flags: Vec<_> = configured_flags.keys().filter(|flag| !KNOWN_FLAGS.contains(&flag.as_str())).collect();
|
||||||
if !invalid_flags.is_empty() {
|
if !invalid_flags.is_empty() {
|
||||||
@@ -1105,7 +1110,9 @@ impl Config {
|
|||||||
|
|
||||||
// Fill any missing with defaults
|
// Fill any missing with defaults
|
||||||
let config = builder.build();
|
let config = builder.build();
|
||||||
|
if !SKIP_CONFIG_VALIDATION.load(Ordering::Relaxed) {
|
||||||
validate_config(&config)?;
|
validate_config(&config)?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Config {
|
Ok(Config {
|
||||||
inner: RwLock::new(Inner {
|
inner: RwLock::new(Inner {
|
||||||
|
@@ -176,7 +176,27 @@ impl Cipher {
|
|||||||
.inspect_err(|e| warn!("Error parsing fields {e:?} for {}", self.uuid))
|
.inspect_err(|e| warn!("Error parsing fields {e:?} for {}", self.uuid))
|
||||||
.ok()
|
.ok()
|
||||||
})
|
})
|
||||||
.map(|d| d.into_iter().map(|d| d.data).collect())
|
.map(|d| {
|
||||||
|
d.into_iter()
|
||||||
|
.map(|mut f| {
|
||||||
|
// Check if the `type` key is a number, strings break some clients
|
||||||
|
// The fallback type is the hidden type `1`. this should prevent accidental data disclosure
|
||||||
|
// If not try to convert the string value to a number and fallback to `1`
|
||||||
|
// If it is both not a number and not a string, fallback to `1`
|
||||||
|
match f.data.get("type") {
|
||||||
|
Some(t) if t.is_number() => {}
|
||||||
|
Some(t) if t.is_string() => {
|
||||||
|
let type_num = &t.as_str().unwrap_or("1").parse::<u8>().unwrap_or(1);
|
||||||
|
f.data["type"] = json!(type_num);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
f.data["type"] = json!(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
f.data
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
let password_history_json: Vec<_> = self
|
let password_history_json: Vec<_> = self
|
||||||
@@ -244,7 +264,7 @@ impl Cipher {
|
|||||||
|
|
||||||
// NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream
|
// NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream
|
||||||
// data_json should always contain the following keys with every atype
|
// data_json should always contain the following keys with every atype
|
||||||
data_json["fields"] = Value::Array(fields_json.clone());
|
data_json["fields"] = json!(fields_json);
|
||||||
data_json["name"] = json!(self.name);
|
data_json["name"] = json!(self.name);
|
||||||
data_json["notes"] = json!(self.notes);
|
data_json["notes"] = json!(self.notes);
|
||||||
data_json["passwordHistory"] = Value::Array(password_history_json.clone());
|
data_json["passwordHistory"] = Value::Array(password_history_json.clone());
|
||||||
|
@@ -81,8 +81,8 @@ impl Collection {
|
|||||||
let (read_only, hide_passwords, can_manage) = if let Some(cipher_sync_data) = cipher_sync_data {
|
let (read_only, hide_passwords, can_manage) = if let Some(cipher_sync_data) = cipher_sync_data {
|
||||||
match cipher_sync_data.user_organizations.get(&self.org_uuid) {
|
match cipher_sync_data.user_organizations.get(&self.org_uuid) {
|
||||||
// Only for Manager types Bitwarden returns true for the can_manage option
|
// Only for Manager types Bitwarden returns true for the can_manage option
|
||||||
// Owners and Admins always have false, but they can manage all collections anyway
|
// Owners and Admins always have true
|
||||||
Some(uo) if uo.has_full_access() => (false, false, uo.atype == UserOrgType::Manager),
|
Some(uo) if uo.has_full_access() => (false, false, uo.atype >= UserOrgType::Manager),
|
||||||
Some(uo) => {
|
Some(uo) => {
|
||||||
// Only let a manager manage collections when the have full read/write access
|
// Only let a manager manage collections when the have full read/write access
|
||||||
let is_manager = uo.atype == UserOrgType::Manager;
|
let is_manager = uo.atype == UserOrgType::Manager;
|
||||||
@@ -98,7 +98,7 @@ impl Collection {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match UserOrganization::find_confirmed_by_user_and_org(user_uuid, &self.org_uuid, conn).await {
|
match UserOrganization::find_confirmed_by_user_and_org(user_uuid, &self.org_uuid, conn).await {
|
||||||
Some(ou) if ou.has_full_access() => (false, false, ou.atype == UserOrgType::Manager),
|
Some(ou) if ou.has_full_access() => (false, false, ou.atype >= UserOrgType::Manager),
|
||||||
Some(ou) => {
|
Some(ou) => {
|
||||||
let is_manager = ou.atype == UserOrgType::Manager;
|
let is_manager = ou.atype == UserOrgType::Manager;
|
||||||
let read_only = !self.is_writable_by_user(user_uuid, conn).await;
|
let read_only = !self.is_writable_by_user(user_uuid, conn).await;
|
||||||
|
@@ -26,7 +26,7 @@ db_object! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Local methods
|
// Local methods
|
||||||
|
|
||||||
impl EmergencyAccess {
|
impl EmergencyAccess {
|
||||||
pub fn new(grantor_uuid: String, email: String, status: i32, atype: i32, wait_time_days: i32) -> Self {
|
pub fn new(grantor_uuid: String, email: String, status: i32, atype: i32, wait_time_days: i32) -> Self {
|
||||||
|
@@ -82,7 +82,7 @@ impl Group {
|
|||||||
"id": entry.collections_uuid,
|
"id": entry.collections_uuid,
|
||||||
"readOnly": entry.read_only,
|
"readOnly": entry.read_only,
|
||||||
"hidePasswords": entry.hide_passwords,
|
"hidePasswords": entry.hide_passwords,
|
||||||
"manage": *user_org_type == UserOrgType::Manager && !entry.read_only && !entry.hide_passwords
|
"manage": *user_org_type >= UserOrgType::Admin || (*user_org_type == UserOrgType::Manager && !entry.read_only && !entry.hide_passwords)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
@@ -161,7 +161,6 @@ impl Organization {
|
|||||||
"identifier": null, // not supported by us
|
"identifier": null, // not supported by us
|
||||||
"name": self.name,
|
"name": self.name,
|
||||||
"seats": null,
|
"seats": null,
|
||||||
"maxAutoscaleSeats": null,
|
|
||||||
"maxCollections": null,
|
"maxCollections": null,
|
||||||
"maxStorageGb": i16::MAX, // The value doesn't matter, we don't check server-side
|
"maxStorageGb": i16::MAX, // The value doesn't matter, we don't check server-side
|
||||||
"use2fa": true,
|
"use2fa": true,
|
||||||
@@ -233,6 +232,14 @@ impl UserOrganization {
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return the status of the user in an unrevoked state
|
||||||
|
pub fn get_unrevoked_status(&self) -> i32 {
|
||||||
|
if self.status <= UserOrgStatus::Revoked as i32 {
|
||||||
|
return self.status + ACTIVATE_REVOKE_DIFF;
|
||||||
|
}
|
||||||
|
self.status
|
||||||
|
}
|
||||||
|
|
||||||
pub fn set_external_id(&mut self, external_id: Option<String>) -> bool {
|
pub fn set_external_id(&mut self, external_id: Option<String>) -> bool {
|
||||||
//Check if external id is empty. We don't want to have
|
//Check if external id is empty. We don't want to have
|
||||||
//empty strings in the database
|
//empty strings in the database
|
||||||
@@ -374,7 +381,6 @@ impl UserOrganization {
|
|||||||
"identifier": null, // Not supported
|
"identifier": null, // Not supported
|
||||||
"name": org.name,
|
"name": org.name,
|
||||||
"seats": null,
|
"seats": null,
|
||||||
"maxAutoscaleSeats": null,
|
|
||||||
"maxCollections": null,
|
"maxCollections": null,
|
||||||
"usersGetPremium": true,
|
"usersGetPremium": true,
|
||||||
"use2fa": true,
|
"use2fa": true,
|
||||||
@@ -411,7 +417,7 @@ impl UserOrganization {
|
|||||||
"familySponsorshipValidUntil": null,
|
"familySponsorshipValidUntil": null,
|
||||||
"familySponsorshipToDelete": null,
|
"familySponsorshipToDelete": null,
|
||||||
"accessSecretsManager": false,
|
"accessSecretsManager": false,
|
||||||
"limitCollectionCreationDeletion": true,
|
"limitCollectionCreationDeletion": false, // This should be set to true only when we can handle roles like createNewCollections
|
||||||
"allowAdminAccessToAllCollectionItems": true,
|
"allowAdminAccessToAllCollectionItems": true,
|
||||||
"flexibleCollections": false,
|
"flexibleCollections": false,
|
||||||
|
|
||||||
@@ -477,7 +483,7 @@ impl UserOrganization {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|c| {
|
.filter_map(|c| {
|
||||||
let (read_only, hide_passwords, can_manage) = if self.has_full_access() {
|
let (read_only, hide_passwords, can_manage) = if self.has_full_access() {
|
||||||
(false, false, self.atype == UserOrgType::Manager)
|
(false, false, self.atype >= UserOrgType::Manager)
|
||||||
} else if let Some(cu) = cu.get(&c.uuid) {
|
} else if let Some(cu) = cu.get(&c.uuid) {
|
||||||
(
|
(
|
||||||
cu.read_only,
|
cu.read_only,
|
||||||
@@ -504,10 +510,29 @@ impl UserOrganization {
|
|||||||
Vec::with_capacity(0)
|
Vec::with_capacity(0)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let permissions = json!({
|
||||||
|
// TODO: Add support for Custom User Roles
|
||||||
|
// See: https://bitwarden.com/help/article/user-types-access-control/#custom-role
|
||||||
|
"accessEventLogs": false,
|
||||||
|
"accessImportExport": false,
|
||||||
|
"accessReports": false,
|
||||||
|
"createNewCollections": false,
|
||||||
|
"editAnyCollection": false,
|
||||||
|
"deleteAnyCollection": false,
|
||||||
|
"editAssignedCollections": false,
|
||||||
|
"deleteAssignedCollections": false,
|
||||||
|
"manageGroups": false,
|
||||||
|
"managePolicies": false,
|
||||||
|
"manageSso": false, // Not supported
|
||||||
|
"manageUsers": false,
|
||||||
|
"manageResetPassword": false,
|
||||||
|
"manageScim": false // Not supported (Not AGPLv3 Licensed)
|
||||||
|
});
|
||||||
|
|
||||||
json!({
|
json!({
|
||||||
"id": self.uuid,
|
"id": self.uuid,
|
||||||
"userId": self.user_uuid,
|
"userId": self.user_uuid,
|
||||||
"name": user.name,
|
"name": if self.get_unrevoked_status() >= UserOrgStatus::Accepted as i32 { Some(user.name) } else { None },
|
||||||
"email": user.email,
|
"email": user.email,
|
||||||
"externalId": self.external_id,
|
"externalId": self.external_id,
|
||||||
"avatarColor": user.avatar_color,
|
"avatarColor": user.avatar_color,
|
||||||
@@ -519,6 +544,13 @@ impl UserOrganization {
|
|||||||
"accessAll": self.access_all,
|
"accessAll": self.access_all,
|
||||||
"twoFactorEnabled": twofactor_enabled,
|
"twoFactorEnabled": twofactor_enabled,
|
||||||
"resetPasswordEnrolled": self.reset_password_key.is_some(),
|
"resetPasswordEnrolled": self.reset_password_key.is_some(),
|
||||||
|
"hasMasterPassword": !user.password_hash.is_empty(),
|
||||||
|
|
||||||
|
"permissions": permissions,
|
||||||
|
|
||||||
|
"ssoBound": false, // Not supported
|
||||||
|
"usesKeyConnector": false, // Not supported
|
||||||
|
"accessSecretsManager": false, // Not supported (Not AGPLv3 Licensed)
|
||||||
|
|
||||||
"object": "organizationUserUserDetails",
|
"object": "organizationUserUserDetails",
|
||||||
})
|
})
|
||||||
|
@@ -1,3 +1,4 @@
|
|||||||
|
use crate::util::{format_date, get_uuid, retry};
|
||||||
use chrono::{NaiveDateTime, TimeDelta, Utc};
|
use chrono::{NaiveDateTime, TimeDelta, Utc};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
@@ -90,7 +91,7 @@ impl User {
|
|||||||
let email = email.to_lowercase();
|
let email = email.to_lowercase();
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
uuid: crate::util::get_uuid(),
|
uuid: get_uuid(),
|
||||||
enabled: true,
|
enabled: true,
|
||||||
created_at: now,
|
created_at: now,
|
||||||
updated_at: now,
|
updated_at: now,
|
||||||
@@ -107,7 +108,7 @@ impl User {
|
|||||||
salt: crypto::get_random_bytes::<64>().to_vec(),
|
salt: crypto::get_random_bytes::<64>().to_vec(),
|
||||||
password_iterations: CONFIG.password_iterations(),
|
password_iterations: CONFIG.password_iterations(),
|
||||||
|
|
||||||
security_stamp: crate::util::get_uuid(),
|
security_stamp: get_uuid(),
|
||||||
stamp_exception: None,
|
stamp_exception: None,
|
||||||
|
|
||||||
password_hint: None,
|
password_hint: None,
|
||||||
@@ -188,7 +189,7 @@ impl User {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn reset_security_stamp(&mut self) {
|
pub fn reset_security_stamp(&mut self) {
|
||||||
self.security_stamp = crate::util::get_uuid();
|
self.security_stamp = get_uuid();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the stamp_exception to only allow a subsequent request matching a specific route using the current security-stamp.
|
/// Set the stamp_exception to only allow a subsequent request matching a specific route using the current security-stamp.
|
||||||
@@ -259,6 +260,7 @@ impl User {
|
|||||||
"forcePasswordReset": false,
|
"forcePasswordReset": false,
|
||||||
"avatarColor": self.avatar_color,
|
"avatarColor": self.avatar_color,
|
||||||
"usesKeyConnector": false,
|
"usesKeyConnector": false,
|
||||||
|
"creationDate": format_date(&self.created_at),
|
||||||
"object": "profile",
|
"object": "profile",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -340,7 +342,7 @@ impl User {
|
|||||||
let updated_at = Utc::now().naive_utc();
|
let updated_at = Utc::now().naive_utc();
|
||||||
|
|
||||||
db_run! {conn: {
|
db_run! {conn: {
|
||||||
crate::util::retry(|| {
|
retry(|| {
|
||||||
diesel::update(users::table)
|
diesel::update(users::table)
|
||||||
.set(users::updated_at.eq(updated_at))
|
.set(users::updated_at.eq(updated_at))
|
||||||
.execute(conn)
|
.execute(conn)
|
||||||
@@ -357,7 +359,7 @@ impl User {
|
|||||||
|
|
||||||
async fn _update_revision(uuid: &str, date: &NaiveDateTime, conn: &mut DbConn) -> EmptyResult {
|
async fn _update_revision(uuid: &str, date: &NaiveDateTime, conn: &mut DbConn) -> EmptyResult {
|
||||||
db_run! {conn: {
|
db_run! {conn: {
|
||||||
crate::util::retry(|| {
|
retry(|| {
|
||||||
diesel::update(users::table.filter(users::uuid.eq(uuid)))
|
diesel::update(users::table.filter(users::uuid.eq(uuid)))
|
||||||
.set(users::updated_at.eq(date))
|
.set(users::updated_at.eq(date))
|
||||||
.execute(conn)
|
.execute(conn)
|
||||||
|
15
src/main.rs
15
src/main.rs
@@ -38,9 +38,11 @@ use std::{
|
|||||||
use tokio::{
|
use tokio::{
|
||||||
fs::File,
|
fs::File,
|
||||||
io::{AsyncBufReadExt, BufReader},
|
io::{AsyncBufReadExt, BufReader},
|
||||||
signal::unix::SignalKind,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
use tokio::signal::unix::SignalKind;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod error;
|
mod error;
|
||||||
mod api;
|
mod api;
|
||||||
@@ -60,7 +62,7 @@ use crate::api::{WS_ANONYMOUS_SUBSCRIPTIONS, WS_USERS};
|
|||||||
pub use config::CONFIG;
|
pub use config::CONFIG;
|
||||||
pub use error::{Error, MapResult};
|
pub use error::{Error, MapResult};
|
||||||
use rocket::data::{Limits, ToByteUnit};
|
use rocket::data::{Limits, ToByteUnit};
|
||||||
use std::sync::Arc;
|
use std::sync::{atomic::Ordering, Arc};
|
||||||
pub use util::is_running_in_container;
|
pub use util::is_running_in_container;
|
||||||
|
|
||||||
#[rocket::main]
|
#[rocket::main]
|
||||||
@@ -122,6 +124,7 @@ fn parse_args() {
|
|||||||
print!("{HELP}");
|
print!("{HELP}");
|
||||||
exit(0);
|
exit(0);
|
||||||
} else if pargs.contains(["-v", "--version"]) {
|
} else if pargs.contains(["-v", "--version"]) {
|
||||||
|
config::SKIP_CONFIG_VALIDATION.store(true, Ordering::Relaxed);
|
||||||
let web_vault_version = util::get_web_vault_version();
|
let web_vault_version = util::get_web_vault_version();
|
||||||
println!("Vaultwarden {version}");
|
println!("Vaultwarden {version}");
|
||||||
println!("Web-Vault {web_vault_version}");
|
println!("Web-Vault {web_vault_version}");
|
||||||
@@ -383,7 +386,7 @@ fn init_logging() -> Result<log::LevelFilter, Error> {
|
|||||||
{
|
{
|
||||||
logger = logger.chain(fern::log_file(log_file)?);
|
logger = logger.chain(fern::log_file(log_file)?);
|
||||||
}
|
}
|
||||||
#[cfg(not(windows))]
|
#[cfg(unix)]
|
||||||
{
|
{
|
||||||
const SIGHUP: i32 = SignalKind::hangup().as_raw_value();
|
const SIGHUP: i32 = SignalKind::hangup().as_raw_value();
|
||||||
let path = Path::new(&log_file);
|
let path = Path::new(&log_file);
|
||||||
@@ -391,7 +394,7 @@ fn init_logging() -> Result<log::LevelFilter, Error> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(windows))]
|
#[cfg(unix)]
|
||||||
{
|
{
|
||||||
if cfg!(feature = "enable_syslog") || CONFIG.use_syslog() {
|
if cfg!(feature = "enable_syslog") || CONFIG.use_syslog() {
|
||||||
logger = chain_syslog(logger);
|
logger = chain_syslog(logger);
|
||||||
@@ -441,7 +444,7 @@ fn init_logging() -> Result<log::LevelFilter, Error> {
|
|||||||
Ok(level)
|
Ok(level)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(windows))]
|
#[cfg(unix)]
|
||||||
fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch {
|
fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch {
|
||||||
let syslog_fmt = syslog::Formatter3164 {
|
let syslog_fmt = syslog::Formatter3164 {
|
||||||
facility: syslog::Facility::LOG_USER,
|
facility: syslog::Facility::LOG_USER,
|
||||||
@@ -513,10 +516,10 @@ async fn container_data_folder_is_persistent(data_folder: &str) -> bool {
|
|||||||
format!(" /{data_folder} ")
|
format!(" /{data_folder} ")
|
||||||
};
|
};
|
||||||
let mut lines = BufReader::new(mountinfo).lines();
|
let mut lines = BufReader::new(mountinfo).lines();
|
||||||
|
let re = regex::Regex::new(r"/volumes/[a-z0-9]{64}/_data /").unwrap();
|
||||||
while let Some(line) = lines.next_line().await.unwrap_or_default() {
|
while let Some(line) = lines.next_line().await.unwrap_or_default() {
|
||||||
// Only execute a regex check if we find the base match
|
// Only execute a regex check if we find the base match
|
||||||
if line.contains(&data_folder_match) {
|
if line.contains(&data_folder_match) {
|
||||||
let re = regex::Regex::new(r"/volumes/[a-z0-9]{64}/_data /").unwrap();
|
|
||||||
if re.is_match(&line) {
|
if re.is_match(&line) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@@ -3,7 +3,7 @@ Join {{{org_name}}}
|
|||||||
You have been invited to join the *{{org_name}}* organization.
|
You have been invited to join the *{{org_name}}* organization.
|
||||||
|
|
||||||
|
|
||||||
Click here to join: {{url}}
|
Click here to join: {{{url}}}
|
||||||
|
|
||||||
|
|
||||||
If you do not wish to join this organization, you can safely ignore this email.
|
If you do not wish to join this organization, you can safely ignore this email.
|
||||||
|
@@ -9,7 +9,7 @@ Join {{{org_name}}}
|
|||||||
</tr>
|
</tr>
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
||||||
<a href="{{url}}"
|
<a href="{{{url}}}"
|
||||||
clicktracking=off target="_blank" style="color: #ffffff; text-decoration: none; text-align: center; cursor: pointer; display: inline-block; border-radius: 5px; background-color: #3c8dbc; border-color: #3c8dbc; border-style: solid; border-width: 10px 20px; margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
clicktracking=off target="_blank" style="color: #ffffff; text-decoration: none; text-align: center; cursor: pointer; display: inline-block; border-radius: 5px; background-color: #3c8dbc; border-color: #3c8dbc; border-style: solid; border-width: 10px 20px; margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
Join Organization Now
|
Join Organization Now
|
||||||
</a>
|
</a>
|
||||||
|
Reference in New Issue
Block a user