Compare commits

...

13 Commits

Author SHA1 Message Date
Mathijs van Veluw
1f868b8d22 Show assigned collections on member edit (#5556)
Because we were using the `has_full_access()` function we did not returned assigned collections for an owner/admin even if the did not have the `access_all` flag set.
This commit will change that to use the `access_all` flag instead, and return assigned collections too.

While saving a member and having it assigned collections would still save those rights, and it was also visible in the collection management, it wasn't at the member it self.
So, it did work, but was not visible.

Fixes #5554
Fixes #5555

Signed-off-by: BlackDex <black.dex@gmail.com>
2025-02-07 22:33:11 +01:00
Mathijs van Veluw
8d1df08b81 Fix icon redirect not working on desktop (#5536)
* Fix icon redirect not working on desktop

We also need to exclude the header in case we do an external_icon call.

Fixes #5535

Signed-off-by: BlackDex <black.dex@gmail.com>

* Add informational comments to the icon_external function

Signed-off-by: BlackDex <black.dex@gmail.com>

* Fix spelling/grammar

Signed-off-by: BlackDex <black.dex@gmail.com>

---------

Signed-off-by: BlackDex <black.dex@gmail.com>
2025-02-04 13:20:32 +01:00
Stefan Melmuk
3b6bccde97 add bulk-access endpoint for collections (#5542) 2025-02-04 09:42:02 +01:00
Daniel
d2b36642a6 Update crates & fix CVE-2025-24898 (#5538) 2025-02-04 01:01:06 +01:00
Mathijs van Veluw
a02fb0fd24 Update workflows and enhance security (#5537)
This commit updates the workflow files and also fixes some security issues which were reported by using zizmor https://github.com/woodruffw/zizmor

Signed-off-by: BlackDex <black.dex@gmail.com>
2025-02-04 00:33:43 +01:00
Daniel
1109293992 Update Rust to 1.84.1 (#5508)
- also update the crates
- add necessary modifications for `rand` upgrade
- `small_rng` is enabled by default now
2025-02-01 13:16:32 +01:00
Mathijs van Veluw
3c29f82974 Allow all manager to create collections again (#5488)
* Allow all manager to create collections again

This commit checks if the member is a manager or better, and if so allows it to createCollections.
We actually check if it is less then a Manager, since the `limitCollectionCreation` should be set to false to allow it and true to prevent.

This should fix an issue discussed in #5484

Signed-off-by: BlackDex <black.dex@gmail.com>

* Fix some small issues

Signed-off-by: BlackDex <black.dex@gmail.com>

---------

Signed-off-by: BlackDex <black.dex@gmail.com>
2025-01-29 20:41:31 +01:00
Roman Ratiner
663f88e717 Fix Duo Field Names for Web Client (#5491)
* Fix Duo Field Names for Web Client

* Fix Api Validation

* Rename Duo Labels In Admin
2025-01-29 12:00:14 +01:00
Stefan Melmuk
a3dccee243 add and use new event types (#5482)
* add additional event_types

* use correct event_type when leaving an org

* use correct event type when deleting a user

* also correctly log auth requests

* add correct membership info to event log
2025-01-28 11:25:53 +01:00
Mathijs van Veluw
c0ebe0d982 Fix passwordRevisionDate format (#5477) 2025-01-27 20:16:59 +01:00
Win‮8201‭Linux‬
1b46c80389 Make sure the icons are displayed correctly in desktop clients (#5469) 2025-01-27 18:29:24 +01:00
Stefan Melmuk
2c549984c0 let invited members access OrgMemberHeaders (#5461) 2025-01-27 18:27:11 +01:00
Stefan Melmuk
ecab7a50ea hide already approved (or declined) devices (#5467) 2025-01-27 18:21:22 +01:00
27 changed files with 561 additions and 271 deletions

View File

@@ -1,4 +1,5 @@
name: Build
permissions: {}
on:
push:
@@ -13,6 +14,7 @@ on:
- "diesel.toml"
- "docker/Dockerfile.j2"
- "docker/DockerSettings.yaml"
pull_request:
paths:
- ".github/workflows/build.yml"
@@ -28,13 +30,17 @@ on:
jobs:
build:
name: Build and Test ${{ matrix.channel }}
permissions:
actions: write
contents: read
# We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers
runs-on: ubuntu-22.04
timeout-minutes: 120
# Make warnings errors, this is to prevent warnings slipping through.
# This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes.
env:
RUSTFLAGS: "-D warnings"
RUSTFLAGS: "-Dwarnings"
strategy:
fail-fast: false
matrix:
@@ -42,20 +48,19 @@ jobs:
- "rust-toolchain" # The version defined in rust-toolchain
- "msrv" # The supported MSRV
name: Build and Test ${{ matrix.channel }}
steps:
# Checkout the repo
- name: "Checkout"
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# End Checkout the repo
# Install dependencies
- name: "Install dependencies Ubuntu"
run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config
# End Install dependencies
# Checkout the repo
- name: "Checkout"
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
fetch-depth: 0
# End Checkout the repo
# Determine rust-toolchain version
- name: Init Variables
@@ -75,7 +80,7 @@ jobs:
# Only install the clippy and rustfmt components on the default rust-toolchain
- name: "Install rust-toolchain version"
uses: dtolnay/rust-toolchain@a54c7afa936fefeb4456b2dd8068152669aa8203 # master @ Dec 14, 2024, 5:49 AM GMT+1
uses: dtolnay/rust-toolchain@c5a29ddb4d9d194e7c84ec8c3fba61b1c31fee8c # master @ Jan 30, 2025, 8:16 PM GMT+1
if: ${{ matrix.channel == 'rust-toolchain' }}
with:
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@@ -85,7 +90,7 @@ jobs:
# Install the any other channel to be used for which we do not execute clippy and rustfmt
- name: "Install MSRV version"
uses: dtolnay/rust-toolchain@a54c7afa936fefeb4456b2dd8068152669aa8203 # master @ Dec 14, 2024, 5:49 AM GMT+1
uses: dtolnay/rust-toolchain@c5a29ddb4d9d194e7c84ec8c3fba61b1c31fee8c # master @ Jan 30, 2025, 8:16 PM GMT+1
if: ${{ matrix.channel != 'rust-toolchain' }}
with:
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@@ -93,11 +98,13 @@ jobs:
# Set the current matrix toolchain version as default
- name: "Set toolchain ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} as default"
env:
RUST_TOOLCHAIN: ${{steps.toolchain.outputs.RUST_TOOLCHAIN}}
run: |
# Remove the rust-toolchain.toml
rm rust-toolchain.toml
# Set the default
rustup default ${{steps.toolchain.outputs.RUST_TOOLCHAIN}}
rustup default "${RUST_TOOLCHAIN}"
# Show environment
- name: "Show environment"
@@ -161,7 +168,7 @@ jobs:
id: clippy
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }}
run: |
cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc -- -D warnings
cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc
# End Run cargo clippy
@@ -178,22 +185,31 @@ jobs:
# This is useful so all test/clippy/fmt actions are done, and they can all be addressed
- name: "Some checks failed"
if: ${{ failure() }}
env:
TEST_DB_M_L: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}
TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}
TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }}
TEST_SQLITE: ${{ steps.test_sqlite.outcome }}
TEST_MYSQL: ${{ steps.test_mysql.outcome }}
TEST_POSTGRESQL: ${{ steps.test_postgresql.outcome }}
CLIPPY: ${{ steps.clippy.outcome }}
FMT: ${{ steps.formatting.outcome }}
run: |
echo "### :x: Checks Failed!" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "|Job|Status|" >> $GITHUB_STEP_SUMMARY
echo "|---|------|" >> $GITHUB_STEP_SUMMARY
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|test (sqlite,mysql,postgresql)|${{ steps.test_sqlite_mysql_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|test (sqlite)|${{ steps.test_sqlite.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|test (mysql)|${{ steps.test_mysql.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|test (postgresql)|${{ steps.test_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.clippy.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "|fmt|${{ steps.formatting.outcome }}|" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Please check the failed jobs and fix where needed." >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### :x: Checks Failed!" >> "${GITHUB_STEP_SUMMARY}"
echo "" >> "${GITHUB_STEP_SUMMARY}"
echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}"
echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${TEST_DB_M_L}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}"
echo "" >> "${GITHUB_STEP_SUMMARY}"
echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}"
echo "" >> "${GITHUB_STEP_SUMMARY}"
exit 1
@@ -202,5 +218,5 @@ jobs:
- name: "All checks passed"
if: ${{ success() }}
run: |
echo "### :tada: Checks Passed!" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### :tada: Checks Passed!" >> "${GITHUB_STEP_SUMMARY}"
echo "" >> "${GITHUB_STEP_SUMMARY}"

View File

@@ -1,21 +1,17 @@
name: Hadolint
permissions: {}
on: [
push,
pull_request
]
on: [ push, pull_request ]
jobs:
hadolint:
name: Validate Dockerfile syntax
permissions:
contents: read
runs-on: ubuntu-24.04
timeout-minutes: 30
steps:
# Checkout the repo
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# End Checkout the repo
steps:
# Start Docker Buildx
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0
@@ -37,6 +33,12 @@ jobs:
env:
HADOLINT_VERSION: 2.12.0
# End Download hadolint
# Checkout the repo
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
# End Checkout the repo
# Test Dockerfiles with hadolint
- name: Run hadolint

View File

@@ -1,4 +1,5 @@
name: Release
permissions: {}
on:
push:
@@ -6,17 +7,23 @@ on:
- main
tags:
- '*'
# https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
- '[1-2].[0-9]+.[0-9]+'
jobs:
# https://github.com/marketplace/actions/skip-duplicate-actions
# Some checks to determine if we need to continue with building a new docker.
# We will skip this check if we are creating a tag, because that has the same hash as a previous run already.
skip_check:
runs-on: ubuntu-24.04
# Only run this in the upstream repo and not on forks
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
name: Cancel older jobs when running
permissions:
actions: write
runs-on: ubuntu-24.04
outputs:
should_skip: ${{ steps.skip_check.outputs.should_skip }}
steps:
- name: Skip Duplicates Actions
id: skip_check
@@ -27,6 +34,9 @@ jobs:
if: ${{ github.ref_type == 'branch' }}
docker-build:
needs: skip_check
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
name: Build Vaultwarden containers
permissions:
packages: write
contents: read
@@ -34,8 +44,6 @@ jobs:
id-token: write
runs-on: ubuntu-24.04
timeout-minutes: 120
needs: skip_check
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
# Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them
services:
registry:
@@ -61,12 +69,6 @@ jobs:
base_image: ["debian","alpine"]
steps:
# Checkout the repo
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
fetch-depth: 0
- name: Initialize QEMU binfmt support
uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0
with:
@@ -78,20 +80,31 @@ jobs:
# https://github.com/moby/buildkit/issues/3969
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
with:
cache-binary: false
buildkitd-config-inline: |
[worker.oci]
max-parallelism = 2
driver-opts: |
network=host
# Checkout the repo
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# We need fetch-depth of 0 so we also get all the tag metadata
with:
persist-credentials: false
fetch-depth: 0
# Determine Base Tags and Source Version
- name: Determine Base Tags and Source Version
shell: bash
env:
REF_TYPE: ${{ github.ref_type }}
run: |
# Check which main tag we are going to build determined by github.ref_type
if [[ "${{ github.ref_type }}" == "tag" ]]; then
# Check which main tag we are going to build determined by ref_type
if [[ "${REF_TYPE}" == "tag" ]]; then
echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}"
elif [[ "${{ github.ref_type }}" == "branch" ]]; then
elif [[ "${REF_TYPE}" == "branch" ]]; then
echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}"
fi
@@ -116,8 +129,10 @@ jobs:
- name: Add registry for DockerHub
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
shell: bash
env:
DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }}
run: |
echo "CONTAINER_REGISTRIES=${{ vars.DOCKERHUB_REPO }}" | tee -a "${GITHUB_ENV}"
echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}"
# Login to GitHub Container Registry
- name: Login to GitHub Container Registry
@@ -131,8 +146,10 @@ jobs:
- name: Add registry for ghcr.io
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
shell: bash
env:
GHCR_REPO: ${{ vars.GHCR_REPO }}
run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.GHCR_REPO }}" | tee -a "${GITHUB_ENV}"
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}"
# Login to Quay.io
- name: Login to Quay.io
@@ -146,17 +163,22 @@ jobs:
- name: Add registry for Quay.io
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
shell: bash
env:
QUAY_REPO: ${{ vars.QUAY_REPO }}
run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.QUAY_REPO }}" | tee -a "${GITHUB_ENV}"
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}"
- name: Configure build cache from/to
shell: bash
env:
GHCR_REPO: ${{ vars.GHCR_REPO }}
BASE_IMAGE: ${{ matrix.base_image }}
run: |
#
# Check if there is a GitHub Container Registry Login and use it for caching
if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then
echo "BAKE_CACHE_FROM=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }}" | tee -a "${GITHUB_ENV}"
echo "BAKE_CACHE_TO=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}" | tee -a "${GITHUB_ENV}"
echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
else
echo "BAKE_CACHE_FROM="
echo "BAKE_CACHE_TO="
@@ -170,7 +192,7 @@ jobs:
- name: Bake ${{ matrix.base_image }} containers
id: bake_vw
uses: docker/bake-action@5ca506d06f70338a4968df87fd8bfee5cbfb84c7 # v6.0.0
uses: docker/bake-action@7bff531c65a5cda33e52e43950a795b91d450f63 # v6.3.0
env:
BASE_TAGS: "${{ env.BASE_TAGS }}"
SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}"
@@ -189,14 +211,16 @@ jobs:
- name: Extract digest SHA
shell: bash
env:
BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }}
run: |
GET_DIGEST_SHA="$(jq -r '.["${{ matrix.base_image }}-multi"]."containerimage.digest"' <<< '${{ steps.bake_vw.outputs.metadata }}')"
GET_DIGEST_SHA="$(jq -r '.["${{ matrix.base_image }}-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")"
echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}"
# Attest container images
- name: Attest - docker.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
with:
subject-name: ${{ vars.DOCKERHUB_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
@@ -204,7 +228,7 @@ jobs:
- name: Attest - ghcr.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
with:
subject-name: ${{ vars.GHCR_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
@@ -212,7 +236,7 @@ jobs:
- name: Attest - quay.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
with:
subject-name: ${{ vars.QUAY_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
@@ -222,11 +246,13 @@ jobs:
# Extract the Alpine binaries from the containers
- name: Extract binaries
shell: bash
env:
REF_TYPE: ${{ github.ref_type }}
run: |
# Check which main tag we are going to build determined by github.ref_type
if [[ "${{ github.ref_type }}" == "tag" ]]; then
# Check which main tag we are going to build determined by ref_type
if [[ "${REF_TYPE}" == "tag" ]]; then
EXTRACT_TAG="latest"
elif [[ "${{ github.ref_type }}" == "branch" ]]; then
elif [[ "${REF_TYPE}" == "branch" ]]; then
EXTRACT_TAG="testing"
fi
@@ -264,31 +290,31 @@ jobs:
# Upload artifacts to Github Actions and Attest the binaries
- name: "Upload amd64 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }}
path: vaultwarden-amd64-${{ matrix.base_image }}
- name: "Upload arm64 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }}
path: vaultwarden-arm64-${{ matrix.base_image }}
- name: "Upload armv7 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }}
path: vaultwarden-armv7-${{ matrix.base_image }}
- name: "Upload armv6 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }}
path: vaultwarden-armv6-${{ matrix.base_image }}
- name: "Attest artifacts ${{ matrix.base_image }}"
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
with:
subject-path: vaultwarden-*
# End Upload artifacts to Github Actions

View File

@@ -1,3 +1,6 @@
name: Cleanup
permissions: {}
on:
workflow_dispatch:
inputs:
@@ -9,10 +12,11 @@ on:
schedule:
- cron: '0 1 * * FRI'
name: Cleanup
jobs:
releasecache-cleanup:
name: Releasecache Cleanup
permissions:
packages: write
runs-on: ubuntu-24.04
continue-on-error: true
timeout-minutes: 30

View File

@@ -1,34 +1,39 @@
name: trivy
name: Trivy
permissions: {}
on:
push:
branches:
- main
tags:
- '*'
pull_request:
branches: [ "main" ]
branches:
- main
schedule:
- cron: '08 11 * * *'
permissions:
contents: read
jobs:
trivy-scan:
# Only run this in the master repo and not on forks
# Only run this in the upstream repo and not on forks
# When all forks run this at the same time, it is causing `Too Many Requests` issues
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
name: Check
runs-on: ubuntu-24.04
timeout-minutes: 30
name: Trivy Scan
permissions:
contents: read
security-events: write
actions: read
security-events: write
runs-on: ubuntu-24.04
timeout-minutes: 30
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@18f2510ee396bbf400402947b394f2dd8c87dbb0 # v0.29.0

244
Cargo.lock generated
View File

@@ -26,7 +26,7 @@ dependencies = [
"cfg-if",
"once_cell",
"version_check",
"zerocopy",
"zerocopy 0.7.35",
]
[[package]]
@@ -275,9 +275,9 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de"
[[package]]
name = "async-trait"
version = "0.1.85"
version = "0.1.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056"
checksum = "644dd749086bf3771a2fbc5f256fdb982d53f011c7d5d560304eafeecebce79d"
dependencies = [
"proc-macro2",
"quote",
@@ -429,9 +429,9 @@ dependencies = [
[[package]]
name = "bumpalo"
version = "3.16.0"
version = "3.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf"
[[package]]
name = "bytemuck"
@@ -447,9 +447,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "bytes"
version = "1.9.0"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9"
[[package]]
name = "cached"
@@ -489,9 +489,9 @@ checksum = "ade8366b8bd5ba243f0a58f036cc0ca8a2f069cff1a2351ef1cac6b083e16fc0"
[[package]]
name = "cc"
version = "1.2.10"
version = "1.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229"
checksum = "e4730490333d58093109dc02c23174c3f4d490998c3fed3cc8e82d57afedb9cf"
dependencies = [
"shlex",
]
@@ -608,9 +608,9 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
[[package]]
name = "cpufeatures"
version = "0.2.16"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3"
checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280"
dependencies = [
"libc",
]
@@ -754,18 +754,18 @@ dependencies = [
[[package]]
name = "derive_more"
version = "1.0.0"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05"
checksum = "71158d5e914dec8a242751a3fc516b03ed3e6772ce9de79e1aeea6420663cad4"
dependencies = [
"derive_more-impl",
]
[[package]]
name = "derive_more-impl"
version = "1.0.0"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22"
checksum = "9e04e066e440d7973a852a3acdc25b0ae712bb6d311755fbf773d6a4518b2226"
dependencies = [
"proc-macro2",
"quote",
@@ -808,9 +808,9 @@ dependencies = [
[[package]]
name = "diesel"
version = "2.2.6"
version = "2.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccf1bedf64cdb9643204a36dd15b19a6ce8e7aa7f7b105868e9f1fad5ffa7d12"
checksum = "04001f23ba8843dc315804fa324000376084dfb1c30794ff68dd279e6e5696d5"
dependencies = [
"bigdecimal",
"bitflags",
@@ -923,9 +923,9 @@ checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
[[package]]
name = "dsl_auto_type"
version = "0.1.2"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5d9abe6314103864cc2d8901b7ae224e0ab1a103a0a416661b4097b0779b607"
checksum = "139ae9aca7527f85f26dd76483eb38533fd84bd571065da1739656ef71c5ff5b"
dependencies = [
"darling",
"either",
@@ -1242,10 +1242,22 @@ dependencies = [
"cfg-if",
"js-sys",
"libc",
"wasi",
"wasi 0.11.0+wasi-snapshot-preview1",
"wasm-bindgen",
]
[[package]]
name = "getrandom"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8"
dependencies = [
"cfg-if",
"libc",
"wasi 0.13.3+wasi-0.2.2",
"windows-targets 0.52.6",
]
[[package]]
name = "gimli"
version = "0.31.1"
@@ -1286,7 +1298,7 @@ dependencies = [
"parking_lot",
"portable-atomic",
"quanta",
"rand",
"rand 0.8.5",
"smallvec",
"spinning_top",
]
@@ -1396,7 +1408,7 @@ dependencies = [
"idna",
"ipnet",
"once_cell",
"rand",
"rand 0.8.5",
"thiserror 1.0.69",
"tinyvec",
"tokio",
@@ -1417,7 +1429,7 @@ dependencies = [
"lru-cache",
"once_cell",
"parking_lot",
"rand",
"rand 0.8.5",
"resolv-conf",
"smallvec",
"thiserror 1.0.69",
@@ -1523,9 +1535,9 @@ dependencies = [
[[package]]
name = "httparse"
version = "1.9.5"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946"
checksum = "f2d708df4e7140240a16cd6ab0ab65c972d7433ab77819ea693fde9c43811e2a"
[[package]]
name = "httpdate"
@@ -1558,9 +1570,9 @@ dependencies = [
[[package]]
name = "hyper"
version = "1.5.2"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0"
checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80"
dependencies = [
"bytes",
"futures-channel",
@@ -1584,9 +1596,9 @@ checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2"
dependencies = [
"futures-util",
"http 1.2.0",
"hyper 1.5.2",
"hyper 1.6.0",
"hyper-util",
"rustls 0.23.21",
"rustls 0.23.22",
"rustls-pki-types",
"tokio",
"tokio-rustls 0.26.1",
@@ -1601,7 +1613,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0"
dependencies = [
"bytes",
"http-body-util",
"hyper 1.5.2",
"hyper 1.6.0",
"hyper-util",
"native-tls",
"tokio",
@@ -1620,7 +1632,7 @@ dependencies = [
"futures-util",
"http 1.2.0",
"http-body 1.0.1",
"hyper 1.5.2",
"hyper 1.6.0",
"pin-project-lite",
"socket2",
"tokio",
@@ -1916,9 +1928,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "lettre"
version = "0.11.11"
version = "0.11.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab4c9a167ff73df98a5ecc07e8bf5ce90b583665da3d1762eb1f775ad4d0d6f5"
checksum = "e882e1489810a45919477602194312b1a7df0e5acc30a6188be7b520268f63f8"
dependencies = [
"async-std",
"async-trait",
@@ -1969,9 +1981,9 @@ dependencies = [
[[package]]
name = "libsqlite3-sys"
version = "0.30.1"
version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149"
checksum = "ad8935b44e7c13394a179a438e0cebba0fe08fe01b54f152e29a93b5cf993fd4"
dependencies = [
"cc",
"pkg-config",
@@ -2132,7 +2144,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd"
dependencies = [
"libc",
"wasi",
"wasi 0.11.0+wasi-snapshot-preview1",
"windows-sys 0.52.0",
]
@@ -2167,9 +2179,9 @@ dependencies = [
[[package]]
name = "native-tls"
version = "0.2.12"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466"
checksum = "0dab59f8e050d5df8e4dd87d9206fb6f65a483e20ac9fda365ade4fab353196c"
dependencies = [
"libc",
"log",
@@ -2310,9 +2322,9 @@ checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]]
name = "openssl"
version = "0.10.68"
version = "0.10.70"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5"
checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6"
dependencies = [
"bitflags",
"cfg-if",
@@ -2351,9 +2363,9 @@ dependencies = [
[[package]]
name = "openssl-sys"
version = "0.9.104"
version = "0.9.105"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741"
checksum = "8b22d5b84be05a8d6947c7cb71f7c849aa0f112acd4bf51c2a7c1c988ac0a9dc"
dependencies = [
"cc",
"libc",
@@ -2413,7 +2425,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166"
dependencies = [
"base64ct",
"rand_core",
"rand_core 0.6.4",
"subtle",
]
@@ -2534,7 +2546,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d"
dependencies = [
"phf_shared",
"rand",
"rand 0.8.5",
]
[[package]]
@@ -2627,15 +2639,16 @@ version = "0.2.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04"
dependencies = [
"zerocopy",
"zerocopy 0.7.35",
]
[[package]]
name = "pq-sys"
version = "0.6.3"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6cc05d7ea95200187117196eee9edd0644424911821aeb28a18ce60ea0b8793"
checksum = "30b51d65ebe1cb1f40641b15abae017fed35ccdda46e3dab1ff8768f625a3222"
dependencies = [
"libc",
"vcpkg",
]
@@ -2696,7 +2709,7 @@ dependencies = [
"libc",
"once_cell",
"raw-cpuid",
"wasi",
"wasi 0.11.0+wasi-snapshot-preview1",
"web-sys",
"winapi",
]
@@ -2740,8 +2753,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
"rand_chacha",
"rand_core",
"rand_chacha 0.3.1",
"rand_core 0.6.4",
]
[[package]]
name = "rand"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94"
dependencies = [
"rand_chacha 0.9.0",
"rand_core 0.9.0",
"zerocopy 0.8.14",
]
[[package]]
@@ -2751,7 +2775,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core",
"rand_core 0.6.4",
]
[[package]]
name = "rand_chacha"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
dependencies = [
"ppv-lite86",
"rand_core 0.9.0",
]
[[package]]
@@ -2760,7 +2794,17 @@ version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom",
"getrandom 0.2.15",
]
[[package]]
name = "rand_core"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b08f3c9802962f7e1b25113931d94f43ed9725bebc59db9d0c3e9a23b67e15ff"
dependencies = [
"getrandom 0.3.1",
"zerocopy 0.8.14",
]
[[package]]
@@ -2875,7 +2919,7 @@ dependencies = [
"http 1.2.0",
"http-body 1.0.1",
"http-body-util",
"hyper 1.5.2",
"hyper 1.6.0",
"hyper-rustls",
"hyper-tls",
"hyper-util",
@@ -2925,7 +2969,7 @@ checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d"
dependencies = [
"cc",
"cfg-if",
"getrandom",
"getrandom 0.2.15",
"libc",
"spin",
"untrusted",
@@ -2974,7 +3018,7 @@ dependencies = [
"num_cpus",
"parking_lot",
"pin-project-lite",
"rand",
"rand 0.8.5",
"ref-cast",
"rocket_codegen",
"rocket_http",
@@ -3102,9 +3146,9 @@ dependencies = [
[[package]]
name = "rustls"
version = "0.23.21"
version = "0.23.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f287924602bf649d949c63dc8ac8b235fa5387d394020705b80c4eb597ce5b8"
checksum = "9fb9263ab4eb695e42321db096e3b8fbd715a59b154d5c88d82db2175b681ba7"
dependencies = [
"once_cell",
"rustls-pki-types",
@@ -3133,9 +3177,9 @@ dependencies = [
[[package]]
name = "rustls-pki-types"
version = "1.10.1"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2bf47e6ff922db3825eb750c4e2ff784c6ff8fb9e13046ef6a1d1c5401b0b37"
checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c"
[[package]]
name = "rustls-webpki"
@@ -3166,9 +3210,9 @@ checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4"
[[package]]
name = "ryu"
version = "1.0.18"
version = "1.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd"
[[package]]
name = "same-file"
@@ -3280,9 +3324,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.137"
version = "1.0.138"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "930cfb6e6abf99298aaad7d29abbef7a9999a9a8806a40088f55f0dcec03146b"
checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949"
dependencies = [
"itoa",
"memchr",
@@ -3476,9 +3520,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "syn"
version = "2.0.96"
version = "2.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80"
checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1"
dependencies = [
"proc-macro2",
"quote",
@@ -3540,13 +3584,13 @@ dependencies = [
[[package]]
name = "tempfile"
version = "3.15.0"
version = "3.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704"
checksum = "38c246215d7d24f48ae091a2902398798e05d978b24315d6efbc00ede9a8bb91"
dependencies = [
"cfg-if",
"fastrand",
"getrandom",
"getrandom 0.3.1",
"once_cell",
"rustix",
"windows-sys 0.59.0",
@@ -3724,7 +3768,7 @@ version = "0.26.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37"
dependencies = [
"rustls 0.23.21",
"rustls 0.23.22",
"tokio",
]
@@ -3799,9 +3843,9 @@ dependencies = [
[[package]]
name = "toml_edit"
version = "0.22.22"
version = "0.22.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5"
checksum = "02a8b472d1a3d7c18e2d61a489aee3453fd9031c33e4f55bd533f4a7adca1bee"
dependencies = [
"indexmap",
"serde",
@@ -3929,7 +3973,7 @@ dependencies = [
"http 1.2.0",
"httparse",
"log",
"rand",
"rand 0.8.5",
"sha1",
"thiserror 1.0.69",
"url",
@@ -3969,9 +4013,9 @@ dependencies = [
[[package]]
name = "unicode-ident"
version = "1.0.15"
version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11cd88e12b17c6494200a9c1b683a04fcac9573ed74cd1b62aeb2727c5592243"
checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034"
[[package]]
name = "unicode-xid"
@@ -4021,7 +4065,7 @@ version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3758f5e68192bb96cc8f9b7e2c2cfdabb435499a28499a42f8f984092adad4b"
dependencies = [
"getrandom",
"getrandom 0.2.15",
]
[[package]]
@@ -4079,7 +4123,7 @@ dependencies = [
"paste",
"percent-encoding",
"pico-args",
"rand",
"rand 0.9.0",
"regex",
"reqwest",
"ring",
@@ -4139,6 +4183,15 @@ version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasi"
version = "0.13.3+wasi-0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2"
dependencies = [
"wit-bindgen-rt",
]
[[package]]
name = "wasm-bindgen"
version = "0.2.100"
@@ -4252,7 +4305,7 @@ dependencies = [
"base64 0.13.1",
"nom",
"openssl",
"rand",
"rand 0.8.5",
"serde",
"serde_cbor",
"serde_derive",
@@ -4519,9 +4572,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "winnow"
version = "0.6.24"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8d71a593cc5c42ad7876e2c1fda56f314f3754c084128833e64f1345ff8a03a"
checksum = "86e376c75f4f43f44db463cf729e0d3acbf954d13e22c51e26e4c264b4ab545f"
dependencies = [
"memchr",
]
@@ -4542,6 +4595,15 @@ version = "0.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904"
[[package]]
name = "wit-bindgen-rt"
version = "0.33.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c"
dependencies = [
"bitflags",
]
[[package]]
name = "write16"
version = "1.0.0"
@@ -4596,7 +4658,7 @@ dependencies = [
"form_urlencoded",
"futures",
"hmac",
"rand",
"rand 0.8.5",
"reqwest",
"sha1",
"threadpool",
@@ -4609,7 +4671,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
dependencies = [
"byteorder",
"zerocopy-derive",
"zerocopy-derive 0.7.35",
]
[[package]]
name = "zerocopy"
version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a367f292d93d4eab890745e75a778da40909cab4d6ff8173693812f79c4a2468"
dependencies = [
"zerocopy-derive 0.8.14",
]
[[package]]
@@ -4623,6 +4694,17 @@ dependencies = [
"syn",
]
[[package]]
name = "zerocopy-derive"
version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3931cb58c62c13adec22e38686b559c86a30565e16ad6e8510a337cedc611e1"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "zerofrom"
version = "0.1.5"

View File

@@ -75,21 +75,21 @@ tokio = { version = "1.43.0", features = ["rt-multi-thread", "fs", "io-util", "p
# A generic serialization/deserialization framework
serde = { version = "1.0.217", features = ["derive"] }
serde_json = "1.0.137"
serde_json = "1.0.138"
# A safe, extensible ORM and Query builder
diesel = { version = "2.2.6", features = ["chrono", "r2d2", "numeric"] }
diesel = { version = "2.2.7", features = ["chrono", "r2d2", "numeric"] }
diesel_migrations = "2.2.0"
diesel_logger = { version = "0.4.0", optional = true }
derive_more = { version = "1.0.0", features = ["from", "into", "as_ref", "deref", "display"] }
derive_more = { version = "2.0.0", features = ["from", "into", "as_ref", "deref", "display"] }
diesel-derive-newtype = "2.1.2"
# Bundled/Static SQLite
libsqlite3-sys = { version = "0.30.1", features = ["bundled"], optional = true }
libsqlite3-sys = { version = "0.31.0", features = ["bundled"], optional = true }
# Crypto-related libraries
rand = { version = "0.8.5", features = ["small_rng"] }
rand = "0.9.0"
ring = "0.17.8"
# UUID generation
@@ -122,7 +122,7 @@ webauthn-rs = "0.3.2"
url = "2.5.4"
# Email libraries
lettre = { version = "0.11.11", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
lettre = { version = "0.11.12", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
email_address = "0.2.9"
@@ -137,7 +137,7 @@ hickory-resolver = "0.24.2"
html5gum = "0.7.0"
regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false }
data-url = "0.3.1"
bytes = "1.9.0"
bytes = "1.10.0"
# Cache function results (Used for version check and favicon fetching)
cached = { version = "0.54.0", features = ["async"] }
@@ -147,7 +147,7 @@ cookie = "0.18.1"
cookie_store = "0.21.1"
# Used by U2F, JWT and PostgreSQL
openssl = "0.10.68"
openssl = "0.10.70"
# CLI argument parsing
pico-args = "0.5.0"

View File

@@ -5,7 +5,7 @@ vault_image_digest: "sha256:cb6b2095a4afc1d9d243a33f6d09211f40e3d82c7ae829fd025d
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894"
rust_version: 1.84.0 # Rust version to be used
rust_version: 1.84.1 # Rust version to be used
debian_version: bookworm # Debian release name to be used
alpine_version: "3.21" # Alpine version to be used
# For which platforms/architectures will we try to build images

View File

@@ -32,10 +32,10 @@ FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:cb6b2095a4afc
########################## ALPINE BUILD IMAGES ##########################
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
## And for Alpine we define all build images here, they will only be loaded when actually used
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.84.0 AS build_amd64
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.84.0 AS build_arm64
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.84.0 AS build_armv7
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.84.0 AS build_armv6
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.84.1 AS build_amd64
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.84.1 AS build_arm64
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.84.1 AS build_armv7
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.84.1 AS build_armv6
########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006

View File

@@ -36,7 +36,7 @@ FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bd
########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.84.0-slim-bookworm AS build
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.84.1-slim-bookworm AS build
COPY --from=xx / /
ARG TARGETARCH
ARG TARGETVARIANT

View File

@@ -10,4 +10,4 @@ proc-macro = true
[dependencies]
quote = "1.0.38"
syn = "2.0.96"
syn = "2.0.98"

View File

@@ -1,4 +1,4 @@
[toolchain]
channel = "1.84.0"
channel = "1.84.1"
components = [ "rustfmt", "clippy" ]
profile = "minimal"

View File

@@ -403,7 +403,7 @@ async fn delete_user(user_id: UserId, token: AdminToken, mut conn: DbConn) -> Em
for membership in memberships {
log_event(
EventType::OrganizationUserRemoved as i32,
EventType::OrganizationUserDeleted as i32,
&membership.uuid,
&membership.org_uuid,
&ACTING_ADMIN_USER.into(),

View File

@@ -925,9 +925,9 @@ async fn password_hint(data: Json<PasswordHintData>, mut conn: DbConn) -> EmptyR
// paths that send mail take noticeably longer than ones that
// don't. Add a randomized sleep to mitigate this somewhat.
use rand::{rngs::SmallRng, Rng, SeedableRng};
let mut rng = SmallRng::from_entropy();
let mut rng = SmallRng::from_os_rng();
let delta: i32 = 100;
let sleep_ms = (1_000 + rng.gen_range(-delta..=delta)) as u64;
let sleep_ms = (1_000 + rng.random_range(-delta..=delta)) as u64;
tokio::time::sleep(tokio::time::Duration::from_millis(sleep_ms)).await;
Ok(())
} else {
@@ -1206,6 +1206,15 @@ async fn post_auth_request(
nt.send_auth_request(&user.uuid, &auth_request.uuid, &data.device_identifier, &mut conn).await;
log_user_event(
EventType::UserRequestedDeviceApproval as i32,
&user.uuid,
client_headers.device_type,
&client_headers.ip.ip,
&mut conn,
)
.await;
Ok(Json(json!({
"id": auth_request.uuid,
"publicKey": auth_request.public_key,
@@ -1287,9 +1296,26 @@ async fn put_auth_request(
ant.send_auth_response(&auth_request.user_uuid, &auth_request.uuid).await;
nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, &data.device_identifier, &mut conn).await;
log_user_event(
EventType::OrganizationUserApprovedAuthRequest as i32,
&headers.user.uuid,
headers.device.atype,
&headers.ip.ip,
&mut conn,
)
.await;
} else {
// If denied, there's no reason to keep the request
auth_request.delete(&mut conn).await?;
log_user_event(
EventType::OrganizationUserRejectedAuthRequest as i32,
&headers.user.uuid,
headers.device.atype,
&headers.ip.ip,
&mut conn,
)
.await;
}
Ok(Json(json!({

View File

@@ -245,8 +245,8 @@ async fn _log_user_event(
ip: &IpAddr,
conn: &mut DbConn,
) {
let orgs = Membership::get_orgs_by_user(user_id, conn).await;
let mut events: Vec<Event> = Vec::with_capacity(orgs.len() + 1); // We need an event per org and one without an org
let memberships = Membership::find_by_user(user_id, conn).await;
let mut events: Vec<Event> = Vec::with_capacity(memberships.len() + 1); // We need an event per org and one without an org
// Upstream saves the event also without any org_id.
let mut event = Event::new(event_type, event_date);
@@ -257,10 +257,11 @@ async fn _log_user_event(
events.push(event);
// For each org a user is a member of store these events per org
for org_id in orgs {
for membership in memberships {
let mut event = Event::new(event_type, event_date);
event.user_uuid = Some(user_id.clone());
event.org_uuid = Some(org_id);
event.org_uuid = Some(membership.org_uuid);
event.org_user_uuid = Some(membership.uuid);
event.act_user_uuid = Some(user_id.clone());
event.device_type = Some(device_type);
event.ip_address = Some(ip.to_string());

View File

@@ -38,6 +38,7 @@ pub fn routes() -> Vec<Route> {
post_organization_collections,
delete_organization_collection_member,
post_organization_collection_delete_member,
post_bulk_access_collections,
post_organization_collection_update,
put_organization_collection_update,
delete_organization_collection,
@@ -129,17 +130,17 @@ struct OrganizationUpdateData {
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct NewCollectionData {
struct FullCollectionData {
name: String,
groups: Vec<NewCollectionGroupData>,
users: Vec<NewCollectionMemberData>,
groups: Vec<CollectionGroupData>,
users: Vec<CollectionMembershipData>,
id: Option<CollectionId>,
external_id: Option<String>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct NewCollectionGroupData {
struct CollectionGroupData {
hide_passwords: bool,
id: GroupId,
read_only: bool,
@@ -148,7 +149,7 @@ struct NewCollectionGroupData {
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct NewCollectionMemberData {
struct CollectionMembershipData {
hide_passwords: bool,
id: MembershipId,
read_only: bool,
@@ -251,7 +252,7 @@ async fn leave_organization(org_id: OrganizationId, headers: Headers, mut conn:
}
log_event(
EventType::OrganizationUserRemoved as i32,
EventType::OrganizationUserLeft as i32,
&member.uuid,
&org_id,
&headers.user.uuid,
@@ -429,13 +430,13 @@ async fn _get_org_collections(org_id: &OrganizationId, conn: &mut DbConn) -> Val
async fn post_organization_collections(
org_id: OrganizationId,
headers: ManagerHeadersLoose,
data: Json<NewCollectionData>,
data: Json<FullCollectionData>,
mut conn: DbConn,
) -> JsonResult {
if org_id != headers.membership.org_uuid {
err!("Organization not found", "Organization id's do not match");
}
let data: NewCollectionData = data.into_inner();
let data: FullCollectionData = data.into_inner();
let Some(org) = Organization::find_by_uuid(&org_id, &mut conn).await else {
err!("Can't find organization details")
@@ -485,7 +486,82 @@ async fn post_organization_collections(
CollectionUser::save(&headers.membership.user_uuid, &collection.uuid, false, false, false, &mut conn).await?;
}
Ok(Json(collection.to_json()))
Ok(Json(collection.to_json_details(&headers.membership.user_uuid, None, &mut conn).await))
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct BulkCollectionAccessData {
collection_ids: Vec<CollectionId>,
groups: Vec<CollectionGroupData>,
users: Vec<CollectionMembershipData>,
}
#[post("/organizations/<org_id>/collections/bulk-access", data = "<data>", rank = 1)]
async fn post_bulk_access_collections(
org_id: OrganizationId,
headers: ManagerHeadersLoose,
data: Json<BulkCollectionAccessData>,
mut conn: DbConn,
) -> EmptyResult {
if org_id != headers.membership.org_uuid {
err!("Organization not found", "Organization id's do not match");
}
let data: BulkCollectionAccessData = data.into_inner();
if Organization::find_by_uuid(&org_id, &mut conn).await.is_none() {
err!("Can't find organization details")
};
for col_id in data.collection_ids {
let Some(collection) = Collection::find_by_uuid_and_org(&col_id, &org_id, &mut conn).await else {
err!("Collection not found")
};
// update collection modification date
collection.save(&mut conn).await?;
log_event(
EventType::CollectionUpdated as i32,
&collection.uuid,
&org_id,
&headers.user.uuid,
headers.device.atype,
&headers.ip.ip,
&mut conn,
)
.await;
CollectionGroup::delete_all_by_collection(&col_id, &mut conn).await?;
for group in &data.groups {
CollectionGroup::new(col_id.clone(), group.id.clone(), group.read_only, group.hide_passwords, group.manage)
.save(&mut conn)
.await?;
}
CollectionUser::delete_all_by_collection(&col_id, &mut conn).await?;
for user in &data.users {
let Some(member) = Membership::find_by_uuid_and_org(&user.id, &org_id, &mut conn).await else {
err!("User is not part of organization")
};
if member.access_all {
continue;
}
CollectionUser::save(
&member.user_uuid,
&col_id,
user.read_only,
user.hide_passwords,
user.manage,
&mut conn,
)
.await?;
}
}
Ok(())
}
#[put("/organizations/<org_id>/collections/<col_id>", data = "<data>")]
@@ -493,24 +569,24 @@ async fn put_organization_collection_update(
org_id: OrganizationId,
col_id: CollectionId,
headers: ManagerHeaders,
data: Json<NewCollectionData>,
data: Json<FullCollectionData>,
conn: DbConn,
) -> JsonResult {
post_organization_collection_update(org_id, col_id, headers, data, conn).await
}
#[post("/organizations/<org_id>/collections/<col_id>", data = "<data>")]
#[post("/organizations/<org_id>/collections/<col_id>", data = "<data>", rank = 2)]
async fn post_organization_collection_update(
org_id: OrganizationId,
col_id: CollectionId,
headers: ManagerHeaders,
data: Json<NewCollectionData>,
data: Json<FullCollectionData>,
mut conn: DbConn,
) -> JsonResult {
if org_id != headers.org_id {
err!("Organization not found", "Organization id's do not match");
}
let data: NewCollectionData = data.into_inner();
let data: FullCollectionData = data.into_inner();
if Organization::find_by_uuid(&org_id, &mut conn).await.is_none() {
err!("Can't find organization details")
@@ -722,15 +798,16 @@ async fn get_org_collection_detail(
.map(|m| (m.uuid, m.atype))
.collect();
let users: Vec<Value> =
CollectionUser::find_by_collection_swap_user_uuid_with_member_uuid(&collection.uuid, &mut conn)
let users: Vec<Value> = CollectionUser::find_by_org_and_coll_swap_user_uuid_with_member_uuid(
&org_id,
&collection.uuid,
&mut conn,
)
.await
.iter()
.map(|collection_member| {
collection_member.to_json_details_for_member(
*membership_type
.get(&collection_member.membership_uuid)
.unwrap_or(&(MembershipType::User as i32)),
*membership_type.get(&collection_member.membership_uuid).unwrap_or(&(MembershipType::User as i32)),
)
})
.collect();
@@ -780,7 +857,7 @@ async fn get_collection_users(
async fn put_collection_users(
org_id: OrganizationId,
col_id: CollectionId,
data: Json<Vec<MembershipData>>,
data: Json<Vec<CollectionMembershipData>>,
headers: ManagerHeaders,
mut conn: DbConn,
) -> EmptyResult {
@@ -912,24 +989,6 @@ async fn post_org_keys(
})))
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct CollectionData {
id: CollectionId,
read_only: bool,
hide_passwords: bool,
manage: bool,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct MembershipData {
id: MembershipId,
read_only: bool,
hide_passwords: bool,
manage: bool,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct InviteData {
@@ -1753,7 +1812,7 @@ use super::ciphers::CipherData;
#[serde(rename_all = "camelCase")]
struct ImportData {
ciphers: Vec<CipherData>,
collections: Vec<NewCollectionData>,
collections: Vec<FullCollectionData>,
collection_relationships: Vec<RelationsData>,
}
@@ -2548,7 +2607,7 @@ struct GroupRequest {
#[serde(default)]
access_all: bool,
external_id: Option<String>,
collections: Vec<SelectedCollection>,
collections: Vec<CollectionData>,
users: Vec<MembershipId>,
}
@@ -2569,14 +2628,14 @@ impl GroupRequest {
#[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
struct SelectedCollection {
struct CollectionData {
id: CollectionId,
read_only: bool,
hide_passwords: bool,
manage: bool,
}
impl SelectedCollection {
impl CollectionData {
pub fn to_collection_group(&self, groups_uuid: GroupId) -> CollectionGroup {
CollectionGroup::new(self.id.clone(), groups_uuid, self.read_only, self.hide_passwords, self.manage)
}
@@ -2659,7 +2718,7 @@ async fn put_group(
async fn add_update_group(
mut group: Group,
collections: Vec<SelectedCollection>,
collections: Vec<CollectionData>,
members: Vec<MembershipId>,
org_id: OrganizationId,
headers: &AdminHeaders,

View File

@@ -26,8 +26,8 @@ pub fn routes() -> Vec<Route> {
#[derive(Serialize, Deserialize)]
struct DuoData {
host: String, // Duo API hostname
ik: String, // integration key
sk: String, // secret key
ik: String, // client id
sk: String, // client secret
}
impl DuoData {
@@ -111,8 +111,8 @@ async fn get_duo(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbCo
json!({
"enabled": enabled,
"host": data.host,
"secretKey": data.sk,
"integrationKey": data.ik,
"clientSecret": data.sk,
"clientId": data.ik,
"object": "twoFactorDuo"
})
} else {
@@ -129,8 +129,8 @@ async fn get_duo(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbCo
#[serde(rename_all = "camelCase")]
struct EnableDuoData {
host: String,
secret_key: String,
integration_key: String,
client_secret: String,
client_id: String,
master_password_hash: Option<String>,
otp: Option<String>,
}
@@ -139,8 +139,8 @@ impl From<EnableDuoData> for DuoData {
fn from(d: EnableDuoData) -> Self {
Self {
host: d.host,
ik: d.integration_key,
sk: d.secret_key,
ik: d.client_id,
sk: d.client_secret,
}
}
}
@@ -151,7 +151,7 @@ fn check_duo_fields_custom(data: &EnableDuoData) -> bool {
st.is_empty() || s == DISABLED_MESSAGE_DEFAULT
}
!empty_or_default(&data.host) && !empty_or_default(&data.secret_key) && !empty_or_default(&data.integration_key)
!empty_or_default(&data.host) && !empty_or_default(&data.client_secret) && !empty_or_default(&data.client_id)
}
#[post("/two-factor/duo", data = "<data>")]
@@ -186,8 +186,8 @@ async fn activate_duo(data: Json<EnableDuoData>, headers: Headers, mut conn: DbC
Ok(Json(json!({
"enabled": true,
"host": data.host,
"secretKey": data.sk,
"integrationKey": data.ik,
"clientSecret": data.sk,
"clientId": data.ik,
"object": "twoFactorDuo"
})))
}

View File

@@ -63,6 +63,9 @@ static CLIENT: Lazy<Client> = Lazy::new(|| {
// Build Regex only once since this takes a lot of time.
static ICON_SIZE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap());
// The function name `icon_external` is checked in the `on_response` function in `AppHeaders`
// It is used to prevent sending a specific header which breaks icon downloads.
// If this function needs to be renamed, also adjust the code in `util.rs`
#[get("/<domain>/icon.png")]
fn icon_external(domain: &str) -> Option<Redirect> {
if !is_valid_domain(domain) {

View File

@@ -542,10 +542,29 @@ pub struct OrgHeaders {
pub device: Device,
pub user: User,
pub membership_type: MembershipType,
pub membership_status: MembershipStatus,
pub membership: Membership,
pub ip: ClientIp,
}
impl OrgHeaders {
fn is_member(&self) -> bool {
// NOTE: we don't care about MembershipStatus at the moment because this is only used
// where an invited, accepted or confirmed user is expected if this ever changes or
// if from_i32 is changed to return Some(Revoked) this check needs to be changed accordingly
self.membership_type >= MembershipType::User
}
fn is_confirmed_and_admin(&self) -> bool {
self.membership_status == MembershipStatus::Confirmed && self.membership_type >= MembershipType::Admin
}
fn is_confirmed_and_manager(&self) -> bool {
self.membership_status == MembershipStatus::Confirmed && self.membership_type >= MembershipType::Manager
}
fn is_confirmed_and_owner(&self) -> bool {
self.membership_status == MembershipStatus::Confirmed && self.membership_type == MembershipType::Owner
}
}
#[rocket::async_trait]
impl<'r> FromRequest<'r> for OrgHeaders {
type Error = &'static str;
@@ -574,15 +593,8 @@ impl<'r> FromRequest<'r> for OrgHeaders {
};
let user = headers.user;
let membership = match Membership::find_by_user_and_org(&user.uuid, &org_id, &mut conn).await {
Some(member) => {
if member.status == MembershipStatus::Confirmed as i32 {
member
} else {
err_handler!("The current user isn't confirmed member of the organization")
}
}
None => err_handler!("The current user isn't member of the organization"),
let Some(membership) = Membership::find_by_user_and_org(&user.uuid, &org_id, &mut conn).await else {
err_handler!("The current user isn't member of the organization");
};
Outcome::Success(Self {
@@ -590,13 +602,22 @@ impl<'r> FromRequest<'r> for OrgHeaders {
device: headers.device,
user,
membership_type: {
if let Some(org_usr_type) = MembershipType::from_i32(membership.atype) {
org_usr_type
if let Some(member_type) = MembershipType::from_i32(membership.atype) {
member_type
} else {
// This should only happen if the DB is corrupted
err_handler!("Unknown user type in the database")
}
},
membership_status: {
if let Some(member_status) = MembershipStatus::from_i32(membership.status) {
// NOTE: add additional check for revoked if from_i32 is ever changed
// to return Revoked status.
member_status
} else {
err_handler!("User status is either revoked or invalid.")
}
},
membership,
ip: headers.ip,
})
@@ -621,7 +642,7 @@ impl<'r> FromRequest<'r> for AdminHeaders {
async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> {
let headers = try_outcome!(OrgHeaders::from_request(request).await);
if headers.membership_type >= MembershipType::Admin {
if headers.is_confirmed_and_admin() {
Outcome::Success(Self {
host: headers.host,
device: headers.device,
@@ -683,7 +704,7 @@ impl<'r> FromRequest<'r> for ManagerHeaders {
async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> {
let headers = try_outcome!(OrgHeaders::from_request(request).await);
if headers.membership_type >= MembershipType::Manager {
if headers.is_confirmed_and_manager() {
match get_col_id(request) {
Some(col_id) => {
let mut conn = match DbConn::from_request(request).await {
@@ -738,7 +759,7 @@ impl<'r> FromRequest<'r> for ManagerHeadersLoose {
async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> {
let headers = try_outcome!(OrgHeaders::from_request(request).await);
if headers.membership_type >= MembershipType::Manager {
if headers.is_confirmed_and_manager() {
Outcome::Success(Self {
host: headers.host,
device: headers.device,
@@ -801,7 +822,7 @@ impl<'r> FromRequest<'r> for OwnerHeaders {
async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> {
let headers = try_outcome!(OrgHeaders::from_request(request).await);
if headers.membership_type == MembershipType::Owner {
if headers.is_confirmed_and_owner() {
Outcome::Success(Self {
device: headers.device,
user: headers.user,
@@ -826,7 +847,7 @@ impl<'r> FromRequest<'r> for OrgMemberHeaders {
async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> {
let headers = try_outcome!(OrgHeaders::from_request(request).await);
if headers.membership_type >= MembershipType::User {
if headers.is_member() {
Outcome::Success(Self {
host: headers.host,
user: headers.user,

View File

@@ -670,9 +670,9 @@ make_config! {
_enable_duo: bool, true, def, true;
/// Attempt to use deprecated iframe-based Traditional Prompt (Duo WebSDK 2)
duo_use_iframe: bool, false, def, false;
/// Integration Key
/// Client Id
duo_ikey: String, true, option;
/// Secret Key
/// Client Secret
duo_skey: Pass, true, option;
/// Host
duo_host: String, true, option;

View File

@@ -56,11 +56,11 @@ pub fn encode_random_bytes<const N: usize>(e: Encoding) -> String {
pub fn get_random_string(alphabet: &[u8], num_chars: usize) -> String {
// Ref: https://rust-lang-nursery.github.io/rust-cookbook/algorithms/randomness.html
use rand::Rng;
let mut rng = rand::thread_rng();
let mut rng = rand::rng();
(0..num_chars)
.map(|_| {
let i = rng.gen_range(0..alphabet.len());
let i = rng.random_range(0..alphabet.len());
alphabet[i] as char
})
.collect()

View File

@@ -150,6 +150,7 @@ impl AuthRequest {
auth_requests::table
.filter(auth_requests::user_uuid.eq(user_uuid))
.filter(auth_requests::request_device_identifier.eq(device_uuid))
.filter(auth_requests::approved.is_null())
.order_by(auth_requests::creation_date.desc())
.first::<AuthRequestDb>(conn).ok().from_db()
}}

View File

@@ -142,7 +142,7 @@ impl Cipher {
sync_type: CipherSyncType,
conn: &mut DbConn,
) -> Value {
use crate::util::format_date;
use crate::util::{format_date, validate_and_format_date};
let mut attachments_json: Value = Value::Null;
if let Some(cipher_sync_data) = cipher_sync_data {
@@ -220,7 +220,7 @@ impl Cipher {
})
.map(|mut d| match d.get("lastUsedDate").and_then(|l| l.as_str()) {
Some(l) => {
d["lastUsedDate"] = json!(crate::util::validate_and_format_date(l));
d["lastUsedDate"] = json!(validate_and_format_date(l));
d
}
_ => {
@@ -261,6 +261,11 @@ impl Cipher {
type_data_json["uri"] = uris[0]["uri"].clone();
}
}
// Check if `passwordRevisionDate` is a valid date, else convert it
if let Some(pw_revision) = type_data_json["passwordRevisionDate"].as_str() {
type_data_json["passwordRevisionDate"] = json!(validate_and_format_date(pw_revision));
}
}
// Fix secure note issues when data is invalid

View File

@@ -589,6 +589,7 @@ impl CollectionUser {
.inner_join(collections::table.on(collections::uuid.eq(users_collections::collection_uuid)))
.filter(collections::org_uuid.eq(org_uuid))
.inner_join(users_organizations::table.on(users_organizations::user_uuid.eq(users_collections::user_uuid)))
.filter(users_organizations::org_uuid.eq(org_uuid))
.select((users_organizations::uuid, users_collections::collection_uuid, users_collections::read_only, users_collections::hide_passwords, users_collections::manage))
.load::<CollectionUserDb>(conn)
.expect("Error loading users_collections")
@@ -685,13 +686,15 @@ impl CollectionUser {
}}
}
pub async fn find_by_collection_swap_user_uuid_with_member_uuid(
pub async fn find_by_org_and_coll_swap_user_uuid_with_member_uuid(
org_uuid: &OrganizationId,
collection_uuid: &CollectionId,
conn: &mut DbConn,
) -> Vec<CollectionMembership> {
let col_users = db_run! { conn: {
users_collections::table
.filter(users_collections::collection_uuid.eq(collection_uuid))
.filter(users_organizations::org_uuid.eq(org_uuid))
.inner_join(users_organizations::table.on(users_organizations::user_uuid.eq(users_collections::user_uuid)))
.select((users_organizations::uuid, users_collections::collection_uuid, users_collections::read_only, users_collections::hide_passwords, users_collections::manage))
.load::<CollectionUserDb>(conn)

View File

@@ -49,6 +49,8 @@ pub enum EventType {
UserClientExportedVault = 1007,
// UserUpdatedTempPassword = 1008, // Not supported
// UserMigratedKeyToKeyConnector = 1009, // Not supported
UserRequestedDeviceApproval = 1010,
// UserTdeOffboardingPasswordSet = 1011, // Not supported
// Cipher
CipherCreated = 1100,
@@ -69,6 +71,7 @@ pub enum EventType {
CipherSoftDeleted = 1115,
CipherRestored = 1116,
CipherClientToggledCardNumberVisible = 1117,
CipherClientToggledTOTPSeedVisible = 1118,
// Collection
CollectionCreated = 1300,
@@ -94,6 +97,10 @@ pub enum EventType {
// OrganizationUserFirstSsoLogin = 1510, // Not supported
OrganizationUserRevoked = 1511,
OrganizationUserRestored = 1512,
OrganizationUserApprovedAuthRequest = 1513,
OrganizationUserRejectedAuthRequest = 1514,
OrganizationUserDeleted = 1515,
OrganizationUserLeft = 1516,
// Organization
OrganizationUpdated = 1600,
@@ -105,6 +112,7 @@ pub enum EventType {
// OrganizationEnabledKeyConnector = 1606, // Not supported
// OrganizationDisabledKeyConnector = 1607, // Not supported
// OrganizationSponsorshipsSynced = 1608, // Not supported
// OrganizationCollectionManagementUpdated = 1609, // Not supported
// Policy
PolicyUpdated = 1700,
@@ -117,6 +125,13 @@ pub enum EventType {
// ProviderOrganizationAdded = 1901, // Not supported
// ProviderOrganizationRemoved = 1902, // Not supported
// ProviderOrganizationVaultAccessed = 1903, // Not supported
// OrganizationDomainAdded = 2000, // Not supported
// OrganizationDomainRemoved = 2001, // Not supported
// OrganizationDomainVerified = 2002, // Not supported
// OrganizationDomainNotVerified = 2003, // Not supported
// SecretRetrieved = 2100, // Not supported
}
/// Local methods

View File

@@ -55,6 +55,7 @@ db_object! {
}
// https://github.com/bitwarden/server/blob/b86a04cef9f1e1b82cf18e49fc94e017c641130c/src/Core/Enums/OrganizationUserStatusType.cs
#[derive(PartialEq)]
pub enum MembershipStatus {
Revoked = -1,
Invited = 0,
@@ -62,6 +63,19 @@ pub enum MembershipStatus {
Confirmed = 2,
}
impl MembershipStatus {
pub fn from_i32(status: i32) -> Option<Self> {
match status {
0 => Some(Self::Invited),
1 => Some(Self::Accepted),
2 => Some(Self::Confirmed),
// NOTE: we don't care about revoked members where this is used
// if this ever changes also adapt the OrgHeaders check.
_ => None,
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, num_derive::FromPrimitive)]
pub enum MembershipType {
Owner = 0,
@@ -450,7 +464,7 @@ impl Membership {
"familySponsorshipValidUntil": null,
"familySponsorshipToDelete": null,
"accessSecretsManager": false,
"limitCollectionCreation": true,
"limitCollectionCreation": self.atype < MembershipType::Manager, // If less then a manager return true, to limit collection creations
"limitCollectionCreationDeletion": true,
"limitCollectionDeletion": true,
"allowAdminAccessToAllCollectionItems": true,
@@ -503,7 +517,7 @@ impl Membership {
CONFIG.org_groups_enabled() && Group::is_in_full_access_group(&self.user_uuid, &self.org_uuid, conn).await;
// If collections are to be included, only include them if the user does not have full access via a group or defined to the user it self
let collections: Vec<Value> = if include_collections && !(full_access_group || self.has_full_access()) {
let collections: Vec<Value> = if include_collections && !(full_access_group || self.access_all) {
// Get all collections for the user here already to prevent more queries
let cu: HashMap<CollectionId, CollectionUser> =
CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn)

View File

@@ -55,11 +55,18 @@ impl Fairing for AppHeaders {
res.set_raw_header("Referrer-Policy", "same-origin");
res.set_raw_header("X-Content-Type-Options", "nosniff");
res.set_raw_header("X-Robots-Tag", "noindex, nofollow");
res.set_raw_header("Cross-Origin-Resource-Policy", "same-origin");
// Obsolete in modern browsers, unsafe (XS-Leak), and largely replaced by CSP
res.set_raw_header("X-XSS-Protection", "0");
// The `Cross-Origin-Resource-Policy` header should not be set on images or on the `icon_external` route.
// Otherwise some clients, like the Bitwarden Desktop, will fail to download the icons
if !(res.headers().get_one("Content-Type").is_some_and(|v| v.starts_with("image/"))
|| req.route().is_some_and(|v| v.name.as_deref() == Some("icon_external")))
{
res.set_raw_header("Cross-Origin-Resource-Policy", "same-origin");
}
// Do not send the Content-Security-Policy (CSP) Header and X-Frame-Options for the *-connector.html files.
// This can cause issues when some MFA requests needs to open a popup or page within the clients like WebAuthn, or Duo.
// This is the same behavior as upstream Bitwarden.