Compare commits

...

5 Commits

Author SHA1 Message Date
Mathijs van Veluw
1f868b8d22 Show assigned collections on member edit (#5556)
Because we were using the `has_full_access()` function we did not returned assigned collections for an owner/admin even if the did not have the `access_all` flag set.
This commit will change that to use the `access_all` flag instead, and return assigned collections too.

While saving a member and having it assigned collections would still save those rights, and it was also visible in the collection management, it wasn't at the member it self.
So, it did work, but was not visible.

Fixes #5554
Fixes #5555

Signed-off-by: BlackDex <black.dex@gmail.com>
2025-02-07 22:33:11 +01:00
Mathijs van Veluw
8d1df08b81 Fix icon redirect not working on desktop (#5536)
* Fix icon redirect not working on desktop

We also need to exclude the header in case we do an external_icon call.

Fixes #5535

Signed-off-by: BlackDex <black.dex@gmail.com>

* Add informational comments to the icon_external function

Signed-off-by: BlackDex <black.dex@gmail.com>

* Fix spelling/grammar

Signed-off-by: BlackDex <black.dex@gmail.com>

---------

Signed-off-by: BlackDex <black.dex@gmail.com>
2025-02-04 13:20:32 +01:00
Stefan Melmuk
3b6bccde97 add bulk-access endpoint for collections (#5542) 2025-02-04 09:42:02 +01:00
Daniel
d2b36642a6 Update crates & fix CVE-2025-24898 (#5538) 2025-02-04 01:01:06 +01:00
Mathijs van Veluw
a02fb0fd24 Update workflows and enhance security (#5537)
This commit updates the workflow files and also fixes some security issues which were reported by using zizmor https://github.com/woodruffw/zizmor

Signed-off-by: BlackDex <black.dex@gmail.com>
2025-02-04 00:33:43 +01:00
12 changed files with 260 additions and 142 deletions

View File

@@ -1,4 +1,5 @@
name: Build name: Build
permissions: {}
on: on:
push: push:
@@ -13,6 +14,7 @@ on:
- "diesel.toml" - "diesel.toml"
- "docker/Dockerfile.j2" - "docker/Dockerfile.j2"
- "docker/DockerSettings.yaml" - "docker/DockerSettings.yaml"
pull_request: pull_request:
paths: paths:
- ".github/workflows/build.yml" - ".github/workflows/build.yml"
@@ -28,13 +30,17 @@ on:
jobs: jobs:
build: build:
name: Build and Test ${{ matrix.channel }}
permissions:
actions: write
contents: read
# We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers # We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
timeout-minutes: 120 timeout-minutes: 120
# Make warnings errors, this is to prevent warnings slipping through. # Make warnings errors, this is to prevent warnings slipping through.
# This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes. # This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes.
env: env:
RUSTFLAGS: "-D warnings" RUSTFLAGS: "-Dwarnings"
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@@ -42,20 +48,19 @@ jobs:
- "rust-toolchain" # The version defined in rust-toolchain - "rust-toolchain" # The version defined in rust-toolchain
- "msrv" # The supported MSRV - "msrv" # The supported MSRV
name: Build and Test ${{ matrix.channel }}
steps: steps:
# Checkout the repo
- name: "Checkout"
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# End Checkout the repo
# Install dependencies # Install dependencies
- name: "Install dependencies Ubuntu" - name: "Install dependencies Ubuntu"
run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config
# End Install dependencies # End Install dependencies
# Checkout the repo
- name: "Checkout"
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
fetch-depth: 0
# End Checkout the repo
# Determine rust-toolchain version # Determine rust-toolchain version
- name: Init Variables - name: Init Variables
@@ -75,7 +80,7 @@ jobs:
# Only install the clippy and rustfmt components on the default rust-toolchain # Only install the clippy and rustfmt components on the default rust-toolchain
- name: "Install rust-toolchain version" - name: "Install rust-toolchain version"
uses: dtolnay/rust-toolchain@a54c7afa936fefeb4456b2dd8068152669aa8203 # master @ Dec 14, 2024, 5:49 AM GMT+1 uses: dtolnay/rust-toolchain@c5a29ddb4d9d194e7c84ec8c3fba61b1c31fee8c # master @ Jan 30, 2025, 8:16 PM GMT+1
if: ${{ matrix.channel == 'rust-toolchain' }} if: ${{ matrix.channel == 'rust-toolchain' }}
with: with:
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@@ -85,7 +90,7 @@ jobs:
# Install the any other channel to be used for which we do not execute clippy and rustfmt # Install the any other channel to be used for which we do not execute clippy and rustfmt
- name: "Install MSRV version" - name: "Install MSRV version"
uses: dtolnay/rust-toolchain@a54c7afa936fefeb4456b2dd8068152669aa8203 # master @ Dec 14, 2024, 5:49 AM GMT+1 uses: dtolnay/rust-toolchain@c5a29ddb4d9d194e7c84ec8c3fba61b1c31fee8c # master @ Jan 30, 2025, 8:16 PM GMT+1
if: ${{ matrix.channel != 'rust-toolchain' }} if: ${{ matrix.channel != 'rust-toolchain' }}
with: with:
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@@ -93,11 +98,13 @@ jobs:
# Set the current matrix toolchain version as default # Set the current matrix toolchain version as default
- name: "Set toolchain ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} as default" - name: "Set toolchain ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} as default"
env:
RUST_TOOLCHAIN: ${{steps.toolchain.outputs.RUST_TOOLCHAIN}}
run: | run: |
# Remove the rust-toolchain.toml # Remove the rust-toolchain.toml
rm rust-toolchain.toml rm rust-toolchain.toml
# Set the default # Set the default
rustup default ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} rustup default "${RUST_TOOLCHAIN}"
# Show environment # Show environment
- name: "Show environment" - name: "Show environment"
@@ -161,7 +168,7 @@ jobs:
id: clippy id: clippy
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }} if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }}
run: | run: |
cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc -- -D warnings cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc
# End Run cargo clippy # End Run cargo clippy
@@ -178,22 +185,31 @@ jobs:
# This is useful so all test/clippy/fmt actions are done, and they can all be addressed # This is useful so all test/clippy/fmt actions are done, and they can all be addressed
- name: "Some checks failed" - name: "Some checks failed"
if: ${{ failure() }} if: ${{ failure() }}
env:
TEST_DB_M_L: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}
TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}
TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }}
TEST_SQLITE: ${{ steps.test_sqlite.outcome }}
TEST_MYSQL: ${{ steps.test_mysql.outcome }}
TEST_POSTGRESQL: ${{ steps.test_postgresql.outcome }}
CLIPPY: ${{ steps.clippy.outcome }}
FMT: ${{ steps.formatting.outcome }}
run: | run: |
echo "### :x: Checks Failed!" >> $GITHUB_STEP_SUMMARY echo "### :x: Checks Failed!" >> "${GITHUB_STEP_SUMMARY}"
echo "" >> $GITHUB_STEP_SUMMARY echo "" >> "${GITHUB_STEP_SUMMARY}"
echo "|Job|Status|" >> $GITHUB_STEP_SUMMARY echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}"
echo "|---|------|" >> $GITHUB_STEP_SUMMARY echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}|" >> $GITHUB_STEP_SUMMARY echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${TEST_DB_M_L}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}|" >> $GITHUB_STEP_SUMMARY echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite,mysql,postgresql)|${{ steps.test_sqlite_mysql_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite)|${{ steps.test_sqlite.outcome }}|" >> $GITHUB_STEP_SUMMARY echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (mysql)|${{ steps.test_mysql.outcome }}|" >> $GITHUB_STEP_SUMMARY echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (postgresql)|${{ steps.test_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.clippy.outcome }}|" >> $GITHUB_STEP_SUMMARY echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|fmt|${{ steps.formatting.outcome }}|" >> $GITHUB_STEP_SUMMARY echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}"
echo "" >> $GITHUB_STEP_SUMMARY echo "" >> "${GITHUB_STEP_SUMMARY}"
echo "Please check the failed jobs and fix where needed." >> $GITHUB_STEP_SUMMARY echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}"
echo "" >> $GITHUB_STEP_SUMMARY echo "" >> "${GITHUB_STEP_SUMMARY}"
exit 1 exit 1
@@ -202,5 +218,5 @@ jobs:
- name: "All checks passed" - name: "All checks passed"
if: ${{ success() }} if: ${{ success() }}
run: | run: |
echo "### :tada: Checks Passed!" >> $GITHUB_STEP_SUMMARY echo "### :tada: Checks Passed!" >> "${GITHUB_STEP_SUMMARY}"
echo "" >> $GITHUB_STEP_SUMMARY echo "" >> "${GITHUB_STEP_SUMMARY}"

View File

@@ -1,21 +1,17 @@
name: Hadolint name: Hadolint
permissions: {}
on: [ on: [ push, pull_request ]
push,
pull_request
]
jobs: jobs:
hadolint: hadolint:
name: Validate Dockerfile syntax name: Validate Dockerfile syntax
permissions:
contents: read
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
timeout-minutes: 30 timeout-minutes: 30
steps:
# Checkout the repo
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# End Checkout the repo
steps:
# Start Docker Buildx # Start Docker Buildx
- name: Setup Docker Buildx - name: Setup Docker Buildx
uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0 uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0
@@ -37,6 +33,12 @@ jobs:
env: env:
HADOLINT_VERSION: 2.12.0 HADOLINT_VERSION: 2.12.0
# End Download hadolint # End Download hadolint
# Checkout the repo
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
# End Checkout the repo
# Test Dockerfiles with hadolint # Test Dockerfiles with hadolint
- name: Run hadolint - name: Run hadolint

View File

@@ -1,4 +1,5 @@
name: Release name: Release
permissions: {}
on: on:
push: push:
@@ -6,17 +7,23 @@ on:
- main - main
tags: tags:
- '*' # https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
- '[1-2].[0-9]+.[0-9]+'
jobs: jobs:
# https://github.com/marketplace/actions/skip-duplicate-actions # https://github.com/marketplace/actions/skip-duplicate-actions
# Some checks to determine if we need to continue with building a new docker. # Some checks to determine if we need to continue with building a new docker.
# We will skip this check if we are creating a tag, because that has the same hash as a previous run already. # We will skip this check if we are creating a tag, because that has the same hash as a previous run already.
skip_check: skip_check:
runs-on: ubuntu-24.04 # Only run this in the upstream repo and not on forks
if: ${{ github.repository == 'dani-garcia/vaultwarden' }} if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
name: Cancel older jobs when running
permissions:
actions: write
runs-on: ubuntu-24.04
outputs: outputs:
should_skip: ${{ steps.skip_check.outputs.should_skip }} should_skip: ${{ steps.skip_check.outputs.should_skip }}
steps: steps:
- name: Skip Duplicates Actions - name: Skip Duplicates Actions
id: skip_check id: skip_check
@@ -27,6 +34,9 @@ jobs:
if: ${{ github.ref_type == 'branch' }} if: ${{ github.ref_type == 'branch' }}
docker-build: docker-build:
needs: skip_check
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
name: Build Vaultwarden containers
permissions: permissions:
packages: write packages: write
contents: read contents: read
@@ -34,8 +44,6 @@ jobs:
id-token: write id-token: write
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
timeout-minutes: 120 timeout-minutes: 120
needs: skip_check
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
# Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them # Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them
services: services:
registry: registry:
@@ -61,12 +69,6 @@ jobs:
base_image: ["debian","alpine"] base_image: ["debian","alpine"]
steps: steps:
# Checkout the repo
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
fetch-depth: 0
- name: Initialize QEMU binfmt support - name: Initialize QEMU binfmt support
uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0 uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0
with: with:
@@ -78,20 +80,31 @@ jobs:
# https://github.com/moby/buildkit/issues/3969 # https://github.com/moby/buildkit/issues/3969
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills # Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
with: with:
cache-binary: false
buildkitd-config-inline: | buildkitd-config-inline: |
[worker.oci] [worker.oci]
max-parallelism = 2 max-parallelism = 2
driver-opts: | driver-opts: |
network=host network=host
# Checkout the repo
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# We need fetch-depth of 0 so we also get all the tag metadata
with:
persist-credentials: false
fetch-depth: 0
# Determine Base Tags and Source Version # Determine Base Tags and Source Version
- name: Determine Base Tags and Source Version - name: Determine Base Tags and Source Version
shell: bash shell: bash
env:
REF_TYPE: ${{ github.ref_type }}
run: | run: |
# Check which main tag we are going to build determined by github.ref_type # Check which main tag we are going to build determined by ref_type
if [[ "${{ github.ref_type }}" == "tag" ]]; then if [[ "${REF_TYPE}" == "tag" ]]; then
echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}" echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}"
elif [[ "${{ github.ref_type }}" == "branch" ]]; then elif [[ "${REF_TYPE}" == "branch" ]]; then
echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}" echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}"
fi fi
@@ -116,8 +129,10 @@ jobs:
- name: Add registry for DockerHub - name: Add registry for DockerHub
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }} if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
shell: bash shell: bash
env:
DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }}
run: | run: |
echo "CONTAINER_REGISTRIES=${{ vars.DOCKERHUB_REPO }}" | tee -a "${GITHUB_ENV}" echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}"
# Login to GitHub Container Registry # Login to GitHub Container Registry
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
@@ -131,8 +146,10 @@ jobs:
- name: Add registry for ghcr.io - name: Add registry for ghcr.io
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }} if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
shell: bash shell: bash
env:
GHCR_REPO: ${{ vars.GHCR_REPO }}
run: | run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.GHCR_REPO }}" | tee -a "${GITHUB_ENV}" echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}"
# Login to Quay.io # Login to Quay.io
- name: Login to Quay.io - name: Login to Quay.io
@@ -146,17 +163,22 @@ jobs:
- name: Add registry for Quay.io - name: Add registry for Quay.io
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }} if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
shell: bash shell: bash
env:
QUAY_REPO: ${{ vars.QUAY_REPO }}
run: | run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.QUAY_REPO }}" | tee -a "${GITHUB_ENV}" echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}"
- name: Configure build cache from/to - name: Configure build cache from/to
shell: bash shell: bash
env:
GHCR_REPO: ${{ vars.GHCR_REPO }}
BASE_IMAGE: ${{ matrix.base_image }}
run: | run: |
# #
# Check if there is a GitHub Container Registry Login and use it for caching # Check if there is a GitHub Container Registry Login and use it for caching
if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then
echo "BAKE_CACHE_FROM=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }}" | tee -a "${GITHUB_ENV}" echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}" | tee -a "${GITHUB_ENV}"
echo "BAKE_CACHE_TO=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}" echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
else else
echo "BAKE_CACHE_FROM=" echo "BAKE_CACHE_FROM="
echo "BAKE_CACHE_TO=" echo "BAKE_CACHE_TO="
@@ -170,7 +192,7 @@ jobs:
- name: Bake ${{ matrix.base_image }} containers - name: Bake ${{ matrix.base_image }} containers
id: bake_vw id: bake_vw
uses: docker/bake-action@5ca506d06f70338a4968df87fd8bfee5cbfb84c7 # v6.0.0 uses: docker/bake-action@7bff531c65a5cda33e52e43950a795b91d450f63 # v6.3.0
env: env:
BASE_TAGS: "${{ env.BASE_TAGS }}" BASE_TAGS: "${{ env.BASE_TAGS }}"
SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}" SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}"
@@ -189,14 +211,16 @@ jobs:
- name: Extract digest SHA - name: Extract digest SHA
shell: bash shell: bash
env:
BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }}
run: | run: |
GET_DIGEST_SHA="$(jq -r '.["${{ matrix.base_image }}-multi"]."containerimage.digest"' <<< '${{ steps.bake_vw.outputs.metadata }}')" GET_DIGEST_SHA="$(jq -r '.["${{ matrix.base_image }}-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")"
echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}" echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}"
# Attest container images # Attest container images
- name: Attest - docker.io - ${{ matrix.base_image }} - name: Attest - docker.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0 uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
with: with:
subject-name: ${{ vars.DOCKERHUB_REPO }} subject-name: ${{ vars.DOCKERHUB_REPO }}
subject-digest: ${{ env.DIGEST_SHA }} subject-digest: ${{ env.DIGEST_SHA }}
@@ -204,7 +228,7 @@ jobs:
- name: Attest - ghcr.io - ${{ matrix.base_image }} - name: Attest - ghcr.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0 uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
with: with:
subject-name: ${{ vars.GHCR_REPO }} subject-name: ${{ vars.GHCR_REPO }}
subject-digest: ${{ env.DIGEST_SHA }} subject-digest: ${{ env.DIGEST_SHA }}
@@ -212,7 +236,7 @@ jobs:
- name: Attest - quay.io - ${{ matrix.base_image }} - name: Attest - quay.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0 uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
with: with:
subject-name: ${{ vars.QUAY_REPO }} subject-name: ${{ vars.QUAY_REPO }}
subject-digest: ${{ env.DIGEST_SHA }} subject-digest: ${{ env.DIGEST_SHA }}
@@ -222,11 +246,13 @@ jobs:
# Extract the Alpine binaries from the containers # Extract the Alpine binaries from the containers
- name: Extract binaries - name: Extract binaries
shell: bash shell: bash
env:
REF_TYPE: ${{ github.ref_type }}
run: | run: |
# Check which main tag we are going to build determined by github.ref_type # Check which main tag we are going to build determined by ref_type
if [[ "${{ github.ref_type }}" == "tag" ]]; then if [[ "${REF_TYPE}" == "tag" ]]; then
EXTRACT_TAG="latest" EXTRACT_TAG="latest"
elif [[ "${{ github.ref_type }}" == "branch" ]]; then elif [[ "${REF_TYPE}" == "branch" ]]; then
EXTRACT_TAG="testing" EXTRACT_TAG="testing"
fi fi
@@ -264,31 +290,31 @@ jobs:
# Upload artifacts to Github Actions and Attest the binaries # Upload artifacts to Github Actions and Attest the binaries
- name: "Upload amd64 artifact ${{ matrix.base_image }}" - name: "Upload amd64 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0 uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
with: with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }} name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }}
path: vaultwarden-amd64-${{ matrix.base_image }} path: vaultwarden-amd64-${{ matrix.base_image }}
- name: "Upload arm64 artifact ${{ matrix.base_image }}" - name: "Upload arm64 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0 uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
with: with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }} name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }}
path: vaultwarden-arm64-${{ matrix.base_image }} path: vaultwarden-arm64-${{ matrix.base_image }}
- name: "Upload armv7 artifact ${{ matrix.base_image }}" - name: "Upload armv7 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0 uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
with: with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }} name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }}
path: vaultwarden-armv7-${{ matrix.base_image }} path: vaultwarden-armv7-${{ matrix.base_image }}
- name: "Upload armv6 artifact ${{ matrix.base_image }}" - name: "Upload armv6 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0 uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
with: with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }} name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }}
path: vaultwarden-armv6-${{ matrix.base_image }} path: vaultwarden-armv6-${{ matrix.base_image }}
- name: "Attest artifacts ${{ matrix.base_image }}" - name: "Attest artifacts ${{ matrix.base_image }}"
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0 uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
with: with:
subject-path: vaultwarden-* subject-path: vaultwarden-*
# End Upload artifacts to Github Actions # End Upload artifacts to Github Actions

View File

@@ -1,3 +1,6 @@
name: Cleanup
permissions: {}
on: on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
@@ -9,10 +12,11 @@ on:
schedule: schedule:
- cron: '0 1 * * FRI' - cron: '0 1 * * FRI'
name: Cleanup
jobs: jobs:
releasecache-cleanup: releasecache-cleanup:
name: Releasecache Cleanup name: Releasecache Cleanup
permissions:
packages: write
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
continue-on-error: true continue-on-error: true
timeout-minutes: 30 timeout-minutes: 30

View File

@@ -1,34 +1,39 @@
name: trivy name: Trivy
permissions: {}
on: on:
push: push:
branches: branches:
- main - main
tags: tags:
- '*' - '*'
pull_request: pull_request:
branches: [ "main" ] branches:
- main
schedule: schedule:
- cron: '08 11 * * *' - cron: '08 11 * * *'
permissions:
contents: read
jobs: jobs:
trivy-scan: trivy-scan:
# Only run this in the master repo and not on forks # Only run this in the upstream repo and not on forks
# When all forks run this at the same time, it is causing `Too Many Requests` issues # When all forks run this at the same time, it is causing `Too Many Requests` issues
if: ${{ github.repository == 'dani-garcia/vaultwarden' }} if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
name: Check name: Trivy Scan
runs-on: ubuntu-24.04
timeout-minutes: 30
permissions: permissions:
contents: read contents: read
security-events: write
actions: read actions: read
security-events: write
runs-on: ubuntu-24.04
timeout-minutes: 30
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Run Trivy vulnerability scanner - name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@18f2510ee396bbf400402947b394f2dd8c87dbb0 # v0.29.0 uses: aquasecurity/trivy-action@18f2510ee396bbf400402947b394f2dd8c87dbb0 # v0.29.0

32
Cargo.lock generated
View File

@@ -447,9 +447,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]] [[package]]
name = "bytes" name = "bytes"
version = "1.9.0" version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9"
[[package]] [[package]]
name = "cached" name = "cached"
@@ -754,18 +754,18 @@ dependencies = [
[[package]] [[package]]
name = "derive_more" name = "derive_more"
version = "1.0.0" version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05" checksum = "71158d5e914dec8a242751a3fc516b03ed3e6772ce9de79e1aeea6420663cad4"
dependencies = [ dependencies = [
"derive_more-impl", "derive_more-impl",
] ]
[[package]] [[package]]
name = "derive_more-impl" name = "derive_more-impl"
version = "1.0.0" version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" checksum = "9e04e066e440d7973a852a3acdc25b0ae712bb6d311755fbf773d6a4518b2226"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -1928,9 +1928,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]] [[package]]
name = "lettre" name = "lettre"
version = "0.11.11" version = "0.11.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab4c9a167ff73df98a5ecc07e8bf5ce90b583665da3d1762eb1f775ad4d0d6f5" checksum = "e882e1489810a45919477602194312b1a7df0e5acc30a6188be7b520268f63f8"
dependencies = [ dependencies = [
"async-std", "async-std",
"async-trait", "async-trait",
@@ -2322,9 +2322,9 @@ checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]] [[package]]
name = "openssl" name = "openssl"
version = "0.10.69" version = "0.10.70"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f5e534d133a060a3c19daec1eb3e98ec6f4685978834f2dbadfe2ec215bab64e" checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"cfg-if", "cfg-if",
@@ -2363,9 +2363,9 @@ dependencies = [
[[package]] [[package]]
name = "openssl-sys" name = "openssl-sys"
version = "0.9.104" version = "0.9.105"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" checksum = "8b22d5b84be05a8d6947c7cb71f7c849aa0f112acd4bf51c2a7c1c988ac0a9dc"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@@ -3520,9 +3520,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.96" version = "2.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -4572,9 +4572,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]] [[package]]
name = "winnow" name = "winnow"
version = "0.7.0" version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e49d2d35d3fad69b39b94139037ecfb4f359f08958b9c11e7315ce770462419" checksum = "86e376c75f4f43f44db463cf729e0d3acbf954d13e22c51e26e4c264b4ab545f"
dependencies = [ dependencies = [
"memchr", "memchr",
] ]

View File

@@ -82,7 +82,7 @@ diesel = { version = "2.2.7", features = ["chrono", "r2d2", "numeric"] }
diesel_migrations = "2.2.0" diesel_migrations = "2.2.0"
diesel_logger = { version = "0.4.0", optional = true } diesel_logger = { version = "0.4.0", optional = true }
derive_more = { version = "1.0.0", features = ["from", "into", "as_ref", "deref", "display"] } derive_more = { version = "2.0.0", features = ["from", "into", "as_ref", "deref", "display"] }
diesel-derive-newtype = "2.1.2" diesel-derive-newtype = "2.1.2"
# Bundled/Static SQLite # Bundled/Static SQLite
@@ -122,7 +122,7 @@ webauthn-rs = "0.3.2"
url = "2.5.4" url = "2.5.4"
# Email libraries # Email libraries
lettre = { version = "0.11.11", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false } lettre = { version = "0.11.12", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
email_address = "0.2.9" email_address = "0.2.9"
@@ -137,7 +137,7 @@ hickory-resolver = "0.24.2"
html5gum = "0.7.0" html5gum = "0.7.0"
regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false } regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false }
data-url = "0.3.1" data-url = "0.3.1"
bytes = "1.9.0" bytes = "1.10.0"
# Cache function results (Used for version check and favicon fetching) # Cache function results (Used for version check and favicon fetching)
cached = { version = "0.54.0", features = ["async"] } cached = { version = "0.54.0", features = ["async"] }
@@ -147,7 +147,7 @@ cookie = "0.18.1"
cookie_store = "0.21.1" cookie_store = "0.21.1"
# Used by U2F, JWT and PostgreSQL # Used by U2F, JWT and PostgreSQL
openssl = "0.10.69" openssl = "0.10.70"
# CLI argument parsing # CLI argument parsing
pico-args = "0.5.0" pico-args = "0.5.0"

View File

@@ -10,4 +10,4 @@ proc-macro = true
[dependencies] [dependencies]
quote = "1.0.38" quote = "1.0.38"
syn = "2.0.96" syn = "2.0.98"

View File

@@ -38,6 +38,7 @@ pub fn routes() -> Vec<Route> {
post_organization_collections, post_organization_collections,
delete_organization_collection_member, delete_organization_collection_member,
post_organization_collection_delete_member, post_organization_collection_delete_member,
post_bulk_access_collections,
post_organization_collection_update, post_organization_collection_update,
put_organization_collection_update, put_organization_collection_update,
delete_organization_collection, delete_organization_collection,
@@ -129,17 +130,17 @@ struct OrganizationUpdateData {
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct NewCollectionData { struct FullCollectionData {
name: String, name: String,
groups: Vec<NewCollectionGroupData>, groups: Vec<CollectionGroupData>,
users: Vec<NewCollectionMemberData>, users: Vec<CollectionMembershipData>,
id: Option<CollectionId>, id: Option<CollectionId>,
external_id: Option<String>, external_id: Option<String>,
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct NewCollectionGroupData { struct CollectionGroupData {
hide_passwords: bool, hide_passwords: bool,
id: GroupId, id: GroupId,
read_only: bool, read_only: bool,
@@ -148,7 +149,7 @@ struct NewCollectionGroupData {
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct NewCollectionMemberData { struct CollectionMembershipData {
hide_passwords: bool, hide_passwords: bool,
id: MembershipId, id: MembershipId,
read_only: bool, read_only: bool,
@@ -429,13 +430,13 @@ async fn _get_org_collections(org_id: &OrganizationId, conn: &mut DbConn) -> Val
async fn post_organization_collections( async fn post_organization_collections(
org_id: OrganizationId, org_id: OrganizationId,
headers: ManagerHeadersLoose, headers: ManagerHeadersLoose,
data: Json<NewCollectionData>, data: Json<FullCollectionData>,
mut conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
if org_id != headers.membership.org_uuid { if org_id != headers.membership.org_uuid {
err!("Organization not found", "Organization id's do not match"); err!("Organization not found", "Organization id's do not match");
} }
let data: NewCollectionData = data.into_inner(); let data: FullCollectionData = data.into_inner();
let Some(org) = Organization::find_by_uuid(&org_id, &mut conn).await else { let Some(org) = Organization::find_by_uuid(&org_id, &mut conn).await else {
err!("Can't find organization details") err!("Can't find organization details")
@@ -488,29 +489,104 @@ async fn post_organization_collections(
Ok(Json(collection.to_json_details(&headers.membership.user_uuid, None, &mut conn).await)) Ok(Json(collection.to_json_details(&headers.membership.user_uuid, None, &mut conn).await))
} }
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct BulkCollectionAccessData {
collection_ids: Vec<CollectionId>,
groups: Vec<CollectionGroupData>,
users: Vec<CollectionMembershipData>,
}
#[post("/organizations/<org_id>/collections/bulk-access", data = "<data>", rank = 1)]
async fn post_bulk_access_collections(
org_id: OrganizationId,
headers: ManagerHeadersLoose,
data: Json<BulkCollectionAccessData>,
mut conn: DbConn,
) -> EmptyResult {
if org_id != headers.membership.org_uuid {
err!("Organization not found", "Organization id's do not match");
}
let data: BulkCollectionAccessData = data.into_inner();
if Organization::find_by_uuid(&org_id, &mut conn).await.is_none() {
err!("Can't find organization details")
};
for col_id in data.collection_ids {
let Some(collection) = Collection::find_by_uuid_and_org(&col_id, &org_id, &mut conn).await else {
err!("Collection not found")
};
// update collection modification date
collection.save(&mut conn).await?;
log_event(
EventType::CollectionUpdated as i32,
&collection.uuid,
&org_id,
&headers.user.uuid,
headers.device.atype,
&headers.ip.ip,
&mut conn,
)
.await;
CollectionGroup::delete_all_by_collection(&col_id, &mut conn).await?;
for group in &data.groups {
CollectionGroup::new(col_id.clone(), group.id.clone(), group.read_only, group.hide_passwords, group.manage)
.save(&mut conn)
.await?;
}
CollectionUser::delete_all_by_collection(&col_id, &mut conn).await?;
for user in &data.users {
let Some(member) = Membership::find_by_uuid_and_org(&user.id, &org_id, &mut conn).await else {
err!("User is not part of organization")
};
if member.access_all {
continue;
}
CollectionUser::save(
&member.user_uuid,
&col_id,
user.read_only,
user.hide_passwords,
user.manage,
&mut conn,
)
.await?;
}
}
Ok(())
}
#[put("/organizations/<org_id>/collections/<col_id>", data = "<data>")] #[put("/organizations/<org_id>/collections/<col_id>", data = "<data>")]
async fn put_organization_collection_update( async fn put_organization_collection_update(
org_id: OrganizationId, org_id: OrganizationId,
col_id: CollectionId, col_id: CollectionId,
headers: ManagerHeaders, headers: ManagerHeaders,
data: Json<NewCollectionData>, data: Json<FullCollectionData>,
conn: DbConn, conn: DbConn,
) -> JsonResult { ) -> JsonResult {
post_organization_collection_update(org_id, col_id, headers, data, conn).await post_organization_collection_update(org_id, col_id, headers, data, conn).await
} }
#[post("/organizations/<org_id>/collections/<col_id>", data = "<data>")] #[post("/organizations/<org_id>/collections/<col_id>", data = "<data>", rank = 2)]
async fn post_organization_collection_update( async fn post_organization_collection_update(
org_id: OrganizationId, org_id: OrganizationId,
col_id: CollectionId, col_id: CollectionId,
headers: ManagerHeaders, headers: ManagerHeaders,
data: Json<NewCollectionData>, data: Json<FullCollectionData>,
mut conn: DbConn, mut conn: DbConn,
) -> JsonResult { ) -> JsonResult {
if org_id != headers.org_id { if org_id != headers.org_id {
err!("Organization not found", "Organization id's do not match"); err!("Organization not found", "Organization id's do not match");
} }
let data: NewCollectionData = data.into_inner(); let data: FullCollectionData = data.into_inner();
if Organization::find_by_uuid(&org_id, &mut conn).await.is_none() { if Organization::find_by_uuid(&org_id, &mut conn).await.is_none() {
err!("Can't find organization details") err!("Can't find organization details")
@@ -781,7 +857,7 @@ async fn get_collection_users(
async fn put_collection_users( async fn put_collection_users(
org_id: OrganizationId, org_id: OrganizationId,
col_id: CollectionId, col_id: CollectionId,
data: Json<Vec<MembershipData>>, data: Json<Vec<CollectionMembershipData>>,
headers: ManagerHeaders, headers: ManagerHeaders,
mut conn: DbConn, mut conn: DbConn,
) -> EmptyResult { ) -> EmptyResult {
@@ -913,24 +989,6 @@ async fn post_org_keys(
}))) })))
} }
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct CollectionData {
id: CollectionId,
read_only: bool,
hide_passwords: bool,
manage: bool,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct MembershipData {
id: MembershipId,
read_only: bool,
hide_passwords: bool,
manage: bool,
}
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct InviteData { struct InviteData {
@@ -1754,7 +1812,7 @@ use super::ciphers::CipherData;
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct ImportData { struct ImportData {
ciphers: Vec<CipherData>, ciphers: Vec<CipherData>,
collections: Vec<NewCollectionData>, collections: Vec<FullCollectionData>,
collection_relationships: Vec<RelationsData>, collection_relationships: Vec<RelationsData>,
} }
@@ -2549,7 +2607,7 @@ struct GroupRequest {
#[serde(default)] #[serde(default)]
access_all: bool, access_all: bool,
external_id: Option<String>, external_id: Option<String>,
collections: Vec<SelectedCollection>, collections: Vec<CollectionData>,
users: Vec<MembershipId>, users: Vec<MembershipId>,
} }
@@ -2570,14 +2628,14 @@ impl GroupRequest {
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct SelectedCollection { struct CollectionData {
id: CollectionId, id: CollectionId,
read_only: bool, read_only: bool,
hide_passwords: bool, hide_passwords: bool,
manage: bool, manage: bool,
} }
impl SelectedCollection { impl CollectionData {
pub fn to_collection_group(&self, groups_uuid: GroupId) -> CollectionGroup { pub fn to_collection_group(&self, groups_uuid: GroupId) -> CollectionGroup {
CollectionGroup::new(self.id.clone(), groups_uuid, self.read_only, self.hide_passwords, self.manage) CollectionGroup::new(self.id.clone(), groups_uuid, self.read_only, self.hide_passwords, self.manage)
} }
@@ -2660,7 +2718,7 @@ async fn put_group(
async fn add_update_group( async fn add_update_group(
mut group: Group, mut group: Group,
collections: Vec<SelectedCollection>, collections: Vec<CollectionData>,
members: Vec<MembershipId>, members: Vec<MembershipId>,
org_id: OrganizationId, org_id: OrganizationId,
headers: &AdminHeaders, headers: &AdminHeaders,

View File

@@ -63,6 +63,9 @@ static CLIENT: Lazy<Client> = Lazy::new(|| {
// Build Regex only once since this takes a lot of time. // Build Regex only once since this takes a lot of time.
static ICON_SIZE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap()); static ICON_SIZE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap());
// The function name `icon_external` is checked in the `on_response` function in `AppHeaders`
// It is used to prevent sending a specific header which breaks icon downloads.
// If this function needs to be renamed, also adjust the code in `util.rs`
#[get("/<domain>/icon.png")] #[get("/<domain>/icon.png")]
fn icon_external(domain: &str) -> Option<Redirect> { fn icon_external(domain: &str) -> Option<Redirect> {
if !is_valid_domain(domain) { if !is_valid_domain(domain) {

View File

@@ -517,7 +517,7 @@ impl Membership {
CONFIG.org_groups_enabled() && Group::is_in_full_access_group(&self.user_uuid, &self.org_uuid, conn).await; CONFIG.org_groups_enabled() && Group::is_in_full_access_group(&self.user_uuid, &self.org_uuid, conn).await;
// If collections are to be included, only include them if the user does not have full access via a group or defined to the user it self // If collections are to be included, only include them if the user does not have full access via a group or defined to the user it self
let collections: Vec<Value> = if include_collections && !(full_access_group || self.has_full_access()) { let collections: Vec<Value> = if include_collections && !(full_access_group || self.access_all) {
// Get all collections for the user here already to prevent more queries // Get all collections for the user here already to prevent more queries
let cu: HashMap<CollectionId, CollectionUser> = let cu: HashMap<CollectionId, CollectionUser> =
CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn) CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn)

View File

@@ -56,13 +56,17 @@ impl Fairing for AppHeaders {
res.set_raw_header("X-Content-Type-Options", "nosniff"); res.set_raw_header("X-Content-Type-Options", "nosniff");
res.set_raw_header("X-Robots-Tag", "noindex, nofollow"); res.set_raw_header("X-Robots-Tag", "noindex, nofollow");
if !res.headers().get_one("Content-Type").is_some_and(|v| v.starts_with("image/")) {
res.set_raw_header("Cross-Origin-Resource-Policy", "same-origin");
}
// Obsolete in modern browsers, unsafe (XS-Leak), and largely replaced by CSP // Obsolete in modern browsers, unsafe (XS-Leak), and largely replaced by CSP
res.set_raw_header("X-XSS-Protection", "0"); res.set_raw_header("X-XSS-Protection", "0");
// The `Cross-Origin-Resource-Policy` header should not be set on images or on the `icon_external` route.
// Otherwise some clients, like the Bitwarden Desktop, will fail to download the icons
if !(res.headers().get_one("Content-Type").is_some_and(|v| v.starts_with("image/"))
|| req.route().is_some_and(|v| v.name.as_deref() == Some("icon_external")))
{
res.set_raw_header("Cross-Origin-Resource-Policy", "same-origin");
}
// Do not send the Content-Security-Policy (CSP) Header and X-Frame-Options for the *-connector.html files. // Do not send the Content-Security-Policy (CSP) Header and X-Frame-Options for the *-connector.html files.
// This can cause issues when some MFA requests needs to open a popup or page within the clients like WebAuthn, or Duo. // This can cause issues when some MFA requests needs to open a popup or page within the clients like WebAuthn, or Duo.
// This is the same behavior as upstream Bitwarden. // This is the same behavior as upstream Bitwarden.