mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-09-10 18:55:57 +03:00
Compare commits
28 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
66bff73ebf | ||
|
83d5432cbf | ||
|
f579a4154c | ||
|
f5a19c5f8b | ||
|
aa9bc1f785 | ||
|
f162e85e44 | ||
|
33ef70c192 | ||
|
3d2df6ce11 | ||
|
6cdcb3b297 | ||
|
d1af468700 | ||
|
ae1c53f4e5 | ||
|
bc57c4b193 | ||
|
61ae4c9cf5 | ||
|
8d7b3db33d | ||
|
e9ec3741ae | ||
|
dacd50f3f1 | ||
|
9412112639 | ||
|
aaeae16983 | ||
|
d892880dd2 | ||
|
4395e8e888 | ||
|
3dbfc484a5 | ||
|
4ec2507073 | ||
|
ab65d7989b | ||
|
8707728cdb | ||
|
631d022e17 | ||
|
211f4492fa | ||
|
61f9081827 | ||
|
a8e5384c4a |
@@ -30,6 +30,10 @@
|
||||
## Define the size of the connection pool used for connecting to the database.
|
||||
# DATABASE_MAX_CONNS=10
|
||||
|
||||
## Database timeout
|
||||
## Timeout when acquiring database connection
|
||||
# DATABASE_TIMEOUT=30
|
||||
|
||||
## Database connection initialization
|
||||
## Allows SQL statements to be run whenever a new database connection is created.
|
||||
## This is mainly useful for connection-scoped pragmas.
|
||||
@@ -77,7 +81,7 @@
|
||||
# PUSH_INSTALLATION_ID=CHANGEME
|
||||
# PUSH_INSTALLATION_KEY=CHANGEME
|
||||
## Don't change this unless you know what you're doing.
|
||||
# PUSH_RELAY_BASE_URI=https://push.bitwarden.com
|
||||
# PUSH_RELAY_URI=https://push.bitwarden.com
|
||||
|
||||
## Controls whether users are allowed to create Bitwarden Sends.
|
||||
## This setting applies globally to all users.
|
||||
|
11
.github/workflows/build.yml
vendored
11
.github/workflows/build.yml
vendored
@@ -24,7 +24,7 @@ on:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 120
|
||||
# Make warnings errors, this is to prevent warnings slipping through.
|
||||
# This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes.
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
# Install dependencies
|
||||
- name: "Install dependencies Ubuntu"
|
||||
run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl sqlite build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config
|
||||
run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config
|
||||
# End Install dependencies
|
||||
|
||||
|
||||
@@ -89,7 +89,12 @@ jobs:
|
||||
|
||||
|
||||
# Enable Rust Caching
|
||||
- uses: Swatinem/rust-cache@2656b87321093db1cb55fbd73183d195214fdfd1 # v2.5.0
|
||||
- uses: Swatinem/rust-cache@dd05243424bd5c0e585e4b55eb2d7615cdd32f1f # v2.5.1
|
||||
with:
|
||||
# Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
|
||||
# Like changing the build host from Ubuntu 20.04 to 22.04 for example.
|
||||
# Only update when really needed! Use a <year>.<month>[.<inc>] format.
|
||||
prefix-key: "v2023.07-rust"
|
||||
# End Enable Rust Caching
|
||||
|
||||
|
||||
|
2
.github/workflows/hadolint.yml
vendored
2
.github/workflows/hadolint.yml
vendored
@@ -8,7 +8,7 @@ on: [
|
||||
jobs:
|
||||
hadolint:
|
||||
name: Validate Dockerfile syntax
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
# Checkout the repo
|
||||
|
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
# Some checks to determine if we need to continue with building a new docker.
|
||||
# We will skip this check if we are creating a tag, because that has the same hash as a previous run already.
|
||||
skip_check:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||
outputs:
|
||||
should_skip: ${{ steps.skip_check.outputs.should_skip }}
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
if: ${{ startsWith(github.ref, 'refs/heads/') }}
|
||||
|
||||
docker-build:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 120
|
||||
needs: skip_check
|
||||
# Start a local docker registry to be used to generate multi-arch images.
|
||||
|
415
Cargo.lock
generated
415
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
34
Cargo.toml
34
Cargo.toml
@@ -3,7 +3,7 @@ name = "vaultwarden"
|
||||
version = "1.0.0"
|
||||
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.68.2"
|
||||
rust-version = "1.69.0"
|
||||
resolver = "2"
|
||||
|
||||
repository = "https://github.com/dani-garcia/vaultwarden"
|
||||
@@ -51,7 +51,7 @@ dotenvy = { version = "0.15.7", default-features = false }
|
||||
once_cell = "1.18.0"
|
||||
|
||||
# Numerical libraries
|
||||
num-traits = "0.2.15"
|
||||
num-traits = "0.2.16"
|
||||
num-derive = "0.4.0"
|
||||
|
||||
# Web framework
|
||||
@@ -61,18 +61,18 @@ rocket_ws = { git = 'https://github.com/SergioBenitez/Rocket', rev = "ce441b5f46
|
||||
|
||||
# WebSockets libraries
|
||||
tokio-tungstenite = "0.19.0"
|
||||
rmpv = "1.0.0" # MessagePack library
|
||||
rmpv = "1.0.1" # MessagePack library
|
||||
|
||||
# Concurrent HashMap used for WebSocket messaging and favicons
|
||||
dashmap = "5.4.0"
|
||||
dashmap = "5.5.0"
|
||||
|
||||
# Async futures
|
||||
futures = "0.3.28"
|
||||
tokio = { version = "1.29.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal"] }
|
||||
tokio = { version = "1.30.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal"] }
|
||||
|
||||
# A generic serialization/deserialization framework
|
||||
serde = { version = "1.0.166", features = ["derive"] }
|
||||
serde_json = "1.0.99"
|
||||
serde = { version = "1.0.183", features = ["derive"] }
|
||||
serde_json = "1.0.104"
|
||||
|
||||
# A safe, extensible ORM and Query builder
|
||||
diesel = { version = "2.1.0", features = ["chrono", "r2d2"] }
|
||||
@@ -87,12 +87,12 @@ rand = { version = "0.8.5", features = ["small_rng"] }
|
||||
ring = "0.16.20"
|
||||
|
||||
# UUID generation
|
||||
uuid = { version = "1.4.0", features = ["v4"] }
|
||||
uuid = { version = "1.4.1", features = ["v4"] }
|
||||
|
||||
# Date and time libraries
|
||||
chrono = { version = "0.4.26", features = ["clock", "serde"], default-features = false }
|
||||
chrono-tz = "0.8.3"
|
||||
time = "0.3.22"
|
||||
time = "0.3.25"
|
||||
|
||||
# Job scheduler
|
||||
job_scheduler_ng = "2.0.4"
|
||||
@@ -124,11 +124,11 @@ email_address = "0.2.4"
|
||||
handlebars = { version = "4.3.7", features = ["dir_source"] }
|
||||
|
||||
# HTTP client (Used for favicons, version check, DUO and HIBP API)
|
||||
reqwest = { version = "0.11.18", features = ["stream", "json", "gzip", "brotli", "socks", "cookies", "trust-dns"] }
|
||||
reqwest = { version = "0.11.18", features = ["stream", "json", "deflate", "gzip", "brotli", "socks", "cookies", "trust-dns", "native-tls-alpn"] }
|
||||
|
||||
# Favicon extraction libraries
|
||||
html5gum = "0.5.3"
|
||||
regex = { version = "1.8.4", features = ["std", "perf", "unicode-perl"], default-features = false }
|
||||
html5gum = "0.5.7"
|
||||
regex = { version = "1.9.3", features = ["std", "perf", "unicode-perl"], default-features = false }
|
||||
data-url = "0.3.0"
|
||||
bytes = "1.4.0"
|
||||
|
||||
@@ -140,17 +140,17 @@ cookie = "0.16.2"
|
||||
cookie_store = "0.19.1"
|
||||
|
||||
# Used by U2F, JWT and PostgreSQL
|
||||
openssl = "0.10.55"
|
||||
openssl = "0.10.56"
|
||||
|
||||
# CLI argument parsing
|
||||
pico-args = "0.5.0"
|
||||
|
||||
# Macro ident concatenation
|
||||
paste = "1.0.13"
|
||||
governor = "0.5.1"
|
||||
paste = "1.0.14"
|
||||
governor = "0.6.0"
|
||||
|
||||
# Check client versions for specific features.
|
||||
semver = "1.0.17"
|
||||
semver = "1.0.18"
|
||||
|
||||
# Allow overriding the default memory allocator
|
||||
# Mainly used for the musl builds, since the default musl malloc is very slow
|
||||
@@ -158,7 +158,7 @@ mimalloc = { version = "0.1.37", features = ["secure"], default-features = false
|
||||
which = "4.4.0"
|
||||
|
||||
# Argon2 library with support for the PHC format
|
||||
argon2 = "0.5.0"
|
||||
argon2 = "0.5.1"
|
||||
|
||||
# Reading a password from the cli for generating the Argon2id ADMIN_TOKEN
|
||||
rpassword = "7.2.0"
|
||||
|
@@ -2,25 +2,25 @@
|
||||
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||
{% set rust_version = "1.70.0" %}
|
||||
{% set debian_version = "bullseye" %}
|
||||
{% set rust_version = "1.71.1" %}
|
||||
{% set debian_version = "bookworm" %}
|
||||
{% set alpine_version = "3.17" %}
|
||||
{% set build_stage_base_image = "docker.io/library/rust:%s-%s" % (rust_version, debian_version) %}
|
||||
{% if "alpine" in target_file %}
|
||||
{% if "amd64" in target_file %}
|
||||
{% set build_stage_base_image = "docker.io/blackdex/rust-musl:x86_64-musl-stable-%s" % rust_version %}
|
||||
{% set build_stage_base_image = "docker.io/blackdex/rust-musl:x86_64-musl-stable-%s-openssl3" % rust_version %}
|
||||
{% set runtime_stage_base_image = "docker.io/library/alpine:%s" % alpine_version %}
|
||||
{% set package_arch_target = "x86_64-unknown-linux-musl" %}
|
||||
{% elif "armv7" in target_file %}
|
||||
{% set build_stage_base_image = "docker.io/blackdex/rust-musl:armv7-musleabihf-stable-%s" % rust_version %}
|
||||
{% set build_stage_base_image = "docker.io/blackdex/rust-musl:armv7-musleabihf-stable-%s-openssl3" % rust_version %}
|
||||
{% set runtime_stage_base_image = "docker.io/balenalib/armv7hf-alpine:%s" % alpine_version %}
|
||||
{% set package_arch_target = "armv7-unknown-linux-musleabihf" %}
|
||||
{% elif "armv6" in target_file %}
|
||||
{% set build_stage_base_image = "docker.io/blackdex/rust-musl:arm-musleabi-stable-%s" % rust_version %}
|
||||
{% set build_stage_base_image = "docker.io/blackdex/rust-musl:arm-musleabi-stable-%s-openssl3" % rust_version %}
|
||||
{% set runtime_stage_base_image = "docker.io/balenalib/rpi-alpine:%s" % alpine_version %}
|
||||
{% set package_arch_target = "arm-unknown-linux-musleabi" %}
|
||||
{% elif "arm64" in target_file %}
|
||||
{% set build_stage_base_image = "docker.io/blackdex/rust-musl:aarch64-musl-stable-%s" % rust_version %}
|
||||
{% set build_stage_base_image = "docker.io/blackdex/rust-musl:aarch64-musl-stable-%s-openssl3" % rust_version %}
|
||||
{% set runtime_stage_base_image = "docker.io/balenalib/aarch64-alpine:%s" % alpine_version %}
|
||||
{% set package_arch_target = "aarch64-unknown-linux-musl" %}
|
||||
{% endif %}
|
||||
@@ -61,8 +61,8 @@
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
{% set vault_version = "v2023.5.0" %}
|
||||
{% set vault_image_digest = "sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085" %}
|
||||
{% set vault_version = "v2023.7.1" %}
|
||||
{% set vault_image_digest = "sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f" %}
|
||||
# The web-vault digest specifies a particular web-vault build on Docker Hub.
|
||||
# Using the digest instead of the tag name provides better security,
|
||||
# as the digest of an image is immutable, whereas a tag name can later
|
||||
@@ -91,6 +91,7 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
@@ -98,13 +99,16 @@ RUN {{ mount_rust_cache -}} mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
{% if "alpine" in target_file %}
|
||||
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||
# Debian Bookworm already contains libpq v15
|
||||
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||
{% if "armv6" in target_file %}
|
||||
# To be able to build the armv6 image with mimalloc we need to specifically specify the libatomic.a file location
|
||||
ENV RUSTFLAGS='-Clink-arg=/usr/local/musl/{{ package_arch_target }}/lib/libatomic.a'
|
||||
# To be able to build the armv6 image with mimalloc we need to tell the linker to also look for libatomic
|
||||
ENV RUSTFLAGS='-Clink-arg=-latomic'
|
||||
{% endif %}
|
||||
{% elif "arm" in target_file %}
|
||||
# Install build dependencies for the {{ package_arch_name }} architecture
|
||||
RUN dpkg --add-architecture {{ package_arch_name }} \
|
||||
RUN {{ mount_rust_cache -}} dpkg --add-architecture {{ package_arch_name }} \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
@@ -211,13 +215,6 @@ RUN mkdir /data \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
{% endif %}
|
||||
|
||||
{% if "armv6" in target_file and "alpine" not in target_file %}
|
||||
# In the Balena Bullseye images for armv6/rpi-debian there is a missing symlink.
|
||||
# This symlink was there in the buster images, and for some reason this is needed.
|
||||
RUN ln -v -s /lib/ld-linux-armhf.so.3 /lib/ld-linux.so.3
|
||||
|
||||
{% endif -%}
|
||||
|
||||
{% if "amd64" not in target_file %}
|
||||
RUN [ "cross-build-end" ]
|
||||
{% endif %}
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/library/rust:1.70.0-bullseye as build
|
||||
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,6 +34,7 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
@@ -80,7 +81,7 @@ RUN cargo build --features ${DB} --release
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM docker.io/library/debian:bullseye-slim
|
||||
FROM docker.io/library/debian:bookworm-slim
|
||||
|
||||
ENV ROCKET_PROFILE="release" \
|
||||
ROCKET_ADDRESS=0.0.0.0 \
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/blackdex/rust-musl:x86_64-musl-stable-1.70.0 as build
|
||||
FROM docker.io/blackdex/rust-musl:x86_64-musl-stable-1.71.1-openssl3 as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,12 +34,16 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||
# Debian Bookworm already contains libpq v15
|
||||
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/library/rust:1.70.0-bullseye as build
|
||||
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,6 +34,7 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
@@ -80,7 +81,7 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM docker.io/library/debian:bullseye-slim
|
||||
FROM docker.io/library/debian:bookworm-slim
|
||||
|
||||
ENV ROCKET_PROFILE="release" \
|
||||
ROCKET_ADDRESS=0.0.0.0 \
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/blackdex/rust-musl:x86_64-musl-stable-1.70.0 as build
|
||||
FROM docker.io/blackdex/rust-musl:x86_64-musl-stable-1.71.1-openssl3 as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,12 +34,16 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||
# Debian Bookworm already contains libpq v15
|
||||
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/library/rust:1.70.0-bullseye as build
|
||||
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,6 +34,7 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
@@ -99,7 +100,7 @@ RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM docker.io/balenalib/aarch64-debian:bullseye
|
||||
FROM docker.io/balenalib/aarch64-debian:bookworm
|
||||
|
||||
ENV ROCKET_PROFILE="release" \
|
||||
ROCKET_ADDRESS=0.0.0.0 \
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/blackdex/rust-musl:aarch64-musl-stable-1.70.0 as build
|
||||
FROM docker.io/blackdex/rust-musl:aarch64-musl-stable-1.71.1-openssl3 as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,12 +34,16 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||
# Debian Bookworm already contains libpq v15
|
||||
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/library/rust:1.70.0-bullseye as build
|
||||
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,6 +34,7 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
@@ -41,7 +42,7 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.
|
||||
&& rustup set profile minimal
|
||||
|
||||
# Install build dependencies for the arm64 architecture
|
||||
RUN dpkg --add-architecture arm64 \
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry dpkg --add-architecture arm64 \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
@@ -99,7 +100,7 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM docker.io/balenalib/aarch64-debian:bullseye
|
||||
FROM docker.io/balenalib/aarch64-debian:bookworm
|
||||
|
||||
ENV ROCKET_PROFILE="release" \
|
||||
ROCKET_ADDRESS=0.0.0.0 \
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/blackdex/rust-musl:aarch64-musl-stable-1.70.0 as build
|
||||
FROM docker.io/blackdex/rust-musl:aarch64-musl-stable-1.71.1-openssl3 as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,12 +34,16 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||
# Debian Bookworm already contains libpq v15
|
||||
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/library/rust:1.70.0-bullseye as build
|
||||
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,6 +34,7 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
@@ -99,7 +100,7 @@ RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM docker.io/balenalib/rpi-debian:bullseye
|
||||
FROM docker.io/balenalib/rpi-debian:bookworm
|
||||
|
||||
ENV ROCKET_PROFILE="release" \
|
||||
ROCKET_ADDRESS=0.0.0.0 \
|
||||
@@ -119,10 +120,6 @@ RUN mkdir /data \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# In the Balena Bullseye images for armv6/rpi-debian there is a missing symlink.
|
||||
# This symlink was there in the buster images, and for some reason this is needed.
|
||||
RUN ln -v -s /lib/ld-linux-armhf.so.3 /lib/ld-linux.so.3
|
||||
|
||||
RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/blackdex/rust-musl:arm-musleabi-stable-1.70.0 as build
|
||||
FROM docker.io/blackdex/rust-musl:arm-musleabi-stable-1.71.1-openssl3 as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,14 +34,18 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
# To be able to build the armv6 image with mimalloc we need to specifically specify the libatomic.a file location
|
||||
ENV RUSTFLAGS='-Clink-arg=/usr/local/musl/arm-unknown-linux-musleabi/lib/libatomic.a'
|
||||
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||
# Debian Bookworm already contains libpq v15
|
||||
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||
# To be able to build the armv6 image with mimalloc we need to tell the linker to also look for libatomic
|
||||
ENV RUSTFLAGS='-Clink-arg=-latomic'
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/library/rust:1.70.0-bullseye as build
|
||||
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,6 +34,7 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
@@ -41,7 +42,7 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.
|
||||
&& rustup set profile minimal
|
||||
|
||||
# Install build dependencies for the armel architecture
|
||||
RUN dpkg --add-architecture armel \
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry dpkg --add-architecture armel \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
@@ -99,7 +100,7 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM docker.io/balenalib/rpi-debian:bullseye
|
||||
FROM docker.io/balenalib/rpi-debian:bookworm
|
||||
|
||||
ENV ROCKET_PROFILE="release" \
|
||||
ROCKET_ADDRESS=0.0.0.0 \
|
||||
@@ -119,10 +120,6 @@ RUN mkdir /data \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# In the Balena Bullseye images for armv6/rpi-debian there is a missing symlink.
|
||||
# This symlink was there in the buster images, and for some reason this is needed.
|
||||
RUN ln -v -s /lib/ld-linux-armhf.so.3 /lib/ld-linux.so.3
|
||||
|
||||
RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/blackdex/rust-musl:arm-musleabi-stable-1.70.0 as build
|
||||
FROM docker.io/blackdex/rust-musl:arm-musleabi-stable-1.71.1-openssl3 as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,14 +34,18 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
# To be able to build the armv6 image with mimalloc we need to specifically specify the libatomic.a file location
|
||||
ENV RUSTFLAGS='-Clink-arg=/usr/local/musl/arm-unknown-linux-musleabi/lib/libatomic.a'
|
||||
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||
# Debian Bookworm already contains libpq v15
|
||||
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||
# To be able to build the armv6 image with mimalloc we need to tell the linker to also look for libatomic
|
||||
ENV RUSTFLAGS='-Clink-arg=-latomic'
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/library/rust:1.70.0-bullseye as build
|
||||
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,6 +34,7 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
@@ -99,7 +100,7 @@ RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabih
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM docker.io/balenalib/armv7hf-debian:bullseye
|
||||
FROM docker.io/balenalib/armv7hf-debian:bookworm
|
||||
|
||||
ENV ROCKET_PROFILE="release" \
|
||||
ROCKET_ADDRESS=0.0.0.0 \
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/blackdex/rust-musl:armv7-musleabihf-stable-1.70.0 as build
|
||||
FROM docker.io/blackdex/rust-musl:armv7-musleabihf-stable-1.71.1-openssl3 as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,12 +34,16 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||
# Debian Bookworm already contains libpq v15
|
||||
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/library/rust:1.70.0-bullseye as build
|
||||
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,6 +34,7 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
@@ -41,7 +42,7 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.
|
||||
&& rustup set profile minimal
|
||||
|
||||
# Install build dependencies for the armhf architecture
|
||||
RUN dpkg --add-architecture armhf \
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry dpkg --add-architecture armhf \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
@@ -99,7 +100,7 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM docker.io/balenalib/armv7hf-debian:bullseye
|
||||
FROM docker.io/balenalib/armv7hf-debian:bookworm
|
||||
|
||||
ENV ROCKET_PROFILE="release" \
|
||||
ROCKET_ADDRESS=0.0.0.0 \
|
||||
|
@@ -15,18 +15,18 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.5.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085]
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085
|
||||
# [docker.io/vaultwarden/web-vault:v2023.5.0]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||
#
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:e5b5e99d132d50dc73176afb65f41cf3b834fb06bfa1d621ac16c705c3f10085 as vault
|
||||
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM docker.io/blackdex/rust-musl:armv7-musleabihf-stable-1.70.0 as build
|
||||
FROM docker.io/blackdex/rust-musl:armv7-musleabihf-stable-1.71.1-openssl3 as build
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -34,12 +34,16 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
USER="root"
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||
# Debian Bookworm already contains libpq v15
|
||||
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
|
@@ -0,0 +1,19 @@
|
||||
CREATE TABLE auth_requests (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
user_uuid CHAR(36) NOT NULL,
|
||||
organization_uuid CHAR(36),
|
||||
request_device_identifier CHAR(36) NOT NULL,
|
||||
device_type INTEGER NOT NULL,
|
||||
request_ip TEXT NOT NULL,
|
||||
response_device_id CHAR(36),
|
||||
access_code TEXT NOT NULL,
|
||||
public_key TEXT NOT NULL,
|
||||
enc_key TEXT NOT NULL,
|
||||
master_password_hash TEXT NOT NULL,
|
||||
approved BOOLEAN,
|
||||
creation_date DATETIME NOT NULL,
|
||||
response_date DATETIME,
|
||||
authentication_date DATETIME,
|
||||
FOREIGN KEY(user_uuid) REFERENCES users(uuid),
|
||||
FOREIGN KEY(organization_uuid) REFERENCES organizations(uuid)
|
||||
);
|
@@ -0,0 +1,19 @@
|
||||
CREATE TABLE auth_requests (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
user_uuid CHAR(36) NOT NULL,
|
||||
organization_uuid CHAR(36),
|
||||
request_device_identifier CHAR(36) NOT NULL,
|
||||
device_type INTEGER NOT NULL,
|
||||
request_ip TEXT NOT NULL,
|
||||
response_device_id CHAR(36),
|
||||
access_code TEXT NOT NULL,
|
||||
public_key TEXT NOT NULL,
|
||||
enc_key TEXT NOT NULL,
|
||||
master_password_hash TEXT NOT NULL,
|
||||
approved BOOLEAN,
|
||||
creation_date TIMESTAMP NOT NULL,
|
||||
response_date TIMESTAMP,
|
||||
authentication_date TIMESTAMP,
|
||||
FOREIGN KEY(user_uuid) REFERENCES users(uuid),
|
||||
FOREIGN KEY(organization_uuid) REFERENCES organizations(uuid)
|
||||
);
|
@@ -0,0 +1,19 @@
|
||||
CREATE TABLE auth_requests (
|
||||
uuid TEXT NOT NULL PRIMARY KEY,
|
||||
user_uuid TEXT NOT NULL,
|
||||
organization_uuid TEXT,
|
||||
request_device_identifier TEXT NOT NULL,
|
||||
device_type INTEGER NOT NULL,
|
||||
request_ip TEXT NOT NULL,
|
||||
response_device_id TEXT,
|
||||
access_code TEXT NOT NULL,
|
||||
public_key TEXT NOT NULL,
|
||||
enc_key TEXT NOT NULL,
|
||||
master_password_hash TEXT NOT NULL,
|
||||
approved BOOLEAN,
|
||||
creation_date DATETIME NOT NULL,
|
||||
response_date DATETIME,
|
||||
authentication_date DATETIME,
|
||||
FOREIGN KEY(user_uuid) REFERENCES users(uuid),
|
||||
FOREIGN KEY(organization_uuid) REFERENCES organizations(uuid)
|
||||
);
|
@@ -1 +1 @@
|
||||
1.70.0
|
||||
1.71.1
|
||||
|
@@ -1,13 +1,14 @@
|
||||
use crate::db::DbPool;
|
||||
use chrono::Utc;
|
||||
use rocket::serde::json::Json;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::{
|
||||
api::{
|
||||
core::log_user_event, register_push_device, unregister_push_device, EmptyResult, JsonResult, JsonUpcase,
|
||||
Notify, NumberOrString, PasswordData, UpdateType,
|
||||
core::log_user_event, register_push_device, unregister_push_device, AnonymousNotify, EmptyResult, JsonResult,
|
||||
JsonUpcase, Notify, NumberOrString, PasswordData, UpdateType,
|
||||
},
|
||||
auth::{decode_delete, decode_invite, decode_verify_email, Headers},
|
||||
auth::{decode_delete, decode_invite, decode_verify_email, ClientHeaders, Headers},
|
||||
crypto,
|
||||
db::{models::*, DbConn},
|
||||
mail, CONFIG,
|
||||
@@ -51,6 +52,11 @@ pub fn routes() -> Vec<rocket::Route> {
|
||||
put_device_token,
|
||||
put_clear_device_token,
|
||||
post_clear_device_token,
|
||||
post_auth_request,
|
||||
get_auth_request,
|
||||
put_auth_request,
|
||||
get_auth_request_response,
|
||||
get_auth_requests,
|
||||
]
|
||||
}
|
||||
|
||||
@@ -996,3 +1002,211 @@ async fn put_clear_device_token(uuid: &str, mut conn: DbConn) -> EmptyResult {
|
||||
async fn post_clear_device_token(uuid: &str, conn: DbConn) -> EmptyResult {
|
||||
put_clear_device_token(uuid, conn).await
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct AuthRequestRequest {
|
||||
accessCode: String,
|
||||
deviceIdentifier: String,
|
||||
email: String,
|
||||
publicKey: String,
|
||||
#[serde(alias = "type")]
|
||||
_type: i32,
|
||||
}
|
||||
|
||||
#[post("/auth-requests", data = "<data>")]
|
||||
async fn post_auth_request(
|
||||
data: Json<AuthRequestRequest>,
|
||||
headers: ClientHeaders,
|
||||
mut conn: DbConn,
|
||||
nt: Notify<'_>,
|
||||
) -> JsonResult {
|
||||
let data = data.into_inner();
|
||||
|
||||
let user = match User::find_by_mail(&data.email, &mut conn).await {
|
||||
Some(user) => user,
|
||||
None => {
|
||||
err!("AuthRequest doesn't exist")
|
||||
}
|
||||
};
|
||||
|
||||
let mut auth_request = AuthRequest::new(
|
||||
user.uuid.clone(),
|
||||
data.deviceIdentifier.clone(),
|
||||
headers.device_type,
|
||||
headers.ip.ip.to_string(),
|
||||
data.accessCode,
|
||||
data.publicKey,
|
||||
);
|
||||
auth_request.save(&mut conn).await?;
|
||||
|
||||
nt.send_auth_request(&user.uuid, &auth_request.uuid, &data.deviceIdentifier, &mut conn).await;
|
||||
|
||||
Ok(Json(json!({
|
||||
"id": auth_request.uuid,
|
||||
"publicKey": auth_request.public_key,
|
||||
"requestDeviceType": DeviceType::from_i32(auth_request.device_type).to_string(),
|
||||
"requestIpAddress": auth_request.request_ip,
|
||||
"key": null,
|
||||
"masterPasswordHash": null,
|
||||
"creationDate": auth_request.creation_date.and_utc(),
|
||||
"responseDate": null,
|
||||
"requestApproved": false,
|
||||
"origin": CONFIG.domain_origin(),
|
||||
"object": "auth-request"
|
||||
})))
|
||||
}
|
||||
|
||||
#[get("/auth-requests/<uuid>")]
|
||||
async fn get_auth_request(uuid: &str, mut conn: DbConn) -> JsonResult {
|
||||
let auth_request = match AuthRequest::find_by_uuid(uuid, &mut conn).await {
|
||||
Some(auth_request) => auth_request,
|
||||
None => {
|
||||
err!("AuthRequest doesn't exist")
|
||||
}
|
||||
};
|
||||
|
||||
let response_date_utc = auth_request.response_date.map(|response_date| response_date.and_utc());
|
||||
|
||||
Ok(Json(json!(
|
||||
{
|
||||
"id": uuid,
|
||||
"publicKey": auth_request.public_key,
|
||||
"requestDeviceType": DeviceType::from_i32(auth_request.device_type).to_string(),
|
||||
"requestIpAddress": auth_request.request_ip,
|
||||
"key": auth_request.enc_key,
|
||||
"masterPasswordHash": auth_request.master_password_hash,
|
||||
"creationDate": auth_request.creation_date.and_utc(),
|
||||
"responseDate": response_date_utc,
|
||||
"requestApproved": auth_request.approved,
|
||||
"origin": CONFIG.domain_origin(),
|
||||
"object":"auth-request"
|
||||
}
|
||||
)))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct AuthResponseRequest {
|
||||
deviceIdentifier: String,
|
||||
key: String,
|
||||
masterPasswordHash: String,
|
||||
requestApproved: bool,
|
||||
}
|
||||
|
||||
#[put("/auth-requests/<uuid>", data = "<data>")]
|
||||
async fn put_auth_request(
|
||||
uuid: &str,
|
||||
data: Json<AuthResponseRequest>,
|
||||
mut conn: DbConn,
|
||||
ant: AnonymousNotify<'_>,
|
||||
nt: Notify<'_>,
|
||||
) -> JsonResult {
|
||||
let data = data.into_inner();
|
||||
let mut auth_request: AuthRequest = match AuthRequest::find_by_uuid(uuid, &mut conn).await {
|
||||
Some(auth_request) => auth_request,
|
||||
None => {
|
||||
err!("AuthRequest doesn't exist")
|
||||
}
|
||||
};
|
||||
|
||||
auth_request.approved = Some(data.requestApproved);
|
||||
auth_request.enc_key = data.key;
|
||||
auth_request.master_password_hash = data.masterPasswordHash;
|
||||
auth_request.response_device_id = Some(data.deviceIdentifier.clone());
|
||||
auth_request.save(&mut conn).await?;
|
||||
|
||||
if auth_request.approved.unwrap_or(false) {
|
||||
ant.send_auth_response(&auth_request.user_uuid, &auth_request.uuid).await;
|
||||
nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, data.deviceIdentifier, &mut conn).await;
|
||||
}
|
||||
|
||||
let response_date_utc = auth_request.response_date.map(|response_date| response_date.and_utc());
|
||||
|
||||
Ok(Json(json!(
|
||||
{
|
||||
"id": uuid,
|
||||
"publicKey": auth_request.public_key,
|
||||
"requestDeviceType": DeviceType::from_i32(auth_request.device_type).to_string(),
|
||||
"requestIpAddress": auth_request.request_ip,
|
||||
"key": auth_request.enc_key,
|
||||
"masterPasswordHash": auth_request.master_password_hash,
|
||||
"creationDate": auth_request.creation_date.and_utc(),
|
||||
"responseDate": response_date_utc,
|
||||
"requestApproved": auth_request.approved,
|
||||
"origin": CONFIG.domain_origin(),
|
||||
"object":"auth-request"
|
||||
}
|
||||
)))
|
||||
}
|
||||
|
||||
#[get("/auth-requests/<uuid>/response?<code>")]
|
||||
async fn get_auth_request_response(uuid: &str, code: &str, mut conn: DbConn) -> JsonResult {
|
||||
let auth_request = match AuthRequest::find_by_uuid(uuid, &mut conn).await {
|
||||
Some(auth_request) => auth_request,
|
||||
None => {
|
||||
err!("AuthRequest doesn't exist")
|
||||
}
|
||||
};
|
||||
|
||||
if !auth_request.check_access_code(code) {
|
||||
err!("Access code invalid doesn't exist")
|
||||
}
|
||||
|
||||
let response_date_utc = auth_request.response_date.map(|response_date| response_date.and_utc());
|
||||
|
||||
Ok(Json(json!(
|
||||
{
|
||||
"id": uuid,
|
||||
"publicKey": auth_request.public_key,
|
||||
"requestDeviceType": DeviceType::from_i32(auth_request.device_type).to_string(),
|
||||
"requestIpAddress": auth_request.request_ip,
|
||||
"key": auth_request.enc_key,
|
||||
"masterPasswordHash": auth_request.master_password_hash,
|
||||
"creationDate": auth_request.creation_date.and_utc(),
|
||||
"responseDate": response_date_utc,
|
||||
"requestApproved": auth_request.approved,
|
||||
"origin": CONFIG.domain_origin(),
|
||||
"object":"auth-request"
|
||||
}
|
||||
)))
|
||||
}
|
||||
|
||||
#[get("/auth-requests")]
|
||||
async fn get_auth_requests(headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||
let auth_requests = AuthRequest::find_by_user(&headers.user.uuid, &mut conn).await;
|
||||
|
||||
Ok(Json(json!({
|
||||
"data": auth_requests
|
||||
.iter()
|
||||
.filter(|request| request.approved.is_none())
|
||||
.map(|request| {
|
||||
let response_date_utc = request.response_date.map(|response_date| response_date.and_utc());
|
||||
|
||||
json!({
|
||||
"id": request.uuid,
|
||||
"publicKey": request.public_key,
|
||||
"requestDeviceType": DeviceType::from_i32(request.device_type).to_string(),
|
||||
"requestIpAddress": request.request_ip,
|
||||
"key": request.enc_key,
|
||||
"masterPasswordHash": request.master_password_hash,
|
||||
"creationDate": request.creation_date.and_utc(),
|
||||
"responseDate": response_date_utc,
|
||||
"requestApproved": request.approved,
|
||||
"origin": CONFIG.domain_origin(),
|
||||
"object":"auth-request"
|
||||
})
|
||||
}).collect::<Vec<Value>>(),
|
||||
"continuationToken": null,
|
||||
"object": "list"
|
||||
})))
|
||||
}
|
||||
|
||||
pub async fn purge_auth_requests(pool: DbPool) {
|
||||
debug!("Purging auth requests");
|
||||
if let Ok(mut conn) = pool.get().await {
|
||||
AuthRequest::purge_expired_auth_requests(&mut conn).await;
|
||||
} else {
|
||||
error!("Failed to get DB connection while purging trashed ciphers")
|
||||
}
|
||||
}
|
||||
|
@@ -8,6 +8,7 @@ mod public;
|
||||
mod sends;
|
||||
pub mod two_factor;
|
||||
|
||||
pub use accounts::purge_auth_requests;
|
||||
pub use ciphers::{purge_trashed_ciphers, CipherData, CipherSyncData, CipherSyncType};
|
||||
pub use emergency_access::{emergency_notification_reminder_job, emergency_request_timeout_job};
|
||||
pub use events::{event_cleanup_job, log_event, log_user_event};
|
||||
|
@@ -6,8 +6,7 @@ use serde_json::Value;
|
||||
use crate::{
|
||||
api::{
|
||||
core::{log_event, CipherSyncData, CipherSyncType},
|
||||
ApiResult, EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, JsonVec, Notify, NumberOrString, PasswordData,
|
||||
UpdateType,
|
||||
EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, JsonVec, Notify, NumberOrString, PasswordData, UpdateType,
|
||||
},
|
||||
auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders},
|
||||
db::{models::*, DbConn},
|
||||
@@ -61,6 +60,7 @@ pub fn routes() -> Vec<Route> {
|
||||
put_policy,
|
||||
get_organization_tax,
|
||||
get_plans,
|
||||
get_plans_all,
|
||||
get_plans_tax_rates,
|
||||
import,
|
||||
post_org_keys,
|
||||
@@ -468,7 +468,11 @@ async fn post_organization_collection_update(
|
||||
}
|
||||
|
||||
collection.name = data.Name;
|
||||
collection.external_id = data.ExternalId;
|
||||
collection.external_id = match data.ExternalId {
|
||||
Some(external_id) if !external_id.trim().is_empty() => Some(external_id),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
collection.save(&mut conn).await?;
|
||||
|
||||
log_event(
|
||||
@@ -1807,12 +1811,28 @@ fn get_plans() -> Json<Value> {
|
||||
"Product": 0,
|
||||
"Name": "Free",
|
||||
"NameLocalizationKey": "planNameFree",
|
||||
"BitwardenProduct": 0,
|
||||
"MaxUsers": 0,
|
||||
"DescriptionLocalizationKey": "planDescFree"
|
||||
},{
|
||||
"Object": "plan",
|
||||
"Type": 0,
|
||||
"Product": 1,
|
||||
"Name": "Free",
|
||||
"NameLocalizationKey": "planNameFree",
|
||||
"BitwardenProduct": 1,
|
||||
"MaxUsers": 0,
|
||||
"DescriptionLocalizationKey": "planDescFree"
|
||||
}],
|
||||
"ContinuationToken": null
|
||||
}))
|
||||
}
|
||||
|
||||
#[get("/plans/all")]
|
||||
fn get_plans_all() -> Json<Value> {
|
||||
get_plans()
|
||||
}
|
||||
|
||||
#[get("/plans/sales-tax-rates")]
|
||||
fn get_plans_tax_rates(_headers: Headers) -> Json<Value> {
|
||||
// Prevent a 404 error, which also causes Javascript errors.
|
||||
@@ -2222,29 +2242,22 @@ struct GroupRequest {
|
||||
}
|
||||
|
||||
impl GroupRequest {
|
||||
pub fn to_group(&self, organizations_uuid: &str) -> ApiResult<Group> {
|
||||
match self.AccessAll {
|
||||
Some(access_all_value) => Ok(Group::new(
|
||||
organizations_uuid.to_owned(),
|
||||
pub fn to_group(&self, organizations_uuid: &str) -> Group {
|
||||
Group::new(
|
||||
String::from(organizations_uuid),
|
||||
self.Name.clone(),
|
||||
access_all_value,
|
||||
self.AccessAll.unwrap_or(false),
|
||||
self.ExternalId.clone(),
|
||||
)),
|
||||
_ => err!("Could not convert GroupRequest to Group, because AccessAll has no value!"),
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
pub fn update_group(&self, mut group: Group) -> ApiResult<Group> {
|
||||
match self.AccessAll {
|
||||
Some(access_all_value) => {
|
||||
pub fn update_group(&self, mut group: Group) -> Group {
|
||||
group.name = self.Name.clone();
|
||||
group.access_all = access_all_value;
|
||||
group.set_external_id(self.ExternalId.clone());
|
||||
group.access_all = self.AccessAll.unwrap_or(false);
|
||||
// Group Updates do not support changing the external_id
|
||||
// These input fields are in a disabled state, and can only be updated/added via ldap_import
|
||||
|
||||
Ok(group)
|
||||
}
|
||||
_ => err!("Could not update group, because AccessAll has no value!"),
|
||||
}
|
||||
group
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2305,7 +2318,7 @@ async fn post_groups(
|
||||
}
|
||||
|
||||
let group_request = data.into_inner().data;
|
||||
let group = group_request.to_group(org_id)?;
|
||||
let group = group_request.to_group(org_id);
|
||||
|
||||
log_event(
|
||||
EventType::GroupCreated as i32,
|
||||
@@ -2339,7 +2352,7 @@ async fn put_group(
|
||||
};
|
||||
|
||||
let group_request = data.into_inner().data;
|
||||
let updated_group = group_request.update_group(group)?;
|
||||
let updated_group = group_request.update_group(group);
|
||||
|
||||
CollectionGroup::delete_all_by_group(group_id, &mut conn).await?;
|
||||
GroupUser::delete_all_by_group(group_id, &mut conn).await?;
|
||||
|
@@ -85,7 +85,7 @@ async fn ldap_import(data: JsonUpcase<OrgImportData>, token: PublicToken, mut co
|
||||
new_user
|
||||
}
|
||||
};
|
||||
let user_org_status = if CONFIG.mail_enabled() {
|
||||
let user_org_status = if CONFIG.mail_enabled() || user.password_hash.is_empty() {
|
||||
UserOrgStatus::Invited as i32
|
||||
} else {
|
||||
UserOrgStatus::Accepted as i32 // Automatically mark user as accepted if no email invites
|
||||
|
226
src/api/icons.rs
226
src/api/icons.rs
@@ -19,7 +19,7 @@ use tokio::{
|
||||
net::lookup_host,
|
||||
};
|
||||
|
||||
use html5gum::{Emitter, EndTag, HtmlString, InfallibleTokenizer, Readable, StartTag, StringReader, Tokenizer};
|
||||
use html5gum::{Emitter, HtmlString, InfallibleTokenizer, Readable, StringReader, Tokenizer};
|
||||
|
||||
use crate::{
|
||||
error::Error,
|
||||
@@ -46,10 +46,15 @@ static CLIENT: Lazy<Client> = Lazy::new(|| {
|
||||
// Generate the cookie store
|
||||
let cookie_store = Arc::new(Jar::default());
|
||||
|
||||
let icon_download_timeout = Duration::from_secs(CONFIG.icon_download_timeout());
|
||||
let pool_idle_timeout = Duration::from_secs(10);
|
||||
// Reuse the client between requests
|
||||
let client = get_reqwest_client_builder()
|
||||
.cookie_provider(Arc::clone(&cookie_store))
|
||||
.timeout(Duration::from_secs(CONFIG.icon_download_timeout()))
|
||||
.timeout(icon_download_timeout)
|
||||
.pool_max_idle_per_host(5) // Configure the Hyper Pool to only have max 5 idle connections
|
||||
.pool_idle_timeout(pool_idle_timeout) // Configure the Hyper Pool to timeout after 10 seconds
|
||||
.trust_dns(true)
|
||||
.default_headers(default_headers.clone());
|
||||
|
||||
match client.build() {
|
||||
@@ -58,9 +63,11 @@ static CLIENT: Lazy<Client> = Lazy::new(|| {
|
||||
error!("Possible trust-dns error, trying with trust-dns disabled: '{e}'");
|
||||
get_reqwest_client_builder()
|
||||
.cookie_provider(cookie_store)
|
||||
.timeout(Duration::from_secs(CONFIG.icon_download_timeout()))
|
||||
.default_headers(default_headers)
|
||||
.timeout(icon_download_timeout)
|
||||
.pool_max_idle_per_host(5) // Configure the Hyper Pool to only have max 5 idle connections
|
||||
.pool_idle_timeout(pool_idle_timeout) // Configure the Hyper Pool to timeout after 10 seconds
|
||||
.trust_dns(false)
|
||||
.default_headers(default_headers)
|
||||
.build()
|
||||
.expect("Failed to build client")
|
||||
}
|
||||
@@ -258,7 +265,7 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Clone)]
|
||||
enum DomainBlacklistReason {
|
||||
Regex,
|
||||
IP,
|
||||
@@ -415,38 +422,34 @@ fn get_favicons_node(
|
||||
const TAG_LINK: &[u8] = b"link";
|
||||
const TAG_BASE: &[u8] = b"base";
|
||||
const TAG_HEAD: &[u8] = b"head";
|
||||
const ATTR_REL: &[u8] = b"rel";
|
||||
const ATTR_HREF: &[u8] = b"href";
|
||||
const ATTR_SIZES: &[u8] = b"sizes";
|
||||
|
||||
let mut base_url = url.clone();
|
||||
let mut icon_tags: Vec<StartTag> = Vec::new();
|
||||
let mut icon_tags: Vec<Tag> = Vec::new();
|
||||
for token in dom {
|
||||
match token {
|
||||
FaviconToken::StartTag(tag) => {
|
||||
if *tag.name == TAG_LINK
|
||||
&& tag.attributes.contains_key(ATTR_REL)
|
||||
&& tag.attributes.contains_key(ATTR_HREF)
|
||||
{
|
||||
let rel_value = std::str::from_utf8(tag.attributes.get(ATTR_REL).unwrap())
|
||||
.unwrap_or_default()
|
||||
.to_ascii_lowercase();
|
||||
if rel_value.contains("icon") && !rel_value.contains("mask-icon") {
|
||||
icon_tags.push(tag);
|
||||
let tag_name: &[u8] = &token.tag.name;
|
||||
match tag_name {
|
||||
TAG_LINK => {
|
||||
icon_tags.push(token.tag);
|
||||
}
|
||||
} else if *tag.name == TAG_BASE && tag.attributes.contains_key(ATTR_HREF) {
|
||||
let href = std::str::from_utf8(tag.attributes.get(ATTR_HREF).unwrap()).unwrap_or_default();
|
||||
TAG_BASE => {
|
||||
base_url = if let Some(href) = token.tag.attributes.get(ATTR_HREF) {
|
||||
let href = std::str::from_utf8(href).unwrap_or_default();
|
||||
debug!("Found base href: {href}");
|
||||
base_url = match base_url.join(href) {
|
||||
match base_url.join(href) {
|
||||
Ok(inner_url) => inner_url,
|
||||
_ => url.clone(),
|
||||
_ => continue,
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
}
|
||||
}
|
||||
FaviconToken::EndTag(tag) => {
|
||||
if *tag.name == TAG_HEAD {
|
||||
TAG_HEAD if token.closing => {
|
||||
break;
|
||||
}
|
||||
_ => {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -820,43 +823,64 @@ impl reqwest::cookie::CookieStore for Jar {
|
||||
}
|
||||
|
||||
/// Custom FaviconEmitter for the html5gum parser.
|
||||
/// The FaviconEmitter is using an almost 1:1 copy of the DefaultEmitter with some small changes.
|
||||
/// The FaviconEmitter is using an optimized version of the DefaultEmitter.
|
||||
/// This prevents emitting tags like comments, doctype and also strings between the tags.
|
||||
/// But it will also only emit the tags we need and only if they have the correct attributes
|
||||
/// Therefor parsing the HTML content is faster.
|
||||
use std::collections::{BTreeSet, VecDeque};
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
#[derive(Debug)]
|
||||
enum FaviconToken {
|
||||
StartTag(StartTag),
|
||||
EndTag(EndTag),
|
||||
#[derive(Default)]
|
||||
pub struct Tag {
|
||||
/// The tag's name, such as `"link"` or `"base"`.
|
||||
pub name: HtmlString,
|
||||
|
||||
/// A mapping for any HTML attributes this start tag may have.
|
||||
///
|
||||
/// Duplicate attributes are ignored after the first one as per WHATWG spec.
|
||||
pub attributes: BTreeMap<HtmlString, HtmlString>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
struct FaviconToken {
|
||||
tag: Tag,
|
||||
closing: bool,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct FaviconEmitter {
|
||||
current_token: Option<FaviconToken>,
|
||||
last_start_tag: HtmlString,
|
||||
current_attribute: Option<(HtmlString, HtmlString)>,
|
||||
seen_attributes: BTreeSet<HtmlString>,
|
||||
emitted_tokens: VecDeque<FaviconToken>,
|
||||
emit_token: bool,
|
||||
}
|
||||
|
||||
impl FaviconEmitter {
|
||||
fn emit_token(&mut self, token: FaviconToken) {
|
||||
self.emitted_tokens.push_front(token);
|
||||
fn flush_current_attribute(&mut self, emit_current_tag: bool) {
|
||||
const ATTR_HREF: &[u8] = b"href";
|
||||
const ATTR_REL: &[u8] = b"rel";
|
||||
const TAG_LINK: &[u8] = b"link";
|
||||
const TAG_BASE: &[u8] = b"base";
|
||||
const TAG_HEAD: &[u8] = b"head";
|
||||
|
||||
if let Some(ref mut token) = self.current_token {
|
||||
let tag_name: &[u8] = &token.tag.name;
|
||||
|
||||
if self.current_attribute.is_some() && (tag_name == TAG_BASE || tag_name == TAG_LINK) {
|
||||
let (k, v) = self.current_attribute.take().unwrap();
|
||||
token.tag.attributes.entry(k).and_modify(|_| {}).or_insert(v);
|
||||
}
|
||||
|
||||
fn flush_current_attribute(&mut self) {
|
||||
if let Some((k, v)) = self.current_attribute.take() {
|
||||
match self.current_token {
|
||||
Some(FaviconToken::StartTag(ref mut tag)) => {
|
||||
tag.attributes.entry(k).and_modify(|_| {}).or_insert(v);
|
||||
let tag_attr = &token.tag.attributes;
|
||||
match tag_name {
|
||||
TAG_HEAD if token.closing => self.emit_token = true,
|
||||
TAG_BASE if tag_attr.contains_key(ATTR_HREF) => self.emit_token = true,
|
||||
TAG_LINK if emit_current_tag && tag_attr.contains_key(ATTR_REL) && tag_attr.contains_key(ATTR_HREF) => {
|
||||
let rel_value =
|
||||
std::str::from_utf8(token.tag.attributes.get(ATTR_REL).unwrap()).unwrap_or_default();
|
||||
if rel_value.contains("icon") && !rel_value.contains("mask-icon") {
|
||||
self.emit_token = true
|
||||
}
|
||||
Some(FaviconToken::EndTag(_)) => {
|
||||
self.seen_attributes.insert(k);
|
||||
}
|
||||
_ => {
|
||||
debug_assert!(false);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -871,87 +895,71 @@ impl Emitter for FaviconEmitter {
|
||||
}
|
||||
|
||||
fn pop_token(&mut self) -> Option<Self::Token> {
|
||||
self.emitted_tokens.pop_back()
|
||||
}
|
||||
|
||||
fn init_start_tag(&mut self) {
|
||||
self.current_token = Some(FaviconToken::StartTag(StartTag::default()));
|
||||
}
|
||||
|
||||
fn init_end_tag(&mut self) {
|
||||
self.current_token = Some(FaviconToken::EndTag(EndTag::default()));
|
||||
self.seen_attributes.clear();
|
||||
}
|
||||
|
||||
fn emit_current_tag(&mut self) -> Option<html5gum::State> {
|
||||
self.flush_current_attribute();
|
||||
let mut token = self.current_token.take().unwrap();
|
||||
let mut emit = false;
|
||||
match token {
|
||||
FaviconToken::EndTag(ref mut tag) => {
|
||||
// Always clean seen attributes
|
||||
self.seen_attributes.clear();
|
||||
self.set_last_start_tag(None);
|
||||
|
||||
// Only trigger an emit for the </head> tag.
|
||||
// This is matched, and will break the for-loop.
|
||||
if *tag.name == b"head" {
|
||||
emit = true;
|
||||
}
|
||||
}
|
||||
FaviconToken::StartTag(ref mut tag) => {
|
||||
// Only trriger an emit for <link> and <base> tags.
|
||||
// These are the only tags we want to parse.
|
||||
if *tag.name == b"link" || *tag.name == b"base" {
|
||||
self.set_last_start_tag(Some(&tag.name));
|
||||
emit = true;
|
||||
} else {
|
||||
self.set_last_start_tag(None);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only emit the tags we want to parse.
|
||||
if emit {
|
||||
self.emit_token(token);
|
||||
if self.emit_token {
|
||||
self.emit_token = false;
|
||||
return self.current_token.take();
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn push_tag_name(&mut self, s: &[u8]) {
|
||||
match self.current_token {
|
||||
Some(
|
||||
FaviconToken::StartTag(StartTag {
|
||||
ref mut name,
|
||||
..
|
||||
})
|
||||
| FaviconToken::EndTag(EndTag {
|
||||
ref mut name,
|
||||
..
|
||||
}),
|
||||
) => {
|
||||
name.extend(s);
|
||||
fn init_start_tag(&mut self) {
|
||||
self.current_token = Some(FaviconToken {
|
||||
tag: Tag::default(),
|
||||
closing: false,
|
||||
});
|
||||
}
|
||||
_ => debug_assert!(false),
|
||||
|
||||
fn init_end_tag(&mut self) {
|
||||
self.current_token = Some(FaviconToken {
|
||||
tag: Tag::default(),
|
||||
closing: true,
|
||||
});
|
||||
}
|
||||
|
||||
fn emit_current_tag(&mut self) -> Option<html5gum::State> {
|
||||
self.flush_current_attribute(true);
|
||||
self.last_start_tag.clear();
|
||||
if self.current_token.is_some() && !self.current_token.as_ref().unwrap().closing {
|
||||
self.last_start_tag.extend(&*self.current_token.as_ref().unwrap().tag.name);
|
||||
}
|
||||
html5gum::naive_next_state(&self.last_start_tag)
|
||||
}
|
||||
|
||||
fn push_tag_name(&mut self, s: &[u8]) {
|
||||
if let Some(ref mut token) = self.current_token {
|
||||
token.tag.name.extend(s);
|
||||
}
|
||||
}
|
||||
|
||||
fn init_attribute(&mut self) {
|
||||
self.flush_current_attribute();
|
||||
self.current_attribute = Some(Default::default());
|
||||
self.flush_current_attribute(false);
|
||||
self.current_attribute = match &self.current_token {
|
||||
Some(token) => {
|
||||
let tag_name: &[u8] = &token.tag.name;
|
||||
match tag_name {
|
||||
b"link" | b"head" | b"base" => Some(Default::default()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
}
|
||||
|
||||
fn push_attribute_name(&mut self, s: &[u8]) {
|
||||
self.current_attribute.as_mut().unwrap().0.extend(s);
|
||||
if let Some(attr) = &mut self.current_attribute {
|
||||
attr.0.extend(s)
|
||||
}
|
||||
}
|
||||
|
||||
fn push_attribute_value(&mut self, s: &[u8]) {
|
||||
self.current_attribute.as_mut().unwrap().1.extend(s);
|
||||
if let Some(attr) = &mut self.current_attribute {
|
||||
attr.1.extend(s)
|
||||
}
|
||||
}
|
||||
|
||||
fn current_is_appropriate_end_tag_token(&mut self) -> bool {
|
||||
match self.current_token {
|
||||
Some(FaviconToken::EndTag(ref tag)) => !self.last_start_tag.is_empty() && self.last_start_tag == tag.name,
|
||||
match &self.current_token {
|
||||
Some(token) if token.closing => !self.last_start_tag.is_empty() && self.last_start_tag == token.tag.name,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@@ -155,7 +155,27 @@ async fn _password_login(
|
||||
|
||||
// Check password
|
||||
let password = data.password.as_ref().unwrap();
|
||||
if !user.check_valid_password(password) {
|
||||
if let Some(auth_request_uuid) = data.auth_request.clone() {
|
||||
if let Some(auth_request) = AuthRequest::find_by_uuid(auth_request_uuid.as_str(), conn).await {
|
||||
if !auth_request.check_access_code(password) {
|
||||
err!(
|
||||
"Username or access code is incorrect. Try again",
|
||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
||||
ErrorEvent {
|
||||
event: EventType::UserFailedLogIn,
|
||||
}
|
||||
)
|
||||
}
|
||||
} else {
|
||||
err!(
|
||||
"Auth request not found. Try again.",
|
||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
||||
ErrorEvent {
|
||||
event: EventType::UserFailedLogIn,
|
||||
}
|
||||
)
|
||||
}
|
||||
} else if !user.check_valid_password(password) {
|
||||
err!(
|
||||
"Username or password is incorrect. Try again",
|
||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
||||
@@ -260,6 +280,10 @@ async fn _password_login(
|
||||
"ResetMasterPassword": false,// TODO: Same as above
|
||||
"scope": scope,
|
||||
"unofficialServer": true,
|
||||
"UserDecryptionOptions": {
|
||||
"HasMasterPassword": !user.password_hash.is_empty(),
|
||||
"Object": "userDecryptionOptions"
|
||||
},
|
||||
});
|
||||
|
||||
if let Some(token) = twofactor_token {
|
||||
@@ -646,6 +670,8 @@ struct ConnectData {
|
||||
#[field(name = uncased("two_factor_remember"))]
|
||||
#[field(name = uncased("twofactorremember"))]
|
||||
two_factor_remember: Option<i32>,
|
||||
#[field(name = uncased("authrequest"))]
|
||||
auth_request: Option<String>,
|
||||
}
|
||||
|
||||
fn _check_is_some<T>(value: &Option<T>, msg: &str) -> EmptyResult {
|
||||
|
@@ -13,6 +13,7 @@ pub use crate::api::{
|
||||
admin::catchers as admin_catchers,
|
||||
admin::routes as admin_routes,
|
||||
core::catchers as core_catchers,
|
||||
core::purge_auth_requests,
|
||||
core::purge_sends,
|
||||
core::purge_trashed_ciphers,
|
||||
core::routes as core_routes,
|
||||
@@ -22,7 +23,7 @@ pub use crate::api::{
|
||||
icons::routes as icons_routes,
|
||||
identity::routes as identity_routes,
|
||||
notifications::routes as notifications_routes,
|
||||
notifications::{start_notification_server, Notify, UpdateType},
|
||||
notifications::{start_notification_server, AnonymousNotify, Notify, UpdateType, WS_ANONYMOUS_SUBSCRIPTIONS},
|
||||
push::{
|
||||
push_cipher_update, push_folder_update, push_logout, push_send_update, push_user_update, register_push_device,
|
||||
unregister_push_device,
|
||||
|
@@ -20,7 +20,7 @@ use tokio_tungstenite::{
|
||||
};
|
||||
|
||||
use crate::{
|
||||
auth::ClientIp,
|
||||
auth::{ClientIp, WsAccessTokenHeader},
|
||||
db::{
|
||||
models::{Cipher, Folder, Send as DbSend, User},
|
||||
DbConn,
|
||||
@@ -36,10 +36,19 @@ static WS_USERS: Lazy<Arc<WebSocketUsers>> = Lazy::new(|| {
|
||||
})
|
||||
});
|
||||
|
||||
use super::{push_cipher_update, push_folder_update, push_logout, push_send_update, push_user_update};
|
||||
pub static WS_ANONYMOUS_SUBSCRIPTIONS: Lazy<Arc<AnonymousWebSocketSubscriptions>> = Lazy::new(|| {
|
||||
Arc::new(AnonymousWebSocketSubscriptions {
|
||||
map: Arc::new(dashmap::DashMap::new()),
|
||||
})
|
||||
});
|
||||
|
||||
use super::{
|
||||
push::push_auth_request, push::push_auth_response, push_cipher_update, push_folder_update, push_logout,
|
||||
push_send_update, push_user_update,
|
||||
};
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
routes![websockets_hub]
|
||||
routes![websockets_hub, anonymous_websockets_hub]
|
||||
}
|
||||
|
||||
#[derive(FromForm, Debug)]
|
||||
@@ -74,16 +83,47 @@ impl Drop for WSEntryMapGuard {
|
||||
}
|
||||
}
|
||||
|
||||
struct WSAnonymousEntryMapGuard {
|
||||
subscriptions: Arc<AnonymousWebSocketSubscriptions>,
|
||||
token: String,
|
||||
addr: IpAddr,
|
||||
}
|
||||
|
||||
impl WSAnonymousEntryMapGuard {
|
||||
fn new(subscriptions: Arc<AnonymousWebSocketSubscriptions>, token: String, addr: IpAddr) -> Self {
|
||||
Self {
|
||||
subscriptions,
|
||||
token,
|
||||
addr,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for WSAnonymousEntryMapGuard {
|
||||
fn drop(&mut self) {
|
||||
info!("Closing WS connection from {}", self.addr);
|
||||
self.subscriptions.map.remove(&self.token);
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/hub?<data..>")]
|
||||
fn websockets_hub<'r>(
|
||||
ws: rocket_ws::WebSocket,
|
||||
data: WsAccessToken,
|
||||
ip: ClientIp,
|
||||
header_token: WsAccessTokenHeader,
|
||||
) -> Result<rocket_ws::Stream!['r], Error> {
|
||||
let addr = ip.ip;
|
||||
info!("Accepting Rocket WS connection from {addr}");
|
||||
|
||||
let Some(token) = data.access_token else { err_code!("Invalid claim", 401) };
|
||||
let token = if let Some(token) = data.access_token {
|
||||
token
|
||||
} else if let Some(token) = header_token.access_token {
|
||||
token
|
||||
} else {
|
||||
err_code!("Invalid claim", 401)
|
||||
};
|
||||
|
||||
let Ok(claims) = crate::auth::decode_login(&token) else { err_code!("Invalid token", 401) };
|
||||
|
||||
let (mut rx, guard) = {
|
||||
@@ -144,6 +184,72 @@ fn websockets_hub<'r>(
|
||||
})
|
||||
}
|
||||
|
||||
#[get("/anonymous-hub?<token..>")]
|
||||
fn anonymous_websockets_hub<'r>(
|
||||
ws: rocket_ws::WebSocket,
|
||||
token: String,
|
||||
ip: ClientIp,
|
||||
) -> Result<rocket_ws::Stream!['r], Error> {
|
||||
let addr = ip.ip;
|
||||
info!("Accepting Anonymous Rocket WS connection from {addr}");
|
||||
|
||||
let (mut rx, guard) = {
|
||||
let subscriptions = Arc::clone(&WS_ANONYMOUS_SUBSCRIPTIONS);
|
||||
|
||||
// Add a channel to send messages to this client to the map
|
||||
let (tx, rx) = tokio::sync::mpsc::channel::<Message>(100);
|
||||
subscriptions.map.insert(token.clone(), tx);
|
||||
|
||||
// Once the guard goes out of scope, the connection will have been closed and the entry will be deleted from the map
|
||||
(rx, WSAnonymousEntryMapGuard::new(subscriptions, token, addr))
|
||||
};
|
||||
|
||||
Ok({
|
||||
rocket_ws::Stream! { ws => {
|
||||
let mut ws = ws;
|
||||
let _guard = guard;
|
||||
let mut interval = tokio::time::interval(Duration::from_secs(15));
|
||||
loop {
|
||||
tokio::select! {
|
||||
res = ws.next() => {
|
||||
match res {
|
||||
Some(Ok(message)) => {
|
||||
match message {
|
||||
// Respond to any pings
|
||||
Message::Ping(ping) => yield Message::Pong(ping),
|
||||
Message::Pong(_) => {/* Ignored */},
|
||||
|
||||
// We should receive an initial message with the protocol and version, and we will reply to it
|
||||
Message::Text(ref message) => {
|
||||
let msg = message.strip_suffix(RECORD_SEPARATOR as char).unwrap_or(message);
|
||||
|
||||
if serde_json::from_str(msg).ok() == Some(INITIAL_MESSAGE) {
|
||||
yield Message::binary(INITIAL_RESPONSE);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// Just echo anything else the client sends
|
||||
_ => yield message,
|
||||
}
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
res = rx.recv() => {
|
||||
match res {
|
||||
Some(res) => yield res,
|
||||
None => break,
|
||||
}
|
||||
}
|
||||
|
||||
_ = interval.tick() => yield Message::Ping(create_ping())
|
||||
}
|
||||
}
|
||||
}}
|
||||
})
|
||||
}
|
||||
|
||||
//
|
||||
// Websockets server
|
||||
//
|
||||
@@ -352,6 +458,69 @@ impl WebSocketUsers {
|
||||
push_send_update(ut, send, acting_device_uuid, conn).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_auth_request(
|
||||
&self,
|
||||
user_uuid: &String,
|
||||
auth_request_uuid: &String,
|
||||
acting_device_uuid: &String,
|
||||
conn: &mut DbConn,
|
||||
) {
|
||||
let data = create_update(
|
||||
vec![("Id".into(), auth_request_uuid.clone().into()), ("UserId".into(), user_uuid.clone().into())],
|
||||
UpdateType::AuthRequest,
|
||||
Some(acting_device_uuid.to_string()),
|
||||
);
|
||||
self.send_update(user_uuid, &data).await;
|
||||
|
||||
if CONFIG.push_enabled() {
|
||||
push_auth_request(user_uuid.to_string(), auth_request_uuid.to_string(), conn).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_auth_response(
|
||||
&self,
|
||||
user_uuid: &String,
|
||||
auth_response_uuid: &str,
|
||||
approving_device_uuid: String,
|
||||
conn: &mut DbConn,
|
||||
) {
|
||||
let data = create_update(
|
||||
vec![("Id".into(), auth_response_uuid.to_owned().into()), ("UserId".into(), user_uuid.clone().into())],
|
||||
UpdateType::AuthRequestResponse,
|
||||
approving_device_uuid.clone().into(),
|
||||
);
|
||||
self.send_update(auth_response_uuid, &data).await;
|
||||
|
||||
if CONFIG.push_enabled() {
|
||||
push_auth_response(user_uuid.to_string(), auth_response_uuid.to_string(), approving_device_uuid, conn)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AnonymousWebSocketSubscriptions {
|
||||
map: Arc<dashmap::DashMap<String, Sender<Message>>>,
|
||||
}
|
||||
|
||||
impl AnonymousWebSocketSubscriptions {
|
||||
async fn send_update(&self, token: &str, data: &[u8]) {
|
||||
if let Some(sender) = self.map.get(token).map(|v| v.clone()) {
|
||||
if let Err(e) = sender.send(Message::binary(data)).await {
|
||||
error!("Error sending WS update {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_auth_response(&self, user_uuid: &String, auth_response_uuid: &str) {
|
||||
let data = create_anonymous_update(
|
||||
vec![("Id".into(), auth_response_uuid.to_owned().into()), ("UserId".into(), user_uuid.clone().into())],
|
||||
UpdateType::AuthRequestResponse,
|
||||
user_uuid.to_string(),
|
||||
);
|
||||
self.send_update(auth_response_uuid, &data).await;
|
||||
}
|
||||
}
|
||||
|
||||
/* Message Structure
|
||||
@@ -387,6 +556,24 @@ fn create_update(payload: Vec<(Value, Value)>, ut: UpdateType, acting_device_uui
|
||||
serialize(value)
|
||||
}
|
||||
|
||||
fn create_anonymous_update(payload: Vec<(Value, Value)>, ut: UpdateType, user_id: String) -> Vec<u8> {
|
||||
use rmpv::Value as V;
|
||||
|
||||
let value = V::Array(vec![
|
||||
1.into(),
|
||||
V::Map(vec![]),
|
||||
V::Nil,
|
||||
"AuthRequestResponseRecieved".into(),
|
||||
V::Array(vec![V::Map(vec![
|
||||
("Type".into(), (ut as i32).into()),
|
||||
("Payload".into(), payload.into()),
|
||||
("UserId".into(), user_id.into()),
|
||||
])]),
|
||||
]);
|
||||
|
||||
serialize(value)
|
||||
}
|
||||
|
||||
fn create_ping() -> Vec<u8> {
|
||||
serialize(Value::Array(vec![6.into()]))
|
||||
}
|
||||
@@ -420,6 +607,7 @@ pub enum UpdateType {
|
||||
}
|
||||
|
||||
pub type Notify<'a> = &'a rocket::State<Arc<WebSocketUsers>>;
|
||||
pub type AnonymousNotify<'a> = &'a rocket::State<Arc<AnonymousWebSocketSubscriptions>>;
|
||||
|
||||
pub fn start_notification_server() -> Arc<WebSocketUsers> {
|
||||
let users = Arc::clone(&WS_USERS);
|
||||
|
@@ -255,3 +255,40 @@ async fn send_to_push_relay(notification_data: Value) {
|
||||
error!("An error occured while sending a send update to the push relay: {}", e);
|
||||
};
|
||||
}
|
||||
|
||||
pub async fn push_auth_request(user_uuid: String, auth_request_uuid: String, conn: &mut crate::db::DbConn) {
|
||||
if Device::check_user_has_push_device(user_uuid.as_str(), conn).await {
|
||||
tokio::task::spawn(send_to_push_relay(json!({
|
||||
"userId": user_uuid,
|
||||
"organizationId": (),
|
||||
"deviceId": null,
|
||||
"identifier": null,
|
||||
"type": UpdateType::AuthRequest as i32,
|
||||
"payload": {
|
||||
"id": auth_request_uuid,
|
||||
"userId": user_uuid,
|
||||
}
|
||||
})));
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn push_auth_response(
|
||||
user_uuid: String,
|
||||
auth_request_uuid: String,
|
||||
approving_device_uuid: String,
|
||||
conn: &mut crate::db::DbConn,
|
||||
) {
|
||||
if Device::check_user_has_push_device(user_uuid.as_str(), conn).await {
|
||||
tokio::task::spawn(send_to_push_relay(json!({
|
||||
"userId": user_uuid,
|
||||
"organizationId": (),
|
||||
"deviceId": approving_device_uuid,
|
||||
"identifier": approving_device_uuid,
|
||||
"type": UpdateType::AuthRequestResponse as i32,
|
||||
"payload": {
|
||||
"id": auth_request_uuid,
|
||||
"userId": user_uuid,
|
||||
}
|
||||
})));
|
||||
}
|
||||
}
|
||||
|
@@ -14,11 +14,17 @@ use crate::{
|
||||
pub fn routes() -> Vec<Route> {
|
||||
// If addding more routes here, consider also adding them to
|
||||
// crate::utils::LOGGED_ROUTES to make sure they appear in the log
|
||||
let mut routes = routes![attachments, alive, alive_head, static_files];
|
||||
if CONFIG.web_vault_enabled() {
|
||||
routes![web_index, web_index_head, app_id, web_files, attachments, alive, alive_head, static_files]
|
||||
} else {
|
||||
routes![attachments, alive, alive_head, static_files]
|
||||
routes.append(&mut routes![web_index, web_index_head, app_id, web_files]);
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
if CONFIG.reload_templates() {
|
||||
routes.append(&mut routes![_static_files_dev]);
|
||||
}
|
||||
|
||||
routes
|
||||
}
|
||||
|
||||
pub fn catchers() -> Vec<Catcher> {
|
||||
@@ -94,7 +100,7 @@ async fn web_files(p: PathBuf) -> Cached<Option<NamedFile>> {
|
||||
|
||||
#[get("/attachments/<uuid>/<file_id>?<token>")]
|
||||
async fn attachments(uuid: SafeString, file_id: SafeString, token: String) -> Option<NamedFile> {
|
||||
let Ok(claims) = dbg!(decode_file_download(&token)) else { return None };
|
||||
let Ok(claims) = decode_file_download(&token) else { return None };
|
||||
if claims.sub != *uuid || claims.file_id != *file_id {
|
||||
return None;
|
||||
}
|
||||
@@ -116,7 +122,30 @@ fn alive_head(_conn: DbConn) -> EmptyResult {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[get("/vw_static/<filename>")]
|
||||
// This endpoint/function is used during development and development only.
|
||||
// It allows to easily develop the admin interface by always loading the files from disk instead from a slice of bytes
|
||||
// This will only be active during a debug build and only when `RELOAD_TEMPLATES` is set to `true`
|
||||
// NOTE: Do not forget to add any new files added to the `static_files` function below!
|
||||
#[cfg(debug_assertions)]
|
||||
#[get("/vw_static/<filename>", rank = 1)]
|
||||
pub async fn _static_files_dev(filename: PathBuf) -> Option<NamedFile> {
|
||||
warn!("LOADING STATIC FILES FROM DISK");
|
||||
let file = filename.to_str().unwrap_or_default();
|
||||
let ext = filename.extension().unwrap_or_default();
|
||||
|
||||
let path = if ext == "png" || ext == "svg" {
|
||||
tokio::fs::canonicalize(Path::new(file!()).parent().unwrap().join("../static/images/").join(file)).await
|
||||
} else {
|
||||
tokio::fs::canonicalize(Path::new(file!()).parent().unwrap().join("../static/scripts/").join(file)).await
|
||||
};
|
||||
|
||||
if let Ok(path) = path {
|
||||
return NamedFile::open(path).await.ok();
|
||||
};
|
||||
None
|
||||
}
|
||||
|
||||
#[get("/vw_static/<filename>", rank = 2)]
|
||||
pub fn static_files(filename: &str) -> Result<(ContentType, &'static [u8]), Error> {
|
||||
match filename {
|
||||
"404.png" => Ok((ContentType::PNG, include_bytes!("../static/images/404.png"))),
|
||||
@@ -138,12 +167,12 @@ pub fn static_files(filename: &str) -> Result<(ContentType, &'static [u8]), Erro
|
||||
Ok((ContentType::JavaScript, include_bytes!("../static/scripts/admin_diagnostics.js")))
|
||||
}
|
||||
"bootstrap.css" => Ok((ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))),
|
||||
"bootstrap-native.js" => Ok((ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js"))),
|
||||
"bootstrap.bundle.js" => Ok((ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap.bundle.js"))),
|
||||
"jdenticon.js" => Ok((ContentType::JavaScript, include_bytes!("../static/scripts/jdenticon.js"))),
|
||||
"datatables.js" => Ok((ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))),
|
||||
"datatables.css" => Ok((ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))),
|
||||
"jquery-3.6.4.slim.js" => {
|
||||
Ok((ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.6.4.slim.js")))
|
||||
"jquery-3.7.0.slim.js" => {
|
||||
Ok((ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.7.0.slim.js")))
|
||||
}
|
||||
_ => err!(format!("Static file not found: {filename}")),
|
||||
}
|
||||
|
23
src/auth.rs
23
src/auth.rs
@@ -825,3 +825,26 @@ impl<'r> FromRequest<'r> for ClientIp {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct WsAccessTokenHeader {
|
||||
pub access_token: Option<String>,
|
||||
}
|
||||
|
||||
#[rocket::async_trait]
|
||||
impl<'r> FromRequest<'r> for WsAccessTokenHeader {
|
||||
type Error = ();
|
||||
|
||||
async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> {
|
||||
let headers = request.headers();
|
||||
|
||||
// Get access_token
|
||||
let access_token = match headers.get_one("Authorization") {
|
||||
Some(a) => a.rsplit("Bearer ").next().map(String::from),
|
||||
None => None,
|
||||
};
|
||||
|
||||
Outcome::Success(Self {
|
||||
access_token,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@@ -409,6 +409,10 @@ make_config! {
|
||||
/// Event cleanup schedule |> Cron schedule of the job that cleans old events from the event table.
|
||||
/// Defaults to daily. Set blank to disable this job.
|
||||
event_cleanup_schedule: String, false, def, "0 10 0 * * *".to_string();
|
||||
/// Auth Request cleanup schedule |> Cron schedule of the job that cleans old auth requests from the auth request.
|
||||
/// Defaults to every minute. Set blank to disable this job.
|
||||
auth_request_purge_schedule: String, false, def, "30 * * * * *".to_string();
|
||||
|
||||
},
|
||||
|
||||
/// General settings
|
||||
@@ -492,7 +496,7 @@ make_config! {
|
||||
/// Invitation organization name |> Name shown in the invitation emails that don't come from a specific organization
|
||||
invitation_org_name: String, true, def, "Vaultwarden".to_string();
|
||||
|
||||
/// Events days retain |> Number of days to retain events stored in the database. If unset, events are kept indefently.
|
||||
/// Events days retain |> Number of days to retain events stored in the database. If unset, events are kept indefinitely.
|
||||
events_days_retain: i64, false, option;
|
||||
},
|
||||
|
||||
@@ -520,7 +524,7 @@ make_config! {
|
||||
/// has been decided on, consider using permanent redirects for cacheability. The legacy codes
|
||||
/// are currently better supported by the Bitwarden clients.
|
||||
icon_redirect_code: u32, true, def, 302;
|
||||
/// Positive icon cache expiry |> Number of seconds to consider that an already cached icon is fresh. After this period, the icon will be redownloaded
|
||||
/// Positive icon cache expiry |> Number of seconds to consider that an already cached icon is fresh. After this period, the icon will be refreshed
|
||||
icon_cache_ttl: u64, true, def, 2_592_000;
|
||||
/// Negative icon cache expiry |> Number of seconds before trying to download an icon that failed again.
|
||||
icon_cache_negttl: u64, true, def, 259_200;
|
||||
@@ -530,7 +534,7 @@ make_config! {
|
||||
/// Useful to hide other servers in the local network. Check the WIKI for more details
|
||||
icon_blacklist_regex: String, true, option;
|
||||
/// Icon blacklist non global IPs |> Any IP which is not defined as a global IP will be blacklisted.
|
||||
/// Usefull to secure your internal environment: See https://en.wikipedia.org/wiki/Reserved_IP_addresses for a list of IPs which it will block
|
||||
/// Useful to secure your internal environment: See https://en.wikipedia.org/wiki/Reserved_IP_addresses for a list of IPs which it will block
|
||||
icon_blacklist_non_global_ips: bool, true, def, true;
|
||||
|
||||
/// Disable Two-Factor remember |> Enabling this would force the users to use a second factor to login every time.
|
||||
@@ -566,7 +570,7 @@ make_config! {
|
||||
/// Max database connection retries |> Number of times to retry the database connection during startup, with 1 second between each retry, set to 0 to retry indefinitely
|
||||
db_connection_retries: u32, false, def, 15;
|
||||
|
||||
/// Timeout when aquiring database connection
|
||||
/// Timeout when acquiring database connection
|
||||
database_timeout: u64, false, def, 30;
|
||||
|
||||
/// Database connection pool size
|
||||
@@ -893,6 +897,10 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
|
||||
err!("`EVENT_CLEANUP_SCHEDULE` is not a valid cron expression")
|
||||
}
|
||||
|
||||
if !cfg.auth_request_purge_schedule.is_empty() && cfg.auth_request_purge_schedule.parse::<Schedule>().is_err() {
|
||||
err!("`AUTH_REQUEST_PURGE_SCHEDULE` is not a valid cron expression")
|
||||
}
|
||||
|
||||
if !cfg.disable_admin_token {
|
||||
match cfg.admin_token.as_ref() {
|
||||
Some(t) if t.starts_with("$argon2") => {
|
||||
|
148
src/db/models/auth_request.rs
Normal file
148
src/db/models/auth_request.rs
Normal file
@@ -0,0 +1,148 @@
|
||||
use crate::crypto::ct_eq;
|
||||
use chrono::{NaiveDateTime, Utc};
|
||||
|
||||
db_object! {
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset, Deserialize, Serialize)]
|
||||
#[diesel(table_name = auth_requests)]
|
||||
#[diesel(treat_none_as_null = true)]
|
||||
#[diesel(primary_key(uuid))]
|
||||
pub struct AuthRequest {
|
||||
pub uuid: String,
|
||||
pub user_uuid: String,
|
||||
pub organization_uuid: Option<String>,
|
||||
|
||||
pub request_device_identifier: String,
|
||||
pub device_type: i32, // https://github.com/bitwarden/server/blob/master/src/Core/Enums/DeviceType.cs
|
||||
|
||||
pub request_ip: String,
|
||||
pub response_device_id: Option<String>,
|
||||
|
||||
pub access_code: String,
|
||||
pub public_key: String,
|
||||
|
||||
pub enc_key: String,
|
||||
|
||||
pub master_password_hash: String,
|
||||
pub approved: Option<bool>,
|
||||
pub creation_date: NaiveDateTime,
|
||||
pub response_date: Option<NaiveDateTime>,
|
||||
|
||||
pub authentication_date: Option<NaiveDateTime>,
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthRequest {
|
||||
pub fn new(
|
||||
user_uuid: String,
|
||||
request_device_identifier: String,
|
||||
device_type: i32,
|
||||
request_ip: String,
|
||||
access_code: String,
|
||||
public_key: String,
|
||||
) -> Self {
|
||||
let now = Utc::now().naive_utc();
|
||||
|
||||
Self {
|
||||
uuid: crate::util::get_uuid(),
|
||||
user_uuid,
|
||||
organization_uuid: None,
|
||||
|
||||
request_device_identifier,
|
||||
device_type,
|
||||
request_ip,
|
||||
response_device_id: None,
|
||||
access_code,
|
||||
public_key,
|
||||
enc_key: String::new(),
|
||||
master_password_hash: String::new(),
|
||||
approved: None,
|
||||
creation_date: now,
|
||||
response_date: None,
|
||||
authentication_date: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
use crate::db::DbConn;
|
||||
|
||||
use crate::api::EmptyResult;
|
||||
use crate::error::MapResult;
|
||||
|
||||
impl AuthRequest {
|
||||
pub async fn save(&mut self, conn: &mut DbConn) -> EmptyResult {
|
||||
db_run! { conn:
|
||||
sqlite, mysql {
|
||||
match diesel::replace_into(auth_requests::table)
|
||||
.values(AuthRequestDb::to_db(self))
|
||||
.execute(conn)
|
||||
{
|
||||
Ok(_) => Ok(()),
|
||||
// Record already exists and causes a Foreign Key Violation because replace_into() wants to delete the record first.
|
||||
Err(diesel::result::Error::DatabaseError(diesel::result::DatabaseErrorKind::ForeignKeyViolation, _)) => {
|
||||
diesel::update(auth_requests::table)
|
||||
.filter(auth_requests::uuid.eq(&self.uuid))
|
||||
.set(AuthRequestDb::to_db(self))
|
||||
.execute(conn)
|
||||
.map_res("Error auth_request")
|
||||
}
|
||||
Err(e) => Err(e.into()),
|
||||
}.map_res("Error auth_request")
|
||||
}
|
||||
postgresql {
|
||||
let value = AuthRequestDb::to_db(self);
|
||||
diesel::insert_into(auth_requests::table)
|
||||
.values(&value)
|
||||
.on_conflict(auth_requests::uuid)
|
||||
.do_update()
|
||||
.set(&value)
|
||||
.execute(conn)
|
||||
.map_res("Error saving auth_request")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option<Self> {
|
||||
db_run! {conn: {
|
||||
auth_requests::table
|
||||
.filter(auth_requests::uuid.eq(uuid))
|
||||
.first::<AuthRequestDb>(conn)
|
||||
.ok()
|
||||
.from_db()
|
||||
}}
|
||||
}
|
||||
|
||||
pub async fn find_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec<Self> {
|
||||
db_run! {conn: {
|
||||
auth_requests::table
|
||||
.filter(auth_requests::user_uuid.eq(user_uuid))
|
||||
.load::<AuthRequestDb>(conn).expect("Error loading auth_requests").from_db()
|
||||
}}
|
||||
}
|
||||
|
||||
pub async fn find_created_before(dt: &NaiveDateTime, conn: &mut DbConn) -> Vec<Self> {
|
||||
db_run! {conn: {
|
||||
auth_requests::table
|
||||
.filter(auth_requests::creation_date.lt(dt))
|
||||
.load::<AuthRequestDb>(conn).expect("Error loading auth_requests").from_db()
|
||||
}}
|
||||
}
|
||||
|
||||
pub async fn delete(&self, conn: &mut DbConn) -> EmptyResult {
|
||||
db_run! { conn: {
|
||||
diesel::delete(auth_requests::table.filter(auth_requests::uuid.eq(&self.uuid)))
|
||||
.execute(conn)
|
||||
.map_res("Error deleting auth request")
|
||||
}}
|
||||
}
|
||||
|
||||
pub fn check_access_code(&self, access_code: &str) -> bool {
|
||||
ct_eq(&self.access_code, access_code)
|
||||
}
|
||||
|
||||
pub async fn purge_expired_auth_requests(conn: &mut DbConn) {
|
||||
let expiry_time = Utc::now().naive_utc() - chrono::Duration::minutes(5); //after 5 minutes, clients reject the request
|
||||
for auth_request in Self::find_created_before(&expiry_time, conn).await {
|
||||
auth_request.delete(conn).await.ok();
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,6 +1,7 @@
|
||||
use chrono::{NaiveDateTime, Utc};
|
||||
|
||||
use crate::{crypto, CONFIG};
|
||||
use core::fmt;
|
||||
|
||||
db_object! {
|
||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
@@ -225,3 +226,90 @@ impl Device {
|
||||
}}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum DeviceType {
|
||||
Android = 0,
|
||||
Ios = 1,
|
||||
ChromeExtension = 2,
|
||||
FirefoxExtension = 3,
|
||||
OperaExtension = 4,
|
||||
EdgeExtension = 5,
|
||||
WindowsDesktop = 6,
|
||||
MacOsDesktop = 7,
|
||||
LinuxDesktop = 8,
|
||||
ChromeBrowser = 9,
|
||||
FirefoxBrowser = 10,
|
||||
OperaBrowser = 11,
|
||||
EdgeBrowser = 12,
|
||||
IEBrowser = 13,
|
||||
UnknownBrowser = 14,
|
||||
AndroidAmazon = 15,
|
||||
Uwp = 16,
|
||||
SafariBrowser = 17,
|
||||
VivaldiBrowser = 18,
|
||||
VivaldiExtension = 19,
|
||||
SafariExtension = 20,
|
||||
Sdk = 21,
|
||||
Server = 22,
|
||||
}
|
||||
|
||||
impl fmt::Display for DeviceType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
DeviceType::Android => write!(f, "Android"),
|
||||
DeviceType::Ios => write!(f, "iOS"),
|
||||
DeviceType::ChromeExtension => write!(f, "Chrome Extension"),
|
||||
DeviceType::FirefoxExtension => write!(f, "Firefox Extension"),
|
||||
DeviceType::OperaExtension => write!(f, "Opera Extension"),
|
||||
DeviceType::EdgeExtension => write!(f, "Edge Extension"),
|
||||
DeviceType::WindowsDesktop => write!(f, "Windows Desktop"),
|
||||
DeviceType::MacOsDesktop => write!(f, "MacOS Desktop"),
|
||||
DeviceType::LinuxDesktop => write!(f, "Linux Desktop"),
|
||||
DeviceType::ChromeBrowser => write!(f, "Chrome Browser"),
|
||||
DeviceType::FirefoxBrowser => write!(f, "Firefox Browser"),
|
||||
DeviceType::OperaBrowser => write!(f, "Opera Browser"),
|
||||
DeviceType::EdgeBrowser => write!(f, "Edge Browser"),
|
||||
DeviceType::IEBrowser => write!(f, "Internet Explorer"),
|
||||
DeviceType::UnknownBrowser => write!(f, "Unknown Browser"),
|
||||
DeviceType::AndroidAmazon => write!(f, "Android Amazon"),
|
||||
DeviceType::Uwp => write!(f, "UWP"),
|
||||
DeviceType::SafariBrowser => write!(f, "Safari Browser"),
|
||||
DeviceType::VivaldiBrowser => write!(f, "Vivaldi Browser"),
|
||||
DeviceType::VivaldiExtension => write!(f, "Vivaldi Extension"),
|
||||
DeviceType::SafariExtension => write!(f, "Safari Extension"),
|
||||
DeviceType::Sdk => write!(f, "SDK"),
|
||||
DeviceType::Server => write!(f, "Server"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DeviceType {
|
||||
pub fn from_i32(value: i32) -> DeviceType {
|
||||
match value {
|
||||
0 => DeviceType::Android,
|
||||
1 => DeviceType::Ios,
|
||||
2 => DeviceType::ChromeExtension,
|
||||
3 => DeviceType::FirefoxExtension,
|
||||
4 => DeviceType::OperaExtension,
|
||||
5 => DeviceType::EdgeExtension,
|
||||
6 => DeviceType::WindowsDesktop,
|
||||
7 => DeviceType::MacOsDesktop,
|
||||
8 => DeviceType::LinuxDesktop,
|
||||
9 => DeviceType::ChromeBrowser,
|
||||
10 => DeviceType::FirefoxBrowser,
|
||||
11 => DeviceType::OperaBrowser,
|
||||
12 => DeviceType::EdgeBrowser,
|
||||
13 => DeviceType::IEBrowser,
|
||||
14 => DeviceType::UnknownBrowser,
|
||||
15 => DeviceType::AndroidAmazon,
|
||||
16 => DeviceType::Uwp,
|
||||
17 => DeviceType::SafariBrowser,
|
||||
18 => DeviceType::VivaldiBrowser,
|
||||
19 => DeviceType::VivaldiExtension,
|
||||
20 => DeviceType::SafariExtension,
|
||||
21 => DeviceType::Sdk,
|
||||
22 => DeviceType::Server,
|
||||
_ => DeviceType::UnknownBrowser,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -94,18 +94,11 @@ impl Group {
|
||||
}
|
||||
|
||||
pub fn set_external_id(&mut self, external_id: Option<String>) {
|
||||
//Check if external id is empty. We don't want to have
|
||||
//empty strings in the database
|
||||
match external_id {
|
||||
Some(external_id) => {
|
||||
if external_id.is_empty() {
|
||||
self.external_id = None;
|
||||
} else {
|
||||
self.external_id = Some(external_id)
|
||||
}
|
||||
}
|
||||
None => self.external_id = None,
|
||||
}
|
||||
// Check if external_id is empty. We do not want to have empty strings in the database
|
||||
self.external_id = match external_id {
|
||||
Some(external_id) if !external_id.trim().is_empty() => Some(external_id),
|
||||
_ => None,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,4 +1,5 @@
|
||||
mod attachment;
|
||||
mod auth_request;
|
||||
mod cipher;
|
||||
mod collection;
|
||||
mod device;
|
||||
@@ -15,9 +16,10 @@ mod two_factor_incomplete;
|
||||
mod user;
|
||||
|
||||
pub use self::attachment::Attachment;
|
||||
pub use self::auth_request::AuthRequest;
|
||||
pub use self::cipher::Cipher;
|
||||
pub use self::collection::{Collection, CollectionCipher, CollectionUser};
|
||||
pub use self::device::Device;
|
||||
pub use self::device::{Device, DeviceType};
|
||||
pub use self::emergency_access::{EmergencyAccess, EmergencyAccessStatus, EmergencyAccessType};
|
||||
pub use self::event::{Event, EventType};
|
||||
pub use self::favorite::Favorite;
|
||||
|
@@ -434,6 +434,7 @@ impl UserOrganization {
|
||||
"UserId": self.user_uuid,
|
||||
"Name": user.name,
|
||||
"Email": user.email,
|
||||
"ExternalId": user.external_id,
|
||||
"Groups": groups,
|
||||
"Collections": collections,
|
||||
|
||||
@@ -804,7 +805,7 @@ impl OrganizationApiKey {
|
||||
let value = OrganizationApiKeyDb::to_db(self);
|
||||
diesel::insert_into(organization_api_key::table)
|
||||
.values(&value)
|
||||
.on_conflict(organization_api_key::uuid)
|
||||
.on_conflict((organization_api_key::uuid, organization_api_key::org_uuid))
|
||||
.do_update()
|
||||
.set(&value)
|
||||
.execute(conn)
|
||||
|
@@ -286,6 +286,26 @@ table! {
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
auth_requests (uuid) {
|
||||
uuid -> Text,
|
||||
user_uuid -> Text,
|
||||
organization_uuid -> Nullable<Text>,
|
||||
request_device_identifier -> Text,
|
||||
device_type -> Integer,
|
||||
request_ip -> Text,
|
||||
response_device_id -> Nullable<Text>,
|
||||
access_code -> Text,
|
||||
public_key -> Text,
|
||||
enc_key -> Text,
|
||||
master_password_hash -> Text,
|
||||
approved -> Nullable<Bool>,
|
||||
creation_date -> Timestamp,
|
||||
response_date -> Nullable<Timestamp>,
|
||||
authentication_date -> Nullable<Timestamp>,
|
||||
}
|
||||
}
|
||||
|
||||
joinable!(attachments -> ciphers (cipher_uuid));
|
||||
joinable!(ciphers -> organizations (organization_uuid));
|
||||
joinable!(ciphers -> users (user_uuid));
|
||||
@@ -312,6 +332,7 @@ joinable!(groups_users -> groups (groups_uuid));
|
||||
joinable!(collections_groups -> collections (collections_uuid));
|
||||
joinable!(collections_groups -> groups (groups_uuid));
|
||||
joinable!(event -> users_organizations (uuid));
|
||||
joinable!(auth_requests -> users (user_uuid));
|
||||
|
||||
allow_tables_to_appear_in_same_query!(
|
||||
attachments,
|
||||
@@ -335,4 +356,5 @@ allow_tables_to_appear_in_same_query!(
|
||||
groups_users,
|
||||
collections_groups,
|
||||
event,
|
||||
auth_requests,
|
||||
);
|
||||
|
@@ -286,6 +286,26 @@ table! {
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
auth_requests (uuid) {
|
||||
uuid -> Text,
|
||||
user_uuid -> Text,
|
||||
organization_uuid -> Nullable<Text>,
|
||||
request_device_identifier -> Text,
|
||||
device_type -> Integer,
|
||||
request_ip -> Text,
|
||||
response_device_id -> Nullable<Text>,
|
||||
access_code -> Text,
|
||||
public_key -> Text,
|
||||
enc_key -> Text,
|
||||
master_password_hash -> Text,
|
||||
approved -> Nullable<Bool>,
|
||||
creation_date -> Timestamp,
|
||||
response_date -> Nullable<Timestamp>,
|
||||
authentication_date -> Nullable<Timestamp>,
|
||||
}
|
||||
}
|
||||
|
||||
joinable!(attachments -> ciphers (cipher_uuid));
|
||||
joinable!(ciphers -> organizations (organization_uuid));
|
||||
joinable!(ciphers -> users (user_uuid));
|
||||
@@ -312,6 +332,7 @@ joinable!(groups_users -> groups (groups_uuid));
|
||||
joinable!(collections_groups -> collections (collections_uuid));
|
||||
joinable!(collections_groups -> groups (groups_uuid));
|
||||
joinable!(event -> users_organizations (uuid));
|
||||
joinable!(auth_requests -> users (user_uuid));
|
||||
|
||||
allow_tables_to_appear_in_same_query!(
|
||||
attachments,
|
||||
@@ -335,4 +356,5 @@ allow_tables_to_appear_in_same_query!(
|
||||
groups_users,
|
||||
collections_groups,
|
||||
event,
|
||||
auth_requests,
|
||||
);
|
||||
|
@@ -286,6 +286,26 @@ table! {
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
auth_requests (uuid) {
|
||||
uuid -> Text,
|
||||
user_uuid -> Text,
|
||||
organization_uuid -> Nullable<Text>,
|
||||
request_device_identifier -> Text,
|
||||
device_type -> Integer,
|
||||
request_ip -> Text,
|
||||
response_device_id -> Nullable<Text>,
|
||||
access_code -> Text,
|
||||
public_key -> Text,
|
||||
enc_key -> Text,
|
||||
master_password_hash -> Text,
|
||||
approved -> Nullable<Bool>,
|
||||
creation_date -> Timestamp,
|
||||
response_date -> Nullable<Timestamp>,
|
||||
authentication_date -> Nullable<Timestamp>,
|
||||
}
|
||||
}
|
||||
|
||||
joinable!(attachments -> ciphers (cipher_uuid));
|
||||
joinable!(ciphers -> organizations (organization_uuid));
|
||||
joinable!(ciphers -> users (user_uuid));
|
||||
@@ -313,6 +333,7 @@ joinable!(groups_users -> groups (groups_uuid));
|
||||
joinable!(collections_groups -> collections (collections_uuid));
|
||||
joinable!(collections_groups -> groups (groups_uuid));
|
||||
joinable!(event -> users_organizations (uuid));
|
||||
joinable!(auth_requests -> users (user_uuid));
|
||||
|
||||
allow_tables_to_appear_in_same_query!(
|
||||
attachments,
|
||||
@@ -336,4 +357,5 @@ allow_tables_to_appear_in_same_query!(
|
||||
groups_users,
|
||||
collections_groups,
|
||||
event,
|
||||
auth_requests,
|
||||
);
|
||||
|
10
src/main.rs
10
src/main.rs
@@ -82,9 +82,12 @@ mod mail;
|
||||
mod ratelimit;
|
||||
mod util;
|
||||
|
||||
use crate::api::purge_auth_requests;
|
||||
use crate::api::WS_ANONYMOUS_SUBSCRIPTIONS;
|
||||
pub use config::CONFIG;
|
||||
pub use error::{Error, MapResult};
|
||||
use rocket::data::{Limits, ToByteUnit};
|
||||
use std::sync::Arc;
|
||||
pub use util::is_running_in_docker;
|
||||
|
||||
#[rocket::main]
|
||||
@@ -533,6 +536,7 @@ async fn launch_rocket(pool: db::DbPool, extra_debug: bool) -> Result<(), Error>
|
||||
.register([basepath, "/admin"].concat(), api::admin_catchers())
|
||||
.manage(pool)
|
||||
.manage(api::start_notification_server())
|
||||
.manage(Arc::clone(&WS_ANONYMOUS_SUBSCRIPTIONS))
|
||||
.attach(util::AppHeaders())
|
||||
.attach(util::Cors())
|
||||
.attach(util::BetterLogging(extra_debug))
|
||||
@@ -608,6 +612,12 @@ fn schedule_jobs(pool: db::DbPool) {
|
||||
}));
|
||||
}
|
||||
|
||||
if !CONFIG.auth_request_purge_schedule().is_empty() {
|
||||
sched.add(Job::new(CONFIG.auth_request_purge_schedule().parse().unwrap(), || {
|
||||
runtime.spawn(purge_auth_requests(pool.clone()));
|
||||
}));
|
||||
}
|
||||
|
||||
// Cleanup the event table of records x days old.
|
||||
if CONFIG.org_events_enabled()
|
||||
&& !CONFIG.event_cleanup_schedule().is_empty()
|
||||
|
71
src/static/scripts/admin.js
vendored
71
src/static/scripts/admin.js
vendored
@@ -65,8 +65,79 @@ function _post(url, successMsg, errMsg, body, reload_page = true) {
|
||||
});
|
||||
}
|
||||
|
||||
// Bootstrap Theme Selector
|
||||
const getStoredTheme = () => localStorage.getItem("theme");
|
||||
const setStoredTheme = theme => localStorage.setItem("theme", theme);
|
||||
|
||||
const getPreferredTheme = () => {
|
||||
const storedTheme = getStoredTheme();
|
||||
if (storedTheme) {
|
||||
return storedTheme;
|
||||
}
|
||||
|
||||
return window.matchMedia("(prefers-color-scheme: dark)").matches ? "dark" : "light";
|
||||
};
|
||||
|
||||
const setTheme = theme => {
|
||||
if (theme === "auto" && window.matchMedia("(prefers-color-scheme: dark)").matches) {
|
||||
document.documentElement.setAttribute("data-bs-theme", "dark");
|
||||
} else {
|
||||
document.documentElement.setAttribute("data-bs-theme", theme);
|
||||
}
|
||||
};
|
||||
|
||||
setTheme(getPreferredTheme());
|
||||
|
||||
const showActiveTheme = (theme, focus = false) => {
|
||||
const themeSwitcher = document.querySelector("#bd-theme");
|
||||
|
||||
if (!themeSwitcher) {
|
||||
return;
|
||||
}
|
||||
|
||||
const themeSwitcherText = document.querySelector("#bd-theme-text");
|
||||
const activeThemeIcon = document.querySelector(".theme-icon-active use");
|
||||
const btnToActive = document.querySelector(`[data-bs-theme-value="${theme}"]`);
|
||||
const svgOfActiveBtn = btnToActive.querySelector("span use").innerText;
|
||||
|
||||
document.querySelectorAll("[data-bs-theme-value]").forEach(element => {
|
||||
element.classList.remove("active");
|
||||
element.setAttribute("aria-pressed", "false");
|
||||
});
|
||||
|
||||
btnToActive.classList.add("active");
|
||||
btnToActive.setAttribute("aria-pressed", "true");
|
||||
activeThemeIcon.innerText = svgOfActiveBtn;
|
||||
const themeSwitcherLabel = `${themeSwitcherText.textContent} (${btnToActive.dataset.bsThemeValue})`;
|
||||
themeSwitcher.setAttribute("aria-label", themeSwitcherLabel);
|
||||
|
||||
if (focus) {
|
||||
themeSwitcher.focus();
|
||||
}
|
||||
};
|
||||
|
||||
window.matchMedia("(prefers-color-scheme: dark)").addEventListener("change", () => {
|
||||
const storedTheme = getStoredTheme();
|
||||
if (storedTheme !== "light" && storedTheme !== "dark") {
|
||||
setTheme(getPreferredTheme());
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// onLoad events
|
||||
document.addEventListener("DOMContentLoaded", (/*event*/) => {
|
||||
showActiveTheme(getPreferredTheme());
|
||||
|
||||
document.querySelectorAll("[data-bs-theme-value]")
|
||||
.forEach(toggle => {
|
||||
toggle.addEventListener("click", () => {
|
||||
const theme = toggle.getAttribute("data-bs-theme-value");
|
||||
setStoredTheme(theme);
|
||||
setTheme(theme);
|
||||
showActiveTheme(theme, true);
|
||||
});
|
||||
});
|
||||
|
||||
// get current URL path and assign "active" class to the correct nav-item
|
||||
const pathname = window.location.pathname;
|
||||
if (pathname === "") return;
|
||||
|
4
src/static/scripts/admin_diagnostics.js
vendored
4
src/static/scripts/admin_diagnostics.js
vendored
@@ -1,6 +1,6 @@
|
||||
"use strict";
|
||||
/* eslint-env es2017, browser */
|
||||
/* global BASE_URL:readable, BSN:readable */
|
||||
/* global BASE_URL:readable, bootstrap:readable */
|
||||
|
||||
var dnsCheck = false;
|
||||
var timeCheck = false;
|
||||
@@ -135,7 +135,7 @@ function copyToClipboard(event) {
|
||||
document.execCommand("copy");
|
||||
tmpCopyEl.remove();
|
||||
|
||||
new BSN.Toast("#toastClipboardCopy").show();
|
||||
new bootstrap.Toast("#toastClipboardCopy").show();
|
||||
}
|
||||
|
||||
function checkTimeDrift(utcTimeA, utcTimeB, statusPrefix) {
|
||||
|
14
src/static/scripts/admin_users.js
vendored
14
src/static/scripts/admin_users.js
vendored
@@ -141,19 +141,20 @@ function resendUserInvite (event) {
|
||||
const ORG_TYPES = {
|
||||
"0": {
|
||||
"name": "Owner",
|
||||
"color": "orange"
|
||||
"bg": "orange",
|
||||
"font": "black"
|
||||
},
|
||||
"1": {
|
||||
"name": "Admin",
|
||||
"color": "blueviolet"
|
||||
"bg": "blueviolet"
|
||||
},
|
||||
"2": {
|
||||
"name": "User",
|
||||
"color": "blue"
|
||||
"bg": "blue"
|
||||
},
|
||||
"3": {
|
||||
"name": "Manager",
|
||||
"color": "green"
|
||||
"bg": "green"
|
||||
},
|
||||
};
|
||||
|
||||
@@ -227,7 +228,10 @@ function initUserTable() {
|
||||
// Color all the org buttons per type
|
||||
document.querySelectorAll("button[data-vw-org-type]").forEach(function(e) {
|
||||
const orgType = ORG_TYPES[e.dataset.vwOrgType];
|
||||
e.style.backgroundColor = orgType.color;
|
||||
e.style.backgroundColor = orgType.bg;
|
||||
if (orgType.font !== undefined) {
|
||||
e.style.color = orgType.font;
|
||||
}
|
||||
e.title = orgType.name;
|
||||
});
|
||||
|
||||
|
5991
src/static/scripts/bootstrap-native.js
vendored
5991
src/static/scripts/bootstrap-native.js
vendored
File diff suppressed because it is too large
Load Diff
6313
src/static/scripts/bootstrap.bundle.js
vendored
Normal file
6313
src/static/scripts/bootstrap.bundle.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2233
src/static/scripts/bootstrap.css
vendored
2233
src/static/scripts/bootstrap.css
vendored
File diff suppressed because it is too large
Load Diff
47
src/static/scripts/datatables.css
vendored
47
src/static/scripts/datatables.css
vendored
@@ -4,10 +4,10 @@
|
||||
*
|
||||
* To rebuild or modify this file with the latest versions of the included
|
||||
* software please visit:
|
||||
* https://datatables.net/download/#bs5/dt-1.13.4
|
||||
* https://datatables.net/download/#bs5/dt-1.13.6
|
||||
*
|
||||
* Included libraries:
|
||||
* DataTables 1.13.4
|
||||
* DataTables 1.13.6
|
||||
*/
|
||||
|
||||
@charset "UTF-8";
|
||||
@@ -15,6 +15,13 @@
|
||||
--dt-row-selected: 13, 110, 253;
|
||||
--dt-row-selected-text: 255, 255, 255;
|
||||
--dt-row-selected-link: 9, 10, 11;
|
||||
--dt-row-stripe: 0, 0, 0;
|
||||
--dt-row-hover: 0, 0, 0;
|
||||
--dt-column-ordering: 0, 0, 0;
|
||||
--dt-html-background: white;
|
||||
}
|
||||
:root.dark {
|
||||
--dt-html-background: rgb(33, 37, 41);
|
||||
}
|
||||
|
||||
table.dataTable td.dt-control {
|
||||
@@ -22,25 +29,19 @@ table.dataTable td.dt-control {
|
||||
cursor: pointer;
|
||||
}
|
||||
table.dataTable td.dt-control:before {
|
||||
height: 1em;
|
||||
width: 1em;
|
||||
margin-top: -9px;
|
||||
display: inline-block;
|
||||
color: white;
|
||||
border: 0.15em solid white;
|
||||
border-radius: 1em;
|
||||
box-shadow: 0 0 0.2em #444;
|
||||
box-sizing: content-box;
|
||||
text-align: center;
|
||||
text-indent: 0 !important;
|
||||
font-family: "Courier New", Courier, monospace;
|
||||
line-height: 1em;
|
||||
content: "+";
|
||||
background-color: #31b131;
|
||||
color: rgba(0, 0, 0, 0.5);
|
||||
content: "►";
|
||||
}
|
||||
table.dataTable tr.dt-hasChild td.dt-control:before {
|
||||
content: "-";
|
||||
background-color: #d33333;
|
||||
content: "▼";
|
||||
}
|
||||
|
||||
html.dark table.dataTable td.dt-control:before {
|
||||
color: rgba(255, 255, 255, 0.5);
|
||||
}
|
||||
html.dark table.dataTable tr.dt-hasChild td.dt-control:before {
|
||||
color: rgba(255, 255, 255, 0.5);
|
||||
}
|
||||
|
||||
table.dataTable thead > tr > th.sorting, table.dataTable thead > tr > th.sorting_asc, table.dataTable thead > tr > th.sorting_desc, table.dataTable thead > tr > th.sorting_asc_disabled, table.dataTable thead > tr > th.sorting_desc_disabled,
|
||||
@@ -303,14 +304,14 @@ table.dataTable > tbody > tr.selected a {
|
||||
color: rgb(var(--dt-row-selected-link));
|
||||
}
|
||||
table.dataTable.table-striped > tbody > tr.odd > * {
|
||||
box-shadow: inset 0 0 0 9999px rgba(0, 0, 0, 0.05);
|
||||
box-shadow: inset 0 0 0 9999px rgba(var(--dt-row-stripe), 0.05);
|
||||
}
|
||||
table.dataTable.table-striped > tbody > tr.odd.selected > * {
|
||||
box-shadow: inset 0 0 0 9999px rgba(13, 110, 253, 0.95);
|
||||
box-shadow: inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.95);
|
||||
}
|
||||
table.dataTable.table-hover > tbody > tr:hover > * {
|
||||
box-shadow: inset 0 0 0 9999px rgba(0, 0, 0, 0.075);
|
||||
box-shadow: inset 0 0 0 9999px rgba(var(--dt-row-hover), 0.075);
|
||||
}
|
||||
table.dataTable.table-hover > tbody > tr.selected:hover > * {
|
||||
box-shadow: inset 0 0 0 9999px rgba(13, 110, 253, 0.975);
|
||||
@@ -441,4 +442,10 @@ div.table-responsive > div.dataTables_wrapper > div.row > div[class^=col-]:last-
|
||||
padding-right: 0;
|
||||
}
|
||||
|
||||
:root[data-bs-theme=dark] {
|
||||
--dt-row-hover: 255, 255, 255;
|
||||
--dt-row-stripe: 255, 255, 255;
|
||||
--dt-column-ordering: 255, 255, 255;
|
||||
}
|
||||
|
||||
|
||||
|
91
src/static/scripts/datatables.js
vendored
91
src/static/scripts/datatables.js
vendored
@@ -4,20 +4,20 @@
|
||||
*
|
||||
* To rebuild or modify this file with the latest versions of the included
|
||||
* software please visit:
|
||||
* https://datatables.net/download/#bs5/dt-1.13.4
|
||||
* https://datatables.net/download/#bs5/dt-1.13.6
|
||||
*
|
||||
* Included libraries:
|
||||
* DataTables 1.13.4
|
||||
* DataTables 1.13.6
|
||||
*/
|
||||
|
||||
/*! DataTables 1.13.4
|
||||
/*! DataTables 1.13.6
|
||||
* ©2008-2023 SpryMedia Ltd - datatables.net/license
|
||||
*/
|
||||
|
||||
/**
|
||||
* @summary DataTables
|
||||
* @description Paginate, search and order HTML tables
|
||||
* @version 1.13.4
|
||||
* @version 1.13.6
|
||||
* @author SpryMedia Ltd
|
||||
* @contact www.datatables.net
|
||||
* @copyright SpryMedia Ltd.
|
||||
@@ -50,7 +50,7 @@
|
||||
// returns a factory function that expects the window object
|
||||
var jq = require('jquery');
|
||||
|
||||
if (typeof window !== 'undefined') {
|
||||
if (typeof window === 'undefined') {
|
||||
module.exports = function (root, $) {
|
||||
if ( ! root ) {
|
||||
// CommonJS environments without a window global must pass a
|
||||
@@ -1396,7 +1396,7 @@
|
||||
|
||||
|
||||
var _isNumber = function ( d, decimalPoint, formatted ) {
|
||||
let type = typeof d;
|
||||
var type = typeof d;
|
||||
var strType = type === 'string';
|
||||
|
||||
if ( type === 'number' || type === 'bigint') {
|
||||
@@ -1530,7 +1530,9 @@
|
||||
|
||||
|
||||
var _stripHtml = function ( d ) {
|
||||
return d.replace( _re_html, '' );
|
||||
return d
|
||||
.replace( _re_html, '' ) // Complete tags
|
||||
.replace(/<script/i, ''); // Safety for incomplete script tag
|
||||
};
|
||||
|
||||
|
||||
@@ -1904,7 +1906,10 @@
|
||||
continue;
|
||||
}
|
||||
|
||||
if ( data === null || data[ a[i] ] === undefined ) {
|
||||
if (data === null || data[ a[i] ] === null) {
|
||||
return null;
|
||||
}
|
||||
else if ( data === undefined || data[ a[i] ] === undefined ) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -2351,6 +2356,12 @@
|
||||
oCol.aDataSort = [ oOptions.iDataSort ];
|
||||
}
|
||||
_fnMap( oCol, oOptions, "aDataSort" );
|
||||
|
||||
// Fall back to the aria-label attribute on the table header if no ariaTitle is
|
||||
// provided.
|
||||
if (! oCol.ariaTitle) {
|
||||
oCol.ariaTitle = th.attr("aria-label");
|
||||
}
|
||||
}
|
||||
|
||||
/* Cache the data get and set functions for speed */
|
||||
@@ -4075,11 +4086,16 @@
|
||||
settings.iDraw++;
|
||||
_fnProcessingDisplay( settings, true );
|
||||
|
||||
// Keep track of drawHold state to handle scrolling after the Ajax call
|
||||
var drawHold = settings._drawHold;
|
||||
|
||||
_fnBuildAjax(
|
||||
settings,
|
||||
_fnAjaxParameters( settings ),
|
||||
function(json) {
|
||||
settings._drawHold = drawHold;
|
||||
_fnAjaxUpdateDraw( settings, json );
|
||||
settings._drawHold = false;
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -4343,7 +4359,7 @@
|
||||
_fnThrottle( searchFn, searchDelay ) :
|
||||
searchFn
|
||||
)
|
||||
.on( 'mouseup', function(e) {
|
||||
.on( 'mouseup.DT', function(e) {
|
||||
// Edge fix! Edge 17 does not trigger anything other than mouse events when clicking
|
||||
// on the clear icon (Edge bug 17584515). This is safe in other browsers as `searchFn`
|
||||
// checks the value to see if it has changed. In other browsers it won't have.
|
||||
@@ -4409,7 +4425,7 @@
|
||||
if ( _fnDataSource( oSettings ) != 'ssp' )
|
||||
{
|
||||
/* Global filter */
|
||||
_fnFilter( oSettings, oInput.sSearch, iForce, fnRegex(oInput), oInput.bSmart, oInput.bCaseInsensitive, oInput.return );
|
||||
_fnFilter( oSettings, oInput.sSearch, iForce, fnRegex(oInput), oInput.bSmart, oInput.bCaseInsensitive );
|
||||
fnSaveFilter( oInput );
|
||||
|
||||
/* Now do the individual column filter */
|
||||
@@ -4578,11 +4594,15 @@
|
||||
*
|
||||
* ^(?=.*?\bone\b)(?=.*?\btwo three\b)(?=.*?\bfour\b).*$
|
||||
*/
|
||||
var a = $.map( search.match( /"[^"]+"|[^ ]+/g ) || [''], function ( word ) {
|
||||
var a = $.map( search.match( /["\u201C][^"\u201D]+["\u201D]|[^ ]+/g ) || [''], function ( word ) {
|
||||
if ( word.charAt(0) === '"' ) {
|
||||
var m = word.match( /^"(.*)"$/ );
|
||||
word = m ? m[1] : word;
|
||||
}
|
||||
else if ( word.charAt(0) === '\u201C' ) {
|
||||
var m = word.match( /^\u201C(.*)\u201D$/ );
|
||||
word = m ? m[1] : word;
|
||||
}
|
||||
|
||||
return word.replace('"', '');
|
||||
} );
|
||||
@@ -9386,7 +9406,8 @@
|
||||
* Set the jQuery or window object to be used by DataTables
|
||||
*
|
||||
* @param {*} module Library / container object
|
||||
* @param {string} type Library or container type `lib` or `win`.
|
||||
* @param {string} [type] Library or container type `lib`, `win` or `datetime`.
|
||||
* If not provided, automatic detection is attempted.
|
||||
*/
|
||||
DataTable.use = function (module, type) {
|
||||
if (type === 'lib' || module.fn) {
|
||||
@@ -9396,6 +9417,9 @@
|
||||
window = module;
|
||||
document = module.document;
|
||||
}
|
||||
else if (type === 'datetime' || module.type === 'DateTime') {
|
||||
DataTable.DateTime = module;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -9755,7 +9779,9 @@
|
||||
resolved._;
|
||||
}
|
||||
|
||||
return resolved.replace( '%d', plural ); // nb: plural might be undefined,
|
||||
return typeof resolved === 'string'
|
||||
? resolved.replace( '%d', plural ) // nb: plural might be undefined,
|
||||
: resolved;
|
||||
} );
|
||||
/**
|
||||
* Version string for plug-ins to check compatibility. Allowed format is
|
||||
@@ -9765,7 +9791,7 @@
|
||||
* @type string
|
||||
* @default Version number
|
||||
*/
|
||||
DataTable.version = "1.13.4";
|
||||
DataTable.version = "1.13.6";
|
||||
|
||||
/**
|
||||
* Private data store, containing all of the settings objects that are
|
||||
@@ -14189,7 +14215,7 @@
|
||||
*
|
||||
* @type string
|
||||
*/
|
||||
build:"bs5/dt-1.13.4",
|
||||
build:"bs5/dt-1.13.6",
|
||||
|
||||
|
||||
/**
|
||||
@@ -14830,7 +14856,7 @@
|
||||
var btnDisplay, btnClass;
|
||||
|
||||
var attach = function( container, buttons ) {
|
||||
var i, ien, node, button, tabIndex;
|
||||
var i, ien, node, button;
|
||||
var disabledClass = classes.sPageButtonDisabled;
|
||||
var clickHandler = function ( e ) {
|
||||
_fnPageChange( settings, e.data.action, true );
|
||||
@@ -14845,9 +14871,10 @@
|
||||
attach( inner, button );
|
||||
}
|
||||
else {
|
||||
var disabled = false;
|
||||
|
||||
btnDisplay = null;
|
||||
btnClass = button;
|
||||
tabIndex = settings.iTabIndex;
|
||||
|
||||
switch ( button ) {
|
||||
case 'ellipsis':
|
||||
@@ -14858,8 +14885,7 @@
|
||||
btnDisplay = lang.sFirst;
|
||||
|
||||
if ( page === 0 ) {
|
||||
tabIndex = -1;
|
||||
btnClass += ' ' + disabledClass;
|
||||
disabled = true;
|
||||
}
|
||||
break;
|
||||
|
||||
@@ -14867,8 +14893,7 @@
|
||||
btnDisplay = lang.sPrevious;
|
||||
|
||||
if ( page === 0 ) {
|
||||
tabIndex = -1;
|
||||
btnClass += ' ' + disabledClass;
|
||||
disabled = true;
|
||||
}
|
||||
break;
|
||||
|
||||
@@ -14876,8 +14901,7 @@
|
||||
btnDisplay = lang.sNext;
|
||||
|
||||
if ( pages === 0 || page === pages-1 ) {
|
||||
tabIndex = -1;
|
||||
btnClass += ' ' + disabledClass;
|
||||
disabled = true;
|
||||
}
|
||||
break;
|
||||
|
||||
@@ -14885,8 +14909,7 @@
|
||||
btnDisplay = lang.sLast;
|
||||
|
||||
if ( pages === 0 || page === pages-1 ) {
|
||||
tabIndex = -1;
|
||||
btnClass += ' ' + disabledClass;
|
||||
disabled = true;
|
||||
}
|
||||
break;
|
||||
|
||||
@@ -14899,18 +14922,20 @@
|
||||
|
||||
if ( btnDisplay !== null ) {
|
||||
var tag = settings.oInit.pagingTag || 'a';
|
||||
var disabled = btnClass.indexOf(disabledClass) !== -1;
|
||||
|
||||
if (disabled) {
|
||||
btnClass += ' ' + disabledClass;
|
||||
}
|
||||
|
||||
node = $('<'+tag+'>', {
|
||||
'class': classes.sPageButton+' '+btnClass,
|
||||
'aria-controls': settings.sTableId,
|
||||
'aria-disabled': disabled ? 'true' : null,
|
||||
'aria-label': aria[ button ],
|
||||
'aria-role': 'link',
|
||||
'role': 'link',
|
||||
'aria-current': btnClass === classes.sPageButtonActive ? 'page' : null,
|
||||
'data-dt-idx': button,
|
||||
'tabindex': tabIndex,
|
||||
'tabindex': disabled ? -1 : settings.iTabIndex,
|
||||
'id': idx === 0 && typeof button === 'string' ?
|
||||
settings.sTableId +'_'+ button :
|
||||
null
|
||||
@@ -15041,7 +15066,7 @@
|
||||
return -Infinity;
|
||||
}
|
||||
|
||||
let type = typeof d;
|
||||
var type = typeof d;
|
||||
|
||||
if (type === 'number' || type === 'bigint') {
|
||||
return d;
|
||||
@@ -15415,7 +15440,7 @@
|
||||
var __thousands = ',';
|
||||
var __decimal = '.';
|
||||
|
||||
if (Intl) {
|
||||
if (window.Intl !== undefined) {
|
||||
try {
|
||||
var num = new Intl.NumberFormat().formatToParts(100000.1);
|
||||
|
||||
@@ -15718,7 +15743,7 @@
|
||||
}
|
||||
};
|
||||
|
||||
if (typeof window !== 'undefined') {
|
||||
if (typeof window === 'undefined') {
|
||||
module.exports = function (root, $) {
|
||||
if ( ! root ) {
|
||||
// CommonJS environments without a window global must pass a
|
||||
@@ -15856,10 +15881,10 @@ DataTable.ext.renderer.pageButton.bootstrap = function ( settings, host, idx, bu
|
||||
'aria-controls': settings.sTableId,
|
||||
'aria-disabled': disabled ? 'true' : null,
|
||||
'aria-label': aria[ button ],
|
||||
'aria-role': 'link',
|
||||
'role': 'link',
|
||||
'aria-current': btnClass === 'active' ? 'page' : null,
|
||||
'data-dt-idx': button,
|
||||
'tabindex': settings.iTabIndex,
|
||||
'tabindex': disabled ? -1 : settings.iTabIndex,
|
||||
'class': 'page-link'
|
||||
} )
|
||||
.html( btnDisplay )
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<html lang="en" data-bs-theme="auto">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
@@ -10,7 +10,7 @@
|
||||
<link rel="stylesheet" href="{{urlpath}}/vw_static/admin.css" />
|
||||
<script src="{{urlpath}}/vw_static/admin.js"></script>
|
||||
</head>
|
||||
<body class="bg-light">
|
||||
<body>
|
||||
<nav class="navbar navbar-expand-md navbar-dark bg-dark mb-4 shadow fixed-top">
|
||||
<div class="container-xl">
|
||||
<a class="navbar-brand" href="{{urlpath}}/admin"><img class="vaultwarden-icon" src="{{urlpath}}/vw_static/vaultwarden-icon.png" alt="V">aultwarden Admin</a>
|
||||
@@ -39,9 +39,53 @@
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
<ul class="navbar-nav">
|
||||
<li class="nav-item dropdown">
|
||||
<button
|
||||
class="btn btn-link nav-link py-0 px-0 px-md-2 dropdown-toggle d-flex align-items-center"
|
||||
id="bd-theme" type="button" aria-expanded="false" data-bs-toggle="dropdown"
|
||||
data-bs-display="static" aria-label="Toggle theme (auto)">
|
||||
<span class="my-1 fs-4 theme-icon-active">
|
||||
<use>☯</use>
|
||||
</span>
|
||||
<span class="d-md-none ms-2" id="bd-theme-text">Toggle theme</span>
|
||||
</button>
|
||||
<ul class="dropdown-menu dropdown-menu-end" aria-labelledby="bd-theme-text">
|
||||
<li>
|
||||
<button type="button" class="dropdown-item d-flex align-items-center"
|
||||
data-bs-theme-value="light" aria-pressed="false">
|
||||
<span class="me-2 fs-4 theme-icon">
|
||||
<use>☀</use>
|
||||
</span>
|
||||
Light
|
||||
</button>
|
||||
</li>
|
||||
<li>
|
||||
<button type="button" class="dropdown-item d-flex align-items-center"
|
||||
data-bs-theme-value="dark" aria-pressed="false">
|
||||
<span class="me-2 fs-4 theme-icon">
|
||||
<use>★</use>
|
||||
</span>
|
||||
Dark
|
||||
</button>
|
||||
</li>
|
||||
<li>
|
||||
<button type="button" class="dropdown-item d-flex align-items-center active"
|
||||
data-bs-theme-value="auto" aria-pressed="true">
|
||||
<span class="me-2 fs-4 theme-icon">
|
||||
<use>☯</use>
|
||||
</span>
|
||||
Auto
|
||||
</button>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
{{#if logged_in}}
|
||||
<a class="btn btn-sm btn-secondary" href="{{urlpath}}/admin/logout">Log Out</a>
|
||||
{{/if}}
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
@@ -49,6 +93,6 @@
|
||||
{{> (lookup this "page_content") }}
|
||||
|
||||
<!-- This script needs to be at the bottom, else it will fail! -->
|
||||
<script src="{{urlpath}}/vw_static/bootstrap-native.js"></script>
|
||||
<script src="{{urlpath}}/vw_static/bootstrap.bundle.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
@@ -1,5 +1,5 @@
|
||||
<main class="container-xl">
|
||||
<div id="diagnostics-block" class="my-3 p-3 bg-white rounded shadow">
|
||||
<div id="diagnostics-block" class="my-3 p-3 rounded shadow">
|
||||
<h6 class="border-bottom pb-2 mb-2">Diagnostics</h6>
|
||||
|
||||
<h3>Versions</h3>
|
||||
@@ -8,8 +8,8 @@
|
||||
<dl class="row">
|
||||
<dt class="col-sm-5">Server Installed
|
||||
<span class="badge bg-success d-none" id="server-success" title="Latest version is installed.">Ok</span>
|
||||
<span class="badge bg-warning d-none" id="server-warning" title="There seems to be an update available.">Update</span>
|
||||
<span class="badge bg-info d-none" id="server-branch" title="This is a branched version.">Branched</span>
|
||||
<span class="badge bg-warning text-dark d-none" id="server-warning" title="There seems to be an update available.">Update</span>
|
||||
<span class="badge bg-info text-dark d-none" id="server-branch" title="This is a branched version.">Branched</span>
|
||||
</dt>
|
||||
<dd class="col-sm-7">
|
||||
<span id="server-installed">{{page_data.current_release}}</span>
|
||||
|
@@ -1,15 +1,15 @@
|
||||
<main class="container-xl">
|
||||
{{#if error}}
|
||||
<div class="align-items-center p-3 mb-3 text-white-50 bg-warning rounded shadow">
|
||||
<div class="align-items-center p-3 mb-3 text-opacity-50 text-dark bg-warning rounded shadow">
|
||||
<div>
|
||||
<h6 class="mb-0 text-white">{{error}}</h6>
|
||||
<h6 class="mb-0 text-dark">{{error}}</h6>
|
||||
</div>
|
||||
</div>
|
||||
{{/if}}
|
||||
|
||||
<div class="align-items-center p-3 mb-3 text-white-50 bg-danger rounded shadow">
|
||||
<div class="align-items-center p-3 mb-3 text-opacity-75 text-light bg-danger rounded shadow">
|
||||
<div>
|
||||
<h6 class="mb-0 text-white">Authentication key needed to continue</h6>
|
||||
<h6 class="mb-0 text-light">Authentication key needed to continue</h6>
|
||||
<small>Please provide it below:</small>
|
||||
|
||||
<form class="form-inline" method="post" action="{{urlpath}}/admin">
|
||||
@@ -17,7 +17,7 @@
|
||||
{{#if redirect}}
|
||||
<input type="hidden" id="redirect" name="redirect" value="/{{redirect}}">
|
||||
{{/if}}
|
||||
<button type="submit" class="btn btn-primary">Enter</button>
|
||||
<button type="submit" class="btn btn-primary mt-2">Enter</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -1,5 +1,5 @@
|
||||
<main class="container-xl">
|
||||
<div id="organizations-block" class="my-3 p-3 bg-white rounded shadow">
|
||||
<div id="organizations-block" class="my-3 p-3 rounded shadow">
|
||||
<h6 class="border-bottom pb-2 mb-3">Organizations</h6>
|
||||
<div class="table-responsive-xl small">
|
||||
<table id="orgs-table" class="table table-sm table-striped table-hover">
|
||||
@@ -59,7 +59,7 @@
|
||||
</main>
|
||||
|
||||
<link rel="stylesheet" href="{{urlpath}}/vw_static/datatables.css" />
|
||||
<script src="{{urlpath}}/vw_static/jquery-3.6.4.slim.js"></script>
|
||||
<script src="{{urlpath}}/vw_static/jquery-3.7.0.slim.js"></script>
|
||||
<script src="{{urlpath}}/vw_static/datatables.js"></script>
|
||||
<script src="{{urlpath}}/vw_static/admin_organizations.js"></script>
|
||||
<script src="{{urlpath}}/vw_static/jdenticon.js"></script>
|
||||
|
@@ -17,7 +17,7 @@
|
||||
<form class="form needs-validation" id="config-form" novalidate>
|
||||
{{#each page_data.config}}
|
||||
{{#if groupdoc}}
|
||||
<div class="card bg-light mb-3">
|
||||
<div class="card mb-3">
|
||||
<button id="b_{{group}}" type="button" class="card-header text-start btn btn-link text-decoration-none" aria-expanded="false" aria-controls="g_{{group}}" data-bs-toggle="collapse" data-bs-target="#g_{{group}}">{{groupdoc}}</button>
|
||||
<div id="g_{{group}}" class="card-body collapse">
|
||||
{{#each elements}}
|
||||
@@ -64,7 +64,7 @@
|
||||
{{/if}}
|
||||
{{/each}}
|
||||
|
||||
<div class="card bg-light mb-3">
|
||||
<div class="card mb-3">
|
||||
<button id="b_readonly" type="button" class="card-header text-start btn btn-link text-decoration-none" aria-expanded="false" aria-controls="g_readonly"
|
||||
data-bs-toggle="collapse" data-bs-target="#g_readonly">Read-Only Config</button>
|
||||
<div id="g_readonly" class="card-body collapse">
|
||||
@@ -119,7 +119,7 @@
|
||||
</div>
|
||||
|
||||
{{#if page_data.can_backup}}
|
||||
<div class="card bg-light mb-3">
|
||||
<div class="card mb-3">
|
||||
<button id="b_database" type="button" class="card-header text-start btn btn-link text-decoration-none" aria-expanded="false" aria-controls="g_database"
|
||||
data-bs-toggle="collapse" data-bs-target="#g_database">Backup Database</button>
|
||||
<div id="g_database" class="card-body collapse">
|
||||
|
@@ -1,5 +1,5 @@
|
||||
<main class="container-xl">
|
||||
<div id="users-block" class="my-3 p-3 bg-white rounded shadow">
|
||||
<div id="users-block" class="my-3 p-3 rounded shadow">
|
||||
<h6 class="border-bottom pb-2 mb-3">Registered Users</h6>
|
||||
<div class="table-responsive-xl small">
|
||||
<table id="users-table" class="table table-sm table-striped table-hover">
|
||||
@@ -30,7 +30,7 @@
|
||||
<span class="badge bg-success me-2" title="2FA is enabled">2FA</span>
|
||||
{{/if}}
|
||||
{{#case _Status 1}}
|
||||
<span class="badge bg-warning me-2" title="User is invited">Invited</span>
|
||||
<span class="badge bg-warning text-dark me-2" title="User is invited">Invited</span>
|
||||
{{/case}}
|
||||
{{#if EmailVerified}}
|
||||
<span class="badge bg-success me-2" title="Email has been verified">Verified</span>
|
||||
@@ -140,7 +140,7 @@
|
||||
</main>
|
||||
|
||||
<link rel="stylesheet" href="{{urlpath}}/vw_static/datatables.css" />
|
||||
<script src="{{urlpath}}/vw_static/jquery-3.6.4.slim.js"></script>
|
||||
<script src="{{urlpath}}/vw_static/jquery-3.7.0.slim.js"></script>
|
||||
<script src="{{urlpath}}/vw_static/datatables.js"></script>
|
||||
<script src="{{urlpath}}/vw_static/admin_users.js"></script>
|
||||
<script src="{{urlpath}}/vw_static/jdenticon.js"></script>
|
||||
|
@@ -80,6 +80,7 @@ impl Fairing for AppHeaders {
|
||||
https://app.simplelogin.io/api/ \
|
||||
https://app.anonaddy.com/api/ \
|
||||
https://api.fastmail.com/ \
|
||||
https://api.forwardemail.net \
|
||||
;\
|
||||
",
|
||||
icon_service_csp = CONFIG._icon_service_csp(),
|
||||
|
Reference in New Issue
Block a user