Compare commits
106 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
c666497130 | ||
|
2620a1ac8c | ||
|
ffdcafa044 | ||
|
56ffec40f4 | ||
|
96c2416903 | ||
|
340d42a1ca | ||
|
e19420160f | ||
|
1741316f42 | ||
|
4f08167d6f | ||
|
fef76e2f6f | ||
|
f16d56cb27 | ||
|
120b286f2b | ||
|
7f437b6947 | ||
|
8d6e62e18b | ||
|
d0ec410b73 | ||
|
c546a59c38 | ||
|
e5ec245626 | ||
|
6ea95d1ede | ||
|
88bea44dd8 | ||
|
8ee5d51bd4 | ||
|
c640abbcd7 | ||
|
13598c098f | ||
|
a622b4d2fb | ||
|
403f35b571 | ||
|
3968bc8016 | ||
|
ff66368cb6 | ||
|
3fb419e704 | ||
|
832f838ddd | ||
|
18703bf195 | ||
|
ff8e88a5df | ||
|
72e1946ce5 | ||
|
ee391720aa | ||
|
e3a2dfffab | ||
|
8bf1278b1b | ||
|
00ce943ea5 | ||
|
b67eacdfde | ||
|
0dcea75764 | ||
|
0c5532d8b5 | ||
|
46e0f3c43a | ||
|
2cd17fe7af | ||
|
f44b2611e6 | ||
|
82fee0ede3 | ||
|
49579e4ce7 | ||
|
9254cf9d9c | ||
|
ff0fee3690 | ||
|
0778bd4bd5 | ||
|
0cd065d354 | ||
|
8615736e84 | ||
|
5772836be5 | ||
|
c380d9c379 | ||
|
cea7a30d82 | ||
|
06cde29419 | ||
|
20f5988174 | ||
|
b491cfe0b0 | ||
|
fc513413ea | ||
|
3f7e4712cd | ||
|
c2ef331df9 | ||
|
5fef7983f4 | ||
|
29ed82a359 | ||
|
7d5186e40a | ||
|
99270612ba | ||
|
c7b5b6ee07 | ||
|
848d17ffb9 | ||
|
47e8aa29e1 | ||
|
f270f2ed65 | ||
|
aba5b234af | ||
|
9133e2927d | ||
|
38104ba7cf | ||
|
c42bcae224 | ||
|
764e51bbe9 | ||
|
8e6c6a1dc4 | ||
|
7a9cfc45da | ||
|
9e24b9065c | ||
|
1c2b376ca2 | ||
|
746ce2afb4 | ||
|
029008bad5 | ||
|
d3449bfa00 | ||
|
a9a5706764 | ||
|
3ff8014add | ||
|
e60bdc7efe | ||
|
cccd8262fa | ||
|
68e5d95d25 | ||
|
5f458b288a | ||
|
e9ee8ac2fa | ||
|
8a4dfc3bbe | ||
|
4f86517501 | ||
|
7cb19ef767 | ||
|
565439a914 | ||
|
b8010be26b | ||
|
f76b8a32ca | ||
|
39167d333a | ||
|
0d63132987 | ||
|
7b5d5d1302 | ||
|
0dc98bda23 | ||
|
f9a062cac8 | ||
|
6ad4ccd901 | ||
|
ee6ceaa923 | ||
|
20b393d354 | ||
|
f707f86c8e | ||
|
daea54b288 | ||
|
cc021a4784 | ||
|
e3c4609c2a | ||
|
89a68741d6 | ||
|
2421d49d9a | ||
|
1db37bf3d0 | ||
|
d75a80bd2d |
@@ -56,6 +56,11 @@
|
|||||||
# WEBSOCKET_ADDRESS=0.0.0.0
|
# WEBSOCKET_ADDRESS=0.0.0.0
|
||||||
# WEBSOCKET_PORT=3012
|
# WEBSOCKET_PORT=3012
|
||||||
|
|
||||||
|
## Controls whether users are allowed to create Bitwarden Sends.
|
||||||
|
## This setting applies globally to all users.
|
||||||
|
## To control this on a per-org basis instead, use the "Disable Send" org policy.
|
||||||
|
# SENDS_ALLOWED=true
|
||||||
|
|
||||||
## Job scheduler settings
|
## Job scheduler settings
|
||||||
##
|
##
|
||||||
## Job schedules use a cron-like syntax (as parsed by https://crates.io/crates/cron),
|
## Job schedules use a cron-like syntax (as parsed by https://crates.io/crates/cron),
|
||||||
@@ -101,7 +106,7 @@
|
|||||||
## Note that if the DB already has WAL enabled, you will also need to disable WAL in the DB,
|
## Note that if the DB already has WAL enabled, you will also need to disable WAL in the DB,
|
||||||
## this setting only prevents vaultwarden from automatically enabling it on start.
|
## this setting only prevents vaultwarden from automatically enabling it on start.
|
||||||
## Please read project wiki page about this setting first before changing the value as it can
|
## Please read project wiki page about this setting first before changing the value as it can
|
||||||
## cause performance degradation or might render the service unable to start.
|
## cause performance degradation or might render the service unable to start.
|
||||||
# ENABLE_DB_WAL=true
|
# ENABLE_DB_WAL=true
|
||||||
|
|
||||||
## Database connection retries
|
## Database connection retries
|
||||||
@@ -189,20 +194,28 @@
|
|||||||
## Name shown in the invitation emails that don't come from a specific organization
|
## Name shown in the invitation emails that don't come from a specific organization
|
||||||
# INVITATION_ORG_NAME=Vaultwarden
|
# INVITATION_ORG_NAME=Vaultwarden
|
||||||
|
|
||||||
## Per-organization attachment limit (KB)
|
## Per-organization attachment storage limit (KB)
|
||||||
## Limit in kilobytes for an organization attachments, once the limit is exceeded it won't be possible to upload more
|
## Max kilobytes of attachment storage allowed per organization.
|
||||||
|
## When this limit is reached, organization members will not be allowed to upload further attachments for ciphers owned by that organization.
|
||||||
# ORG_ATTACHMENT_LIMIT=
|
# ORG_ATTACHMENT_LIMIT=
|
||||||
## Per-user attachment limit (KB).
|
## Per-user attachment storage limit (KB)
|
||||||
## Limit in kilobytes for a users attachments, once the limit is exceeded it won't be possible to upload more
|
## Max kilobytes of attachment storage allowed per user.
|
||||||
|
## When this limit is reached, the user will not be allowed to upload further attachments.
|
||||||
# USER_ATTACHMENT_LIMIT=
|
# USER_ATTACHMENT_LIMIT=
|
||||||
|
|
||||||
|
## Number of days to wait before auto-deleting a trashed item.
|
||||||
|
## If unset (the default), trashed items are not auto-deleted.
|
||||||
|
## This setting applies globally, so make sure to inform all users of any changes to this setting.
|
||||||
|
# TRASH_AUTO_DELETE_DAYS=
|
||||||
|
|
||||||
## Controls the PBBKDF password iterations to apply on the server
|
## Controls the PBBKDF password iterations to apply on the server
|
||||||
## The change only applies when the password is changed
|
## The change only applies when the password is changed
|
||||||
# PASSWORD_ITERATIONS=100000
|
# PASSWORD_ITERATIONS=100000
|
||||||
|
|
||||||
## Whether password hint should be sent into the error response when the client request it
|
## Controls whether a password hint should be shown directly in the web page if
|
||||||
# SHOW_PASSWORD_HINT=true
|
## SMTP service is not configured. Not recommended for publicly-accessible instances
|
||||||
|
## as this provides unauthenticated access to potentially sensitive data.
|
||||||
|
# SHOW_PASSWORD_HINT=false
|
||||||
|
|
||||||
## Domain settings
|
## Domain settings
|
||||||
## The domain must match the address from where you access the server
|
## The domain must match the address from where you access the server
|
||||||
@@ -247,12 +260,13 @@
|
|||||||
## You can disable this, so that only the current TOTP Code is allowed.
|
## You can disable this, so that only the current TOTP Code is allowed.
|
||||||
## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid.
|
## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid.
|
||||||
## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid.
|
## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid.
|
||||||
# AUTHENTICATOR_DISABLE_TIME_DRIFT = false
|
# AUTHENTICATOR_DISABLE_TIME_DRIFT=false
|
||||||
|
|
||||||
## Rocket specific settings, check Rocket documentation to learn more
|
## Rocket specific settings
|
||||||
# ROCKET_ENV=staging
|
## See https://rocket.rs/v0.4/guide/configuration/ for more details.
|
||||||
# ROCKET_ADDRESS=0.0.0.0 # Enable this to test mobile app
|
# ROCKET_ADDRESS=0.0.0.0
|
||||||
# ROCKET_PORT=8000
|
# ROCKET_PORT=80 # Defaults to 80 in the Docker images, or 8000 otherwise.
|
||||||
|
# ROCKET_WORKERS=10
|
||||||
# ROCKET_TLS={certs="/path/to/certs.pem",key="/path/to/key.pem"}
|
# ROCKET_TLS={certs="/path/to/certs.pem",key="/path/to/key.pem"}
|
||||||
|
|
||||||
## Mail specific settings, set SMTP_HOST and SMTP_FROM to enable the mail service.
|
## Mail specific settings, set SMTP_HOST and SMTP_FROM to enable the mail service.
|
||||||
|
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,7 +1,7 @@
|
|||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: Discourse forum for bitwarden_rs
|
- name: Discourse forum for vaultwarden
|
||||||
url: https://bitwardenrs.discourse.group/
|
url: https://vaultwarden.discourse.group/
|
||||||
about: Use this forum to request features or get help with usage/configuration.
|
about: Use this forum to request features or get help with usage/configuration.
|
||||||
- name: GitHub Discussions for vaultwarden
|
- name: GitHub Discussions for vaultwarden
|
||||||
url: https://github.com/dani-garcia/vaultwarden/discussions
|
url: https://github.com/dani-garcia/vaultwarden/discussions
|
||||||
|
BIN
.github/security-contact.gif
vendored
Normal file
After Width: | Height: | Size: 2.3 KiB |
82
.github/workflows/build.yml
vendored
@@ -2,11 +2,9 @@ name: Build
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
pull_request:
|
|
||||||
# Ignore when there are only changes done too one of these paths
|
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- "**.md"
|
- "*.md"
|
||||||
- "**.txt"
|
- "*.txt"
|
||||||
- ".dockerignore"
|
- ".dockerignore"
|
||||||
- ".env.template"
|
- ".env.template"
|
||||||
- ".gitattributes"
|
- ".gitattributes"
|
||||||
@@ -17,9 +15,30 @@ on:
|
|||||||
- "tools/**"
|
- "tools/**"
|
||||||
- ".github/FUNDING.yml"
|
- ".github/FUNDING.yml"
|
||||||
- ".github/ISSUE_TEMPLATE/**"
|
- ".github/ISSUE_TEMPLATE/**"
|
||||||
|
- ".github/security-contact.gif"
|
||||||
|
pull_request:
|
||||||
|
# Ignore when there are only changes done too one of these paths
|
||||||
|
paths-ignore:
|
||||||
|
- "*.md"
|
||||||
|
- "*.txt"
|
||||||
|
- ".dockerignore"
|
||||||
|
- ".env.template"
|
||||||
|
- ".gitattributes"
|
||||||
|
- ".gitignore"
|
||||||
|
- "azure-pipelines.yml"
|
||||||
|
- "docker/**"
|
||||||
|
- "hooks/**"
|
||||||
|
- "tools/**"
|
||||||
|
- ".github/FUNDING.yml"
|
||||||
|
- ".github/ISSUE_TEMPLATE/**"
|
||||||
|
- ".github/security-contact.gif"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
# Make warnings errors, this is to prevent warnings slipping through.
|
||||||
|
# This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes.
|
||||||
|
env:
|
||||||
|
RUSTFLAGS: "-D warnings"
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
@@ -32,28 +51,16 @@ jobs:
|
|||||||
include:
|
include:
|
||||||
- target-triple: x86_64-unknown-linux-gnu
|
- target-triple: x86_64-unknown-linux-gnu
|
||||||
host-triple: x86_64-unknown-linux-gnu
|
host-triple: x86_64-unknown-linux-gnu
|
||||||
features: "sqlite,mysql,postgresql"
|
features: [sqlite,mysql,postgresql] # Remember to update the `cargo test` to match the amount of features
|
||||||
channel: nightly
|
channel: nightly
|
||||||
os: ubuntu-18.04
|
os: ubuntu-18.04
|
||||||
ext:
|
ext: ""
|
||||||
# - target-triple: x86_64-unknown-linux-gnu
|
# - target-triple: x86_64-unknown-linux-gnu
|
||||||
# host-triple: x86_64-unknown-linux-gnu
|
# host-triple: x86_64-unknown-linux-gnu
|
||||||
# features: "sqlite,mysql,postgresql"
|
# features: "sqlite,mysql,postgresql"
|
||||||
# channel: stable
|
# channel: stable
|
||||||
# os: ubuntu-18.04
|
# os: ubuntu-18.04
|
||||||
# ext:
|
# ext: ""
|
||||||
# - target-triple: x86_64-unknown-linux-musl
|
|
||||||
# host-triple: x86_64-unknown-linux-gnu
|
|
||||||
# features: "sqlite,postgresql"
|
|
||||||
# channel: nightly
|
|
||||||
# os: ubuntu-18.04
|
|
||||||
# ext:
|
|
||||||
# - target-triple: x86_64-unknown-linux-musl
|
|
||||||
# host-triple: x86_64-unknown-linux-gnu
|
|
||||||
# features: "sqlite,postgresql"
|
|
||||||
# channel: stable
|
|
||||||
# os: ubuntu-18.04
|
|
||||||
# ext:
|
|
||||||
|
|
||||||
name: Building ${{ matrix.channel }}-${{ matrix.target-triple }}
|
name: Building ${{ matrix.channel }}-${{ matrix.target-triple }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
@@ -94,20 +101,43 @@ jobs:
|
|||||||
|
|
||||||
|
|
||||||
# Run cargo tests (In release mode to speed up future builds)
|
# Run cargo tests (In release mode to speed up future builds)
|
||||||
- name: '`cargo test --release --features ${{ matrix.features }} --target ${{ matrix.target-triple }}`'
|
# First test all features together, afterwards test them separately.
|
||||||
|
- name: "`cargo test --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }}`"
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: test
|
command: test
|
||||||
args: --release --features ${{ matrix.features }} --target ${{ matrix.target-triple }}
|
args: --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }}
|
||||||
|
# Test single features
|
||||||
|
# 0: sqlite
|
||||||
|
- name: "`cargo test --release --features ${{ matrix.features[0] }} --target ${{ matrix.target-triple }}`"
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: test
|
||||||
|
args: --release --features ${{ matrix.features[0] }} --target ${{ matrix.target-triple }}
|
||||||
|
if: ${{ matrix.features[0] != '' }}
|
||||||
|
# 1: mysql
|
||||||
|
- name: "`cargo test --release --features ${{ matrix.features[1] }} --target ${{ matrix.target-triple }}`"
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: test
|
||||||
|
args: --release --features ${{ matrix.features[1] }} --target ${{ matrix.target-triple }}
|
||||||
|
if: ${{ matrix.features[1] != '' }}
|
||||||
|
# 2: postgresql
|
||||||
|
- name: "`cargo test --release --features ${{ matrix.features[2] }} --target ${{ matrix.target-triple }}`"
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: test
|
||||||
|
args: --release --features ${{ matrix.features[2] }} --target ${{ matrix.target-triple }}
|
||||||
|
if: ${{ matrix.features[2] != '' }}
|
||||||
# End Run cargo tests
|
# End Run cargo tests
|
||||||
|
|
||||||
|
|
||||||
# Run cargo clippy (In release mode to speed up future builds)
|
# Run cargo clippy, and fail on warnings (In release mode to speed up future builds)
|
||||||
- name: '`cargo clippy --release --features ${{ matrix.features }} --target ${{ matrix.target-triple }}`'
|
- name: "`cargo clippy --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }}`"
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: clippy
|
command: clippy
|
||||||
args: --release --features ${{ matrix.features }} --target ${{ matrix.target-triple }}
|
args: --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }} -- -D warnings
|
||||||
# End Run cargo clippy
|
# End Run cargo clippy
|
||||||
|
|
||||||
|
|
||||||
@@ -121,11 +151,11 @@ jobs:
|
|||||||
|
|
||||||
|
|
||||||
# Build the binary
|
# Build the binary
|
||||||
- name: '`cargo build --release --features ${{ matrix.features }} --target ${{ matrix.target-triple }}`'
|
- name: "`cargo build --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }}`"
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: build
|
command: build
|
||||||
args: --release --features ${{ matrix.features }} --target ${{ matrix.target-triple }}
|
args: --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }}
|
||||||
# End Build the binary
|
# End Build the binary
|
||||||
|
|
||||||
|
|
||||||
|
9
.github/workflows/hadolint.yml
vendored
@@ -2,6 +2,9 @@ name: Hadolint
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
|
# Ignore when there are only changes done too one of these paths
|
||||||
|
paths:
|
||||||
|
- "docker/**"
|
||||||
pull_request:
|
pull_request:
|
||||||
# Ignore when there are only changes done too one of these paths
|
# Ignore when there are only changes done too one of these paths
|
||||||
paths:
|
paths:
|
||||||
@@ -22,14 +25,14 @@ jobs:
|
|||||||
- name: Download hadolint
|
- name: Download hadolint
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
sudo curl -L https://github.com/hadolint/hadolint/releases/download/v$HADOLINT_VERSION/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \
|
sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \
|
||||||
sudo chmod +x /usr/local/bin/hadolint
|
sudo chmod +x /usr/local/bin/hadolint
|
||||||
env:
|
env:
|
||||||
HADOLINT_VERSION: 2.0.0
|
HADOLINT_VERSION: 2.5.0
|
||||||
# End Download hadolint
|
# End Download hadolint
|
||||||
|
|
||||||
# Test Dockerfiles
|
# Test Dockerfiles
|
||||||
- name: Run hadolint
|
- name: Run hadolint
|
||||||
shell: bash
|
shell: bash
|
||||||
run: git ls-files --exclude='docker/*/Dockerfile*' --ignored | xargs hadolint
|
run: git ls-files --exclude='docker/*/Dockerfile*' --ignored --cached | xargs hadolint
|
||||||
# End Test Dockerfiles
|
# End Test Dockerfiles
|
||||||
|
37
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
---
|
||||||
|
repos:
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v4.0.1
|
||||||
|
hooks:
|
||||||
|
- id: check-yaml
|
||||||
|
- id: check-json
|
||||||
|
- id: check-toml
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
- id: check-case-conflict
|
||||||
|
- id: check-merge-conflict
|
||||||
|
- id: detect-private-key
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: fmt
|
||||||
|
name: fmt
|
||||||
|
description: Format files with cargo fmt.
|
||||||
|
entry: cargo fmt
|
||||||
|
language: system
|
||||||
|
types: [rust]
|
||||||
|
args: ["--", "--check"]
|
||||||
|
- id: cargo-test
|
||||||
|
name: cargo test
|
||||||
|
description: Test the package for errors.
|
||||||
|
entry: cargo test
|
||||||
|
language: system
|
||||||
|
args: ["--features", "sqlite,mysql,postgresql", "--"]
|
||||||
|
types: [rust]
|
||||||
|
pass_filenames: false
|
||||||
|
- id: cargo-clippy
|
||||||
|
name: cargo clippy
|
||||||
|
description: Lint Rust sources
|
||||||
|
entry: cargo clippy
|
||||||
|
language: system
|
||||||
|
args: ["--features", "sqlite,mysql,postgresql", "--", "-D", "warnings"]
|
||||||
|
types: [rust]
|
||||||
|
pass_filenames: false
|
694
Cargo.lock
generated
48
Cargo.toml
@@ -28,17 +28,23 @@ syslog = "4.0.1"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# Web framework for nightly with a focus on ease-of-use, expressibility, and speed.
|
# Web framework for nightly with a focus on ease-of-use, expressibility, and speed.
|
||||||
rocket = { version = "0.5.0-dev", features = ["tls"], default-features = false }
|
rocket = { version = "=0.5.0-dev", features = ["tls"], default-features = false }
|
||||||
rocket_contrib = "0.5.0-dev"
|
rocket_contrib = "=0.5.0-dev"
|
||||||
|
|
||||||
# HTTP client
|
# HTTP client
|
||||||
reqwest = { version = "0.11.3", features = ["blocking", "json", "gzip", "brotli", "socks"] }
|
reqwest = { version = "0.11.4", features = ["blocking", "json", "gzip", "brotli", "socks", "cookies"] }
|
||||||
|
|
||||||
|
# Used for custom short lived cookie jar
|
||||||
|
cookie = "0.15.1"
|
||||||
|
cookie_store = "0.15.0"
|
||||||
|
bytes = "1.0.1"
|
||||||
|
url = "2.2.2"
|
||||||
|
|
||||||
# multipart/form-data support
|
# multipart/form-data support
|
||||||
multipart = { version = "0.17.1", features = ["server"], default-features = false }
|
multipart = { version = "0.18.0", features = ["server"], default-features = false }
|
||||||
|
|
||||||
# WebSockets library
|
# WebSockets library
|
||||||
ws = { version = "0.10.0", package = "parity-ws" }
|
ws = { version = "0.11.0", package = "parity-ws" }
|
||||||
|
|
||||||
# MessagePack library
|
# MessagePack library
|
||||||
rmpv = "0.4.7"
|
rmpv = "0.4.7"
|
||||||
@@ -47,7 +53,7 @@ rmpv = "0.4.7"
|
|||||||
chashmap = "2.2.2"
|
chashmap = "2.2.2"
|
||||||
|
|
||||||
# A generic serialization/deserialization framework
|
# A generic serialization/deserialization framework
|
||||||
serde = { version = "1.0.125", features = ["derive"] }
|
serde = { version = "1.0.126", features = ["derive"] }
|
||||||
serde_json = "1.0.64"
|
serde_json = "1.0.64"
|
||||||
|
|
||||||
# Logging
|
# Logging
|
||||||
@@ -55,14 +61,14 @@ log = "0.4.14"
|
|||||||
fern = { version = "0.6.0", features = ["syslog-4"] }
|
fern = { version = "0.6.0", features = ["syslog-4"] }
|
||||||
|
|
||||||
# A safe, extensible ORM and Query builder
|
# A safe, extensible ORM and Query builder
|
||||||
diesel = { version = "1.4.6", features = [ "chrono", "r2d2"] }
|
diesel = { version = "1.4.7", features = [ "chrono", "r2d2"] }
|
||||||
diesel_migrations = "1.4.0"
|
diesel_migrations = "1.4.0"
|
||||||
|
|
||||||
# Bundled SQLite
|
# Bundled SQLite
|
||||||
libsqlite3-sys = { version = "0.20.1", features = ["bundled"], optional = true }
|
libsqlite3-sys = { version = "0.22.2", features = ["bundled"], optional = true }
|
||||||
|
|
||||||
# Crypto-related libraries
|
# Crypto-related libraries
|
||||||
rand = "0.8.3"
|
rand = "0.8.4"
|
||||||
ring = "0.16.20"
|
ring = "0.16.20"
|
||||||
|
|
||||||
# UUID generation
|
# UUID generation
|
||||||
@@ -71,7 +77,7 @@ uuid = { version = "0.8.2", features = ["v4"] }
|
|||||||
# Date and time libraries
|
# Date and time libraries
|
||||||
chrono = { version = "0.4.19", features = ["serde"] }
|
chrono = { version = "0.4.19", features = ["serde"] }
|
||||||
chrono-tz = "0.5.3"
|
chrono-tz = "0.5.3"
|
||||||
time = "0.2.26"
|
time = "0.2.27"
|
||||||
|
|
||||||
# Job scheduler
|
# Job scheduler
|
||||||
job_scheduler = "1.2.1"
|
job_scheduler = "1.2.1"
|
||||||
@@ -87,6 +93,7 @@ jsonwebtoken = "7.2.0"
|
|||||||
|
|
||||||
# U2F library
|
# U2F library
|
||||||
u2f = "0.2.0"
|
u2f = "0.2.0"
|
||||||
|
webauthn-rs = "=0.3.0-alpha.9"
|
||||||
|
|
||||||
# Yubico Library
|
# Yubico Library
|
||||||
yubico = { version = "0.10.0", features = ["online-tokio"], default-features = false }
|
yubico = { version = "0.10.0", features = ["online-tokio"], default-features = false }
|
||||||
@@ -95,39 +102,38 @@ yubico = { version = "0.10.0", features = ["online-tokio"], default-features = f
|
|||||||
dotenv = { version = "0.15.0", default-features = false }
|
dotenv = { version = "0.15.0", default-features = false }
|
||||||
|
|
||||||
# Lazy initialization
|
# Lazy initialization
|
||||||
once_cell = "1.7.2"
|
once_cell = "1.8.0"
|
||||||
|
|
||||||
# Numerical libraries
|
# Numerical libraries
|
||||||
num-traits = "0.2.14"
|
num-traits = "0.2.14"
|
||||||
num-derive = "0.3.3"
|
num-derive = "0.3.3"
|
||||||
|
|
||||||
# Email libraries
|
# Email libraries
|
||||||
tracing = { version = "0.1.25", features = ["log"] } # Needed to have lettre trace logging used when SMTP_DEBUG is enabled.
|
tracing = { version = "0.1.26", features = ["log"] } # Needed to have lettre trace logging used when SMTP_DEBUG is enabled.
|
||||||
lettre = { version = "0.10.0-beta.3", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname", "tracing"], default-features = false }
|
lettre = { version = "0.10.0-rc.3", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname", "tracing"], default-features = false }
|
||||||
newline-converter = "0.2.0"
|
|
||||||
|
|
||||||
# Template library
|
# Template library
|
||||||
handlebars = { version = "3.5.4", features = ["dir_source"] }
|
handlebars = { version = "4.1.0", features = ["dir_source"] }
|
||||||
|
|
||||||
# For favicon extraction from main website
|
# For favicon extraction from main website
|
||||||
html5ever = "0.25.1"
|
html5ever = "0.25.1"
|
||||||
markup5ever_rcdom = "0.1.0"
|
markup5ever_rcdom = "0.1.0"
|
||||||
regex = { version = "1.4.5", features = ["std", "perf"], default-features = false }
|
regex = { version = "1.5.4", features = ["std", "perf"], default-features = false }
|
||||||
data-url = "0.1.0"
|
data-url = "0.1.0"
|
||||||
|
|
||||||
# Used by U2F, JWT and Postgres
|
# Used by U2F, JWT and Postgres
|
||||||
openssl = "0.10.34"
|
openssl = "0.10.35"
|
||||||
|
|
||||||
# URL encoding library
|
# URL encoding library
|
||||||
percent-encoding = "2.1.0"
|
percent-encoding = "2.1.0"
|
||||||
# Punycode conversion
|
# Punycode conversion
|
||||||
idna = "0.2.2"
|
idna = "0.2.3"
|
||||||
|
|
||||||
# CLI argument parsing
|
# CLI argument parsing
|
||||||
pico-args = "0.4.0"
|
pico-args = "0.4.2"
|
||||||
|
|
||||||
# Logging panics to logfile instead stderr only
|
# Logging panics to logfile instead stderr only
|
||||||
backtrace = "0.3.56"
|
backtrace = "0.3.60"
|
||||||
|
|
||||||
# Macro ident concatenation
|
# Macro ident concatenation
|
||||||
paste = "1.0.5"
|
paste = "1.0.5"
|
||||||
@@ -138,7 +144,7 @@ rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = '263e39b5b429d
|
|||||||
rocket_contrib = { git = 'https://github.com/SergioBenitez/Rocket', rev = '263e39b5b429de1913ce7e3036575a7b4d88b6d7' }
|
rocket_contrib = { git = 'https://github.com/SergioBenitez/Rocket', rev = '263e39b5b429de1913ce7e3036575a7b4d88b6d7' }
|
||||||
|
|
||||||
# For favicon extraction from main website
|
# For favicon extraction from main website
|
||||||
data-url = { git = 'https://github.com/servo/rust-url', package="data-url", rev = '540ede02d0771824c0c80ff9f57fe8eff38b1291' }
|
data-url = { git = 'https://github.com/servo/rust-url', package="data-url", rev = 'eb7330b5296c0d43816d1346211b74182bb4ae37' }
|
||||||
|
|
||||||
# The maintainer of the `job_scheduler` crate doesn't seem to have responded
|
# The maintainer of the `job_scheduler` crate doesn't seem to have responded
|
||||||
# to any issues or PRs for almost a year (as of April 2021). This hopefully
|
# to any issues or PRs for almost a year (as of April 2021). This hopefully
|
||||||
|
12
README.md
@@ -1,8 +1,10 @@
|
|||||||
### Alternative implementation of the Bitwarden server API written in Rust and compatible with [upstream Bitwarden clients](https://bitwarden.com/#download)*, perfect for self-hosted deployment where running the official resource-heavy service might not be ideal.
|
### Alternative implementation of the Bitwarden server API written in Rust and compatible with [upstream Bitwarden clients](https://bitwarden.com/#download)*, perfect for self-hosted deployment where running the official resource-heavy service might not be ideal.
|
||||||
|
|
||||||
|
📢 Note: This project was known as Bitwarden_RS and has been renamed to separate itself from the official Bitwarden server in the hopes of avoiding confusion and trademark/branding issues. Please see [#1642](https://github.com/dani-garcia/vaultwarden/discussions/1642) for more explanation.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
[](https://hub.docker.com/r/vaultwarden/server)
|
[](https://hub.docker.com/r/vaultwarden/server)
|
||||||
[](https://deps.rs/repo/github/dani-garcia/vaultwarden)
|
[](https://deps.rs/repo/github/dani-garcia/vaultwarden)
|
||||||
[](https://github.com/dani-garcia/vaultwarden/releases/latest)
|
[](https://github.com/dani-garcia/vaultwarden/releases/latest)
|
||||||
[](https://github.com/dani-garcia/vaultwarden/blob/master/LICENSE.txt)
|
[](https://github.com/dani-garcia/vaultwarden/blob/master/LICENSE.txt)
|
||||||
@@ -35,7 +37,7 @@ Pull the docker image and mount a volume from the host for persistent storage:
|
|||||||
docker pull vaultwarden/server:latest
|
docker pull vaultwarden/server:latest
|
||||||
docker run -d --name vaultwarden -v /vw-data/:/data/ -p 80:80 vaultwarden/server:latest
|
docker run -d --name vaultwarden -v /vw-data/:/data/ -p 80:80 vaultwarden/server:latest
|
||||||
```
|
```
|
||||||
This will preserve any persistent data under /bw-data/, you can adapt the path to whatever suits you.
|
This will preserve any persistent data under /vw-data/, you can adapt the path to whatever suits you.
|
||||||
|
|
||||||
**IMPORTANT**: Some web browsers, like Chrome, disallow the use of Web Crypto APIs in insecure contexts. In this case, you might get an error like `Cannot read property 'importKey'`. To solve this problem, you need to access the web vault from HTTPS.
|
**IMPORTANT**: Some web browsers, like Chrome, disallow the use of Web Crypto APIs in insecure contexts. In this case, you might get an error like `Cannot read property 'importKey'`. To solve this problem, you need to access the web vault from HTTPS.
|
||||||
|
|
||||||
@@ -73,15 +75,15 @@ Thanks for your contribution to the project!
|
|||||||
<table>
|
<table>
|
||||||
<tr>
|
<tr>
|
||||||
<td align="center">
|
<td align="center">
|
||||||
<a href="https://github.com/ChonoN" style="width: 75px">
|
<a href="https://github.com/Gyarbij" style="width: 75px">
|
||||||
<sub><b>ChonoN</b></sub>
|
<sub><b>Chono N</b></sub>
|
||||||
</a>
|
</a>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td align="center">
|
<td align="center">
|
||||||
<a href="https://github.com/themightychris">
|
<a href="https://github.com/themightychris">
|
||||||
<sub><b>themightychris</b></sub>
|
<sub><b>Chris Alfano</b></sub>
|
||||||
</a>
|
</a>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
45
SECURITY.md
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
Vaultwarden tries to prevent security issues but there could always slip something through.
|
||||||
|
If you believe you've found a security issue in our application, we encourage you to
|
||||||
|
notify us. We welcome working with you to resolve the issue promptly. Thanks in advance!
|
||||||
|
|
||||||
|
# Disclosure Policy
|
||||||
|
|
||||||
|
- Let us know as soon as possible upon discovery of a potential security issue, and we'll make every
|
||||||
|
effort to quickly resolve the issue.
|
||||||
|
- Provide us a reasonable amount of time to resolve the issue before any disclosure to the public or a
|
||||||
|
third-party. We may publicly disclose the issue before resolving it, if appropriate.
|
||||||
|
- Make a good faith effort to avoid privacy violations, destruction of data, and interruption or
|
||||||
|
degradation of our service. Only interact with accounts you own or with explicit permission of the
|
||||||
|
account holder.
|
||||||
|
|
||||||
|
# In-scope
|
||||||
|
|
||||||
|
- Security issues in any current release of Vaultwarden. Source code is available at https://github.com/dani-garcia/vaultwarden. This includes the current `latest` release and `main / testing` release.
|
||||||
|
|
||||||
|
# Exclusions
|
||||||
|
|
||||||
|
The following bug classes are out-of scope:
|
||||||
|
|
||||||
|
- Bugs that are already reported on Vaultwarden's issue tracker (https://github.com/dani-garcia/vaultwarden/issues)
|
||||||
|
- Bugs that are not part of Vaultwarden, like on the the web-vault or mobile and desktop clients. These issues need to be reported in the respective project issue tracker at https://github.com/bitwarden to which we are not associated
|
||||||
|
- Issues in an upstream software dependency (ex: Rust, or External Libraries) which are already reported to the upstream maintainer
|
||||||
|
- Attacks requiring physical access to a user's device
|
||||||
|
- Issues related to software or protocols not under Vaultwarden's control
|
||||||
|
- Vulnerabilities in outdated versions of Vaultwarden
|
||||||
|
- Missing security best practices that do not directly lead to a vulnerability (You may still report them as a normal issue)
|
||||||
|
- Issues that do not have any impact on the general public
|
||||||
|
|
||||||
|
While researching, we'd like to ask you to refrain from:
|
||||||
|
|
||||||
|
- Denial of service
|
||||||
|
- Spamming
|
||||||
|
- Social engineering (including phishing) of Vaultwarden developers, contributors or users
|
||||||
|
|
||||||
|
Thank you for helping keep Vaultwarden and our users safe!
|
||||||
|
|
||||||
|
# How to contact us
|
||||||
|
|
||||||
|
- You can contact us on Matrix https://matrix.to/#/#vaultwarden:matrix.org (user: `@danig:matrix.org`)
|
||||||
|
- You can send an  to report a security issue.
|
||||||
|
- If you want to send an encrypted email you can use the following GPG key:<br>
|
||||||
|
https://keyserver.ubuntu.com/pks/lookup?search=0xB9B7A108373276BF3C0406F9FC8A7D14C3CD543A&fingerprint=on&op=index
|
@@ -1,22 +0,0 @@
|
|||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-18.04'
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- script: |
|
|
||||||
ls -la
|
|
||||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain $(cat rust-toolchain) --profile=minimal
|
|
||||||
echo "##vso[task.prependpath]$HOME/.cargo/bin"
|
|
||||||
displayName: 'Install Rust'
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y --no-install-recommends build-essential libmariadb-dev-compat libpq-dev libssl-dev pkgconf
|
|
||||||
displayName: 'Install build libraries.'
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
rustc -Vv
|
|
||||||
cargo -V
|
|
||||||
displayName: Query rust and cargo versions
|
|
||||||
|
|
||||||
- script : cargo test --features "sqlite,mysql,postgresql"
|
|
||||||
displayName: 'Test project with sqlite, mysql and postgresql backends'
|
|
@@ -1,15 +1,15 @@
|
|||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
{% set build_stage_base_image = "rust:1.51" %}
|
{% set build_stage_base_image = "rust:1.53" %}
|
||||||
{% if "alpine" in target_file %}
|
{% if "alpine" in target_file %}
|
||||||
{% if "amd64" in target_file %}
|
{% if "amd64" in target_file %}
|
||||||
{% set build_stage_base_image = "clux/muslrust:nightly-2021-04-14" %}
|
{% set build_stage_base_image = "clux/muslrust:nightly-2021-06-24" %}
|
||||||
{% set runtime_stage_base_image = "alpine:3.13" %}
|
{% set runtime_stage_base_image = "alpine:3.14" %}
|
||||||
{% set package_arch_target = "x86_64-unknown-linux-musl" %}
|
{% set package_arch_target = "x86_64-unknown-linux-musl" %}
|
||||||
{% elif "armv7" in target_file %}
|
{% elif "armv7" in target_file %}
|
||||||
{% set build_stage_base_image = "messense/rust-musl-cross:armv7-musleabihf" %}
|
{% set build_stage_base_image = "messense/rust-musl-cross:armv7-musleabihf" %}
|
||||||
{% set runtime_stage_base_image = "balenalib/armv7hf-alpine:3.13" %}
|
{% set runtime_stage_base_image = "balenalib/armv7hf-alpine:3.14" %}
|
||||||
{% set package_arch_target = "armv7-unknown-linux-musleabihf" %}
|
{% set package_arch_target = "armv7-unknown-linux-musleabihf" %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% elif "amd64" in target_file %}
|
{% elif "amd64" in target_file %}
|
||||||
@@ -44,8 +44,8 @@
|
|||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
{% set vault_version = "2.19.0d" %}
|
{% set vault_version = "2.21.1" %}
|
||||||
{% set vault_image_digest = "sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233" %}
|
{% set vault_image_digest = "sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5" %}
|
||||||
# The web-vault digest specifies a particular web-vault build on Docker Hub.
|
# The web-vault digest specifies a particular web-vault build on Docker Hub.
|
||||||
# Using the digest instead of the tag name provides better security,
|
# Using the digest instead of the tag name provides better security,
|
||||||
# as the digest of an image is immutable, whereas a tag name can later
|
# as the digest of an image is immutable, whereas a tag name can later
|
||||||
@@ -75,7 +75,8 @@ ARG DB=sqlite,postgresql
|
|||||||
{% set features = "sqlite,postgresql" %}
|
{% set features = "sqlite,postgresql" %}
|
||||||
{% else %}
|
{% else %}
|
||||||
# Alpine-based ARM (musl) only supports sqlite during compile time.
|
# Alpine-based ARM (musl) only supports sqlite during compile time.
|
||||||
ARG DB=sqlite
|
# We now also need to add vendored_openssl, because the current base image we use to build has OpenSSL removed.
|
||||||
|
ARG DB=sqlite,vendored_openssl
|
||||||
{% set features = "sqlite" %}
|
{% set features = "sqlite" %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% else %}
|
{% else %}
|
||||||
@@ -110,11 +111,7 @@ RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
|||||||
libpq5{{ package_arch_prefix }} \
|
libpq5{{ package_arch_prefix }} \
|
||||||
libpq-dev \
|
libpq-dev \
|
||||||
libmariadb-dev{{ package_arch_prefix }} \
|
libmariadb-dev{{ package_arch_prefix }} \
|
||||||
libmariadb-dev-compat{{ package_arch_prefix }}
|
libmariadb-dev-compat{{ package_arch_prefix }} \
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install -y \
|
|
||||||
--no-install-recommends \
|
|
||||||
gcc-{{ package_cross_compiler }} \
|
gcc-{{ package_cross_compiler }} \
|
||||||
&& mkdir -p ~/.cargo \
|
&& mkdir -p ~/.cargo \
|
||||||
&& echo '[target.{{ package_arch_target }}]' >> ~/.cargo/config \
|
&& echo '[target.{{ package_arch_target }}]' >> ~/.cargo/config \
|
||||||
@@ -150,16 +147,15 @@ COPY ./build.rs ./build.rs
|
|||||||
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
||||||
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the {{ package_arch_prefix }} version.
|
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the {{ package_arch_prefix }} version.
|
||||||
# What we can do is a force install, because nothing important is overlapping each other.
|
# What we can do is a force install, because nothing important is overlapping each other.
|
||||||
RUN apt-get install -y --no-install-recommends libmariadb3:amd64 && \
|
RUN apt-get install -y --no-install-recommends libmariadb3:amd64 \
|
||||||
apt-get download libmariadb-dev-compat:amd64 && \
|
&& apt-get download libmariadb-dev-compat:amd64 \
|
||||||
dpkg --force-all -i ./libmariadb-dev-compat*.deb && \
|
&& dpkg --force-all -i ./libmariadb-dev-compat*.deb \
|
||||||
rm -rvf ./libmariadb-dev-compat*.deb
|
&& rm -rvf ./libmariadb-dev-compat*.deb \
|
||||||
|
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
||||||
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
# The libpq5{{ package_arch_prefix }} package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
||||||
# The libpq5{{ package_arch_prefix }} package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
||||||
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
# Without this specific file the ld command will fail and compilation fails with it.
|
||||||
# Without this specific file the ld command will fail and compilation fails with it.
|
&& ln -sfnr /usr/lib/{{ package_cross_compiler }}/libpq.so.5 /usr/lib/{{ package_cross_compiler }}/libpq.so
|
||||||
RUN ln -sfnr /usr/lib/{{ package_cross_compiler }}/libpq.so.5 /usr/lib/{{ package_cross_compiler }}/libpq.so
|
|
||||||
|
|
||||||
ENV CC_{{ package_arch_target | replace("-", "_") }}="/usr/bin/{{ package_cross_compiler }}-gcc"
|
ENV CC_{{ package_arch_target | replace("-", "_") }}="/usr/bin/{{ package_cross_compiler }}-gcc"
|
||||||
ENV CROSS_COMPILE="1"
|
ENV CROSS_COMPILE="1"
|
||||||
@@ -174,8 +170,8 @@ RUN rustup target add {{ package_arch_target }}
|
|||||||
# Builds your dependencies and removes the
|
# Builds your dependencies and removes the
|
||||||
# dummy project, except the target folder
|
# dummy project, except the target folder
|
||||||
# This folder contains the compiled dependencies
|
# This folder contains the compiled dependencies
|
||||||
RUN cargo build --features ${DB} --release{{ package_arch_target_param }}
|
RUN cargo build --features ${DB} --release{{ package_arch_target_param }} \
|
||||||
RUN find . -not -path "./target*" -delete
|
&& find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
@@ -189,6 +185,7 @@ RUN touch src/main.rs
|
|||||||
RUN cargo build --features ${DB} --release{{ package_arch_target_param }}
|
RUN cargo build --features ${DB} --release{{ package_arch_target_param }}
|
||||||
{% if "alpine" in target_file %}
|
{% if "alpine" in target_file %}
|
||||||
{% if "armv7" in target_file %}
|
{% if "armv7" in target_file %}
|
||||||
|
# hadolint ignore=DL3059
|
||||||
RUN musl-strip target/{{ package_arch_target }}/release/vaultwarden
|
RUN musl-strip target/{{ package_arch_target }}/release/vaultwarden
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
@@ -206,12 +203,14 @@ ENV SSL_CERT_DIR=/etc/ssl/certs
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if "amd64" not in target_file %}
|
{% if "amd64" not in target_file %}
|
||||||
|
# hadolint ignore=DL3059
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
# Install needed libraries
|
|
||||||
|
# Create data folder and Install needed libraries
|
||||||
|
RUN mkdir /data \
|
||||||
{% if "alpine" in runtime_stage_base_image %}
|
{% if "alpine" in runtime_stage_base_image %}
|
||||||
RUN apk add --no-cache \
|
&& apk add --no-cache \
|
||||||
openssl \
|
openssl \
|
||||||
curl \
|
curl \
|
||||||
dumb-init \
|
dumb-init \
|
||||||
@@ -223,7 +222,7 @@ RUN apk add --no-cache \
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
ca-certificates
|
ca-certificates
|
||||||
{% else %}
|
{% else %}
|
||||||
RUN apt-get update && apt-get install -y \
|
&& apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
openssl \
|
openssl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
@@ -234,12 +233,11 @@ RUN apt-get update && apt-get install -y \
|
|||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
RUN mkdir /data
|
|
||||||
{% if "amd64" not in target_file %}
|
{% if "amd64" not in target_file %}
|
||||||
|
# hadolint ignore=DL3059
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
EXPOSE 80
|
EXPOSE 80
|
||||||
EXPOSE 3012
|
EXPOSE 3012
|
||||||
|
@@ -14,18 +14,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2.19.0d
|
# $ docker pull vaultwarden/web-vault:v2.21.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.19.0d
|
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.21.1
|
||||||
# [vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233]
|
# [vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233
|
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5
|
||||||
# [vaultwarden/web-vault:v2.19.0d]
|
# [vaultwarden/web-vault:v2.21.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 as vault
|
FROM vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5 as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM rust:1.51 as build
|
FROM rust:1.53 as build
|
||||||
|
|
||||||
# Debian-based builds support multidb
|
# Debian-based builds support multidb
|
||||||
ARG DB=sqlite,mysql,postgresql
|
ARG DB=sqlite,mysql,postgresql
|
||||||
@@ -56,8 +56,8 @@ COPY ./build.rs ./build.rs
|
|||||||
# Builds your dependencies and removes the
|
# Builds your dependencies and removes the
|
||||||
# dummy project, except the target folder
|
# dummy project, except the target folder
|
||||||
# This folder contains the compiled dependencies
|
# This folder contains the compiled dependencies
|
||||||
RUN cargo build --features ${DB} --release
|
RUN cargo build --features ${DB} --release \
|
||||||
RUN find . -not -path "./target*" -delete
|
&& find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
@@ -79,8 +79,10 @@ ENV ROCKET_ENV "staging"
|
|||||||
ENV ROCKET_PORT=80
|
ENV ROCKET_PORT=80
|
||||||
ENV ROCKET_WORKERS=10
|
ENV ROCKET_WORKERS=10
|
||||||
|
|
||||||
# Install needed libraries
|
|
||||||
RUN apt-get update && apt-get install -y \
|
# Create data folder and Install needed libraries
|
||||||
|
RUN mkdir /data \
|
||||||
|
&& apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
openssl \
|
openssl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
@@ -90,7 +92,7 @@ RUN apt-get update && apt-get install -y \
|
|||||||
libpq5 \
|
libpq5 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN mkdir /data
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
EXPOSE 80
|
EXPOSE 80
|
||||||
EXPOSE 3012
|
EXPOSE 3012
|
||||||
|
@@ -14,18 +14,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2.19.0d
|
# $ docker pull vaultwarden/web-vault:v2.21.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.19.0d
|
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.21.1
|
||||||
# [vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233]
|
# [vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233
|
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5
|
||||||
# [vaultwarden/web-vault:v2.19.0d]
|
# [vaultwarden/web-vault:v2.21.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 as vault
|
FROM vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5 as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM clux/muslrust:nightly-2021-04-14 as build
|
FROM clux/muslrust:nightly-2021-06-24 as build
|
||||||
|
|
||||||
# Alpine-based AMD64 (musl) does not support mysql/mariadb during compile time.
|
# Alpine-based AMD64 (musl) does not support mysql/mariadb during compile time.
|
||||||
ARG DB=sqlite,postgresql
|
ARG DB=sqlite,postgresql
|
||||||
@@ -53,8 +53,8 @@ RUN rustup target add x86_64-unknown-linux-musl
|
|||||||
# Builds your dependencies and removes the
|
# Builds your dependencies and removes the
|
||||||
# dummy project, except the target folder
|
# dummy project, except the target folder
|
||||||
# This folder contains the compiled dependencies
|
# This folder contains the compiled dependencies
|
||||||
RUN cargo build --features ${DB} --release --target=x86_64-unknown-linux-musl
|
RUN cargo build --features ${DB} --release --target=x86_64-unknown-linux-musl \
|
||||||
RUN find . -not -path "./target*" -delete
|
&& find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
@@ -70,22 +70,24 @@ RUN cargo build --features ${DB} --release --target=x86_64-unknown-linux-musl
|
|||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM alpine:3.13
|
FROM alpine:3.14
|
||||||
|
|
||||||
ENV ROCKET_ENV "staging"
|
ENV ROCKET_ENV "staging"
|
||||||
ENV ROCKET_PORT=80
|
ENV ROCKET_PORT=80
|
||||||
ENV ROCKET_WORKERS=10
|
ENV ROCKET_WORKERS=10
|
||||||
ENV SSL_CERT_DIR=/etc/ssl/certs
|
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||||
|
|
||||||
# Install needed libraries
|
|
||||||
RUN apk add --no-cache \
|
# Create data folder and Install needed libraries
|
||||||
|
RUN mkdir /data \
|
||||||
|
&& apk add --no-cache \
|
||||||
openssl \
|
openssl \
|
||||||
curl \
|
curl \
|
||||||
dumb-init \
|
dumb-init \
|
||||||
postgresql-libs \
|
postgresql-libs \
|
||||||
ca-certificates
|
ca-certificates
|
||||||
|
|
||||||
RUN mkdir /data
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
EXPOSE 80
|
EXPOSE 80
|
||||||
EXPOSE 3012
|
EXPOSE 3012
|
||||||
|
@@ -14,18 +14,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2.19.0d
|
# $ docker pull vaultwarden/web-vault:v2.21.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.19.0d
|
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.21.1
|
||||||
# [vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233]
|
# [vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233
|
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5
|
||||||
# [vaultwarden/web-vault:v2.19.0d]
|
# [vaultwarden/web-vault:v2.21.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 as vault
|
FROM vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5 as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM rust:1.51 as build
|
FROM rust:1.53 as build
|
||||||
|
|
||||||
# Debian-based builds support multidb
|
# Debian-based builds support multidb
|
||||||
ARG DB=sqlite,mysql,postgresql
|
ARG DB=sqlite,mysql,postgresql
|
||||||
@@ -49,11 +49,7 @@ RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
|||||||
libpq5:arm64 \
|
libpq5:arm64 \
|
||||||
libpq-dev \
|
libpq-dev \
|
||||||
libmariadb-dev:arm64 \
|
libmariadb-dev:arm64 \
|
||||||
libmariadb-dev-compat:arm64
|
libmariadb-dev-compat:arm64 \
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install -y \
|
|
||||||
--no-install-recommends \
|
|
||||||
gcc-aarch64-linux-gnu \
|
gcc-aarch64-linux-gnu \
|
||||||
&& mkdir -p ~/.cargo \
|
&& mkdir -p ~/.cargo \
|
||||||
&& echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config \
|
&& echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config \
|
||||||
@@ -77,16 +73,15 @@ COPY ./build.rs ./build.rs
|
|||||||
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
||||||
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :arm64 version.
|
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :arm64 version.
|
||||||
# What we can do is a force install, because nothing important is overlapping each other.
|
# What we can do is a force install, because nothing important is overlapping each other.
|
||||||
RUN apt-get install -y --no-install-recommends libmariadb3:amd64 && \
|
RUN apt-get install -y --no-install-recommends libmariadb3:amd64 \
|
||||||
apt-get download libmariadb-dev-compat:amd64 && \
|
&& apt-get download libmariadb-dev-compat:amd64 \
|
||||||
dpkg --force-all -i ./libmariadb-dev-compat*.deb && \
|
&& dpkg --force-all -i ./libmariadb-dev-compat*.deb \
|
||||||
rm -rvf ./libmariadb-dev-compat*.deb
|
&& rm -rvf ./libmariadb-dev-compat*.deb \
|
||||||
|
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
||||||
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
# The libpq5:arm64 package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
||||||
# The libpq5:arm64 package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
||||||
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
# Without this specific file the ld command will fail and compilation fails with it.
|
||||||
# Without this specific file the ld command will fail and compilation fails with it.
|
&& ln -sfnr /usr/lib/aarch64-linux-gnu/libpq.so.5 /usr/lib/aarch64-linux-gnu/libpq.so
|
||||||
RUN ln -sfnr /usr/lib/aarch64-linux-gnu/libpq.so.5 /usr/lib/aarch64-linux-gnu/libpq.so
|
|
||||||
|
|
||||||
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc"
|
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc"
|
||||||
ENV CROSS_COMPILE="1"
|
ENV CROSS_COMPILE="1"
|
||||||
@@ -97,8 +92,8 @@ RUN rustup target add aarch64-unknown-linux-gnu
|
|||||||
# Builds your dependencies and removes the
|
# Builds your dependencies and removes the
|
||||||
# dummy project, except the target folder
|
# dummy project, except the target folder
|
||||||
# This folder contains the compiled dependencies
|
# This folder contains the compiled dependencies
|
||||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu \
|
||||||
RUN find . -not -path "./target*" -delete
|
&& find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
@@ -120,10 +115,12 @@ ENV ROCKET_ENV "staging"
|
|||||||
ENV ROCKET_PORT=80
|
ENV ROCKET_PORT=80
|
||||||
ENV ROCKET_WORKERS=10
|
ENV ROCKET_WORKERS=10
|
||||||
|
|
||||||
|
# hadolint ignore=DL3059
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN mkdir /data \
|
||||||
|
&& apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
openssl \
|
openssl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
@@ -133,8 +130,7 @@ RUN apt-get update && apt-get install -y \
|
|||||||
libpq5 \
|
libpq5 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN mkdir /data
|
# hadolint ignore=DL3059
|
||||||
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
|
@@ -14,18 +14,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2.19.0d
|
# $ docker pull vaultwarden/web-vault:v2.21.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.19.0d
|
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.21.1
|
||||||
# [vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233]
|
# [vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233
|
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5
|
||||||
# [vaultwarden/web-vault:v2.19.0d]
|
# [vaultwarden/web-vault:v2.21.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 as vault
|
FROM vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5 as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM rust:1.51 as build
|
FROM rust:1.53 as build
|
||||||
|
|
||||||
# Debian-based builds support multidb
|
# Debian-based builds support multidb
|
||||||
ARG DB=sqlite,mysql,postgresql
|
ARG DB=sqlite,mysql,postgresql
|
||||||
@@ -49,11 +49,7 @@ RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
|||||||
libpq5:armel \
|
libpq5:armel \
|
||||||
libpq-dev \
|
libpq-dev \
|
||||||
libmariadb-dev:armel \
|
libmariadb-dev:armel \
|
||||||
libmariadb-dev-compat:armel
|
libmariadb-dev-compat:armel \
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install -y \
|
|
||||||
--no-install-recommends \
|
|
||||||
gcc-arm-linux-gnueabi \
|
gcc-arm-linux-gnueabi \
|
||||||
&& mkdir -p ~/.cargo \
|
&& mkdir -p ~/.cargo \
|
||||||
&& echo '[target.arm-unknown-linux-gnueabi]' >> ~/.cargo/config \
|
&& echo '[target.arm-unknown-linux-gnueabi]' >> ~/.cargo/config \
|
||||||
@@ -77,16 +73,15 @@ COPY ./build.rs ./build.rs
|
|||||||
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
||||||
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :armel version.
|
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :armel version.
|
||||||
# What we can do is a force install, because nothing important is overlapping each other.
|
# What we can do is a force install, because nothing important is overlapping each other.
|
||||||
RUN apt-get install -y --no-install-recommends libmariadb3:amd64 && \
|
RUN apt-get install -y --no-install-recommends libmariadb3:amd64 \
|
||||||
apt-get download libmariadb-dev-compat:amd64 && \
|
&& apt-get download libmariadb-dev-compat:amd64 \
|
||||||
dpkg --force-all -i ./libmariadb-dev-compat*.deb && \
|
&& dpkg --force-all -i ./libmariadb-dev-compat*.deb \
|
||||||
rm -rvf ./libmariadb-dev-compat*.deb
|
&& rm -rvf ./libmariadb-dev-compat*.deb \
|
||||||
|
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
||||||
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
# The libpq5:armel package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
||||||
# The libpq5:armel package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
||||||
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
# Without this specific file the ld command will fail and compilation fails with it.
|
||||||
# Without this specific file the ld command will fail and compilation fails with it.
|
&& ln -sfnr /usr/lib/arm-linux-gnueabi/libpq.so.5 /usr/lib/arm-linux-gnueabi/libpq.so
|
||||||
RUN ln -sfnr /usr/lib/arm-linux-gnueabi/libpq.so.5 /usr/lib/arm-linux-gnueabi/libpq.so
|
|
||||||
|
|
||||||
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
||||||
ENV CROSS_COMPILE="1"
|
ENV CROSS_COMPILE="1"
|
||||||
@@ -97,8 +92,8 @@ RUN rustup target add arm-unknown-linux-gnueabi
|
|||||||
# Builds your dependencies and removes the
|
# Builds your dependencies and removes the
|
||||||
# dummy project, except the target folder
|
# dummy project, except the target folder
|
||||||
# This folder contains the compiled dependencies
|
# This folder contains the compiled dependencies
|
||||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi \
|
||||||
RUN find . -not -path "./target*" -delete
|
&& find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
@@ -120,10 +115,12 @@ ENV ROCKET_ENV "staging"
|
|||||||
ENV ROCKET_PORT=80
|
ENV ROCKET_PORT=80
|
||||||
ENV ROCKET_WORKERS=10
|
ENV ROCKET_WORKERS=10
|
||||||
|
|
||||||
|
# hadolint ignore=DL3059
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN mkdir /data \
|
||||||
|
&& apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
openssl \
|
openssl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
@@ -133,8 +130,7 @@ RUN apt-get update && apt-get install -y \
|
|||||||
libpq5 \
|
libpq5 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN mkdir /data
|
# hadolint ignore=DL3059
|
||||||
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
|
@@ -14,18 +14,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2.19.0d
|
# $ docker pull vaultwarden/web-vault:v2.21.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.19.0d
|
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.21.1
|
||||||
# [vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233]
|
# [vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233
|
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5
|
||||||
# [vaultwarden/web-vault:v2.19.0d]
|
# [vaultwarden/web-vault:v2.21.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 as vault
|
FROM vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5 as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM rust:1.51 as build
|
FROM rust:1.53 as build
|
||||||
|
|
||||||
# Debian-based builds support multidb
|
# Debian-based builds support multidb
|
||||||
ARG DB=sqlite,mysql,postgresql
|
ARG DB=sqlite,mysql,postgresql
|
||||||
@@ -49,11 +49,7 @@ RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
|||||||
libpq5:armhf \
|
libpq5:armhf \
|
||||||
libpq-dev \
|
libpq-dev \
|
||||||
libmariadb-dev:armhf \
|
libmariadb-dev:armhf \
|
||||||
libmariadb-dev-compat:armhf
|
libmariadb-dev-compat:armhf \
|
||||||
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get install -y \
|
|
||||||
--no-install-recommends \
|
|
||||||
gcc-arm-linux-gnueabihf \
|
gcc-arm-linux-gnueabihf \
|
||||||
&& mkdir -p ~/.cargo \
|
&& mkdir -p ~/.cargo \
|
||||||
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> ~/.cargo/config \
|
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> ~/.cargo/config \
|
||||||
@@ -77,16 +73,15 @@ COPY ./build.rs ./build.rs
|
|||||||
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
||||||
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :armhf version.
|
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :armhf version.
|
||||||
# What we can do is a force install, because nothing important is overlapping each other.
|
# What we can do is a force install, because nothing important is overlapping each other.
|
||||||
RUN apt-get install -y --no-install-recommends libmariadb3:amd64 && \
|
RUN apt-get install -y --no-install-recommends libmariadb3:amd64 \
|
||||||
apt-get download libmariadb-dev-compat:amd64 && \
|
&& apt-get download libmariadb-dev-compat:amd64 \
|
||||||
dpkg --force-all -i ./libmariadb-dev-compat*.deb && \
|
&& dpkg --force-all -i ./libmariadb-dev-compat*.deb \
|
||||||
rm -rvf ./libmariadb-dev-compat*.deb
|
&& rm -rvf ./libmariadb-dev-compat*.deb \
|
||||||
|
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
||||||
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
# The libpq5:armhf package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
||||||
# The libpq5:armhf package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
||||||
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
# Without this specific file the ld command will fail and compilation fails with it.
|
||||||
# Without this specific file the ld command will fail and compilation fails with it.
|
&& ln -sfnr /usr/lib/arm-linux-gnueabihf/libpq.so.5 /usr/lib/arm-linux-gnueabihf/libpq.so
|
||||||
RUN ln -sfnr /usr/lib/arm-linux-gnueabihf/libpq.so.5 /usr/lib/arm-linux-gnueabihf/libpq.so
|
|
||||||
|
|
||||||
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc"
|
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc"
|
||||||
ENV CROSS_COMPILE="1"
|
ENV CROSS_COMPILE="1"
|
||||||
@@ -97,8 +92,8 @@ RUN rustup target add armv7-unknown-linux-gnueabihf
|
|||||||
# Builds your dependencies and removes the
|
# Builds your dependencies and removes the
|
||||||
# dummy project, except the target folder
|
# dummy project, except the target folder
|
||||||
# This folder contains the compiled dependencies
|
# This folder contains the compiled dependencies
|
||||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf \
|
||||||
RUN find . -not -path "./target*" -delete
|
&& find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
@@ -120,10 +115,12 @@ ENV ROCKET_ENV "staging"
|
|||||||
ENV ROCKET_PORT=80
|
ENV ROCKET_PORT=80
|
||||||
ENV ROCKET_WORKERS=10
|
ENV ROCKET_WORKERS=10
|
||||||
|
|
||||||
|
# hadolint ignore=DL3059
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN mkdir /data \
|
||||||
|
&& apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
openssl \
|
openssl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
@@ -133,8 +130,7 @@ RUN apt-get update && apt-get install -y \
|
|||||||
libpq5 \
|
libpq5 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN mkdir /data
|
# hadolint ignore=DL3059
|
||||||
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
|
@@ -14,21 +14,22 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2.19.0d
|
# $ docker pull vaultwarden/web-vault:v2.21.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.19.0d
|
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.21.1
|
||||||
# [vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233]
|
# [vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233
|
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5
|
||||||
# [vaultwarden/web-vault:v2.19.0d]
|
# [vaultwarden/web-vault:v2.21.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 as vault
|
FROM vaultwarden/web-vault@sha256:29a4fa7bf3790fff9d908b02ac5a154913491f4bf30c95b87b06d8cf1c5516b5 as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM messense/rust-musl-cross:armv7-musleabihf as build
|
FROM messense/rust-musl-cross:armv7-musleabihf as build
|
||||||
|
|
||||||
# Alpine-based ARM (musl) only supports sqlite during compile time.
|
# Alpine-based ARM (musl) only supports sqlite during compile time.
|
||||||
ARG DB=sqlite
|
# We now also need to add vendored_openssl, because the current base image we use to build has OpenSSL removed.
|
||||||
|
ARG DB=sqlite,vendored_openssl
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||||
@@ -54,8 +55,8 @@ RUN rustup target add armv7-unknown-linux-musleabihf
|
|||||||
# Builds your dependencies and removes the
|
# Builds your dependencies and removes the
|
||||||
# dummy project, except the target folder
|
# dummy project, except the target folder
|
||||||
# This folder contains the compiled dependencies
|
# This folder contains the compiled dependencies
|
||||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf
|
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf \
|
||||||
RUN find . -not -path "./target*" -delete
|
&& find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
@@ -67,29 +68,31 @@ RUN touch src/main.rs
|
|||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf
|
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf
|
||||||
|
# hadolint ignore=DL3059
|
||||||
RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden
|
RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/armv7hf-alpine:3.13
|
FROM balenalib/armv7hf-alpine:3.14
|
||||||
|
|
||||||
ENV ROCKET_ENV "staging"
|
ENV ROCKET_ENV "staging"
|
||||||
ENV ROCKET_PORT=80
|
ENV ROCKET_PORT=80
|
||||||
ENV ROCKET_WORKERS=10
|
ENV ROCKET_WORKERS=10
|
||||||
ENV SSL_CERT_DIR=/etc/ssl/certs
|
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||||
|
|
||||||
|
# hadolint ignore=DL3059
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN apk add --no-cache \
|
RUN mkdir /data \
|
||||||
|
&& apk add --no-cache \
|
||||||
openssl \
|
openssl \
|
||||||
curl \
|
curl \
|
||||||
dumb-init \
|
dumb-init \
|
||||||
ca-certificates
|
ca-certificates
|
||||||
|
|
||||||
RUN mkdir /data
|
# hadolint ignore=DL3059
|
||||||
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
|
2
migrations/mysql/2021-04-30-233251_add_reprompt/up.sql
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE ciphers
|
||||||
|
ADD COLUMN reprompt INTEGER;
|
2
migrations/mysql/2021-05-11-205202_add_hide_email/up.sql
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE sends
|
||||||
|
ADD COLUMN hide_email BOOLEAN;
|
@@ -0,0 +1,5 @@
|
|||||||
|
ALTER TABLE organizations
|
||||||
|
ADD COLUMN private_key TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE organizations
|
||||||
|
ADD COLUMN public_key TEXT;
|
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE ciphers
|
||||||
|
ADD COLUMN reprompt INTEGER;
|
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE sends
|
||||||
|
ADD COLUMN hide_email BOOLEAN;
|
@@ -0,0 +1,5 @@
|
|||||||
|
ALTER TABLE organizations
|
||||||
|
ADD COLUMN private_key TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE organizations
|
||||||
|
ADD COLUMN public_key TEXT;
|
2
migrations/sqlite/2021-04-30-233251_add_reprompt/up.sql
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE ciphers
|
||||||
|
ADD COLUMN reprompt INTEGER;
|
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE sends
|
||||||
|
ADD COLUMN hide_email BOOLEAN;
|
@@ -0,0 +1,5 @@
|
|||||||
|
ALTER TABLE organizations
|
||||||
|
ADD COLUMN private_key TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE organizations
|
||||||
|
ADD COLUMN public_key TEXT;
|
99
resources/vaultwarden-icon-white.svg
Normal file
After Width: | Height: | Size: 8.7 KiB |
86
resources/vaultwarden-icon.svg
Normal file
After Width: | Height: | Size: 8.4 KiB |
271
resources/vaultwarden-logo-white.svg
Normal file
After Width: | Height: | Size: 17 KiB |
151
resources/vaultwarden-logo.svg
Normal file
After Width: | Height: | Size: 12 KiB |
@@ -1 +1 @@
|
|||||||
nightly-2021-04-14
|
nightly-2021-06-24
|
107
src/api/admin.rs
@@ -4,7 +4,7 @@ use serde_json::Value;
|
|||||||
use std::{env, time::Duration};
|
use std::{env, time::Duration};
|
||||||
|
|
||||||
use rocket::{
|
use rocket::{
|
||||||
http::{Cookie, Cookies, SameSite},
|
http::{Cookie, Cookies, SameSite, Status},
|
||||||
request::{self, FlashMessage, Form, FromRequest, Outcome, Request},
|
request::{self, FlashMessage, Form, FromRequest, Outcome, Request},
|
||||||
response::{content::Html, Flash, Redirect},
|
response::{content::Html, Flash, Redirect},
|
||||||
Route,
|
Route,
|
||||||
@@ -12,7 +12,7 @@ use rocket::{
|
|||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
api::{ApiResult, EmptyResult, NumberOrString},
|
api::{ApiResult, EmptyResult, JsonResult, NumberOrString},
|
||||||
auth::{decode_admin, encode_jwt, generate_admin_claims, ClientIp},
|
auth::{decode_admin, encode_jwt, generate_admin_claims, ClientIp},
|
||||||
config::ConfigBuilder,
|
config::ConfigBuilder,
|
||||||
db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType},
|
db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType},
|
||||||
@@ -30,6 +30,7 @@ pub fn routes() -> Vec<Route> {
|
|||||||
routes![
|
routes![
|
||||||
admin_login,
|
admin_login,
|
||||||
get_users_json,
|
get_users_json,
|
||||||
|
get_user_json,
|
||||||
post_admin_login,
|
post_admin_login,
|
||||||
admin_page,
|
admin_page,
|
||||||
invite_user,
|
invite_user,
|
||||||
@@ -195,9 +196,7 @@ fn _validate_token(token: &str) -> bool {
|
|||||||
struct AdminTemplateData {
|
struct AdminTemplateData {
|
||||||
page_content: String,
|
page_content: String,
|
||||||
version: Option<&'static str>,
|
version: Option<&'static str>,
|
||||||
users: Option<Vec<Value>>,
|
page_data: Option<Value>,
|
||||||
organizations: Option<Vec<Value>>,
|
|
||||||
diagnostics: Option<Value>,
|
|
||||||
config: Value,
|
config: Value,
|
||||||
can_backup: bool,
|
can_backup: bool,
|
||||||
logged_in: bool,
|
logged_in: bool,
|
||||||
@@ -213,51 +212,19 @@ impl AdminTemplateData {
|
|||||||
can_backup: *CAN_BACKUP,
|
can_backup: *CAN_BACKUP,
|
||||||
logged_in: true,
|
logged_in: true,
|
||||||
urlpath: CONFIG.domain_path(),
|
urlpath: CONFIG.domain_path(),
|
||||||
users: None,
|
page_data: None,
|
||||||
organizations: None,
|
|
||||||
diagnostics: None,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn users(users: Vec<Value>) -> Self {
|
fn with_data(page_content: &str, page_data: Value) -> Self {
|
||||||
Self {
|
Self {
|
||||||
page_content: String::from("admin/users"),
|
page_content: String::from(page_content),
|
||||||
version: VERSION,
|
version: VERSION,
|
||||||
users: Some(users),
|
page_data: Some(page_data),
|
||||||
config: CONFIG.prepare_json(),
|
config: CONFIG.prepare_json(),
|
||||||
can_backup: *CAN_BACKUP,
|
can_backup: *CAN_BACKUP,
|
||||||
logged_in: true,
|
logged_in: true,
|
||||||
urlpath: CONFIG.domain_path(),
|
urlpath: CONFIG.domain_path(),
|
||||||
organizations: None,
|
|
||||||
diagnostics: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn organizations(organizations: Vec<Value>) -> Self {
|
|
||||||
Self {
|
|
||||||
page_content: String::from("admin/organizations"),
|
|
||||||
version: VERSION,
|
|
||||||
organizations: Some(organizations),
|
|
||||||
config: CONFIG.prepare_json(),
|
|
||||||
can_backup: *CAN_BACKUP,
|
|
||||||
logged_in: true,
|
|
||||||
urlpath: CONFIG.domain_path(),
|
|
||||||
users: None,
|
|
||||||
diagnostics: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn diagnostics(diagnostics: Value) -> Self {
|
|
||||||
Self {
|
|
||||||
page_content: String::from("admin/diagnostics"),
|
|
||||||
version: VERSION,
|
|
||||||
organizations: None,
|
|
||||||
config: CONFIG.prepare_json(),
|
|
||||||
can_backup: *CAN_BACKUP,
|
|
||||||
logged_in: true,
|
|
||||||
urlpath: CONFIG.domain_path(),
|
|
||||||
users: None,
|
|
||||||
diagnostics: Some(diagnostics),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -278,23 +245,39 @@ struct InviteData {
|
|||||||
email: String,
|
email: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_user_or_404(uuid: &str, conn: &DbConn) -> ApiResult<User> {
|
||||||
|
if let Some(user) = User::find_by_uuid(uuid, conn) {
|
||||||
|
Ok(user)
|
||||||
|
} else {
|
||||||
|
err_code!("User doesn't exist", Status::NotFound.code);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[post("/invite", data = "<data>")]
|
#[post("/invite", data = "<data>")]
|
||||||
fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -> JsonResult {
|
||||||
let data: InviteData = data.into_inner();
|
let data: InviteData = data.into_inner();
|
||||||
let email = data.email.clone();
|
let email = data.email.clone();
|
||||||
if User::find_by_mail(&data.email, &conn).is_some() {
|
if User::find_by_mail(&data.email, &conn).is_some() {
|
||||||
err!("User already exists")
|
err_code!("User already exists", Status::Conflict.code)
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut user = User::new(email);
|
let mut user = User::new(email);
|
||||||
user.save(&conn)?;
|
|
||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
// TODO: After try_blocks is stabilized, this can be made more readable
|
||||||
mail::send_invite(&user.email, &user.uuid, None, None, &CONFIG.invitation_org_name(), None)
|
// See: https://github.com/rust-lang/rust/issues/31436
|
||||||
} else {
|
(|| {
|
||||||
let invitation = Invitation::new(data.email);
|
if CONFIG.mail_enabled() {
|
||||||
invitation.save(&conn)
|
mail::send_invite(&user.email, &user.uuid, None, None, &CONFIG.invitation_org_name(), None)?;
|
||||||
}
|
} else {
|
||||||
|
let invitation = Invitation::new(data.email);
|
||||||
|
invitation.save(&conn)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
user.save(&conn)
|
||||||
|
})()
|
||||||
|
.map_err(|e| e.with_code(Status::InternalServerError.code))?;
|
||||||
|
|
||||||
|
Ok(Json(user.to_json(&conn)))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/test/smtp", data = "<data>")]
|
#[post("/test/smtp", data = "<data>")]
|
||||||
@@ -343,19 +326,26 @@ fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let text = AdminTemplateData::users(users_json).render()?;
|
let text = AdminTemplateData::with_data("admin/users", json!(users_json)).render()?;
|
||||||
Ok(Html(text))
|
Ok(Html(text))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[get("/users/<uuid>")]
|
||||||
|
fn get_user_json(uuid: String, _token: AdminToken, conn: DbConn) -> JsonResult {
|
||||||
|
let user = get_user_or_404(&uuid, &conn)?;
|
||||||
|
|
||||||
|
Ok(Json(user.to_json(&conn)))
|
||||||
|
}
|
||||||
|
|
||||||
#[post("/users/<uuid>/delete")]
|
#[post("/users/<uuid>/delete")]
|
||||||
fn delete_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
fn delete_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||||
let user = User::find_by_uuid(&uuid, &conn).map_res("User doesn't exist")?;
|
let user = get_user_or_404(&uuid, &conn)?;
|
||||||
user.delete(&conn)
|
user.delete(&conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/users/<uuid>/deauth")]
|
#[post("/users/<uuid>/deauth")]
|
||||||
fn deauth_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
fn deauth_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||||
let mut user = User::find_by_uuid(&uuid, &conn).map_res("User doesn't exist")?;
|
let mut user = get_user_or_404(&uuid, &conn)?;
|
||||||
Device::delete_all_by_user(&user.uuid, &conn)?;
|
Device::delete_all_by_user(&user.uuid, &conn)?;
|
||||||
user.reset_security_stamp();
|
user.reset_security_stamp();
|
||||||
|
|
||||||
@@ -364,7 +354,7 @@ fn deauth_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
|||||||
|
|
||||||
#[post("/users/<uuid>/disable")]
|
#[post("/users/<uuid>/disable")]
|
||||||
fn disable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
fn disable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||||
let mut user = User::find_by_uuid(&uuid, &conn).map_res("User doesn't exist")?;
|
let mut user = get_user_or_404(&uuid, &conn)?;
|
||||||
Device::delete_all_by_user(&user.uuid, &conn)?;
|
Device::delete_all_by_user(&user.uuid, &conn)?;
|
||||||
user.reset_security_stamp();
|
user.reset_security_stamp();
|
||||||
user.enabled = false;
|
user.enabled = false;
|
||||||
@@ -374,7 +364,7 @@ fn disable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
|||||||
|
|
||||||
#[post("/users/<uuid>/enable")]
|
#[post("/users/<uuid>/enable")]
|
||||||
fn enable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
fn enable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||||
let mut user = User::find_by_uuid(&uuid, &conn).map_res("User doesn't exist")?;
|
let mut user = get_user_or_404(&uuid, &conn)?;
|
||||||
user.enabled = true;
|
user.enabled = true;
|
||||||
|
|
||||||
user.save(&conn)
|
user.save(&conn)
|
||||||
@@ -382,7 +372,7 @@ fn enable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
|||||||
|
|
||||||
#[post("/users/<uuid>/remove-2fa")]
|
#[post("/users/<uuid>/remove-2fa")]
|
||||||
fn remove_2fa(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
fn remove_2fa(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||||
let mut user = User::find_by_uuid(&uuid, &conn).map_res("User doesn't exist")?;
|
let mut user = get_user_or_404(&uuid, &conn)?;
|
||||||
TwoFactor::delete_all_by_user(&user.uuid, &conn)?;
|
TwoFactor::delete_all_by_user(&user.uuid, &conn)?;
|
||||||
user.totp_recover = None;
|
user.totp_recover = None;
|
||||||
user.save(&conn)
|
user.save(&conn)
|
||||||
@@ -442,7 +432,7 @@ fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<St
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let text = AdminTemplateData::organizations(organizations_json).render()?;
|
let text = AdminTemplateData::with_data("admin/organizations", json!(organizations_json)).render()?;
|
||||||
Ok(Html(text))
|
Ok(Html(text))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -568,11 +558,12 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
|
|||||||
"db_type": *DB_TYPE,
|
"db_type": *DB_TYPE,
|
||||||
"db_version": get_sql_server_version(&conn),
|
"db_version": get_sql_server_version(&conn),
|
||||||
"admin_url": format!("{}/diagnostics", admin_url(Referer(None))),
|
"admin_url": format!("{}/diagnostics", admin_url(Referer(None))),
|
||||||
|
"overrides": &CONFIG.get_overrides().join(", "),
|
||||||
"server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(),
|
"server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(),
|
||||||
"server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the date/time check as the last item to minimize the difference
|
"server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the date/time check as the last item to minimize the difference
|
||||||
});
|
});
|
||||||
|
|
||||||
let text = AdminTemplateData::diagnostics(diagnostics_json).render()?;
|
let text = AdminTemplateData::with_data("admin/diagnostics", diagnostics_json).render()?;
|
||||||
Ok(Html(text))
|
Ok(Html(text))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -231,7 +231,10 @@ fn post_password(data: JsonUpcase<ChangePassData>, headers: Headers, conn: DbCon
|
|||||||
err!("Invalid password")
|
err!("Invalid password")
|
||||||
}
|
}
|
||||||
|
|
||||||
user.set_password(&data.NewMasterPasswordHash, Some("post_rotatekey"));
|
user.set_password(
|
||||||
|
&data.NewMasterPasswordHash,
|
||||||
|
Some(vec![String::from("post_rotatekey"), String::from("get_contacts")]),
|
||||||
|
);
|
||||||
user.akey = data.Key;
|
user.akey = data.Key;
|
||||||
user.save(&conn)
|
user.save(&conn)
|
||||||
}
|
}
|
||||||
@@ -320,7 +323,9 @@ fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbConn, nt:
|
|||||||
err!("The cipher is not owned by the user")
|
err!("The cipher is not owned by the user")
|
||||||
}
|
}
|
||||||
|
|
||||||
update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::CipherUpdate)?
|
// Prevent triggering cipher updates via WebSockets by settings UpdateType::None
|
||||||
|
// The user sessions are invalidated because all the ciphers were re-encrypted and thus triggering an update could cause issues.
|
||||||
|
update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::None)?
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update user data
|
// Update user data
|
||||||
@@ -329,7 +334,6 @@ fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbConn, nt:
|
|||||||
user.akey = data.Key;
|
user.akey = data.Key;
|
||||||
user.private_key = Some(data.PrivateKey);
|
user.private_key = Some(data.PrivateKey);
|
||||||
user.reset_security_stamp();
|
user.reset_security_stamp();
|
||||||
user.reset_stamp_exception();
|
|
||||||
|
|
||||||
user.save(&conn)
|
user.save(&conn)
|
||||||
}
|
}
|
||||||
@@ -576,24 +580,45 @@ struct PasswordHintData {
|
|||||||
|
|
||||||
#[post("/accounts/password-hint", data = "<data>")]
|
#[post("/accounts/password-hint", data = "<data>")]
|
||||||
fn password_hint(data: JsonUpcase<PasswordHintData>, conn: DbConn) -> EmptyResult {
|
fn password_hint(data: JsonUpcase<PasswordHintData>, conn: DbConn) -> EmptyResult {
|
||||||
let data: PasswordHintData = data.into_inner().data;
|
if !CONFIG.mail_enabled() && !CONFIG.show_password_hint() {
|
||||||
|
err!("This server is not configured to provide password hints.");
|
||||||
let hint = match User::find_by_mail(&data.Email, &conn) {
|
|
||||||
Some(user) => user.password_hint,
|
|
||||||
None => return Ok(()),
|
|
||||||
};
|
|
||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
|
||||||
mail::send_password_hint(&data.Email, hint)?;
|
|
||||||
} else if CONFIG.show_password_hint() {
|
|
||||||
if let Some(hint) = hint {
|
|
||||||
err!(format!("Your password hint is: {}", &hint));
|
|
||||||
} else {
|
|
||||||
err!("Sorry, you have no password hint...");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
const NO_HINT: &str = "Sorry, you have no password hint...";
|
||||||
|
|
||||||
|
let data: PasswordHintData = data.into_inner().data;
|
||||||
|
let email = &data.Email;
|
||||||
|
|
||||||
|
match User::find_by_mail(email, &conn) {
|
||||||
|
None => {
|
||||||
|
// To prevent user enumeration, act as if the user exists.
|
||||||
|
if CONFIG.mail_enabled() {
|
||||||
|
// There is still a timing side channel here in that the code
|
||||||
|
// paths that send mail take noticeably longer than ones that
|
||||||
|
// don't. Add a randomized sleep to mitigate this somewhat.
|
||||||
|
use rand::{thread_rng, Rng};
|
||||||
|
let mut rng = thread_rng();
|
||||||
|
let base = 1000;
|
||||||
|
let delta: i32 = 100;
|
||||||
|
let sleep_ms = (base + rng.gen_range(-delta..=delta)) as u64;
|
||||||
|
std::thread::sleep(std::time::Duration::from_millis(sleep_ms));
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
err!(NO_HINT);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(user) => {
|
||||||
|
let hint: Option<String> = user.password_hint;
|
||||||
|
if CONFIG.mail_enabled() {
|
||||||
|
mail::send_password_hint(email, hint)?;
|
||||||
|
Ok(())
|
||||||
|
} else if let Some(hint) = hint {
|
||||||
|
err!(format!("Your password hint is: {}", hint));
|
||||||
|
} else {
|
||||||
|
err!(NO_HINT);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
|
@@ -1,12 +1,11 @@
|
|||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::path::Path;
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use chrono::{NaiveDateTime, Utc};
|
use chrono::{NaiveDateTime, Utc};
|
||||||
use rocket::{http::ContentType, request::Form, Data, Route};
|
use rocket::{http::ContentType, request::Form, Data, Route};
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use data_encoding::HEXLOWER;
|
|
||||||
use multipart::server::{save::SavedData, Multipart, SaveResult};
|
use multipart::server::{save::SavedData, Multipart, SaveResult};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@@ -39,8 +38,11 @@ pub fn routes() -> Vec<Route> {
|
|||||||
post_ciphers_admin,
|
post_ciphers_admin,
|
||||||
post_ciphers_create,
|
post_ciphers_create,
|
||||||
post_ciphers_import,
|
post_ciphers_import,
|
||||||
post_attachment,
|
get_attachment,
|
||||||
post_attachment_admin,
|
post_attachment_v2,
|
||||||
|
post_attachment_v2_data,
|
||||||
|
post_attachment, // legacy
|
||||||
|
post_attachment_admin, // legacy
|
||||||
post_attachment_share,
|
post_attachment_share,
|
||||||
delete_attachment_post,
|
delete_attachment_post,
|
||||||
delete_attachment_post_admin,
|
delete_attachment_post_admin,
|
||||||
@@ -199,6 +201,7 @@ pub struct CipherData {
|
|||||||
Identity: Option<Value>,
|
Identity: Option<Value>,
|
||||||
|
|
||||||
Favorite: Option<bool>,
|
Favorite: Option<bool>,
|
||||||
|
Reprompt: Option<i32>,
|
||||||
|
|
||||||
PasswordHistory: Option<Value>,
|
PasswordHistory: Option<Value>,
|
||||||
|
|
||||||
@@ -265,7 +268,13 @@ fn post_ciphers_create(data: JsonUpcase<ShareCipherData>, headers: Headers, conn
|
|||||||
/// Called when creating a new user-owned cipher.
|
/// Called when creating a new user-owned cipher.
|
||||||
#[post("/ciphers", data = "<data>")]
|
#[post("/ciphers", data = "<data>")]
|
||||||
fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
|
fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
|
||||||
let data: CipherData = data.into_inner().data;
|
let mut data: CipherData = data.into_inner().data;
|
||||||
|
|
||||||
|
// The web/browser clients set this field to null as expected, but the
|
||||||
|
// mobile clients seem to set the invalid value `0001-01-01T00:00:00`,
|
||||||
|
// which results in a warning message being logged. This field isn't
|
||||||
|
// needed when creating a new cipher, so just ignore it unconditionally.
|
||||||
|
data.LastKnownRevisionDate = None;
|
||||||
|
|
||||||
let mut cipher = Cipher::new(data.Type, data.Name.clone());
|
let mut cipher = Cipher::new(data.Type, data.Name.clone());
|
||||||
update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherCreate)?;
|
update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherCreate)?;
|
||||||
@@ -319,12 +328,12 @@ pub fn update_cipher_from_data(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some(org_id) = data.OrganizationId {
|
if let Some(org_id) = data.OrganizationId {
|
||||||
match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) {
|
match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, conn) {
|
||||||
None => err!("You don't have permission to add item to organization"),
|
None => err!("You don't have permission to add item to organization"),
|
||||||
Some(org_user) => {
|
Some(org_user) => {
|
||||||
if shared_to_collection
|
if shared_to_collection
|
||||||
|| org_user.has_full_access()
|
|| org_user.has_full_access()
|
||||||
|| cipher.is_write_accessible_to_user(&headers.user.uuid, &conn)
|
|| cipher.is_write_accessible_to_user(&headers.user.uuid, conn)
|
||||||
{
|
{
|
||||||
cipher.organization_uuid = Some(org_id);
|
cipher.organization_uuid = Some(org_id);
|
||||||
// After some discussion in PR #1329 re-added the user_uuid = None again.
|
// After some discussion in PR #1329 re-added the user_uuid = None again.
|
||||||
@@ -356,7 +365,7 @@ pub fn update_cipher_from_data(
|
|||||||
// Modify attachments name and keys when rotating
|
// Modify attachments name and keys when rotating
|
||||||
if let Some(attachments) = data.Attachments2 {
|
if let Some(attachments) = data.Attachments2 {
|
||||||
for (id, attachment) in attachments {
|
for (id, attachment) in attachments {
|
||||||
let mut saved_att = match Attachment::find_by_id(&id, &conn) {
|
let mut saved_att = match Attachment::find_by_id(&id, conn) {
|
||||||
Some(att) => att,
|
Some(att) => att,
|
||||||
None => err!("Attachment doesn't exist"),
|
None => err!("Attachment doesn't exist"),
|
||||||
};
|
};
|
||||||
@@ -371,7 +380,7 @@ pub fn update_cipher_from_data(
|
|||||||
saved_att.akey = Some(attachment.Key);
|
saved_att.akey = Some(attachment.Key);
|
||||||
saved_att.file_name = attachment.FileName;
|
saved_att.file_name = attachment.FileName;
|
||||||
|
|
||||||
saved_att.save(&conn)?;
|
saved_att.save(conn)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -415,13 +424,14 @@ pub fn update_cipher_from_data(
|
|||||||
cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string());
|
cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string());
|
||||||
cipher.data = type_data.to_string();
|
cipher.data = type_data.to_string();
|
||||||
cipher.password_history = data.PasswordHistory.map(|f| f.to_string());
|
cipher.password_history = data.PasswordHistory.map(|f| f.to_string());
|
||||||
|
cipher.reprompt = data.Reprompt;
|
||||||
|
|
||||||
cipher.save(&conn)?;
|
cipher.save(conn)?;
|
||||||
cipher.move_to_folder(data.FolderId, &headers.user.uuid, &conn)?;
|
cipher.move_to_folder(data.FolderId, &headers.user.uuid, conn)?;
|
||||||
cipher.set_favorite(data.Favorite, &headers.user.uuid, &conn)?;
|
cipher.set_favorite(data.Favorite, &headers.user.uuid, conn)?;
|
||||||
|
|
||||||
if ut != UpdateType::None {
|
if ut != UpdateType::None {
|
||||||
nt.send_cipher_update(ut, &cipher, &cipher.update_users_revision(&conn));
|
nt.send_cipher_update(ut, cipher, &cipher.update_users_revision(conn));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -591,7 +601,7 @@ fn post_collections_admin(
|
|||||||
cipher.get_collections(&headers.user.uuid, &conn).iter().cloned().collect();
|
cipher.get_collections(&headers.user.uuid, &conn).iter().cloned().collect();
|
||||||
|
|
||||||
for collection in posted_collections.symmetric_difference(¤t_collections) {
|
for collection in posted_collections.symmetric_difference(¤t_collections) {
|
||||||
match Collection::find_by_uuid(&collection, &conn) {
|
match Collection::find_by_uuid(collection, &conn) {
|
||||||
None => err!("Invalid collection ID provided"),
|
None => err!("Invalid collection ID provided"),
|
||||||
Some(collection) => {
|
Some(collection) => {
|
||||||
if collection.is_writable_by_user(&headers.user.uuid, &conn) {
|
if collection.is_writable_by_user(&headers.user.uuid, &conn) {
|
||||||
@@ -705,9 +715,9 @@ fn share_cipher_by_uuid(
|
|||||||
conn: &DbConn,
|
conn: &DbConn,
|
||||||
nt: &Notify,
|
nt: &Notify,
|
||||||
) -> JsonResult {
|
) -> JsonResult {
|
||||||
let mut cipher = match Cipher::find_by_uuid(&uuid, &conn) {
|
let mut cipher = match Cipher::find_by_uuid(uuid, conn) {
|
||||||
Some(cipher) => {
|
Some(cipher) => {
|
||||||
if cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
|
if cipher.is_write_accessible_to_user(&headers.user.uuid, conn) {
|
||||||
cipher
|
cipher
|
||||||
} else {
|
} else {
|
||||||
err!("Cipher is not write accessible")
|
err!("Cipher is not write accessible")
|
||||||
@@ -724,11 +734,11 @@ fn share_cipher_by_uuid(
|
|||||||
None => {}
|
None => {}
|
||||||
Some(organization_uuid) => {
|
Some(organization_uuid) => {
|
||||||
for uuid in &data.CollectionIds {
|
for uuid in &data.CollectionIds {
|
||||||
match Collection::find_by_uuid_and_org(uuid, &organization_uuid, &conn) {
|
match Collection::find_by_uuid_and_org(uuid, &organization_uuid, conn) {
|
||||||
None => err!("Invalid collection ID provided"),
|
None => err!("Invalid collection ID provided"),
|
||||||
Some(collection) => {
|
Some(collection) => {
|
||||||
if collection.is_writable_by_user(&headers.user.uuid, &conn) {
|
if collection.is_writable_by_user(&headers.user.uuid, conn) {
|
||||||
CollectionCipher::save(&cipher.uuid, &collection.uuid, &conn)?;
|
CollectionCipher::save(&cipher.uuid, &collection.uuid, conn)?;
|
||||||
shared_to_collection = true;
|
shared_to_collection = true;
|
||||||
} else {
|
} else {
|
||||||
err!("No rights to modify the collection")
|
err!("No rights to modify the collection")
|
||||||
@@ -742,45 +752,126 @@ fn share_cipher_by_uuid(
|
|||||||
update_cipher_from_data(
|
update_cipher_from_data(
|
||||||
&mut cipher,
|
&mut cipher,
|
||||||
data.Cipher,
|
data.Cipher,
|
||||||
&headers,
|
headers,
|
||||||
shared_to_collection,
|
shared_to_collection,
|
||||||
&conn,
|
conn,
|
||||||
&nt,
|
nt,
|
||||||
UpdateType::CipherUpdate,
|
UpdateType::CipherUpdate,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
|
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, conn)))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/ciphers/<uuid>/attachment", format = "multipart/form-data", data = "<data>")]
|
/// v2 API for downloading an attachment. This just redirects the client to
|
||||||
fn post_attachment(
|
/// the actual location of an attachment.
|
||||||
|
///
|
||||||
|
/// Upstream added this v2 API to support direct download of attachments from
|
||||||
|
/// their object storage service. For self-hosted instances, it basically just
|
||||||
|
/// redirects to the same location as before the v2 API.
|
||||||
|
#[get("/ciphers/<uuid>/attachment/<attachment_id>")]
|
||||||
|
fn get_attachment(uuid: String, attachment_id: String, headers: Headers, conn: DbConn) -> JsonResult {
|
||||||
|
match Attachment::find_by_id(&attachment_id, &conn) {
|
||||||
|
Some(attachment) if uuid == attachment.cipher_uuid => Ok(Json(attachment.to_json(&headers.host))),
|
||||||
|
Some(_) => err!("Attachment doesn't belong to cipher"),
|
||||||
|
None => err!("Attachment doesn't exist"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
struct AttachmentRequestData {
|
||||||
|
Key: String,
|
||||||
|
FileName: String,
|
||||||
|
FileSize: i32,
|
||||||
|
// We check org owner/admin status via is_write_accessible_to_user(),
|
||||||
|
// so we can just ignore this field.
|
||||||
|
//
|
||||||
|
// AdminRequest: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
enum FileUploadType {
|
||||||
|
Direct = 0,
|
||||||
|
// Azure = 1, // only used upstream
|
||||||
|
}
|
||||||
|
|
||||||
|
/// v2 API for creating an attachment associated with a cipher.
|
||||||
|
/// This redirects the client to the API it should use to upload the attachment.
|
||||||
|
/// For upstream's cloud-hosted service, it's an Azure object storage API.
|
||||||
|
/// For self-hosted instances, it's another API on the local instance.
|
||||||
|
#[post("/ciphers/<uuid>/attachment/v2", data = "<data>")]
|
||||||
|
fn post_attachment_v2(
|
||||||
uuid: String,
|
uuid: String,
|
||||||
data: Data,
|
data: JsonUpcase<AttachmentRequestData>,
|
||||||
content_type: &ContentType,
|
|
||||||
headers: Headers,
|
headers: Headers,
|
||||||
conn: DbConn,
|
conn: DbConn,
|
||||||
nt: Notify,
|
|
||||||
) -> JsonResult {
|
) -> JsonResult {
|
||||||
let cipher = match Cipher::find_by_uuid(&uuid, &conn) {
|
let cipher = match Cipher::find_by_uuid(&uuid, &conn) {
|
||||||
|
Some(cipher) => cipher,
|
||||||
|
None => err!("Cipher doesn't exist"),
|
||||||
|
};
|
||||||
|
|
||||||
|
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
|
||||||
|
err!("Cipher is not write accessible")
|
||||||
|
}
|
||||||
|
|
||||||
|
let attachment_id = crypto::generate_attachment_id();
|
||||||
|
let data: AttachmentRequestData = data.into_inner().data;
|
||||||
|
let attachment =
|
||||||
|
Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.FileName, data.FileSize, Some(data.Key));
|
||||||
|
attachment.save(&conn).expect("Error saving attachment");
|
||||||
|
|
||||||
|
let url = format!("/ciphers/{}/attachment/{}", cipher.uuid, attachment_id);
|
||||||
|
|
||||||
|
Ok(Json(json!({ // AttachmentUploadDataResponseModel
|
||||||
|
"Object": "attachment-fileUpload",
|
||||||
|
"AttachmentId": attachment_id,
|
||||||
|
"Url": url,
|
||||||
|
"FileUploadType": FileUploadType::Direct as i32,
|
||||||
|
"CipherResponse": cipher.to_json(&headers.host, &headers.user.uuid, &conn),
|
||||||
|
"CipherMiniResponse": null,
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Saves the data content of an attachment to a file. This is common code
|
||||||
|
/// shared between the v2 and legacy attachment APIs.
|
||||||
|
///
|
||||||
|
/// When used with the legacy API, this function is responsible for creating
|
||||||
|
/// the attachment database record, so `attachment` is None.
|
||||||
|
///
|
||||||
|
/// When used with the v2 API, post_attachment_v2() has already created the
|
||||||
|
/// database record, which is passed in as `attachment`.
|
||||||
|
fn save_attachment(
|
||||||
|
mut attachment: Option<Attachment>,
|
||||||
|
cipher_uuid: String,
|
||||||
|
data: Data,
|
||||||
|
content_type: &ContentType,
|
||||||
|
headers: &Headers,
|
||||||
|
conn: &DbConn,
|
||||||
|
nt: Notify,
|
||||||
|
) -> Result<Cipher, crate::error::Error> {
|
||||||
|
let cipher = match Cipher::find_by_uuid(&cipher_uuid, conn) {
|
||||||
Some(cipher) => cipher,
|
Some(cipher) => cipher,
|
||||||
None => err_discard!("Cipher doesn't exist", data),
|
None => err_discard!("Cipher doesn't exist", data),
|
||||||
};
|
};
|
||||||
|
|
||||||
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
|
if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn) {
|
||||||
err_discard!("Cipher is not write accessible", data)
|
err_discard!("Cipher is not write accessible", data)
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut params = content_type.params();
|
// In the v2 API, the attachment record has already been created,
|
||||||
let boundary_pair = params.next().expect("No boundary provided");
|
// so the size limit needs to be adjusted to account for that.
|
||||||
let boundary = boundary_pair.1;
|
let size_adjust = match &attachment {
|
||||||
|
None => 0, // Legacy API
|
||||||
|
Some(a) => a.file_size as i64, // v2 API
|
||||||
|
};
|
||||||
|
|
||||||
let size_limit = if let Some(ref user_uuid) = cipher.user_uuid {
|
let size_limit = if let Some(ref user_uuid) = cipher.user_uuid {
|
||||||
match CONFIG.user_attachment_limit() {
|
match CONFIG.user_attachment_limit() {
|
||||||
Some(0) => err_discard!("Attachments are disabled", data),
|
Some(0) => err_discard!("Attachments are disabled", data),
|
||||||
Some(limit_kb) => {
|
Some(limit_kb) => {
|
||||||
let left = (limit_kb * 1024) - Attachment::size_by_user(user_uuid, &conn);
|
let left = (limit_kb * 1024) - Attachment::size_by_user(user_uuid, conn) + size_adjust;
|
||||||
if left <= 0 {
|
if left <= 0 {
|
||||||
err_discard!("Attachment size limit reached! Delete some files to open space", data)
|
err_discard!("Attachment storage limit reached! Delete some attachments to free up space", data)
|
||||||
}
|
}
|
||||||
Some(left as u64)
|
Some(left as u64)
|
||||||
}
|
}
|
||||||
@@ -790,9 +881,9 @@ fn post_attachment(
|
|||||||
match CONFIG.org_attachment_limit() {
|
match CONFIG.org_attachment_limit() {
|
||||||
Some(0) => err_discard!("Attachments are disabled", data),
|
Some(0) => err_discard!("Attachments are disabled", data),
|
||||||
Some(limit_kb) => {
|
Some(limit_kb) => {
|
||||||
let left = (limit_kb * 1024) - Attachment::size_by_org(org_uuid, &conn);
|
let left = (limit_kb * 1024) - Attachment::size_by_org(org_uuid, conn) + size_adjust;
|
||||||
if left <= 0 {
|
if left <= 0 {
|
||||||
err_discard!("Attachment size limit reached! Delete some files to open space", data)
|
err_discard!("Attachment storage limit reached! Delete some attachments to free up space", data)
|
||||||
}
|
}
|
||||||
Some(left as u64)
|
Some(left as u64)
|
||||||
}
|
}
|
||||||
@@ -802,7 +893,12 @@ fn post_attachment(
|
|||||||
err_discard!("Cipher is neither owned by a user nor an organization", data);
|
err_discard!("Cipher is neither owned by a user nor an organization", data);
|
||||||
};
|
};
|
||||||
|
|
||||||
let base_path = Path::new(&CONFIG.attachments_folder()).join(&cipher.uuid);
|
let mut params = content_type.params();
|
||||||
|
let boundary_pair = params.next().expect("No boundary provided");
|
||||||
|
let boundary = boundary_pair.1;
|
||||||
|
|
||||||
|
let base_path = Path::new(&CONFIG.attachments_folder()).join(&cipher_uuid);
|
||||||
|
let mut path = PathBuf::new();
|
||||||
|
|
||||||
let mut attachment_key = None;
|
let mut attachment_key = None;
|
||||||
let mut error = None;
|
let mut error = None;
|
||||||
@@ -818,35 +914,81 @@ fn post_attachment(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
"data" => {
|
"data" => {
|
||||||
// This is provided by the client, don't trust it
|
// In the legacy API, this is the encrypted filename
|
||||||
let name = field.headers.filename.expect("No filename provided");
|
// provided by the client, stored to the database as-is.
|
||||||
|
// In the v2 API, this value doesn't matter, as it was
|
||||||
|
// already provided and stored via an earlier API call.
|
||||||
|
let encrypted_filename = field.headers.filename;
|
||||||
|
|
||||||
let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10]));
|
// This random ID is used as the name of the file on disk.
|
||||||
let path = base_path.join(&file_name);
|
// In the legacy API, we need to generate this value here.
|
||||||
|
// In the v2 API, we use the value from post_attachment_v2().
|
||||||
|
let file_id = match &attachment {
|
||||||
|
Some(attachment) => attachment.id.clone(), // v2 API
|
||||||
|
None => crypto::generate_attachment_id(), // Legacy API
|
||||||
|
};
|
||||||
|
path = base_path.join(&file_id);
|
||||||
|
|
||||||
let size =
|
let size =
|
||||||
match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) {
|
match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) {
|
||||||
SaveResult::Full(SavedData::File(_, size)) => size as i32,
|
SaveResult::Full(SavedData::File(_, size)) => size as i32,
|
||||||
SaveResult::Full(other) => {
|
SaveResult::Full(other) => {
|
||||||
std::fs::remove_file(path).ok();
|
|
||||||
error = Some(format!("Attachment is not a file: {:?}", other));
|
error = Some(format!("Attachment is not a file: {:?}", other));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
SaveResult::Partial(_, reason) => {
|
SaveResult::Partial(_, reason) => {
|
||||||
std::fs::remove_file(path).ok();
|
error = Some(format!("Attachment storage limit exceeded with this file: {:?}", reason));
|
||||||
error = Some(format!("Attachment size limit exceeded with this file: {:?}", reason));
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
SaveResult::Error(e) => {
|
SaveResult::Error(e) => {
|
||||||
std::fs::remove_file(path).ok();
|
|
||||||
error = Some(format!("Error: {:?}", e));
|
error = Some(format!("Error: {:?}", e));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut attachment = Attachment::new(file_name, cipher.uuid.clone(), name, size);
|
if let Some(attachment) = &mut attachment {
|
||||||
attachment.akey = attachment_key.clone();
|
// v2 API
|
||||||
attachment.save(&conn).expect("Error saving attachment");
|
|
||||||
|
// Check the actual size against the size initially provided by
|
||||||
|
// the client. Upstream allows +/- 1 MiB deviation from this
|
||||||
|
// size, but it's not clear when or why this is needed.
|
||||||
|
const LEEWAY: i32 = 1024 * 1024; // 1 MiB
|
||||||
|
let min_size = attachment.file_size - LEEWAY;
|
||||||
|
let max_size = attachment.file_size + LEEWAY;
|
||||||
|
|
||||||
|
if min_size <= size && size <= max_size {
|
||||||
|
if size != attachment.file_size {
|
||||||
|
// Update the attachment with the actual file size.
|
||||||
|
attachment.file_size = size;
|
||||||
|
attachment.save(conn).expect("Error updating attachment");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
attachment.delete(conn).ok();
|
||||||
|
|
||||||
|
let err_msg = "Attachment size mismatch".to_string();
|
||||||
|
error!("{} (expected within [{}, {}], got {})", err_msg, min_size, max_size, size);
|
||||||
|
error = Some(err_msg);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Legacy API
|
||||||
|
|
||||||
|
if encrypted_filename.is_none() {
|
||||||
|
error = Some("No filename provided".to_string());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if attachment_key.is_none() {
|
||||||
|
error = Some("No attachment key provided".to_string());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let attachment = Attachment::new(
|
||||||
|
file_id,
|
||||||
|
cipher_uuid.clone(),
|
||||||
|
encrypted_filename.unwrap(),
|
||||||
|
size,
|
||||||
|
attachment_key.clone(),
|
||||||
|
);
|
||||||
|
attachment.save(conn).expect("Error saving attachment");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
_ => error!("Invalid multipart name"),
|
_ => error!("Invalid multipart name"),
|
||||||
}
|
}
|
||||||
@@ -854,10 +996,55 @@ fn post_attachment(
|
|||||||
.expect("Error processing multipart data");
|
.expect("Error processing multipart data");
|
||||||
|
|
||||||
if let Some(ref e) = error {
|
if let Some(ref e) = error {
|
||||||
|
std::fs::remove_file(path).ok();
|
||||||
err!(e);
|
err!(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&conn));
|
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn));
|
||||||
|
|
||||||
|
Ok(cipher)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// v2 API for uploading the actual data content of an attachment.
|
||||||
|
/// This route needs a rank specified so that Rocket prioritizes the
|
||||||
|
/// /ciphers/<uuid>/attachment/v2 route, which would otherwise conflict
|
||||||
|
/// with this one.
|
||||||
|
#[post("/ciphers/<uuid>/attachment/<attachment_id>", format = "multipart/form-data", data = "<data>", rank = 1)]
|
||||||
|
fn post_attachment_v2_data(
|
||||||
|
uuid: String,
|
||||||
|
attachment_id: String,
|
||||||
|
data: Data,
|
||||||
|
content_type: &ContentType,
|
||||||
|
headers: Headers,
|
||||||
|
conn: DbConn,
|
||||||
|
nt: Notify,
|
||||||
|
) -> EmptyResult {
|
||||||
|
let attachment = match Attachment::find_by_id(&attachment_id, &conn) {
|
||||||
|
Some(attachment) if uuid == attachment.cipher_uuid => Some(attachment),
|
||||||
|
Some(_) => err!("Attachment doesn't belong to cipher"),
|
||||||
|
None => err!("Attachment doesn't exist"),
|
||||||
|
};
|
||||||
|
|
||||||
|
save_attachment(attachment, uuid, data, content_type, &headers, &conn, nt)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Legacy API for creating an attachment associated with a cipher.
|
||||||
|
#[post("/ciphers/<uuid>/attachment", format = "multipart/form-data", data = "<data>")]
|
||||||
|
fn post_attachment(
|
||||||
|
uuid: String,
|
||||||
|
data: Data,
|
||||||
|
content_type: &ContentType,
|
||||||
|
headers: Headers,
|
||||||
|
conn: DbConn,
|
||||||
|
nt: Notify,
|
||||||
|
) -> JsonResult {
|
||||||
|
// Setting this as None signifies to save_attachment() that it should create
|
||||||
|
// the attachment database record as well as saving the data to disk.
|
||||||
|
let attachment = None;
|
||||||
|
|
||||||
|
let cipher = save_attachment(attachment, uuid, data, content_type, &headers, &conn, nt)?;
|
||||||
|
|
||||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
|
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
|
||||||
}
|
}
|
||||||
@@ -1122,22 +1309,22 @@ fn delete_all(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, soft_delete: bool, nt: &Notify) -> EmptyResult {
|
fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, soft_delete: bool, nt: &Notify) -> EmptyResult {
|
||||||
let mut cipher = match Cipher::find_by_uuid(&uuid, &conn) {
|
let mut cipher = match Cipher::find_by_uuid(uuid, conn) {
|
||||||
Some(cipher) => cipher,
|
Some(cipher) => cipher,
|
||||||
None => err!("Cipher doesn't exist"),
|
None => err!("Cipher doesn't exist"),
|
||||||
};
|
};
|
||||||
|
|
||||||
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
|
if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn) {
|
||||||
err!("Cipher can't be deleted by user")
|
err!("Cipher can't be deleted by user")
|
||||||
}
|
}
|
||||||
|
|
||||||
if soft_delete {
|
if soft_delete {
|
||||||
cipher.deleted_at = Some(Utc::now().naive_utc());
|
cipher.deleted_at = Some(Utc::now().naive_utc());
|
||||||
cipher.save(&conn)?;
|
cipher.save(conn)?;
|
||||||
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&conn));
|
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn));
|
||||||
} else {
|
} else {
|
||||||
cipher.delete(&conn)?;
|
cipher.delete(conn)?;
|
||||||
nt.send_cipher_update(UpdateType::CipherDelete, &cipher, &cipher.update_users_revision(&conn));
|
nt.send_cipher_update(UpdateType::CipherDelete, &cipher, &cipher.update_users_revision(conn));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -1170,20 +1357,20 @@ fn _delete_multiple_ciphers(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, nt: &Notify) -> JsonResult {
|
fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, nt: &Notify) -> JsonResult {
|
||||||
let mut cipher = match Cipher::find_by_uuid(&uuid, &conn) {
|
let mut cipher = match Cipher::find_by_uuid(uuid, conn) {
|
||||||
Some(cipher) => cipher,
|
Some(cipher) => cipher,
|
||||||
None => err!("Cipher doesn't exist"),
|
None => err!("Cipher doesn't exist"),
|
||||||
};
|
};
|
||||||
|
|
||||||
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
|
if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn) {
|
||||||
err!("Cipher can't be restored by user")
|
err!("Cipher can't be restored by user")
|
||||||
}
|
}
|
||||||
|
|
||||||
cipher.deleted_at = None;
|
cipher.deleted_at = None;
|
||||||
cipher.save(&conn)?;
|
cipher.save(conn)?;
|
||||||
|
|
||||||
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&conn));
|
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn));
|
||||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
|
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, conn)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _restore_multiple_ciphers(data: JsonUpcase<Value>, headers: &Headers, conn: &DbConn, nt: &Notify) -> JsonResult {
|
fn _restore_multiple_ciphers(data: JsonUpcase<Value>, headers: &Headers, conn: &DbConn, nt: &Notify) -> JsonResult {
|
||||||
@@ -1219,7 +1406,7 @@ fn _delete_cipher_attachment_by_id(
|
|||||||
conn: &DbConn,
|
conn: &DbConn,
|
||||||
nt: &Notify,
|
nt: &Notify,
|
||||||
) -> EmptyResult {
|
) -> EmptyResult {
|
||||||
let attachment = match Attachment::find_by_id(&attachment_id, &conn) {
|
let attachment = match Attachment::find_by_id(attachment_id, conn) {
|
||||||
Some(attachment) => attachment,
|
Some(attachment) => attachment,
|
||||||
None => err!("Attachment doesn't exist"),
|
None => err!("Attachment doesn't exist"),
|
||||||
};
|
};
|
||||||
@@ -1228,17 +1415,17 @@ fn _delete_cipher_attachment_by_id(
|
|||||||
err!("Attachment from other cipher")
|
err!("Attachment from other cipher")
|
||||||
}
|
}
|
||||||
|
|
||||||
let cipher = match Cipher::find_by_uuid(&uuid, &conn) {
|
let cipher = match Cipher::find_by_uuid(uuid, conn) {
|
||||||
Some(cipher) => cipher,
|
Some(cipher) => cipher,
|
||||||
None => err!("Cipher doesn't exist"),
|
None => err!("Cipher doesn't exist"),
|
||||||
};
|
};
|
||||||
|
|
||||||
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
|
if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn) {
|
||||||
err!("Cipher cannot be deleted by user")
|
err!("Cipher cannot be deleted by user")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete attachment
|
// Delete attachment
|
||||||
attachment.delete(&conn)?;
|
attachment.delete(conn)?;
|
||||||
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&conn));
|
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn));
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
24
src/api/core/emergency_access.rs
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
use rocket::Route;
|
||||||
|
use rocket_contrib::json::Json;
|
||||||
|
|
||||||
|
use crate::{api::JsonResult, auth::Headers, db::DbConn};
|
||||||
|
|
||||||
|
pub fn routes() -> Vec<Route> {
|
||||||
|
routes![get_contacts,]
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This endpoint is expected to return at least something.
|
||||||
|
/// If we return an error message that will trigger error toasts for the user.
|
||||||
|
/// To prevent this we just return an empty json result with no Data.
|
||||||
|
/// When this feature is going to be implemented it also needs to return this empty Data
|
||||||
|
/// instead of throwing an error/4XX unless it really is an error.
|
||||||
|
#[get("/emergency-access/trusted")]
|
||||||
|
fn get_contacts(_headers: Headers, _conn: DbConn) -> JsonResult {
|
||||||
|
debug!("Emergency access is not supported.");
|
||||||
|
|
||||||
|
Ok(Json(json!({
|
||||||
|
"Data": [],
|
||||||
|
"Object": "list",
|
||||||
|
"ContinuationToken": null
|
||||||
|
})))
|
||||||
|
}
|
@@ -1,5 +1,6 @@
|
|||||||
mod accounts;
|
mod accounts;
|
||||||
mod ciphers;
|
mod ciphers;
|
||||||
|
mod emergency_access;
|
||||||
mod folders;
|
mod folders;
|
||||||
mod organizations;
|
mod organizations;
|
||||||
mod sends;
|
mod sends;
|
||||||
@@ -15,6 +16,7 @@ pub fn routes() -> Vec<Route> {
|
|||||||
let mut routes = Vec::new();
|
let mut routes = Vec::new();
|
||||||
routes.append(&mut accounts::routes());
|
routes.append(&mut accounts::routes());
|
||||||
routes.append(&mut ciphers::routes());
|
routes.append(&mut ciphers::routes());
|
||||||
|
routes.append(&mut emergency_access::routes());
|
||||||
routes.append(&mut folders::routes());
|
routes.append(&mut folders::routes());
|
||||||
routes.append(&mut organizations::routes());
|
routes.append(&mut organizations::routes());
|
||||||
routes.append(&mut two_factor::routes());
|
routes.append(&mut two_factor::routes());
|
||||||
@@ -27,7 +29,6 @@ pub fn routes() -> Vec<Route> {
|
|||||||
//
|
//
|
||||||
// Move this somewhere else
|
// Move this somewhere else
|
||||||
//
|
//
|
||||||
use rocket::response::Response;
|
|
||||||
use rocket::Route;
|
use rocket::Route;
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
@@ -41,7 +42,7 @@ use crate::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
#[put("/devices/identifier/<uuid>/clear-token")]
|
#[put("/devices/identifier/<uuid>/clear-token")]
|
||||||
fn clear_device_token<'a>(uuid: String) -> Response<'a> {
|
fn clear_device_token(uuid: String) -> &'static str {
|
||||||
// This endpoint doesn't have auth header
|
// This endpoint doesn't have auth header
|
||||||
|
|
||||||
let _ = uuid;
|
let _ = uuid;
|
||||||
@@ -50,7 +51,7 @@ fn clear_device_token<'a>(uuid: String) -> Response<'a> {
|
|||||||
// This only clears push token
|
// This only clears push token
|
||||||
// https://github.com/bitwarden/core/blob/master/src/Api/Controllers/DevicesController.cs#L109
|
// https://github.com/bitwarden/core/blob/master/src/Api/Controllers/DevicesController.cs#L109
|
||||||
// https://github.com/bitwarden/core/blob/master/src/Core/Services/Implementations/DeviceService.cs#L37
|
// https://github.com/bitwarden/core/blob/master/src/Core/Services/Implementations/DeviceService.cs#L37
|
||||||
Response::new()
|
""
|
||||||
}
|
}
|
||||||
|
|
||||||
#[put("/devices/identifier/<uuid>/token", data = "<data>")]
|
#[put("/devices/identifier/<uuid>/token", data = "<data>")]
|
||||||
|
@@ -51,6 +51,7 @@ pub fn routes() -> Vec<Route> {
|
|||||||
get_plans,
|
get_plans,
|
||||||
get_plans_tax_rates,
|
get_plans_tax_rates,
|
||||||
import,
|
import,
|
||||||
|
post_org_keys,
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -61,6 +62,7 @@ struct OrgData {
|
|||||||
CollectionName: String,
|
CollectionName: String,
|
||||||
Key: String,
|
Key: String,
|
||||||
Name: String,
|
Name: String,
|
||||||
|
Keys: Option<OrgKeyData>,
|
||||||
#[serde(rename = "PlanType")]
|
#[serde(rename = "PlanType")]
|
||||||
_PlanType: NumberOrString, // Ignored, always use the same plan
|
_PlanType: NumberOrString, // Ignored, always use the same plan
|
||||||
}
|
}
|
||||||
@@ -78,6 +80,13 @@ struct NewCollectionData {
|
|||||||
Name: String,
|
Name: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
struct OrgKeyData {
|
||||||
|
EncryptedPrivateKey: String,
|
||||||
|
PublicKey: String,
|
||||||
|
}
|
||||||
|
|
||||||
#[post("/organizations", data = "<data>")]
|
#[post("/organizations", data = "<data>")]
|
||||||
fn create_organization(headers: Headers, data: JsonUpcase<OrgData>, conn: DbConn) -> JsonResult {
|
fn create_organization(headers: Headers, data: JsonUpcase<OrgData>, conn: DbConn) -> JsonResult {
|
||||||
if !CONFIG.is_org_creation_allowed(&headers.user.email) {
|
if !CONFIG.is_org_creation_allowed(&headers.user.email) {
|
||||||
@@ -85,8 +94,14 @@ fn create_organization(headers: Headers, data: JsonUpcase<OrgData>, conn: DbConn
|
|||||||
}
|
}
|
||||||
|
|
||||||
let data: OrgData = data.into_inner().data;
|
let data: OrgData = data.into_inner().data;
|
||||||
|
let (private_key, public_key) = if data.Keys.is_some() {
|
||||||
|
let keys: OrgKeyData = data.Keys.unwrap();
|
||||||
|
(Some(keys.EncryptedPrivateKey), Some(keys.PublicKey))
|
||||||
|
} else {
|
||||||
|
(None, None)
|
||||||
|
};
|
||||||
|
|
||||||
let org = Organization::new(data.Name, data.BillingEmail);
|
let org = Organization::new(data.Name, data.BillingEmail, private_key, public_key);
|
||||||
let mut user_org = UserOrganization::new(headers.user.uuid, org.uuid.clone());
|
let mut user_org = UserOrganization::new(headers.user.uuid, org.uuid.clone());
|
||||||
let collection = Collection::new(org.uuid.clone(), data.CollectionName);
|
let collection = Collection::new(org.uuid.clone(), data.CollectionName);
|
||||||
|
|
||||||
@@ -397,7 +412,7 @@ fn get_collection_users(org_id: String, coll_id: String, _headers: ManagerHeader
|
|||||||
.map(|col_user| {
|
.map(|col_user| {
|
||||||
UserOrganization::find_by_user_and_org(&col_user.user_uuid, &org_id, &conn)
|
UserOrganization::find_by_user_and_org(&col_user.user_uuid, &org_id, &conn)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_json_user_access_restrictions(&col_user)
|
.to_json_user_access_restrictions(col_user)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
@@ -468,6 +483,32 @@ fn get_org_users(org_id: String, _headers: ManagerHeadersLoose, conn: DbConn) ->
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[post("/organizations/<org_id>/keys", data = "<data>")]
|
||||||
|
fn post_org_keys(org_id: String, data: JsonUpcase<OrgKeyData>, _headers: AdminHeaders, conn: DbConn) -> JsonResult {
|
||||||
|
let data: OrgKeyData = data.into_inner().data;
|
||||||
|
|
||||||
|
let mut org = match Organization::find_by_uuid(&org_id, &conn) {
|
||||||
|
Some(organization) => {
|
||||||
|
if organization.private_key.is_some() && organization.public_key.is_some() {
|
||||||
|
err!("Organization Keys already exist")
|
||||||
|
}
|
||||||
|
organization
|
||||||
|
}
|
||||||
|
None => err!("Can't find organization details"),
|
||||||
|
};
|
||||||
|
|
||||||
|
org.private_key = Some(data.EncryptedPrivateKey);
|
||||||
|
org.public_key = Some(data.PublicKey);
|
||||||
|
|
||||||
|
org.save(&conn)?;
|
||||||
|
|
||||||
|
Ok(Json(json!({
|
||||||
|
"Object": "organizationKeys",
|
||||||
|
"PublicKey": org.public_key,
|
||||||
|
"PrivateKey": org.private_key,
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
struct CollectionData {
|
struct CollectionData {
|
||||||
@@ -504,13 +545,13 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
|
|||||||
} else {
|
} else {
|
||||||
UserOrgStatus::Accepted as i32 // Automatically mark user as accepted if no email invites
|
UserOrgStatus::Accepted as i32 // Automatically mark user as accepted if no email invites
|
||||||
};
|
};
|
||||||
let user = match User::find_by_mail(&email, &conn) {
|
let user = match User::find_by_mail(email, &conn) {
|
||||||
None => {
|
None => {
|
||||||
if !CONFIG.invitations_allowed() {
|
if !CONFIG.invitations_allowed() {
|
||||||
err!(format!("User does not exist: {}", email))
|
err!(format!("User does not exist: {}", email))
|
||||||
}
|
}
|
||||||
|
|
||||||
if !CONFIG.is_email_domain_allowed(&email) {
|
if !CONFIG.is_email_domain_allowed(email) {
|
||||||
err!("Email domain not eligible for invitations")
|
err!("Email domain not eligible for invitations")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -560,7 +601,7 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
|
|||||||
};
|
};
|
||||||
|
|
||||||
mail::send_invite(
|
mail::send_invite(
|
||||||
&email,
|
email,
|
||||||
&user.uuid,
|
&user.uuid,
|
||||||
Some(org_id.clone()),
|
Some(org_id.clone()),
|
||||||
Some(new_user.uuid),
|
Some(new_user.uuid),
|
||||||
@@ -630,7 +671,7 @@ fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase<AcceptD
|
|||||||
// The web-vault passes org_id and org_user_id in the URL, but we are just reading them from the JWT instead
|
// The web-vault passes org_id and org_user_id in the URL, but we are just reading them from the JWT instead
|
||||||
let data: AcceptData = data.into_inner().data;
|
let data: AcceptData = data.into_inner().data;
|
||||||
let token = &data.Token;
|
let token = &data.Token;
|
||||||
let claims = decode_invite(&token)?;
|
let claims = decode_invite(token)?;
|
||||||
|
|
||||||
match User::find_by_mail(&claims.email, &conn) {
|
match User::find_by_mail(&claims.email, &conn) {
|
||||||
Some(_) => {
|
Some(_) => {
|
||||||
@@ -646,6 +687,19 @@ fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase<AcceptD
|
|||||||
err!("User already accepted the invitation")
|
err!("User already accepted the invitation")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let user_twofactor_disabled = TwoFactor::find_by_user(&user_org.user_uuid, &conn).is_empty();
|
||||||
|
|
||||||
|
let policy = OrgPolicyType::TwoFactorAuthentication as i32;
|
||||||
|
let org_twofactor_policy_enabled =
|
||||||
|
match OrgPolicy::find_by_org_and_type(&user_org.org_uuid, policy, &conn) {
|
||||||
|
Some(p) => p.enabled,
|
||||||
|
None => false,
|
||||||
|
};
|
||||||
|
|
||||||
|
if org_twofactor_policy_enabled && user_twofactor_disabled {
|
||||||
|
err!("You cannot join this organization until you enable two-step login on your user account.")
|
||||||
|
}
|
||||||
|
|
||||||
user_org.status = UserOrgStatus::Accepted as i32;
|
user_org.status = UserOrgStatus::Accepted as i32;
|
||||||
user_org.save(&conn)?;
|
user_org.save(&conn)?;
|
||||||
}
|
}
|
||||||
@@ -656,7 +710,7 @@ fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase<AcceptD
|
|||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
let mut org_name = CONFIG.invitation_org_name();
|
let mut org_name = CONFIG.invitation_org_name();
|
||||||
if let Some(org_id) = &claims.org_id {
|
if let Some(org_id) = &claims.org_id {
|
||||||
org_name = match Organization::find_by_uuid(&org_id, &conn) {
|
org_name = match Organization::find_by_uuid(org_id, &conn) {
|
||||||
Some(org) => org.name,
|
Some(org) => org.name,
|
||||||
None => err!("Organization not found."),
|
None => err!("Organization not found."),
|
||||||
};
|
};
|
||||||
@@ -998,6 +1052,24 @@ fn put_policy(
|
|||||||
None => err!("Invalid policy type"),
|
None => err!("Invalid policy type"),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if pol_type_enum == OrgPolicyType::TwoFactorAuthentication && data.enabled {
|
||||||
|
let org_list = UserOrganization::find_by_org(&org_id, &conn);
|
||||||
|
|
||||||
|
for user_org in org_list.into_iter() {
|
||||||
|
let user_twofactor_disabled = TwoFactor::find_by_user(&user_org.user_uuid, &conn).is_empty();
|
||||||
|
|
||||||
|
if user_twofactor_disabled && user_org.atype < UserOrgType::Admin {
|
||||||
|
if CONFIG.mail_enabled() {
|
||||||
|
let org = Organization::find_by_uuid(&user_org.org_uuid, &conn).unwrap();
|
||||||
|
let user = User::find_by_uuid(&user_org.user_uuid, &conn).unwrap();
|
||||||
|
|
||||||
|
mail::send_2fa_removed_from_org(&user.email, &org.name)?;
|
||||||
|
}
|
||||||
|
user_org.delete(&conn)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let mut policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type, &conn) {
|
let mut policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type, &conn) {
|
||||||
Some(p) => p,
|
Some(p) => p,
|
||||||
None => OrgPolicy::new(org_id, pol_type_enum, "{}".to_string()),
|
None => OrgPolicy::new(org_id, pol_type_enum, "{}".to_string()),
|
||||||
|
@@ -2,7 +2,7 @@ use std::{io::Read, path::Path};
|
|||||||
|
|
||||||
use chrono::{DateTime, Duration, Utc};
|
use chrono::{DateTime, Duration, Utc};
|
||||||
use multipart::server::{save::SavedData, Multipart, SaveResult};
|
use multipart::server::{save::SavedData, Multipart, SaveResult};
|
||||||
use rocket::{http::ContentType, Data};
|
use rocket::{http::ContentType, response::NamedFile, Data};
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
@@ -10,13 +10,23 @@ use crate::{
|
|||||||
api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType},
|
api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType},
|
||||||
auth::{Headers, Host},
|
auth::{Headers, Host},
|
||||||
db::{models::*, DbConn, DbPool},
|
db::{models::*, DbConn, DbPool},
|
||||||
|
util::SafeString,
|
||||||
CONFIG,
|
CONFIG,
|
||||||
};
|
};
|
||||||
|
|
||||||
const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available";
|
const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available";
|
||||||
|
|
||||||
pub fn routes() -> Vec<rocket::Route> {
|
pub fn routes() -> Vec<rocket::Route> {
|
||||||
routes![post_send, post_send_file, post_access, post_access_file, put_send, delete_send, put_remove_password]
|
routes![
|
||||||
|
post_send,
|
||||||
|
post_send_file,
|
||||||
|
post_access,
|
||||||
|
post_access_file,
|
||||||
|
put_send,
|
||||||
|
delete_send,
|
||||||
|
put_remove_password,
|
||||||
|
download_send
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn purge_sends(pool: DbPool) {
|
pub fn purge_sends(pool: DbPool) {
|
||||||
@@ -38,6 +48,7 @@ pub struct SendData {
|
|||||||
pub ExpirationDate: Option<DateTime<Utc>>,
|
pub ExpirationDate: Option<DateTime<Utc>>,
|
||||||
pub DeletionDate: DateTime<Utc>,
|
pub DeletionDate: DateTime<Utc>,
|
||||||
pub Disabled: bool,
|
pub Disabled: bool,
|
||||||
|
pub HideEmail: Option<bool>,
|
||||||
|
|
||||||
// Data field
|
// Data field
|
||||||
pub Name: String,
|
pub Name: String,
|
||||||
@@ -51,15 +62,36 @@ pub struct SendData {
|
|||||||
/// modify existing ones, but is allowed to delete them.
|
/// modify existing ones, but is allowed to delete them.
|
||||||
///
|
///
|
||||||
/// Ref: https://bitwarden.com/help/article/policies/#disable-send
|
/// Ref: https://bitwarden.com/help/article/policies/#disable-send
|
||||||
|
///
|
||||||
|
/// There is also a Vaultwarden-specific `sends_allowed` config setting that
|
||||||
|
/// controls this policy globally.
|
||||||
fn enforce_disable_send_policy(headers: &Headers, conn: &DbConn) -> EmptyResult {
|
fn enforce_disable_send_policy(headers: &Headers, conn: &DbConn) -> EmptyResult {
|
||||||
let user_uuid = &headers.user.uuid;
|
let user_uuid = &headers.user.uuid;
|
||||||
let policy_type = OrgPolicyType::DisableSend;
|
let policy_type = OrgPolicyType::DisableSend;
|
||||||
if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn) {
|
if !CONFIG.sends_allowed() || OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn) {
|
||||||
err!("Due to an Enterprise Policy, you are only able to delete an existing Send.")
|
err!("Due to an Enterprise Policy, you are only able to delete an existing Send.")
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Enforces the `DisableHideEmail` option of the `Send Options` policy.
|
||||||
|
/// A non-owner/admin user belonging to an org with this option enabled isn't
|
||||||
|
/// allowed to hide their email address from the recipient of a Bitwarden Send,
|
||||||
|
/// but is allowed to remove this option from an existing Send.
|
||||||
|
///
|
||||||
|
/// Ref: https://bitwarden.com/help/article/policies/#send-options
|
||||||
|
fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, conn: &DbConn) -> EmptyResult {
|
||||||
|
let user_uuid = &headers.user.uuid;
|
||||||
|
let hide_email = data.HideEmail.unwrap_or(false);
|
||||||
|
if hide_email && OrgPolicy::is_hide_email_disabled(user_uuid, conn) {
|
||||||
|
err!(
|
||||||
|
"Due to an Enterprise Policy, you are not allowed to hide your email address \
|
||||||
|
from recipients when creating or editing a Send."
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn create_send(data: SendData, user_uuid: String) -> ApiResult<Send> {
|
fn create_send(data: SendData, user_uuid: String) -> ApiResult<Send> {
|
||||||
let data_val = if data.Type == SendType::Text as i32 {
|
let data_val = if data.Type == SendType::Text as i32 {
|
||||||
data.Text
|
data.Text
|
||||||
@@ -88,6 +120,7 @@ fn create_send(data: SendData, user_uuid: String) -> ApiResult<Send> {
|
|||||||
send.max_access_count = data.MaxAccessCount;
|
send.max_access_count = data.MaxAccessCount;
|
||||||
send.expiration_date = data.ExpirationDate.map(|d| d.naive_utc());
|
send.expiration_date = data.ExpirationDate.map(|d| d.naive_utc());
|
||||||
send.disabled = data.Disabled;
|
send.disabled = data.Disabled;
|
||||||
|
send.hide_email = data.HideEmail;
|
||||||
send.atype = data.Type;
|
send.atype = data.Type;
|
||||||
|
|
||||||
send.set_password(data.Password.as_deref());
|
send.set_password(data.Password.as_deref());
|
||||||
@@ -100,6 +133,7 @@ fn post_send(data: JsonUpcase<SendData>, headers: Headers, conn: DbConn, nt: Not
|
|||||||
enforce_disable_send_policy(&headers, &conn)?;
|
enforce_disable_send_policy(&headers, &conn)?;
|
||||||
|
|
||||||
let data: SendData = data.into_inner().data;
|
let data: SendData = data.into_inner().data;
|
||||||
|
enforce_disable_hide_email_policy(&data, &headers, &conn)?;
|
||||||
|
|
||||||
if data.Type == SendType::File as i32 {
|
if data.Type == SendType::File as i32 {
|
||||||
err!("File sends should use /api/sends/file")
|
err!("File sends should use /api/sends/file")
|
||||||
@@ -130,25 +164,26 @@ fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn
|
|||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
model_entry.data.read_to_string(&mut buf)?;
|
model_entry.data.read_to_string(&mut buf)?;
|
||||||
let data = serde_json::from_str::<crate::util::UpCase<SendData>>(&buf)?;
|
let data = serde_json::from_str::<crate::util::UpCase<SendData>>(&buf)?;
|
||||||
|
enforce_disable_hide_email_policy(&data.data, &headers, &conn)?;
|
||||||
|
|
||||||
// Get the file length and add an extra 10% to avoid issues
|
// Get the file length and add an extra 5% to avoid issues
|
||||||
const SIZE_110_MB: u64 = 115_343_360;
|
const SIZE_525_MB: u64 = 550_502_400;
|
||||||
|
|
||||||
let size_limit = match CONFIG.user_attachment_limit() {
|
let size_limit = match CONFIG.user_attachment_limit() {
|
||||||
Some(0) => err!("File uploads are disabled"),
|
Some(0) => err!("File uploads are disabled"),
|
||||||
Some(limit_kb) => {
|
Some(limit_kb) => {
|
||||||
let left = (limit_kb * 1024) - Attachment::size_by_user(&headers.user.uuid, &conn);
|
let left = (limit_kb * 1024) - Attachment::size_by_user(&headers.user.uuid, &conn);
|
||||||
if left <= 0 {
|
if left <= 0 {
|
||||||
err!("Attachment size limit reached! Delete some files to open space")
|
err!("Attachment storage limit reached! Delete some attachments to free up space")
|
||||||
}
|
}
|
||||||
std::cmp::Ord::max(left as u64, SIZE_110_MB)
|
std::cmp::Ord::max(left as u64, SIZE_525_MB)
|
||||||
}
|
}
|
||||||
None => SIZE_110_MB,
|
None => SIZE_525_MB,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Create the Send
|
// Create the Send
|
||||||
let mut send = create_send(data.data, headers.user.uuid.clone())?;
|
let mut send = create_send(data.data, headers.user.uuid.clone())?;
|
||||||
let file_id: String = data_encoding::HEXLOWER.encode(&crate::crypto::get_random(vec![0; 32]));
|
let file_id = crate::crypto::generate_send_id();
|
||||||
|
|
||||||
if send.atype != SendType::File as i32 {
|
if send.atype != SendType::File as i32 {
|
||||||
err!("Send content is not a file");
|
err!("Send content is not a file");
|
||||||
@@ -171,7 +206,7 @@ fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn
|
|||||||
}
|
}
|
||||||
SaveResult::Partial(_, reason) => {
|
SaveResult::Partial(_, reason) => {
|
||||||
std::fs::remove_file(&file_path).ok();
|
std::fs::remove_file(&file_path).ok();
|
||||||
err!(format!("Attachment size limit exceeded with this file: {:?}", reason));
|
err!(format!("Attachment storage limit exceeded with this file: {:?}", reason));
|
||||||
}
|
}
|
||||||
SaveResult::Error(e) => {
|
SaveResult::Error(e) => {
|
||||||
std::fs::remove_file(&file_path).ok();
|
std::fs::remove_file(&file_path).ok();
|
||||||
@@ -243,7 +278,7 @@ fn post_access(access_id: String, data: JsonUpcase<SendAccessData>, conn: DbConn
|
|||||||
|
|
||||||
send.save(&conn)?;
|
send.save(&conn)?;
|
||||||
|
|
||||||
Ok(Json(send.to_json_access()))
|
Ok(Json(send.to_json_access(&conn)))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/sends/<send_id>/access/file/<file_id>", data = "<data>")]
|
#[post("/sends/<send_id>/access/file/<file_id>", data = "<data>")]
|
||||||
@@ -291,18 +326,31 @@ fn post_access_file(
|
|||||||
|
|
||||||
send.save(&conn)?;
|
send.save(&conn)?;
|
||||||
|
|
||||||
|
let token_claims = crate::auth::generate_send_claims(&send_id, &file_id);
|
||||||
|
let token = crate::auth::encode_jwt(&token_claims);
|
||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
"Object": "send-fileDownload",
|
"Object": "send-fileDownload",
|
||||||
"Id": file_id,
|
"Id": file_id,
|
||||||
"Url": format!("{}/sends/{}/{}", &host.host, send_id, file_id)
|
"Url": format!("{}/api/sends/{}/{}?t={}", &host.host, send_id, file_id, token)
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[get("/sends/<send_id>/<file_id>?<t>")]
|
||||||
|
fn download_send(send_id: SafeString, file_id: SafeString, t: String) -> Option<NamedFile> {
|
||||||
|
if let Ok(claims) = crate::auth::decode_send(&t) {
|
||||||
|
if claims.sub == format!("{}/{}", send_id, file_id) {
|
||||||
|
return NamedFile::open(Path::new(&CONFIG.sends_folder()).join(send_id).join(file_id)).ok();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
#[put("/sends/<id>", data = "<data>")]
|
#[put("/sends/<id>", data = "<data>")]
|
||||||
fn put_send(id: String, data: JsonUpcase<SendData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
|
fn put_send(id: String, data: JsonUpcase<SendData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
|
||||||
enforce_disable_send_policy(&headers, &conn)?;
|
enforce_disable_send_policy(&headers, &conn)?;
|
||||||
|
|
||||||
let data: SendData = data.into_inner().data;
|
let data: SendData = data.into_inner().data;
|
||||||
|
enforce_disable_hide_email_policy(&data, &headers, &conn)?;
|
||||||
|
|
||||||
let mut send = match Send::find_by_uuid(&id, &conn) {
|
let mut send = match Send::find_by_uuid(&id, &conn) {
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
@@ -340,6 +388,7 @@ fn put_send(id: String, data: JsonUpcase<SendData>, headers: Headers, conn: DbCo
|
|||||||
send.notes = data.Notes;
|
send.notes = data.Notes;
|
||||||
send.max_access_count = data.MaxAccessCount;
|
send.max_access_count = data.MaxAccessCount;
|
||||||
send.expiration_date = data.ExpirationDate.map(|d| d.naive_utc());
|
send.expiration_date = data.ExpirationDate.map(|d| d.naive_utc());
|
||||||
|
send.hide_email = data.HideEmail;
|
||||||
send.disabled = data.Disabled;
|
send.disabled = data.Disabled;
|
||||||
|
|
||||||
// Only change the value if it's present
|
// Only change the value if it's present
|
||||||
|
@@ -114,7 +114,7 @@ pub fn validate_totp_code_str(
|
|||||||
_ => err!("TOTP code is not a number"),
|
_ => err!("TOTP code is not a number"),
|
||||||
};
|
};
|
||||||
|
|
||||||
validate_totp_code(user_uuid, totp_code, secret, ip, &conn)
|
validate_totp_code(user_uuid, totp_code, secret, ip, conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &ClientIp, conn: &DbConn) -> EmptyResult {
|
pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &ClientIp, conn: &DbConn) -> EmptyResult {
|
||||||
@@ -125,7 +125,7 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl
|
|||||||
Err(_) => err!("Invalid TOTP secret"),
|
Err(_) => err!("Invalid TOTP secret"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut twofactor = match TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Authenticator as i32, &conn) {
|
let mut twofactor = match TwoFactor::find_by_user_and_type(user_uuid, TwoFactorType::Authenticator as i32, conn) {
|
||||||
Some(tf) => tf,
|
Some(tf) => tf,
|
||||||
_ => TwoFactor::new(user_uuid.to_string(), TwoFactorType::Authenticator, secret.to_string()),
|
_ => TwoFactor::new(user_uuid.to_string(), TwoFactorType::Authenticator, secret.to_string()),
|
||||||
};
|
};
|
||||||
@@ -156,7 +156,7 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl
|
|||||||
// Save the last used time step so only totp time steps higher then this one are allowed.
|
// Save the last used time step so only totp time steps higher then this one are allowed.
|
||||||
// This will also save a newly created twofactor if the code is correct.
|
// This will also save a newly created twofactor if the code is correct.
|
||||||
twofactor.last_used = time_step as i32;
|
twofactor.last_used = time_step as i32;
|
||||||
twofactor.save(&conn)?;
|
twofactor.save(conn)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
} else if generated == totp_code && time_step <= twofactor.last_used as i64 {
|
} else if generated == totp_code && time_step <= twofactor.last_used as i64 {
|
||||||
warn!("This or a TOTP code within {} steps back and forward has already been used!", steps);
|
warn!("This or a TOTP code within {} steps back and forward has already been used!", steps);
|
||||||
|
@@ -226,7 +226,7 @@ fn get_user_duo_data(uuid: &str, conn: &DbConn) -> DuoStatus {
|
|||||||
let type_ = TwoFactorType::Duo as i32;
|
let type_ = TwoFactorType::Duo as i32;
|
||||||
|
|
||||||
// If the user doesn't have an entry, disabled
|
// If the user doesn't have an entry, disabled
|
||||||
let twofactor = match TwoFactor::find_by_user_and_type(uuid, type_, &conn) {
|
let twofactor = match TwoFactor::find_by_user_and_type(uuid, type_, conn) {
|
||||||
Some(t) => t,
|
Some(t) => t,
|
||||||
None => return DuoStatus::Disabled(DuoData::global().is_some()),
|
None => return DuoStatus::Disabled(DuoData::global().is_some()),
|
||||||
};
|
};
|
||||||
@@ -247,8 +247,8 @@ fn get_user_duo_data(uuid: &str, conn: &DbConn) -> DuoStatus {
|
|||||||
|
|
||||||
// let (ik, sk, ak, host) = get_duo_keys();
|
// let (ik, sk, ak, host) = get_duo_keys();
|
||||||
fn get_duo_keys_email(email: &str, conn: &DbConn) -> ApiResult<(String, String, String, String)> {
|
fn get_duo_keys_email(email: &str, conn: &DbConn) -> ApiResult<(String, String, String, String)> {
|
||||||
let data = User::find_by_mail(email, &conn)
|
let data = User::find_by_mail(email, conn)
|
||||||
.and_then(|u| get_user_duo_data(&u.uuid, &conn).data())
|
.and_then(|u| get_user_duo_data(&u.uuid, conn).data())
|
||||||
.or_else(DuoData::global)
|
.or_else(DuoData::global)
|
||||||
.map_res("Can't fetch Duo keys")?;
|
.map_res("Can't fetch Duo keys")?;
|
||||||
|
|
||||||
@@ -343,7 +343,7 @@ fn parse_duo_values(key: &str, val: &str, ikey: &str, prefix: &str, time: i64) -
|
|||||||
err!("Invalid ikey")
|
err!("Invalid ikey")
|
||||||
}
|
}
|
||||||
|
|
||||||
let expire = match expire.parse() {
|
let expire: i64 = match expire.parse() {
|
||||||
Ok(e) => e,
|
Ok(e) => e,
|
||||||
Err(_) => err!("Invalid expire time"),
|
Err(_) => err!("Invalid expire time"),
|
||||||
};
|
};
|
||||||
|
@@ -56,14 +56,14 @@ fn send_email_login(data: JsonUpcase<SendEmailLoginData>, conn: DbConn) -> Empty
|
|||||||
/// Generate the token, save the data for later verification and send email to user
|
/// Generate the token, save the data for later verification and send email to user
|
||||||
pub fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
let type_ = TwoFactorType::Email as i32;
|
let type_ = TwoFactorType::Email as i32;
|
||||||
let mut twofactor = TwoFactor::find_by_user_and_type(user_uuid, type_, &conn).map_res("Two factor not found")?;
|
let mut twofactor = TwoFactor::find_by_user_and_type(user_uuid, type_, conn).map_res("Two factor not found")?;
|
||||||
|
|
||||||
let generated_token = crypto::generate_token(CONFIG.email_token_size())?;
|
let generated_token = crypto::generate_token(CONFIG.email_token_size())?;
|
||||||
|
|
||||||
let mut twofactor_data = EmailTokenData::from_json(&twofactor.data)?;
|
let mut twofactor_data = EmailTokenData::from_json(&twofactor.data)?;
|
||||||
twofactor_data.set_token(generated_token);
|
twofactor_data.set_token(generated_token);
|
||||||
twofactor.data = twofactor_data.to_json();
|
twofactor.data = twofactor_data.to_json();
|
||||||
twofactor.save(&conn)?;
|
twofactor.save(conn)?;
|
||||||
|
|
||||||
mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?;
|
mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?;
|
||||||
|
|
||||||
@@ -181,8 +181,8 @@ fn email(data: JsonUpcase<EmailData>, headers: Headers, conn: DbConn) -> JsonRes
|
|||||||
|
|
||||||
/// Validate the email code when used as TwoFactor token mechanism
|
/// Validate the email code when used as TwoFactor token mechanism
|
||||||
pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult {
|
pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult {
|
||||||
let mut email_data = EmailTokenData::from_json(&data)?;
|
let mut email_data = EmailTokenData::from_json(data)?;
|
||||||
let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn)
|
let mut twofactor = TwoFactor::find_by_user_and_type(user_uuid, TwoFactorType::Email as i32, conn)
|
||||||
.map_res("Two factor not found")?;
|
.map_res("Two factor not found")?;
|
||||||
let issued_token = match &email_data.last_token {
|
let issued_token = match &email_data.last_token {
|
||||||
Some(t) => t,
|
Some(t) => t,
|
||||||
@@ -195,14 +195,14 @@ pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &
|
|||||||
email_data.reset_token();
|
email_data.reset_token();
|
||||||
}
|
}
|
||||||
twofactor.data = email_data.to_json();
|
twofactor.data = email_data.to_json();
|
||||||
twofactor.save(&conn)?;
|
twofactor.save(conn)?;
|
||||||
|
|
||||||
err!("Token is invalid")
|
err!("Token is invalid")
|
||||||
}
|
}
|
||||||
|
|
||||||
email_data.reset_token();
|
email_data.reset_token();
|
||||||
twofactor.data = email_data.to_json();
|
twofactor.data = email_data.to_json();
|
||||||
twofactor.save(&conn)?;
|
twofactor.save(conn)?;
|
||||||
|
|
||||||
let date = NaiveDateTime::from_timestamp(email_data.token_sent, 0);
|
let date = NaiveDateTime::from_timestamp(email_data.token_sent, 0);
|
||||||
let max_time = CONFIG.email_expiration_time() as i64;
|
let max_time = CONFIG.email_expiration_time() as i64;
|
||||||
@@ -255,7 +255,7 @@ impl EmailTokenData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_json(string: &str) -> Result<EmailTokenData, Error> {
|
pub fn from_json(string: &str) -> Result<EmailTokenData, Error> {
|
||||||
let res: Result<EmailTokenData, crate::serde_json::Error> = serde_json::from_str(&string);
|
let res: Result<EmailTokenData, crate::serde_json::Error> = serde_json::from_str(string);
|
||||||
match res {
|
match res {
|
||||||
Ok(x) => Ok(x),
|
Ok(x) => Ok(x),
|
||||||
Err(_) => err!("Could not decode EmailTokenData from string"),
|
Err(_) => err!("Could not decode EmailTokenData from string"),
|
||||||
@@ -292,7 +292,7 @@ mod tests {
|
|||||||
fn test_obscure_email_long() {
|
fn test_obscure_email_long() {
|
||||||
let email = "bytes@example.ext";
|
let email = "bytes@example.ext";
|
||||||
|
|
||||||
let result = obscure_email(&email);
|
let result = obscure_email(email);
|
||||||
|
|
||||||
// Only first two characters should be visible.
|
// Only first two characters should be visible.
|
||||||
assert_eq!(result, "by***@example.ext");
|
assert_eq!(result, "by***@example.ext");
|
||||||
@@ -302,7 +302,7 @@ mod tests {
|
|||||||
fn test_obscure_email_short() {
|
fn test_obscure_email_short() {
|
||||||
let email = "byt@example.ext";
|
let email = "byt@example.ext";
|
||||||
|
|
||||||
let result = obscure_email(&email);
|
let result = obscure_email(email);
|
||||||
|
|
||||||
// If it's smaller than 3 characters it should only show asterisks.
|
// If it's smaller than 3 characters it should only show asterisks.
|
||||||
assert_eq!(result, "***@example.ext");
|
assert_eq!(result, "***@example.ext");
|
||||||
|
@@ -7,16 +7,15 @@ use crate::{
|
|||||||
api::{JsonResult, JsonUpcase, NumberOrString, PasswordData},
|
api::{JsonResult, JsonUpcase, NumberOrString, PasswordData},
|
||||||
auth::Headers,
|
auth::Headers,
|
||||||
crypto,
|
crypto,
|
||||||
db::{
|
db::{models::*, DbConn},
|
||||||
models::{TwoFactor, User},
|
mail, CONFIG,
|
||||||
DbConn,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub mod authenticator;
|
pub mod authenticator;
|
||||||
pub mod duo;
|
pub mod duo;
|
||||||
pub mod email;
|
pub mod email;
|
||||||
pub mod u2f;
|
pub mod u2f;
|
||||||
|
pub mod webauthn;
|
||||||
pub mod yubikey;
|
pub mod yubikey;
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
@@ -26,6 +25,7 @@ pub fn routes() -> Vec<Route> {
|
|||||||
routes.append(&mut duo::routes());
|
routes.append(&mut duo::routes());
|
||||||
routes.append(&mut email::routes());
|
routes.append(&mut email::routes());
|
||||||
routes.append(&mut u2f::routes());
|
routes.append(&mut u2f::routes());
|
||||||
|
routes.append(&mut webauthn::routes());
|
||||||
routes.append(&mut yubikey::routes());
|
routes.append(&mut yubikey::routes());
|
||||||
|
|
||||||
routes
|
routes
|
||||||
@@ -128,6 +128,23 @@ fn disable_twofactor(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, c
|
|||||||
twofactor.delete(&conn)?;
|
twofactor.delete(&conn)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let twofactor_disabled = TwoFactor::find_by_user(&user.uuid, &conn).is_empty();
|
||||||
|
|
||||||
|
if twofactor_disabled {
|
||||||
|
let policy_type = OrgPolicyType::TwoFactorAuthentication;
|
||||||
|
let org_list = UserOrganization::find_by_user_and_policy(&user.uuid, policy_type, &conn);
|
||||||
|
|
||||||
|
for user_org in org_list.into_iter() {
|
||||||
|
if user_org.atype < UserOrgType::Admin {
|
||||||
|
if CONFIG.mail_enabled() {
|
||||||
|
let org = Organization::find_by_uuid(&user_org.org_uuid, &conn).unwrap();
|
||||||
|
mail::send_2fa_removed_from_org(&user.email, &org.name)?;
|
||||||
|
}
|
||||||
|
user_org.delete(&conn)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
"Enabled": false,
|
"Enabled": false,
|
||||||
"Type": type_,
|
"Type": type_,
|
||||||
|
@@ -94,13 +94,14 @@ struct RegistrationDef {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
struct U2FRegistration {
|
pub struct U2FRegistration {
|
||||||
id: i32,
|
pub id: i32,
|
||||||
name: String,
|
pub name: String,
|
||||||
#[serde(with = "RegistrationDef")]
|
#[serde(with = "RegistrationDef")]
|
||||||
reg: Registration,
|
pub reg: Registration,
|
||||||
counter: u32,
|
pub counter: u32,
|
||||||
compromised: bool,
|
compromised: bool,
|
||||||
|
pub migrated: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl U2FRegistration {
|
impl U2FRegistration {
|
||||||
@@ -168,6 +169,7 @@ fn activate_u2f(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbConn)
|
|||||||
reg: registration,
|
reg: registration,
|
||||||
compromised: false,
|
compromised: false,
|
||||||
counter: 0,
|
counter: 0,
|
||||||
|
migrated: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut regs = get_u2f_registrations(&user.uuid, &conn)?.1;
|
let mut regs = get_u2f_registrations(&user.uuid, &conn)?.1;
|
||||||
@@ -246,7 +248,7 @@ fn _create_u2f_challenge(user_uuid: &str, type_: TwoFactorType, conn: &DbConn) -
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn save_u2f_registrations(user_uuid: &str, regs: &[U2FRegistration], conn: &DbConn) -> EmptyResult {
|
fn save_u2f_registrations(user_uuid: &str, regs: &[U2FRegistration], conn: &DbConn) -> EmptyResult {
|
||||||
TwoFactor::new(user_uuid.into(), TwoFactorType::U2f, serde_json::to_string(regs)?).save(&conn)
|
TwoFactor::new(user_uuid.into(), TwoFactorType::U2f, serde_json::to_string(regs)?).save(conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_u2f_registrations(user_uuid: &str, conn: &DbConn) -> Result<(bool, Vec<U2FRegistration>), Error> {
|
fn get_u2f_registrations(user_uuid: &str, conn: &DbConn) -> Result<(bool, Vec<U2FRegistration>), Error> {
|
||||||
@@ -273,10 +275,11 @@ fn get_u2f_registrations(user_uuid: &str, conn: &DbConn) -> Result<(bool, Vec<U2
|
|||||||
reg: old_regs.remove(0),
|
reg: old_regs.remove(0),
|
||||||
compromised: false,
|
compromised: false,
|
||||||
counter: 0,
|
counter: 0,
|
||||||
|
migrated: None,
|
||||||
}];
|
}];
|
||||||
|
|
||||||
// Save new format
|
// Save new format
|
||||||
save_u2f_registrations(user_uuid, &new_regs, &conn)?;
|
save_u2f_registrations(user_uuid, &new_regs, conn)?;
|
||||||
|
|
||||||
new_regs
|
new_regs
|
||||||
}
|
}
|
||||||
@@ -308,12 +311,12 @@ pub fn generate_u2f_login(user_uuid: &str, conn: &DbConn) -> ApiResult<U2fSignRe
|
|||||||
|
|
||||||
pub fn validate_u2f_login(user_uuid: &str, response: &str, conn: &DbConn) -> EmptyResult {
|
pub fn validate_u2f_login(user_uuid: &str, response: &str, conn: &DbConn) -> EmptyResult {
|
||||||
let challenge_type = TwoFactorType::U2fLoginChallenge as i32;
|
let challenge_type = TwoFactorType::U2fLoginChallenge as i32;
|
||||||
let tf_challenge = TwoFactor::find_by_user_and_type(user_uuid, challenge_type, &conn);
|
let tf_challenge = TwoFactor::find_by_user_and_type(user_uuid, challenge_type, conn);
|
||||||
|
|
||||||
let challenge = match tf_challenge {
|
let challenge = match tf_challenge {
|
||||||
Some(tf_challenge) => {
|
Some(tf_challenge) => {
|
||||||
let challenge: Challenge = serde_json::from_str(&tf_challenge.data)?;
|
let challenge: Challenge = serde_json::from_str(&tf_challenge.data)?;
|
||||||
tf_challenge.delete(&conn)?;
|
tf_challenge.delete(conn)?;
|
||||||
challenge
|
challenge
|
||||||
}
|
}
|
||||||
None => err!("Can't recover login challenge"),
|
None => err!("Can't recover login challenge"),
|
||||||
@@ -329,13 +332,13 @@ pub fn validate_u2f_login(user_uuid: &str, response: &str, conn: &DbConn) -> Emp
|
|||||||
match response {
|
match response {
|
||||||
Ok(new_counter) => {
|
Ok(new_counter) => {
|
||||||
reg.counter = new_counter;
|
reg.counter = new_counter;
|
||||||
save_u2f_registrations(user_uuid, ®istrations, &conn)?;
|
save_u2f_registrations(user_uuid, ®istrations, conn)?;
|
||||||
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
Err(u2f::u2ferror::U2fError::CounterTooLow) => {
|
Err(u2f::u2ferror::U2fError::CounterTooLow) => {
|
||||||
reg.compromised = true;
|
reg.compromised = true;
|
||||||
save_u2f_registrations(user_uuid, ®istrations, &conn)?;
|
save_u2f_registrations(user_uuid, ®istrations, conn)?;
|
||||||
|
|
||||||
err!("This device might be compromised!");
|
err!("This device might be compromised!");
|
||||||
}
|
}
|
||||||
|
386
src/api/core/two_factor/webauthn.rs
Normal file
@@ -0,0 +1,386 @@
|
|||||||
|
use rocket::Route;
|
||||||
|
use rocket_contrib::json::Json;
|
||||||
|
use serde_json::Value;
|
||||||
|
use webauthn_rs::{base64_data::Base64UrlSafeData, proto::*, AuthenticationState, RegistrationState, Webauthn};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
api::{
|
||||||
|
core::two_factor::_generate_recover_code, EmptyResult, JsonResult, JsonUpcase, NumberOrString, PasswordData,
|
||||||
|
},
|
||||||
|
auth::Headers,
|
||||||
|
db::{
|
||||||
|
models::{TwoFactor, TwoFactorType},
|
||||||
|
DbConn,
|
||||||
|
},
|
||||||
|
error::Error,
|
||||||
|
CONFIG,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn routes() -> Vec<Route> {
|
||||||
|
routes![get_webauthn, generate_webauthn_challenge, activate_webauthn, activate_webauthn_put, delete_webauthn,]
|
||||||
|
}
|
||||||
|
|
||||||
|
struct WebauthnConfig {
|
||||||
|
url: String,
|
||||||
|
rpid: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WebauthnConfig {
|
||||||
|
fn load() -> Webauthn<Self> {
|
||||||
|
let domain = CONFIG.domain();
|
||||||
|
Webauthn::new(Self {
|
||||||
|
rpid: reqwest::Url::parse(&domain)
|
||||||
|
.map(|u| u.domain().map(str::to_owned))
|
||||||
|
.ok()
|
||||||
|
.flatten()
|
||||||
|
.unwrap_or_default(),
|
||||||
|
url: domain,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl webauthn_rs::WebauthnConfig for WebauthnConfig {
|
||||||
|
fn get_relying_party_name(&self) -> &str {
|
||||||
|
&self.url
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_origin(&self) -> &str {
|
||||||
|
&self.url
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_relying_party_id(&self) -> &str {
|
||||||
|
&self.rpid
|
||||||
|
}
|
||||||
|
|
||||||
|
/// We have WebAuthn configured to discourage user verification
|
||||||
|
/// if we leave this enabled, it will cause verification issues when a keys send UV=1.
|
||||||
|
/// Upstream (the library they use) ignores this when set to discouraged, so we should too.
|
||||||
|
fn get_require_uv_consistency(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct WebauthnRegistration {
|
||||||
|
pub id: i32,
|
||||||
|
pub name: String,
|
||||||
|
pub migrated: bool,
|
||||||
|
|
||||||
|
pub credential: Credential,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WebauthnRegistration {
|
||||||
|
fn to_json(&self) -> Value {
|
||||||
|
json!({
|
||||||
|
"Id": self.id,
|
||||||
|
"Name": self.name,
|
||||||
|
"migrated": self.migrated,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/two-factor/get-webauthn", data = "<data>")]
|
||||||
|
fn get_webauthn(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||||
|
if !CONFIG.domain_set() {
|
||||||
|
err!("`DOMAIN` environment variable is not set. Webauthn disabled")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !headers.user.check_valid_password(&data.data.MasterPasswordHash) {
|
||||||
|
err!("Invalid password");
|
||||||
|
}
|
||||||
|
|
||||||
|
let (enabled, registrations) = get_webauthn_registrations(&headers.user.uuid, &conn)?;
|
||||||
|
let registrations_json: Vec<Value> = registrations.iter().map(WebauthnRegistration::to_json).collect();
|
||||||
|
|
||||||
|
Ok(Json(json!({
|
||||||
|
"Enabled": enabled,
|
||||||
|
"Keys": registrations_json,
|
||||||
|
"Object": "twoFactorWebAuthn"
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/two-factor/get-webauthn-challenge", data = "<data>")]
|
||||||
|
fn generate_webauthn_challenge(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||||
|
if !headers.user.check_valid_password(&data.data.MasterPasswordHash) {
|
||||||
|
err!("Invalid password");
|
||||||
|
}
|
||||||
|
|
||||||
|
let registrations = get_webauthn_registrations(&headers.user.uuid, &conn)?
|
||||||
|
.1
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| r.credential.cred_id) // We return the credentialIds to the clients to avoid double registering
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let (challenge, state) = WebauthnConfig::load().generate_challenge_register_options(
|
||||||
|
headers.user.uuid.as_bytes().to_vec(),
|
||||||
|
headers.user.email,
|
||||||
|
headers.user.name,
|
||||||
|
Some(registrations),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let type_ = TwoFactorType::WebauthnRegisterChallenge;
|
||||||
|
TwoFactor::new(headers.user.uuid, type_, serde_json::to_string(&state)?).save(&conn)?;
|
||||||
|
|
||||||
|
let mut challenge_value = serde_json::to_value(challenge.public_key)?;
|
||||||
|
challenge_value["status"] = "ok".into();
|
||||||
|
challenge_value["errorMessage"] = "".into();
|
||||||
|
Ok(Json(challenge_value))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
struct EnableWebauthnData {
|
||||||
|
Id: NumberOrString, // 1..5
|
||||||
|
Name: String,
|
||||||
|
MasterPasswordHash: String,
|
||||||
|
DeviceResponse: RegisterPublicKeyCredentialCopy,
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is copied from RegisterPublicKeyCredential to change the Response objects casing
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
struct RegisterPublicKeyCredentialCopy {
|
||||||
|
pub Id: String,
|
||||||
|
pub RawId: Base64UrlSafeData,
|
||||||
|
pub Response: AuthenticatorAttestationResponseRawCopy,
|
||||||
|
pub Type: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is copied from AuthenticatorAttestationResponseRaw to change clientDataJSON to clientDataJson
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub struct AuthenticatorAttestationResponseRawCopy {
|
||||||
|
pub AttestationObject: Base64UrlSafeData,
|
||||||
|
pub ClientDataJson: Base64UrlSafeData,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<RegisterPublicKeyCredentialCopy> for RegisterPublicKeyCredential {
|
||||||
|
fn from(r: RegisterPublicKeyCredentialCopy) -> Self {
|
||||||
|
Self {
|
||||||
|
id: r.Id,
|
||||||
|
raw_id: r.RawId,
|
||||||
|
response: AuthenticatorAttestationResponseRaw {
|
||||||
|
attestation_object: r.Response.AttestationObject,
|
||||||
|
client_data_json: r.Response.ClientDataJson,
|
||||||
|
},
|
||||||
|
type_: r.Type,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is copied from PublicKeyCredential to change the Response objects casing
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub struct PublicKeyCredentialCopy {
|
||||||
|
pub Id: String,
|
||||||
|
pub RawId: Base64UrlSafeData,
|
||||||
|
pub Response: AuthenticatorAssertionResponseRawCopy,
|
||||||
|
pub Extensions: Option<AuthenticationExtensionsClientOutputsCopy>,
|
||||||
|
pub Type: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is copied from AuthenticatorAssertionResponseRaw to change clientDataJSON to clientDataJson
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub struct AuthenticatorAssertionResponseRawCopy {
|
||||||
|
pub AuthenticatorData: Base64UrlSafeData,
|
||||||
|
pub ClientDataJson: Base64UrlSafeData,
|
||||||
|
pub Signature: Base64UrlSafeData,
|
||||||
|
pub UserHandle: Option<Base64UrlSafeData>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub struct AuthenticationExtensionsClientOutputsCopy {
|
||||||
|
#[serde(default)]
|
||||||
|
pub Appid: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<PublicKeyCredentialCopy> for PublicKeyCredential {
|
||||||
|
fn from(r: PublicKeyCredentialCopy) -> Self {
|
||||||
|
Self {
|
||||||
|
id: r.Id,
|
||||||
|
raw_id: r.RawId,
|
||||||
|
response: AuthenticatorAssertionResponseRaw {
|
||||||
|
authenticator_data: r.Response.AuthenticatorData,
|
||||||
|
client_data_json: r.Response.ClientDataJson,
|
||||||
|
signature: r.Response.Signature,
|
||||||
|
user_handle: r.Response.UserHandle,
|
||||||
|
},
|
||||||
|
extensions: r.Extensions.map(|e| AuthenticationExtensionsClientOutputs {
|
||||||
|
appid: e.Appid,
|
||||||
|
}),
|
||||||
|
type_: r.Type,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/two-factor/webauthn", data = "<data>")]
|
||||||
|
fn activate_webauthn(data: JsonUpcase<EnableWebauthnData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||||
|
let data: EnableWebauthnData = data.into_inner().data;
|
||||||
|
let mut user = headers.user;
|
||||||
|
|
||||||
|
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||||
|
err!("Invalid password");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Retrieve and delete the saved challenge state
|
||||||
|
let type_ = TwoFactorType::WebauthnRegisterChallenge as i32;
|
||||||
|
let state = match TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn) {
|
||||||
|
Some(tf) => {
|
||||||
|
let state: RegistrationState = serde_json::from_str(&tf.data)?;
|
||||||
|
tf.delete(&conn)?;
|
||||||
|
state
|
||||||
|
}
|
||||||
|
None => err!("Can't recover challenge"),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Verify the credentials with the saved state
|
||||||
|
let (credential, _data) =
|
||||||
|
WebauthnConfig::load().register_credential(&data.DeviceResponse.into(), &state, |_| Ok(false))?;
|
||||||
|
|
||||||
|
let mut registrations: Vec<_> = get_webauthn_registrations(&user.uuid, &conn)?.1;
|
||||||
|
// TODO: Check for repeated ID's
|
||||||
|
registrations.push(WebauthnRegistration {
|
||||||
|
id: data.Id.into_i32()?,
|
||||||
|
name: data.Name,
|
||||||
|
migrated: false,
|
||||||
|
|
||||||
|
credential,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Save the registrations and return them
|
||||||
|
TwoFactor::new(user.uuid.clone(), TwoFactorType::Webauthn, serde_json::to_string(®istrations)?).save(&conn)?;
|
||||||
|
_generate_recover_code(&mut user, &conn);
|
||||||
|
|
||||||
|
let keys_json: Vec<Value> = registrations.iter().map(WebauthnRegistration::to_json).collect();
|
||||||
|
Ok(Json(json!({
|
||||||
|
"Enabled": true,
|
||||||
|
"Keys": keys_json,
|
||||||
|
"Object": "twoFactorU2f"
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[put("/two-factor/webauthn", data = "<data>")]
|
||||||
|
fn activate_webauthn_put(data: JsonUpcase<EnableWebauthnData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||||
|
activate_webauthn(data, headers, conn)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
struct DeleteU2FData {
|
||||||
|
Id: NumberOrString,
|
||||||
|
MasterPasswordHash: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[delete("/two-factor/webauthn", data = "<data>")]
|
||||||
|
fn delete_webauthn(data: JsonUpcase<DeleteU2FData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||||
|
let id = data.data.Id.into_i32()?;
|
||||||
|
if !headers.user.check_valid_password(&data.data.MasterPasswordHash) {
|
||||||
|
err!("Invalid password");
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut tf = match TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::Webauthn as i32, &conn) {
|
||||||
|
Some(tf) => tf,
|
||||||
|
None => err!("Webauthn data not found!"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut data: Vec<WebauthnRegistration> = serde_json::from_str(&tf.data)?;
|
||||||
|
|
||||||
|
let item_pos = match data.iter().position(|r| r.id == id) {
|
||||||
|
Some(p) => p,
|
||||||
|
None => err!("Webauthn entry not found"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let removed_item = data.remove(item_pos);
|
||||||
|
tf.data = serde_json::to_string(&data)?;
|
||||||
|
tf.save(&conn)?;
|
||||||
|
drop(tf);
|
||||||
|
|
||||||
|
// If entry is migrated from u2f, delete the u2f entry as well
|
||||||
|
if let Some(mut u2f) = TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::U2f as i32, &conn) {
|
||||||
|
use crate::api::core::two_factor::u2f::U2FRegistration;
|
||||||
|
let mut data: Vec<U2FRegistration> = match serde_json::from_str(&u2f.data) {
|
||||||
|
Ok(d) => d,
|
||||||
|
Err(_) => err!("Error parsing U2F data"),
|
||||||
|
};
|
||||||
|
|
||||||
|
data.retain(|r| r.reg.key_handle != removed_item.credential.cred_id);
|
||||||
|
let new_data_str = serde_json::to_string(&data)?;
|
||||||
|
|
||||||
|
u2f.data = new_data_str;
|
||||||
|
u2f.save(&conn)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let keys_json: Vec<Value> = data.iter().map(WebauthnRegistration::to_json).collect();
|
||||||
|
|
||||||
|
Ok(Json(json!({
|
||||||
|
"Enabled": true,
|
||||||
|
"Keys": keys_json,
|
||||||
|
"Object": "twoFactorU2f"
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_webauthn_registrations(user_uuid: &str, conn: &DbConn) -> Result<(bool, Vec<WebauthnRegistration>), Error> {
|
||||||
|
let type_ = TwoFactorType::Webauthn as i32;
|
||||||
|
match TwoFactor::find_by_user_and_type(user_uuid, type_, conn) {
|
||||||
|
Some(tf) => Ok((tf.enabled, serde_json::from_str(&tf.data)?)),
|
||||||
|
None => Ok((false, Vec::new())), // If no data, return empty list
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn generate_webauthn_login(user_uuid: &str, conn: &DbConn) -> JsonResult {
|
||||||
|
// Load saved credentials
|
||||||
|
let creds: Vec<Credential> =
|
||||||
|
get_webauthn_registrations(user_uuid, conn)?.1.into_iter().map(|r| r.credential).collect();
|
||||||
|
|
||||||
|
if creds.is_empty() {
|
||||||
|
err!("No Webauthn devices registered")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate a challenge based on the credentials
|
||||||
|
let ext = RequestAuthenticationExtensions::builder().appid(format!("{}/app-id.json", &CONFIG.domain())).build();
|
||||||
|
let (response, state) = WebauthnConfig::load().generate_challenge_authenticate_options(creds, Some(ext))?;
|
||||||
|
|
||||||
|
// Save the challenge state for later validation
|
||||||
|
TwoFactor::new(user_uuid.into(), TwoFactorType::WebauthnLoginChallenge, serde_json::to_string(&state)?)
|
||||||
|
.save(conn)?;
|
||||||
|
|
||||||
|
// Return challenge to the clients
|
||||||
|
Ok(Json(serde_json::to_value(response.public_key)?))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn validate_webauthn_login(user_uuid: &str, response: &str, conn: &DbConn) -> EmptyResult {
|
||||||
|
let type_ = TwoFactorType::WebauthnLoginChallenge as i32;
|
||||||
|
let state = match TwoFactor::find_by_user_and_type(user_uuid, type_, conn) {
|
||||||
|
Some(tf) => {
|
||||||
|
let state: AuthenticationState = serde_json::from_str(&tf.data)?;
|
||||||
|
tf.delete(conn)?;
|
||||||
|
state
|
||||||
|
}
|
||||||
|
None => err!("Can't recover login challenge"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let rsp: crate::util::UpCase<PublicKeyCredentialCopy> = serde_json::from_str(response)?;
|
||||||
|
let rsp: PublicKeyCredential = rsp.data.into();
|
||||||
|
|
||||||
|
let mut registrations = get_webauthn_registrations(user_uuid, conn)?.1;
|
||||||
|
|
||||||
|
// If the credential we received is migrated from U2F, enable the U2F compatibility
|
||||||
|
//let use_u2f = registrations.iter().any(|r| r.migrated && r.credential.cred_id == rsp.raw_id.0);
|
||||||
|
let (cred_id, auth_data) = WebauthnConfig::load().authenticate_credential(&rsp, &state)?;
|
||||||
|
|
||||||
|
for reg in &mut registrations {
|
||||||
|
if ®.credential.cred_id == cred_id {
|
||||||
|
reg.credential.counter = auth_data.counter;
|
||||||
|
|
||||||
|
TwoFactor::new(user_uuid.to_string(), TwoFactorType::Webauthn, serde_json::to_string(®istrations)?)
|
||||||
|
.save(conn)?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err!("Credential not present")
|
||||||
|
}
|
181
src/api/icons.rs
@@ -3,14 +3,14 @@ use std::{
|
|||||||
fs::{create_dir_all, remove_file, symlink_metadata, File},
|
fs::{create_dir_all, remove_file, symlink_metadata, File},
|
||||||
io::prelude::*,
|
io::prelude::*,
|
||||||
net::{IpAddr, ToSocketAddrs},
|
net::{IpAddr, ToSocketAddrs},
|
||||||
sync::RwLock,
|
sync::{Arc, RwLock},
|
||||||
time::{Duration, SystemTime},
|
time::{Duration, SystemTime},
|
||||||
};
|
};
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use reqwest::{blocking::Client, blocking::Response, header, Url};
|
use reqwest::{blocking::Client, blocking::Response, header};
|
||||||
use rocket::{http::ContentType, http::Cookie, response::Content, Route};
|
use rocket::{http::ContentType, response::Content, Route};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
error::Error,
|
error::Error,
|
||||||
@@ -25,19 +25,17 @@ pub fn routes() -> Vec<Route> {
|
|||||||
static CLIENT: Lazy<Client> = Lazy::new(|| {
|
static CLIENT: Lazy<Client> = Lazy::new(|| {
|
||||||
// Generate the default headers
|
// Generate the default headers
|
||||||
let mut default_headers = header::HeaderMap::new();
|
let mut default_headers = header::HeaderMap::new();
|
||||||
default_headers.insert(header::USER_AGENT, header::HeaderValue::from_static("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.1.1 Safari/605.1.15"));
|
default_headers
|
||||||
default_headers.insert(header::ACCEPT_LANGUAGE, header::HeaderValue::from_static("en-US,en;q=0.8"));
|
.insert(header::USER_AGENT, header::HeaderValue::from_static("Links (2.22; Linux X86_64; GNU C; text)"));
|
||||||
|
default_headers
|
||||||
|
.insert(header::ACCEPT, header::HeaderValue::from_static("text/html, text/*;q=0.5, image/*, */*;q=0.1"));
|
||||||
|
default_headers.insert(header::ACCEPT_LANGUAGE, header::HeaderValue::from_static("en,*;q=0.1"));
|
||||||
default_headers.insert(header::CACHE_CONTROL, header::HeaderValue::from_static("no-cache"));
|
default_headers.insert(header::CACHE_CONTROL, header::HeaderValue::from_static("no-cache"));
|
||||||
default_headers.insert(header::PRAGMA, header::HeaderValue::from_static("no-cache"));
|
default_headers.insert(header::PRAGMA, header::HeaderValue::from_static("no-cache"));
|
||||||
default_headers.insert(
|
|
||||||
header::ACCEPT,
|
|
||||||
header::HeaderValue::from_static(
|
|
||||||
"text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*;q=0.8",
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Reuse the client between requests
|
// Reuse the client between requests
|
||||||
get_reqwest_client_builder()
|
get_reqwest_client_builder()
|
||||||
|
.cookie_provider(Arc::new(Jar::default()))
|
||||||
.timeout(Duration::from_secs(CONFIG.icon_download_timeout()))
|
.timeout(Duration::from_secs(CONFIG.icon_download_timeout()))
|
||||||
.default_headers(default_headers)
|
.default_headers(default_headers)
|
||||||
.build()
|
.build()
|
||||||
@@ -80,7 +78,7 @@ fn is_valid_domain(domain: &str) -> bool {
|
|||||||
const ALLOWED_CHARS: &str = "_-.";
|
const ALLOWED_CHARS: &str = "_-.";
|
||||||
|
|
||||||
// If parsing the domain fails using Url, it will not work with reqwest.
|
// If parsing the domain fails using Url, it will not work with reqwest.
|
||||||
if let Err(parse_error) = Url::parse(format!("https://{}", domain).as_str()) {
|
if let Err(parse_error) = url::Url::parse(format!("https://{}", domain).as_str()) {
|
||||||
debug!("Domain parse error: '{}' - {:?}", domain, parse_error);
|
debug!("Domain parse error: '{}' - {:?}", domain, parse_error);
|
||||||
return false;
|
return false;
|
||||||
} else if domain.is_empty()
|
} else if domain.is_empty()
|
||||||
@@ -251,7 +249,7 @@ fn is_domain_blacklisted(domain: &str) -> bool {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Use the pre-generate Regex stored in a Lazy HashMap.
|
// Use the pre-generate Regex stored in a Lazy HashMap.
|
||||||
if regex.is_match(&domain) {
|
if regex.is_match(domain) {
|
||||||
warn!("Blacklisted domain: {:#?} matched {:#?}", domain, blacklist);
|
warn!("Blacklisted domain: {:#?} matched {:#?}", domain, blacklist);
|
||||||
is_blacklisted = true;
|
is_blacklisted = true;
|
||||||
}
|
}
|
||||||
@@ -282,7 +280,7 @@ fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get the icon, or None in case of error
|
// Get the icon, or None in case of error
|
||||||
match download_icon(&domain) {
|
match download_icon(domain) {
|
||||||
Ok((icon, icon_type)) => {
|
Ok((icon, icon_type)) => {
|
||||||
save_icon(&path, &icon);
|
save_icon(&path, &icon);
|
||||||
Some((icon, icon_type.unwrap_or("x-icon").to_string()))
|
Some((icon, icon_type.unwrap_or("x-icon").to_string()))
|
||||||
@@ -354,13 +352,57 @@ struct Icon {
|
|||||||
impl Icon {
|
impl Icon {
|
||||||
const fn new(priority: u8, href: String) -> Self {
|
const fn new(priority: u8, href: String) -> Self {
|
||||||
Self {
|
Self {
|
||||||
href,
|
|
||||||
priority,
|
priority,
|
||||||
|
href,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Vec<Icon>, url: &Url) {
|
/// Iterates over the HTML document to find <base href="http://domain.tld">
|
||||||
|
/// When found it will stop the iteration and the found base href will be shared deref via `base_href`.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
/// * `node` - A Parsed HTML document via html5ever::parse_document()
|
||||||
|
/// * `base_href` - a mutable url::Url which will be overwritten when a base href tag has been found.
|
||||||
|
///
|
||||||
|
fn get_base_href(node: &std::rc::Rc<markup5ever_rcdom::Node>, base_href: &mut url::Url) -> bool {
|
||||||
|
if let markup5ever_rcdom::NodeData::Element {
|
||||||
|
name,
|
||||||
|
attrs,
|
||||||
|
..
|
||||||
|
} = &node.data
|
||||||
|
{
|
||||||
|
if name.local.as_ref() == "base" {
|
||||||
|
let attrs = attrs.borrow();
|
||||||
|
for attr in attrs.iter() {
|
||||||
|
let attr_name = attr.name.local.as_ref();
|
||||||
|
let attr_value = attr.value.as_ref();
|
||||||
|
|
||||||
|
if attr_name == "href" {
|
||||||
|
debug!("Found base href: {}", attr_value);
|
||||||
|
*base_href = match base_href.join(attr_value) {
|
||||||
|
Ok(href) => href,
|
||||||
|
_ => base_href.clone(),
|
||||||
|
};
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Might want to limit the recursion depth?
|
||||||
|
for child in node.children.borrow().iter() {
|
||||||
|
// Check if we got a true back and stop the iter.
|
||||||
|
// This means we found a <base> tag and can stop processing the html.
|
||||||
|
if get_base_href(child, base_href) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Vec<Icon>, url: &url::Url) {
|
||||||
if let markup5ever_rcdom::NodeData::Element {
|
if let markup5ever_rcdom::NodeData::Element {
|
||||||
name,
|
name,
|
||||||
attrs,
|
attrs,
|
||||||
@@ -389,7 +431,7 @@ fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Ve
|
|||||||
|
|
||||||
if has_rel {
|
if has_rel {
|
||||||
if let Some(inner_href) = href {
|
if let Some(inner_href) = href {
|
||||||
if let Ok(full_href) = url.join(&inner_href).map(|h| h.into_string()) {
|
if let Ok(full_href) = url.join(inner_href).map(String::from) {
|
||||||
let priority = get_icon_priority(&full_href, sizes);
|
let priority = get_icon_priority(&full_href, sizes);
|
||||||
icons.push(Icon::new(priority, full_href));
|
icons.push(Icon::new(priority, full_href));
|
||||||
}
|
}
|
||||||
@@ -406,12 +448,11 @@ fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Ve
|
|||||||
|
|
||||||
struct IconUrlResult {
|
struct IconUrlResult {
|
||||||
iconlist: Vec<Icon>,
|
iconlist: Vec<Icon>,
|
||||||
cookies: String,
|
|
||||||
referer: String,
|
referer: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a Result/Tuple which holds a Vector IconList and a string which holds the cookies from the last response.
|
/// Returns a IconUrlResult which holds a Vector IconList and a string which holds the referer.
|
||||||
/// There will always be a result with a string which will contain https://example.com/favicon.ico and an empty string for the cookies.
|
/// There will always two items within the iconlist which holds http(s)://domain.tld/favicon.ico.
|
||||||
/// This does not mean that that location does exists, but it is the default location browser use.
|
/// This does not mean that that location does exists, but it is the default location browser use.
|
||||||
///
|
///
|
||||||
/// # Argument
|
/// # Argument
|
||||||
@@ -419,8 +460,8 @@ struct IconUrlResult {
|
|||||||
///
|
///
|
||||||
/// # Example
|
/// # Example
|
||||||
/// ```
|
/// ```
|
||||||
/// let (mut iconlist, cookie_str) = get_icon_url("github.com")?;
|
/// let icon_result = get_icon_url("github.com")?;
|
||||||
/// let (mut iconlist, cookie_str) = get_icon_url("gitlab.com")?;
|
/// let icon_result = get_icon_url("vaultwarden.discourse.group")?;
|
||||||
/// ```
|
/// ```
|
||||||
fn get_icon_url(domain: &str) -> Result<IconUrlResult, Error> {
|
fn get_icon_url(domain: &str) -> Result<IconUrlResult, Error> {
|
||||||
// Default URL with secure and insecure schemes
|
// Default URL with secure and insecure schemes
|
||||||
@@ -468,49 +509,30 @@ fn get_icon_url(domain: &str) -> Result<IconUrlResult, Error> {
|
|||||||
|
|
||||||
// Create the iconlist
|
// Create the iconlist
|
||||||
let mut iconlist: Vec<Icon> = Vec::new();
|
let mut iconlist: Vec<Icon> = Vec::new();
|
||||||
|
let mut referer = String::from("");
|
||||||
// Create the cookie_str to fill it all the cookies from the response
|
|
||||||
// These cookies can be used to request/download the favicon image.
|
|
||||||
// Some sites have extra security in place with for example XSRF Tokens.
|
|
||||||
let mut cookie_str = "".to_string();
|
|
||||||
let mut referer = "".to_string();
|
|
||||||
|
|
||||||
if let Ok(content) = resp {
|
if let Ok(content) = resp {
|
||||||
// Extract the URL from the respose in case redirects occured (like @ gitlab.com)
|
// Extract the URL from the respose in case redirects occured (like @ gitlab.com)
|
||||||
let url = content.url().clone();
|
let url = content.url().clone();
|
||||||
|
|
||||||
// Get all the cookies and pass it on to the next function.
|
|
||||||
// Needed for XSRF Cookies for example (like @ mijn.ing.nl)
|
|
||||||
let raw_cookies = content.headers().get_all("set-cookie");
|
|
||||||
cookie_str = raw_cookies
|
|
||||||
.iter()
|
|
||||||
.filter_map(|raw_cookie| raw_cookie.to_str().ok())
|
|
||||||
.map(|cookie_str| {
|
|
||||||
if let Ok(cookie) = Cookie::parse(cookie_str) {
|
|
||||||
format!("{}={}; ", cookie.name(), cookie.value())
|
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<String>();
|
|
||||||
|
|
||||||
// Set the referer to be used on the final request, some sites check this.
|
// Set the referer to be used on the final request, some sites check this.
|
||||||
// Mostly used to prevent direct linking and other security resons.
|
// Mostly used to prevent direct linking and other security resons.
|
||||||
referer = url.as_str().to_string();
|
referer = url.as_str().to_string();
|
||||||
|
|
||||||
// Add the default favicon.ico to the list with the domain the content responded from.
|
// Add the default favicon.ico to the list with the domain the content responded from.
|
||||||
iconlist.push(Icon::new(35, url.join("/favicon.ico").unwrap().into_string()));
|
iconlist.push(Icon::new(35, String::from(url.join("/favicon.ico").unwrap())));
|
||||||
|
|
||||||
// 512KB should be more than enough for the HTML, though as we only really need
|
// 384KB should be more than enough for the HTML, though as we only really need the HTML header.
|
||||||
// the HTML header, it could potentially be reduced even further
|
let mut limited_reader = content.take(384 * 1024);
|
||||||
let mut limited_reader = content.take(512 * 1024);
|
|
||||||
|
|
||||||
use html5ever::tendril::TendrilSink;
|
use html5ever::tendril::TendrilSink;
|
||||||
let dom = html5ever::parse_document(markup5ever_rcdom::RcDom::default(), Default::default())
|
let dom = html5ever::parse_document(markup5ever_rcdom::RcDom::default(), Default::default())
|
||||||
.from_utf8()
|
.from_utf8()
|
||||||
.read_from(&mut limited_reader)?;
|
.read_from(&mut limited_reader)?;
|
||||||
|
|
||||||
get_favicons_node(&dom.document, &mut iconlist, &url);
|
let mut base_url: url::Url = url;
|
||||||
|
get_base_href(&dom.document, &mut base_url);
|
||||||
|
get_favicons_node(&dom.document, &mut iconlist, &base_url);
|
||||||
} else {
|
} else {
|
||||||
// Add the default favicon.ico to the list with just the given domain
|
// Add the default favicon.ico to the list with just the given domain
|
||||||
iconlist.push(Icon::new(35, format!("{}/favicon.ico", ssldomain)));
|
iconlist.push(Icon::new(35, format!("{}/favicon.ico", ssldomain)));
|
||||||
@@ -523,24 +545,20 @@ fn get_icon_url(domain: &str) -> Result<IconUrlResult, Error> {
|
|||||||
// There always is an icon in the list, so no need to check if it exists, and just return the first one
|
// There always is an icon in the list, so no need to check if it exists, and just return the first one
|
||||||
Ok(IconUrlResult {
|
Ok(IconUrlResult {
|
||||||
iconlist,
|
iconlist,
|
||||||
cookies: cookie_str,
|
|
||||||
referer,
|
referer,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_page(url: &str) -> Result<Response, Error> {
|
fn get_page(url: &str) -> Result<Response, Error> {
|
||||||
get_page_with_cookies(url, "", "")
|
get_page_with_referer(url, "")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_page_with_cookies(url: &str, cookie_str: &str, referer: &str) -> Result<Response, Error> {
|
fn get_page_with_referer(url: &str, referer: &str) -> Result<Response, Error> {
|
||||||
if is_domain_blacklisted(Url::parse(url).unwrap().host_str().unwrap_or_default()) {
|
if is_domain_blacklisted(url::Url::parse(url).unwrap().host_str().unwrap_or_default()) {
|
||||||
err!("Favicon rel linked to a blacklisted domain!");
|
err!("Favicon rel linked to a blacklisted domain!");
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut client = CLIENT.get(url);
|
let mut client = CLIENT.get(url);
|
||||||
if !cookie_str.is_empty() {
|
|
||||||
client = client.header("Cookie", cookie_str)
|
|
||||||
}
|
|
||||||
if !referer.is_empty() {
|
if !referer.is_empty() {
|
||||||
client = client.header("Referer", referer)
|
client = client.header("Referer", referer)
|
||||||
}
|
}
|
||||||
@@ -573,7 +591,7 @@ fn get_icon_priority(href: &str, sizes: Option<&str>) -> u8 {
|
|||||||
1
|
1
|
||||||
} else if width == 64 {
|
} else if width == 64 {
|
||||||
2
|
2
|
||||||
} else if (24..=128).contains(&width) {
|
} else if (24..=192).contains(&width) {
|
||||||
3
|
3
|
||||||
} else if width == 16 {
|
} else if width == 16 {
|
||||||
4
|
4
|
||||||
@@ -632,7 +650,7 @@ fn download_icon(domain: &str) -> Result<(Vec<u8>, Option<&str>), Error> {
|
|||||||
err!("Domain is blacklisted", domain)
|
err!("Domain is blacklisted", domain)
|
||||||
}
|
}
|
||||||
|
|
||||||
let icon_result = get_icon_url(&domain)?;
|
let icon_result = get_icon_url(domain)?;
|
||||||
|
|
||||||
let mut buffer = Vec::new();
|
let mut buffer = Vec::new();
|
||||||
let mut icon_type: Option<&str> = None;
|
let mut icon_type: Option<&str> = None;
|
||||||
@@ -661,7 +679,7 @@ fn download_icon(domain: &str) -> Result<(Vec<u8>, Option<&str>), Error> {
|
|||||||
_ => warn!("Extracted icon from data:image uri is invalid"),
|
_ => warn!("Extracted icon from data:image uri is invalid"),
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
match get_page_with_cookies(&icon.href, &icon_result.cookies, &icon_result.referer) {
|
match get_page_with_referer(&icon.href, &icon_result.referer) {
|
||||||
Ok(mut res) => {
|
Ok(mut res) => {
|
||||||
res.copy_to(&mut buffer)?;
|
res.copy_to(&mut buffer)?;
|
||||||
// Check if the icon type is allowed, else try an icon from the list.
|
// Check if the icon type is allowed, else try an icon from the list.
|
||||||
@@ -706,7 +724,54 @@ fn get_icon_type(bytes: &[u8]) -> Option<&'static str> {
|
|||||||
[0, 0, 1, 0, ..] => Some("x-icon"),
|
[0, 0, 1, 0, ..] => Some("x-icon"),
|
||||||
[82, 73, 70, 70, ..] => Some("webp"),
|
[82, 73, 70, 70, ..] => Some("webp"),
|
||||||
[255, 216, 255, ..] => Some("jpeg"),
|
[255, 216, 255, ..] => Some("jpeg"),
|
||||||
|
[71, 73, 70, 56, ..] => Some("gif"),
|
||||||
[66, 77, ..] => Some("bmp"),
|
[66, 77, ..] => Some("bmp"),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// This is an implementation of the default Cookie Jar from Reqwest and reqwest_cookie_store build by pfernie.
|
||||||
|
/// The default cookie jar used by Reqwest keeps all the cookies based upon the Max-Age or Expires which could be a long time.
|
||||||
|
/// That could be used for tracking, to prevent this we force the lifespan of the cookies to always be max two minutes.
|
||||||
|
/// A Cookie Jar is needed because some sites force a redirect with cookies to verify if a request uses cookies or not.
|
||||||
|
use cookie_store::CookieStore;
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct Jar(RwLock<CookieStore>);
|
||||||
|
|
||||||
|
impl reqwest::cookie::CookieStore for Jar {
|
||||||
|
fn set_cookies(&self, cookie_headers: &mut dyn Iterator<Item = &header::HeaderValue>, url: &url::Url) {
|
||||||
|
use cookie::{Cookie as RawCookie, ParseError as RawCookieParseError};
|
||||||
|
use time::Duration;
|
||||||
|
|
||||||
|
let mut cookie_store = self.0.write().unwrap();
|
||||||
|
let cookies = cookie_headers.filter_map(|val| {
|
||||||
|
std::str::from_utf8(val.as_bytes())
|
||||||
|
.map_err(RawCookieParseError::from)
|
||||||
|
.and_then(RawCookie::parse)
|
||||||
|
.map(|mut c| {
|
||||||
|
c.set_expires(None);
|
||||||
|
c.set_max_age(Some(Duration::minutes(2)));
|
||||||
|
c.into_owned()
|
||||||
|
})
|
||||||
|
.ok()
|
||||||
|
});
|
||||||
|
cookie_store.store_response_cookies(cookies, url);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cookies(&self, url: &url::Url) -> Option<header::HeaderValue> {
|
||||||
|
use bytes::Bytes;
|
||||||
|
|
||||||
|
let cookie_store = self.0.read().unwrap();
|
||||||
|
let s = cookie_store
|
||||||
|
.get_request_values(url)
|
||||||
|
.map(|(name, value)| format!("{}={}", name, value))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("; ");
|
||||||
|
|
||||||
|
if s.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
header::HeaderValue::from_maybe_shared(Bytes::from(s)).ok()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@@ -134,7 +134,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult
|
|||||||
|
|
||||||
let (mut device, new_device) = get_device(&data, &conn, &user);
|
let (mut device, new_device) = get_device(&data, &conn, &user);
|
||||||
|
|
||||||
let twofactor_token = twofactor_auth(&user.uuid, &data, &mut device, &ip, &conn)?;
|
let twofactor_token = twofactor_auth(&user.uuid, &data, &mut device, ip, &conn)?;
|
||||||
|
|
||||||
if CONFIG.mail_enabled() && new_device {
|
if CONFIG.mail_enabled() && new_device {
|
||||||
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name) {
|
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name) {
|
||||||
@@ -185,7 +185,7 @@ fn get_device(data: &ConnectData, conn: &DbConn, user: &User) -> (Device, bool)
|
|||||||
|
|
||||||
let mut new_device = false;
|
let mut new_device = false;
|
||||||
// Find device or create new
|
// Find device or create new
|
||||||
let device = match Device::find_by_uuid(&device_id, &conn) {
|
let device = match Device::find_by_uuid(&device_id, conn) {
|
||||||
Some(device) => {
|
Some(device) => {
|
||||||
// Check if owned device, and recreate if not
|
// Check if owned device, and recreate if not
|
||||||
if device.user_uuid != user.uuid {
|
if device.user_uuid != user.uuid {
|
||||||
@@ -240,6 +240,7 @@ fn twofactor_auth(
|
|||||||
_tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)?
|
_tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)?
|
||||||
}
|
}
|
||||||
Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?,
|
Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?,
|
||||||
|
Some(TwoFactorType::Webauthn) => _tf::webauthn::validate_webauthn_login(user_uuid, twofactor_code, conn)?,
|
||||||
Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?,
|
Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?,
|
||||||
Some(TwoFactorType::Duo) => {
|
Some(TwoFactorType::Duo) => {
|
||||||
_tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?
|
_tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?
|
||||||
@@ -309,8 +310,13 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Some(TwoFactorType::Webauthn) if CONFIG.domain_set() => {
|
||||||
|
let request = two_factor::webauthn::generate_webauthn_login(user_uuid, conn)?;
|
||||||
|
result["TwoFactorProviders2"][provider.to_string()] = request.0;
|
||||||
|
}
|
||||||
|
|
||||||
Some(TwoFactorType::Duo) => {
|
Some(TwoFactorType::Duo) => {
|
||||||
let email = match User::find_by_uuid(user_uuid, &conn) {
|
let email = match User::find_by_uuid(user_uuid, conn) {
|
||||||
Some(u) => u.email,
|
Some(u) => u.email,
|
||||||
None => err!("User does not exist"),
|
None => err!("User does not exist"),
|
||||||
};
|
};
|
||||||
@@ -324,7 +330,7 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api
|
|||||||
}
|
}
|
||||||
|
|
||||||
Some(tf_type @ TwoFactorType::YubiKey) => {
|
Some(tf_type @ TwoFactorType::YubiKey) => {
|
||||||
let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, &conn) {
|
let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, conn) {
|
||||||
Some(tf) => tf,
|
Some(tf) => tf,
|
||||||
None => err!("No YubiKey devices registered"),
|
None => err!("No YubiKey devices registered"),
|
||||||
};
|
};
|
||||||
@@ -339,14 +345,14 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api
|
|||||||
Some(tf_type @ TwoFactorType::Email) => {
|
Some(tf_type @ TwoFactorType::Email) => {
|
||||||
use crate::api::core::two_factor as _tf;
|
use crate::api::core::two_factor as _tf;
|
||||||
|
|
||||||
let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, &conn) {
|
let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, conn) {
|
||||||
Some(tf) => tf,
|
Some(tf) => tf,
|
||||||
None => err!("No twofactor email registered"),
|
None => err!("No twofactor email registered"),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Send email immediately if email is the only 2FA option
|
// Send email immediately if email is the only 2FA option
|
||||||
if providers.len() == 1 {
|
if providers.len() == 1 {
|
||||||
_tf::email::send_token(&user_uuid, &conn)?
|
_tf::email::send_token(user_uuid, conn)?
|
||||||
}
|
}
|
||||||
|
|
||||||
let email_data = EmailTokenData::from_json(&twofactor.data)?;
|
let email_data = EmailTokenData::from_json(&twofactor.data)?;
|
||||||
|
@@ -51,10 +51,11 @@ impl NumberOrString {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn into_i32(self) -> ApiResult<i32> {
|
#[allow(clippy::wrong_self_convention)]
|
||||||
|
fn into_i32(&self) -> ApiResult<i32> {
|
||||||
use std::num::ParseIntError as PIE;
|
use std::num::ParseIntError as PIE;
|
||||||
match self {
|
match self {
|
||||||
NumberOrString::Number(n) => Ok(n),
|
NumberOrString::Number(n) => Ok(*n),
|
||||||
NumberOrString::String(s) => {
|
NumberOrString::String(s) => {
|
||||||
s.parse().map_err(|e: PIE| crate::Error::new("Can't convert to number", e.to_string()))
|
s.parse().map_err(|e: PIE| crate::Error::new("Can't convert to number", e.to_string()))
|
||||||
}
|
}
|
||||||
|
@@ -332,7 +332,7 @@ impl WebSocketUsers {
|
|||||||
);
|
);
|
||||||
|
|
||||||
for uuid in user_uuids {
|
for uuid in user_uuids {
|
||||||
self.send_update(&uuid, &data).ok();
|
self.send_update(uuid, &data).ok();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -408,12 +408,10 @@ pub fn start_notification_server() -> WebSocketUsers {
|
|||||||
|
|
||||||
if CONFIG.websocket_enabled() {
|
if CONFIG.websocket_enabled() {
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
let settings = ws::Settings {
|
let mut settings = ws::Settings::default();
|
||||||
max_connections: 500,
|
settings.max_connections = 500;
|
||||||
queue_size: 2,
|
settings.queue_size = 2;
|
||||||
panic_on_internal: false,
|
settings.panic_on_internal = false;
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
ws::Builder::new()
|
ws::Builder::new()
|
||||||
.with_settings(settings)
|
.with_settings(settings)
|
||||||
|
@@ -4,13 +4,17 @@ use rocket::{http::ContentType, response::content::Content, response::NamedFile,
|
|||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::{error::Error, util::Cached, CONFIG};
|
use crate::{
|
||||||
|
error::Error,
|
||||||
|
util::{Cached, SafeString},
|
||||||
|
CONFIG,
|
||||||
|
};
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
// If addding more routes here, consider also adding them to
|
// If addding more routes here, consider also adding them to
|
||||||
// crate::utils::LOGGED_ROUTES to make sure they appear in the log
|
// crate::utils::LOGGED_ROUTES to make sure they appear in the log
|
||||||
if CONFIG.web_vault_enabled() {
|
if CONFIG.web_vault_enabled() {
|
||||||
routes![web_index, app_id, web_files, attachments, sends, alive, static_files]
|
routes![web_index, app_id, web_files, attachments, alive, static_files]
|
||||||
} else {
|
} else {
|
||||||
routes![attachments, alive, static_files]
|
routes![attachments, alive, static_files]
|
||||||
}
|
}
|
||||||
@@ -55,14 +59,9 @@ fn web_files(p: PathBuf) -> Cached<Option<NamedFile>> {
|
|||||||
Cached::long(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join(p)).ok())
|
Cached::long(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join(p)).ok())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/attachments/<uuid>/<file..>")]
|
#[get("/attachments/<uuid>/<file_id>")]
|
||||||
fn attachments(uuid: String, file: PathBuf) -> Option<NamedFile> {
|
fn attachments(uuid: SafeString, file_id: SafeString) -> Option<NamedFile> {
|
||||||
NamedFile::open(Path::new(&CONFIG.attachments_folder()).join(uuid).join(file)).ok()
|
NamedFile::open(Path::new(&CONFIG.attachments_folder()).join(uuid).join(file_id)).ok()
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/sends/<send_id>/<file_id>")]
|
|
||||||
fn sends(send_id: String, file_id: String) -> Option<NamedFile> {
|
|
||||||
NamedFile::open(Path::new(&CONFIG.sends_folder()).join(send_id).join(file_id)).ok()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/alive")]
|
#[get("/alive")]
|
||||||
@@ -78,9 +77,11 @@ fn static_files(filename: String) -> Result<Content<&'static [u8]>, Error> {
|
|||||||
match filename.as_ref() {
|
match filename.as_ref() {
|
||||||
"mail-github.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/mail-github.png"))),
|
"mail-github.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/mail-github.png"))),
|
||||||
"logo-gray.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/logo-gray.png"))),
|
"logo-gray.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/logo-gray.png"))),
|
||||||
"shield-white.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/shield-white.png"))),
|
|
||||||
"error-x.svg" => Ok(Content(ContentType::SVG, include_bytes!("../static/images/error-x.svg"))),
|
"error-x.svg" => Ok(Content(ContentType::SVG, include_bytes!("../static/images/error-x.svg"))),
|
||||||
"hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))),
|
"hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))),
|
||||||
|
"vaultwarden-icon.png" => {
|
||||||
|
Ok(Content(ContentType::PNG, include_bytes!("../static/images/vaultwarden-icon.png")))
|
||||||
|
}
|
||||||
|
|
||||||
"bootstrap.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))),
|
"bootstrap.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))),
|
||||||
"bootstrap-native.js" => {
|
"bootstrap-native.js" => {
|
||||||
@@ -89,8 +90,8 @@ fn static_files(filename: String) -> Result<Content<&'static [u8]>, Error> {
|
|||||||
"identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))),
|
"identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))),
|
||||||
"datatables.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))),
|
"datatables.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))),
|
||||||
"datatables.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))),
|
"datatables.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))),
|
||||||
"jquery-3.5.1.slim.js" => {
|
"jquery-3.6.0.slim.js" => {
|
||||||
Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.5.1.slim.js")))
|
Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.6.0.slim.js")))
|
||||||
}
|
}
|
||||||
_ => err!(format!("Static file not found: {}", filename)),
|
_ => err!(format!("Static file not found: {}", filename)),
|
||||||
}
|
}
|
||||||
|
104
src/auth.rs
@@ -19,22 +19,34 @@ const JWT_ALGORITHM: Algorithm = Algorithm::RS256;
|
|||||||
|
|
||||||
pub static DEFAULT_VALIDITY: Lazy<Duration> = Lazy::new(|| Duration::hours(2));
|
pub static DEFAULT_VALIDITY: Lazy<Duration> = Lazy::new(|| Duration::hours(2));
|
||||||
static JWT_HEADER: Lazy<Header> = Lazy::new(|| Header::new(JWT_ALGORITHM));
|
static JWT_HEADER: Lazy<Header> = Lazy::new(|| Header::new(JWT_ALGORITHM));
|
||||||
|
|
||||||
pub static JWT_LOGIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|login", CONFIG.domain_origin()));
|
pub static JWT_LOGIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|login", CONFIG.domain_origin()));
|
||||||
static JWT_INVITE_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|invite", CONFIG.domain_origin()));
|
static JWT_INVITE_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|invite", CONFIG.domain_origin()));
|
||||||
static JWT_DELETE_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|delete", CONFIG.domain_origin()));
|
static JWT_DELETE_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|delete", CONFIG.domain_origin()));
|
||||||
static JWT_VERIFYEMAIL_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|verifyemail", CONFIG.domain_origin()));
|
static JWT_VERIFYEMAIL_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|verifyemail", CONFIG.domain_origin()));
|
||||||
static JWT_ADMIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|admin", CONFIG.domain_origin()));
|
static JWT_ADMIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|admin", CONFIG.domain_origin()));
|
||||||
static PRIVATE_RSA_KEY: Lazy<Vec<u8>> = Lazy::new(|| match read_file(&CONFIG.private_rsa_key()) {
|
static JWT_SEND_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|send", CONFIG.domain_origin()));
|
||||||
Ok(key) => key,
|
|
||||||
Err(e) => panic!("Error loading private RSA Key.\n Error: {}", e),
|
static PRIVATE_RSA_KEY_VEC: Lazy<Vec<u8>> = Lazy::new(|| {
|
||||||
|
read_file(&CONFIG.private_rsa_key()).unwrap_or_else(|e| panic!("Error loading private RSA Key.\n{}", e))
|
||||||
});
|
});
|
||||||
static PUBLIC_RSA_KEY: Lazy<Vec<u8>> = Lazy::new(|| match read_file(&CONFIG.public_rsa_key()) {
|
static PRIVATE_RSA_KEY: Lazy<EncodingKey> = Lazy::new(|| {
|
||||||
Ok(key) => key,
|
EncodingKey::from_rsa_pem(&PRIVATE_RSA_KEY_VEC).unwrap_or_else(|e| panic!("Error decoding private RSA Key.\n{}", e))
|
||||||
Err(e) => panic!("Error loading public RSA Key.\n Error: {}", e),
|
});
|
||||||
|
static PUBLIC_RSA_KEY_VEC: Lazy<Vec<u8>> = Lazy::new(|| {
|
||||||
|
read_file(&CONFIG.public_rsa_key()).unwrap_or_else(|e| panic!("Error loading public RSA Key.\n{}", e))
|
||||||
|
});
|
||||||
|
static PUBLIC_RSA_KEY: Lazy<DecodingKey> = Lazy::new(|| {
|
||||||
|
DecodingKey::from_rsa_pem(&PUBLIC_RSA_KEY_VEC).unwrap_or_else(|e| panic!("Error decoding public RSA Key.\n{}", e))
|
||||||
});
|
});
|
||||||
|
|
||||||
|
pub fn load_keys() {
|
||||||
|
Lazy::force(&PRIVATE_RSA_KEY);
|
||||||
|
Lazy::force(&PUBLIC_RSA_KEY);
|
||||||
|
}
|
||||||
|
|
||||||
pub fn encode_jwt<T: Serialize>(claims: &T) -> String {
|
pub fn encode_jwt<T: Serialize>(claims: &T) -> String {
|
||||||
match jsonwebtoken::encode(&JWT_HEADER, claims, &EncodingKey::from_rsa_der(&PRIVATE_RSA_KEY)) {
|
match jsonwebtoken::encode(&JWT_HEADER, claims, &PRIVATE_RSA_KEY) {
|
||||||
Ok(token) => token,
|
Ok(token) => token,
|
||||||
Err(e) => panic!("Error encoding jwt {}", e),
|
Err(e) => panic!("Error encoding jwt {}", e),
|
||||||
}
|
}
|
||||||
@@ -52,10 +64,7 @@ fn decode_jwt<T: DeserializeOwned>(token: &str, issuer: String) -> Result<T, Err
|
|||||||
};
|
};
|
||||||
|
|
||||||
let token = token.replace(char::is_whitespace, "");
|
let token = token.replace(char::is_whitespace, "");
|
||||||
|
jsonwebtoken::decode(&token, &PUBLIC_RSA_KEY, &validation).map(|d| d.claims).map_res("Error decoding JWT")
|
||||||
jsonwebtoken::decode(&token, &DecodingKey::from_rsa_der(&PUBLIC_RSA_KEY), &validation)
|
|
||||||
.map(|d| d.claims)
|
|
||||||
.map_res("Error decoding JWT")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn decode_login(token: &str) -> Result<LoginJwtClaims, Error> {
|
pub fn decode_login(token: &str) -> Result<LoginJwtClaims, Error> {
|
||||||
@@ -66,18 +75,22 @@ pub fn decode_invite(token: &str) -> Result<InviteJwtClaims, Error> {
|
|||||||
decode_jwt(token, JWT_INVITE_ISSUER.to_string())
|
decode_jwt(token, JWT_INVITE_ISSUER.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn decode_delete(token: &str) -> Result<DeleteJwtClaims, Error> {
|
pub fn decode_delete(token: &str) -> Result<BasicJwtClaims, Error> {
|
||||||
decode_jwt(token, JWT_DELETE_ISSUER.to_string())
|
decode_jwt(token, JWT_DELETE_ISSUER.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn decode_verify_email(token: &str) -> Result<VerifyEmailJwtClaims, Error> {
|
pub fn decode_verify_email(token: &str) -> Result<BasicJwtClaims, Error> {
|
||||||
decode_jwt(token, JWT_VERIFYEMAIL_ISSUER.to_string())
|
decode_jwt(token, JWT_VERIFYEMAIL_ISSUER.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn decode_admin(token: &str) -> Result<AdminJwtClaims, Error> {
|
pub fn decode_admin(token: &str) -> Result<BasicJwtClaims, Error> {
|
||||||
decode_jwt(token, JWT_ADMIN_ISSUER.to_string())
|
decode_jwt(token, JWT_ADMIN_ISSUER.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn decode_send(token: &str) -> Result<BasicJwtClaims, Error> {
|
||||||
|
decode_jwt(token, JWT_SEND_ISSUER.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct LoginJwtClaims {
|
pub struct LoginJwtClaims {
|
||||||
// Not before
|
// Not before
|
||||||
@@ -147,7 +160,7 @@ pub fn generate_invite_claims(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct DeleteJwtClaims {
|
pub struct BasicJwtClaims {
|
||||||
// Not before
|
// Not before
|
||||||
pub nbf: i64,
|
pub nbf: i64,
|
||||||
// Expiration time
|
// Expiration time
|
||||||
@@ -158,9 +171,9 @@ pub struct DeleteJwtClaims {
|
|||||||
pub sub: String,
|
pub sub: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_delete_claims(uuid: String) -> DeleteJwtClaims {
|
pub fn generate_delete_claims(uuid: String) -> BasicJwtClaims {
|
||||||
let time_now = Utc::now().naive_utc();
|
let time_now = Utc::now().naive_utc();
|
||||||
DeleteJwtClaims {
|
BasicJwtClaims {
|
||||||
nbf: time_now.timestamp(),
|
nbf: time_now.timestamp(),
|
||||||
exp: (time_now + Duration::days(5)).timestamp(),
|
exp: (time_now + Duration::days(5)).timestamp(),
|
||||||
iss: JWT_DELETE_ISSUER.to_string(),
|
iss: JWT_DELETE_ISSUER.to_string(),
|
||||||
@@ -168,21 +181,9 @@ pub fn generate_delete_claims(uuid: String) -> DeleteJwtClaims {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
pub fn generate_verify_email_claims(uuid: String) -> BasicJwtClaims {
|
||||||
pub struct VerifyEmailJwtClaims {
|
|
||||||
// Not before
|
|
||||||
pub nbf: i64,
|
|
||||||
// Expiration time
|
|
||||||
pub exp: i64,
|
|
||||||
// Issuer
|
|
||||||
pub iss: String,
|
|
||||||
// Subject
|
|
||||||
pub sub: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn generate_verify_email_claims(uuid: String) -> DeleteJwtClaims {
|
|
||||||
let time_now = Utc::now().naive_utc();
|
let time_now = Utc::now().naive_utc();
|
||||||
DeleteJwtClaims {
|
BasicJwtClaims {
|
||||||
nbf: time_now.timestamp(),
|
nbf: time_now.timestamp(),
|
||||||
exp: (time_now + Duration::days(5)).timestamp(),
|
exp: (time_now + Duration::days(5)).timestamp(),
|
||||||
iss: JWT_VERIFYEMAIL_ISSUER.to_string(),
|
iss: JWT_VERIFYEMAIL_ISSUER.to_string(),
|
||||||
@@ -190,21 +191,9 @@ pub fn generate_verify_email_claims(uuid: String) -> DeleteJwtClaims {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
pub fn generate_admin_claims() -> BasicJwtClaims {
|
||||||
pub struct AdminJwtClaims {
|
|
||||||
// Not before
|
|
||||||
pub nbf: i64,
|
|
||||||
// Expiration time
|
|
||||||
pub exp: i64,
|
|
||||||
// Issuer
|
|
||||||
pub iss: String,
|
|
||||||
// Subject
|
|
||||||
pub sub: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn generate_admin_claims() -> AdminJwtClaims {
|
|
||||||
let time_now = Utc::now().naive_utc();
|
let time_now = Utc::now().naive_utc();
|
||||||
AdminJwtClaims {
|
BasicJwtClaims {
|
||||||
nbf: time_now.timestamp(),
|
nbf: time_now.timestamp(),
|
||||||
exp: (time_now + Duration::minutes(20)).timestamp(),
|
exp: (time_now + Duration::minutes(20)).timestamp(),
|
||||||
iss: JWT_ADMIN_ISSUER.to_string(),
|
iss: JWT_ADMIN_ISSUER.to_string(),
|
||||||
@@ -212,6 +201,16 @@ pub fn generate_admin_claims() -> AdminJwtClaims {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn generate_send_claims(send_id: &str, file_id: &str) -> BasicJwtClaims {
|
||||||
|
let time_now = Utc::now().naive_utc();
|
||||||
|
BasicJwtClaims {
|
||||||
|
nbf: time_now.timestamp(),
|
||||||
|
exp: (time_now + Duration::minutes(2)).timestamp(),
|
||||||
|
iss: JWT_SEND_ISSUER.to_string(),
|
||||||
|
sub: format!("{}/{}", send_id, file_id),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
// Bearer token authentication
|
// Bearer token authentication
|
||||||
//
|
//
|
||||||
@@ -326,8 +325,19 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers {
|
|||||||
_ => err_handler!("Error getting current route for stamp exception"),
|
_ => err_handler!("Error getting current route for stamp exception"),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Check if both match, if not this route is not allowed with the current security stamp.
|
// Check if the stamp exception has expired first.
|
||||||
if stamp_exception.route != current_route {
|
// Then, check if the current route matches any of the allowed routes.
|
||||||
|
// After that check the stamp in exception matches the one in the claims.
|
||||||
|
if Utc::now().naive_utc().timestamp() > stamp_exception.expire {
|
||||||
|
// If the stamp exception has been expired remove it from the database.
|
||||||
|
// This prevents checking this stamp exception for new requests.
|
||||||
|
let mut user = user;
|
||||||
|
user.reset_stamp_exception();
|
||||||
|
if let Err(e) = user.save(&conn) {
|
||||||
|
error!("Error updating user: {:#?}", e);
|
||||||
|
}
|
||||||
|
err_handler!("Stamp exception is expired")
|
||||||
|
} else if !stamp_exception.routes.contains(¤t_route.to_string()) {
|
||||||
err_handler!("Invalid security stamp: Current route and exception route do not match")
|
err_handler!("Invalid security stamp: Current route and exception route do not match")
|
||||||
} else if stamp_exception.security_stamp != claims.sstamp {
|
} else if stamp_exception.security_stamp != claims.sstamp {
|
||||||
err_handler!("Invalid security stamp for matched stamp exception")
|
err_handler!("Invalid security stamp for matched stamp exception")
|
||||||
|
@@ -57,6 +57,8 @@ macro_rules! make_config {
|
|||||||
|
|
||||||
_env: ConfigBuilder,
|
_env: ConfigBuilder,
|
||||||
_usr: ConfigBuilder,
|
_usr: ConfigBuilder,
|
||||||
|
|
||||||
|
_overrides: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
|
||||||
@@ -79,20 +81,16 @@ macro_rules! make_config {
|
|||||||
dotenv::Error::Io(ioerr) => match ioerr.kind() {
|
dotenv::Error::Io(ioerr) => match ioerr.kind() {
|
||||||
std::io::ErrorKind::NotFound => {
|
std::io::ErrorKind::NotFound => {
|
||||||
println!("[INFO] No .env file found.\n");
|
println!("[INFO] No .env file found.\n");
|
||||||
()
|
|
||||||
},
|
},
|
||||||
std::io::ErrorKind::PermissionDenied => {
|
std::io::ErrorKind::PermissionDenied => {
|
||||||
println!("[WARNING] Permission Denied while trying to read the .env file!\n");
|
println!("[WARNING] Permission Denied while trying to read the .env file!\n");
|
||||||
()
|
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
println!("[WARNING] Reading the .env file failed:\n{:?}\n", ioerr);
|
println!("[WARNING] Reading the .env file failed:\n{:?}\n", ioerr);
|
||||||
()
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
println!("[WARNING] Reading the .env file failed:\n{:?}\n", e);
|
println!("[WARNING] Reading the .env file failed:\n{:?}\n", e);
|
||||||
()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -113,8 +111,7 @@ macro_rules! make_config {
|
|||||||
|
|
||||||
/// Merges the values of both builders into a new builder.
|
/// Merges the values of both builders into a new builder.
|
||||||
/// If both have the same element, `other` wins.
|
/// If both have the same element, `other` wins.
|
||||||
fn merge(&self, other: &Self, show_overrides: bool) -> Self {
|
fn merge(&self, other: &Self, show_overrides: bool, overrides: &mut Vec<String>) -> Self {
|
||||||
let mut overrides = Vec::new();
|
|
||||||
let mut builder = self.clone();
|
let mut builder = self.clone();
|
||||||
$($(
|
$($(
|
||||||
if let v @Some(_) = &other.$name {
|
if let v @Some(_) = &other.$name {
|
||||||
@@ -176,9 +173,9 @@ macro_rules! make_config {
|
|||||||
)+)+
|
)+)+
|
||||||
|
|
||||||
pub fn prepare_json(&self) -> serde_json::Value {
|
pub fn prepare_json(&self) -> serde_json::Value {
|
||||||
let (def, cfg) = {
|
let (def, cfg, overriden) = {
|
||||||
let inner = &self.inner.read().unwrap();
|
let inner = &self.inner.read().unwrap();
|
||||||
(inner._env.build(), inner.config.clone())
|
(inner._env.build(), inner.config.clone(), inner._overrides.clone())
|
||||||
};
|
};
|
||||||
|
|
||||||
fn _get_form_type(rust_type: &str) -> &'static str {
|
fn _get_form_type(rust_type: &str) -> &'static str {
|
||||||
@@ -210,6 +207,7 @@ macro_rules! make_config {
|
|||||||
"default": def.$name,
|
"default": def.$name,
|
||||||
"type": _get_form_type(stringify!($ty)),
|
"type": _get_form_type(stringify!($ty)),
|
||||||
"doc": _get_doc(concat!($($doc),+)),
|
"doc": _get_doc(concat!($($doc),+)),
|
||||||
|
"overridden": overriden.contains(&stringify!($name).to_uppercase()),
|
||||||
}, )+
|
}, )+
|
||||||
]}, )+ ])
|
]}, )+ ])
|
||||||
}
|
}
|
||||||
@@ -224,6 +222,15 @@ macro_rules! make_config {
|
|||||||
stringify!($name): make_config!{ @supportstr $name, cfg.$name, $ty, $none_action },
|
stringify!($name): make_config!{ @supportstr $name, cfg.$name, $ty, $none_action },
|
||||||
)+)+ })
|
)+)+ })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_overrides(&self) -> Vec<String> {
|
||||||
|
let overrides = {
|
||||||
|
let inner = &self.inner.read().unwrap();
|
||||||
|
inner._overrides.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
overrides
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -342,12 +349,16 @@ make_config! {
|
|||||||
/// Enable web vault
|
/// Enable web vault
|
||||||
web_vault_enabled: bool, false, def, true;
|
web_vault_enabled: bool, false, def, true;
|
||||||
|
|
||||||
|
/// Allow Sends |> Controls whether users are allowed to create Bitwarden Sends.
|
||||||
|
/// This setting applies globally to all users. To control this on a per-org basis instead, use the "Disable Send" org policy.
|
||||||
|
sends_allowed: bool, true, def, true;
|
||||||
|
|
||||||
/// HIBP Api Key |> HaveIBeenPwned API Key, request it here: https://haveibeenpwned.com/API/Key
|
/// HIBP Api Key |> HaveIBeenPwned API Key, request it here: https://haveibeenpwned.com/API/Key
|
||||||
hibp_api_key: Pass, true, option;
|
hibp_api_key: Pass, true, option;
|
||||||
|
|
||||||
/// Per-user attachment limit (KB) |> Limit in kilobytes for a users attachments, once the limit is exceeded it won't be possible to upload more
|
/// Per-user attachment storage limit (KB) |> Max kilobytes of attachment storage allowed per user. When this limit is reached, the user will not be allowed to upload further attachments.
|
||||||
user_attachment_limit: i64, true, option;
|
user_attachment_limit: i64, true, option;
|
||||||
/// Per-organization attachment limit (KB) |> Limit in kilobytes for an organization attachments, once the limit is exceeded it won't be possible to upload more
|
/// Per-organization attachment storage limit (KB) |> Max kilobytes of attachment storage allowed per org. When this limit is reached, org members will not be allowed to upload further attachments for ciphers owned by that org.
|
||||||
org_attachment_limit: i64, true, option;
|
org_attachment_limit: i64, true, option;
|
||||||
|
|
||||||
/// Trash auto-delete days |> Number of days to wait before auto-deleting a trashed item.
|
/// Trash auto-delete days |> Number of days to wait before auto-deleting a trashed item.
|
||||||
@@ -377,9 +388,10 @@ make_config! {
|
|||||||
/// Password iterations |> Number of server-side passwords hashing iterations.
|
/// Password iterations |> Number of server-side passwords hashing iterations.
|
||||||
/// The changes only apply when a user changes their password. Not recommended to lower the value
|
/// The changes only apply when a user changes their password. Not recommended to lower the value
|
||||||
password_iterations: i32, true, def, 100_000;
|
password_iterations: i32, true, def, 100_000;
|
||||||
/// Show password hints |> Controls if the password hint should be shown directly in the web page.
|
/// Show password hint |> Controls whether a password hint should be shown directly in the web page
|
||||||
/// Otherwise, if email is disabled, there is no way to see the password hint
|
/// if SMTP service is not configured. Not recommended for publicly-accessible instances as this
|
||||||
show_password_hint: bool, true, def, true;
|
/// provides unauthenticated access to potentially sensitive data.
|
||||||
|
show_password_hint: bool, true, def, false;
|
||||||
|
|
||||||
/// Admin page token |> The token used to authenticate in this very same page. Changing it here won't deauthorize the current session
|
/// Admin page token |> The token used to authenticate in this very same page. Changing it here won't deauthorize the current session
|
||||||
admin_token: Pass, true, option;
|
admin_token: Pass, true, option;
|
||||||
@@ -501,7 +513,7 @@ make_config! {
|
|||||||
/// Server name sent during HELO |> By default this value should be is on the machine's hostname, but might need to be changed in case it trips some anti-spam filters
|
/// Server name sent during HELO |> By default this value should be is on the machine's hostname, but might need to be changed in case it trips some anti-spam filters
|
||||||
helo_name: String, true, option;
|
helo_name: String, true, option;
|
||||||
/// Enable SMTP debugging (Know the risks!) |> DANGEROUS: Enabling this will output very detailed SMTP messages. This could contain sensitive information like passwords and usernames! Only enable this during troubleshooting!
|
/// Enable SMTP debugging (Know the risks!) |> DANGEROUS: Enabling this will output very detailed SMTP messages. This could contain sensitive information like passwords and usernames! Only enable this during troubleshooting!
|
||||||
smtp_debug: bool, true, def, false;
|
smtp_debug: bool, false, def, false;
|
||||||
/// Accept Invalid Certs (Know the risks!) |> DANGEROUS: Allow invalid certificates. This option introduces significant vulnerabilities to man-in-the-middle attacks!
|
/// Accept Invalid Certs (Know the risks!) |> DANGEROUS: Allow invalid certificates. This option introduces significant vulnerabilities to man-in-the-middle attacks!
|
||||||
smtp_accept_invalid_certs: bool, true, def, false;
|
smtp_accept_invalid_certs: bool, true, def, false;
|
||||||
/// Accept Invalid Hostnames (Know the risks!) |> DANGEROUS: Allow invalid hostnames. This option introduces significant vulnerabilities to man-in-the-middle attacks!
|
/// Accept Invalid Hostnames (Know the risks!) |> DANGEROUS: Allow invalid hostnames. This option introduces significant vulnerabilities to man-in-the-middle attacks!
|
||||||
@@ -595,7 +607,7 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
|
|||||||
|
|
||||||
// Check if the icon blacklist regex is valid
|
// Check if the icon blacklist regex is valid
|
||||||
if let Some(ref r) = cfg.icon_blacklist_regex {
|
if let Some(ref r) = cfg.icon_blacklist_regex {
|
||||||
let validate_regex = Regex::new(&r);
|
let validate_regex = Regex::new(r);
|
||||||
match validate_regex {
|
match validate_regex {
|
||||||
Ok(_) => (),
|
Ok(_) => (),
|
||||||
Err(e) => err!(format!("`ICON_BLACKLIST_REGEX` is invalid: {:#?}", e)),
|
Err(e) => err!(format!("`ICON_BLACKLIST_REGEX` is invalid: {:#?}", e)),
|
||||||
@@ -635,7 +647,8 @@ impl Config {
|
|||||||
let _usr = ConfigBuilder::from_file(&CONFIG_FILE).unwrap_or_default();
|
let _usr = ConfigBuilder::from_file(&CONFIG_FILE).unwrap_or_default();
|
||||||
|
|
||||||
// Create merged config, config file overwrites env
|
// Create merged config, config file overwrites env
|
||||||
let builder = _env.merge(&_usr, true);
|
let mut _overrides = Vec::new();
|
||||||
|
let builder = _env.merge(&_usr, true, &mut _overrides);
|
||||||
|
|
||||||
// Fill any missing with defaults
|
// Fill any missing with defaults
|
||||||
let config = builder.build();
|
let config = builder.build();
|
||||||
@@ -647,6 +660,7 @@ impl Config {
|
|||||||
config,
|
config,
|
||||||
_env,
|
_env,
|
||||||
_usr,
|
_usr,
|
||||||
|
_overrides,
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -662,9 +676,10 @@ impl Config {
|
|||||||
let config_str = serde_json::to_string_pretty(&builder)?;
|
let config_str = serde_json::to_string_pretty(&builder)?;
|
||||||
|
|
||||||
// Prepare the combined config
|
// Prepare the combined config
|
||||||
|
let mut overrides = Vec::new();
|
||||||
let config = {
|
let config = {
|
||||||
let env = &self.inner.read().unwrap()._env;
|
let env = &self.inner.read().unwrap()._env;
|
||||||
env.merge(&builder, false).build()
|
env.merge(&builder, false, &mut overrides).build()
|
||||||
};
|
};
|
||||||
validate_config(&config)?;
|
validate_config(&config)?;
|
||||||
|
|
||||||
@@ -673,6 +688,7 @@ impl Config {
|
|||||||
let mut writer = self.inner.write().unwrap();
|
let mut writer = self.inner.write().unwrap();
|
||||||
writer.config = config;
|
writer.config = config;
|
||||||
writer._usr = builder;
|
writer._usr = builder;
|
||||||
|
writer._overrides = overrides;
|
||||||
}
|
}
|
||||||
|
|
||||||
//Save to file
|
//Save to file
|
||||||
@@ -686,7 +702,8 @@ impl Config {
|
|||||||
pub fn update_config_partial(&self, other: ConfigBuilder) -> Result<(), Error> {
|
pub fn update_config_partial(&self, other: ConfigBuilder) -> Result<(), Error> {
|
||||||
let builder = {
|
let builder = {
|
||||||
let usr = &self.inner.read().unwrap()._usr;
|
let usr = &self.inner.read().unwrap()._usr;
|
||||||
usr.merge(&other, false)
|
let mut _overrides = Vec::new();
|
||||||
|
usr.merge(&other, false, &mut _overrides)
|
||||||
};
|
};
|
||||||
self.update_config(builder)
|
self.update_config(builder)
|
||||||
}
|
}
|
||||||
@@ -747,19 +764,17 @@ impl Config {
|
|||||||
let mut writer = self.inner.write().unwrap();
|
let mut writer = self.inner.write().unwrap();
|
||||||
writer.config = config;
|
writer.config = config;
|
||||||
writer._usr = usr;
|
writer._usr = usr;
|
||||||
|
writer._overrides = Vec::new();
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn private_rsa_key(&self) -> String {
|
pub fn private_rsa_key(&self) -> String {
|
||||||
format!("{}.der", CONFIG.rsa_key_filename())
|
|
||||||
}
|
|
||||||
pub fn private_rsa_key_pem(&self) -> String {
|
|
||||||
format!("{}.pem", CONFIG.rsa_key_filename())
|
format!("{}.pem", CONFIG.rsa_key_filename())
|
||||||
}
|
}
|
||||||
pub fn public_rsa_key(&self) -> String {
|
pub fn public_rsa_key(&self) -> String {
|
||||||
format!("{}.pub.der", CONFIG.rsa_key_filename())
|
format!("{}.pub.pem", CONFIG.rsa_key_filename())
|
||||||
}
|
}
|
||||||
pub fn mail_enabled(&self) -> bool {
|
pub fn mail_enabled(&self) -> bool {
|
||||||
let inner = &self.inner.read().unwrap().config;
|
let inner = &self.inner.read().unwrap().config;
|
||||||
@@ -832,6 +847,10 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
// First register default templates here
|
// First register default templates here
|
||||||
|
reg!("email/email_header");
|
||||||
|
reg!("email/email_footer");
|
||||||
|
reg!("email/email_footer_text");
|
||||||
|
|
||||||
reg!("email/change_email", ".html");
|
reg!("email/change_email", ".html");
|
||||||
reg!("email/delete_account", ".html");
|
reg!("email/delete_account", ".html");
|
||||||
reg!("email/invite_accepted", ".html");
|
reg!("email/invite_accepted", ".html");
|
||||||
@@ -839,6 +858,7 @@ where
|
|||||||
reg!("email/new_device_logged_in", ".html");
|
reg!("email/new_device_logged_in", ".html");
|
||||||
reg!("email/pw_hint_none", ".html");
|
reg!("email/pw_hint_none", ".html");
|
||||||
reg!("email/pw_hint_some", ".html");
|
reg!("email/pw_hint_some", ".html");
|
||||||
|
reg!("email/send_2fa_removed_from_org", ".html");
|
||||||
reg!("email/send_org_invite", ".html");
|
reg!("email/send_org_invite", ".html");
|
||||||
reg!("email/twofactor_email", ".html");
|
reg!("email/twofactor_email", ".html");
|
||||||
reg!("email/verify_email", ".html");
|
reg!("email/verify_email", ".html");
|
||||||
|
@@ -3,6 +3,7 @@
|
|||||||
//
|
//
|
||||||
use std::num::NonZeroU32;
|
use std::num::NonZeroU32;
|
||||||
|
|
||||||
|
use data_encoding::HEXLOWER;
|
||||||
use ring::{digest, hmac, pbkdf2};
|
use ring::{digest, hmac, pbkdf2};
|
||||||
|
|
||||||
use crate::error::Error;
|
use crate::error::Error;
|
||||||
@@ -28,8 +29,6 @@ pub fn verify_password_hash(secret: &[u8], salt: &[u8], previous: &[u8], iterati
|
|||||||
// HMAC
|
// HMAC
|
||||||
//
|
//
|
||||||
pub fn hmac_sign(key: &str, data: &str) -> String {
|
pub fn hmac_sign(key: &str, data: &str) -> String {
|
||||||
use data_encoding::HEXLOWER;
|
|
||||||
|
|
||||||
let key = hmac::Key::new(hmac::HMAC_SHA1_FOR_LEGACY_USE_ONLY, key.as_bytes());
|
let key = hmac::Key::new(hmac::HMAC_SHA1_FOR_LEGACY_USE_ONLY, key.as_bytes());
|
||||||
let signature = hmac::sign(&key, data.as_bytes());
|
let signature = hmac::sign(&key, data.as_bytes());
|
||||||
|
|
||||||
@@ -52,6 +51,20 @@ pub fn get_random(mut array: Vec<u8>) -> Vec<u8> {
|
|||||||
array
|
array
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn generate_id(num_bytes: usize) -> String {
|
||||||
|
HEXLOWER.encode(&get_random(vec![0; num_bytes]))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn generate_send_id() -> String {
|
||||||
|
// Send IDs are globally scoped, so make them longer to avoid collisions.
|
||||||
|
generate_id(32) // 256 bits
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn generate_attachment_id() -> String {
|
||||||
|
// Attachment IDs are scoped to a cipher, so they can be smaller.
|
||||||
|
generate_id(10) // 80 bits
|
||||||
|
}
|
||||||
|
|
||||||
pub fn generate_token(token_size: u32) -> Result<String, Error> {
|
pub fn generate_token(token_size: u32) -> Result<String, Error> {
|
||||||
// A u64 can represent all whole numbers up to 19 digits long.
|
// A u64 can represent all whole numbers up to 19 digits long.
|
||||||
if token_size > 19 {
|
if token_size > 19 {
|
||||||
|
@@ -114,7 +114,7 @@ macro_rules! db_run {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Different code for each db
|
// Different code for each db
|
||||||
( $conn:ident: $( $($db:ident),+ $body:block )+ ) => {
|
( $conn:ident: $( $($db:ident),+ $body:block )+ ) => {{
|
||||||
#[allow(unused)] use diesel::prelude::*;
|
#[allow(unused)] use diesel::prelude::*;
|
||||||
match $conn {
|
match $conn {
|
||||||
$($(
|
$($(
|
||||||
@@ -128,7 +128,7 @@ macro_rules! db_run {
|
|||||||
$body
|
$body
|
||||||
},
|
},
|
||||||
)+)+
|
)+)+
|
||||||
}
|
}}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Same for all dbs
|
// Same for all dbs
|
||||||
@@ -157,6 +157,24 @@ pub trait FromDb {
|
|||||||
fn from_db(self) -> Self::Output;
|
fn from_db(self) -> Self::Output;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T: FromDb> FromDb for Vec<T> {
|
||||||
|
type Output = Vec<T::Output>;
|
||||||
|
#[allow(clippy::wrong_self_convention)]
|
||||||
|
#[inline(always)]
|
||||||
|
fn from_db(self) -> Self::Output {
|
||||||
|
self.into_iter().map(crate::db::FromDb::from_db).collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: FromDb> FromDb for Option<T> {
|
||||||
|
type Output = Option<T::Output>;
|
||||||
|
#[allow(clippy::wrong_self_convention)]
|
||||||
|
#[inline(always)]
|
||||||
|
fn from_db(self) -> Self::Output {
|
||||||
|
self.map(crate::db::FromDb::from_db)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// For each struct eg. Cipher, we create a CipherDb inside a module named __$db_model (where $db is sqlite, mysql or postgresql),
|
// For each struct eg. Cipher, we create a CipherDb inside a module named __$db_model (where $db is sqlite, mysql or postgresql),
|
||||||
// to implement the Diesel traits. We also provide methods to convert between them and the basic structs. Later, that module will be auto imported when using db_run!
|
// to implement the Diesel traits. We also provide methods to convert between them and the basic structs. Later, that module will be auto imported when using db_run!
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
@@ -197,18 +215,9 @@ macro_rules! db_object {
|
|||||||
|
|
||||||
impl crate::db::FromDb for [<$name Db>] {
|
impl crate::db::FromDb for [<$name Db>] {
|
||||||
type Output = super::$name;
|
type Output = super::$name;
|
||||||
|
#[allow(clippy::wrong_self_convention)]
|
||||||
#[inline(always)] fn from_db(self) -> Self::Output { super::$name { $( $field: self.$field, )+ } }
|
#[inline(always)] fn from_db(self) -> Self::Output { super::$name { $( $field: self.$field, )+ } }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl crate::db::FromDb for Vec<[<$name Db>]> {
|
|
||||||
type Output = Vec<super::$name>;
|
|
||||||
#[inline(always)] fn from_db(self) -> Self::Output { self.into_iter().map(crate::db::FromDb::from_db).collect() }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl crate::db::FromDb for Option<[<$name Db>]> {
|
|
||||||
type Output = Option<super::$name>;
|
|
||||||
#[inline(always)] fn from_db(self) -> Self::Output { self.map(crate::db::FromDb::from_db) }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@@ -1,3 +1,5 @@
|
|||||||
|
use std::io::ErrorKind;
|
||||||
|
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use super::Cipher;
|
use super::Cipher;
|
||||||
@@ -12,7 +14,7 @@ db_object! {
|
|||||||
pub struct Attachment {
|
pub struct Attachment {
|
||||||
pub id: String,
|
pub id: String,
|
||||||
pub cipher_uuid: String,
|
pub cipher_uuid: String,
|
||||||
pub file_name: String,
|
pub file_name: String, // encrypted
|
||||||
pub file_size: i32,
|
pub file_size: i32,
|
||||||
pub akey: Option<String>,
|
pub akey: Option<String>,
|
||||||
}
|
}
|
||||||
@@ -20,13 +22,13 @@ db_object! {
|
|||||||
|
|
||||||
/// Local methods
|
/// Local methods
|
||||||
impl Attachment {
|
impl Attachment {
|
||||||
pub const fn new(id: String, cipher_uuid: String, file_name: String, file_size: i32) -> Self {
|
pub const fn new(id: String, cipher_uuid: String, file_name: String, file_size: i32, akey: Option<String>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
id,
|
id,
|
||||||
cipher_uuid,
|
cipher_uuid,
|
||||||
file_name,
|
file_name,
|
||||||
file_size,
|
file_size,
|
||||||
akey: None,
|
akey,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -34,18 +36,17 @@ impl Attachment {
|
|||||||
format!("{}/{}/{}", CONFIG.attachments_folder(), self.cipher_uuid, self.id)
|
format!("{}/{}/{}", CONFIG.attachments_folder(), self.cipher_uuid, self.id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_url(&self, host: &str) -> String {
|
||||||
|
format!("{}/attachments/{}/{}", host, self.cipher_uuid, self.id)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn to_json(&self, host: &str) -> Value {
|
pub fn to_json(&self, host: &str) -> Value {
|
||||||
use crate::util::get_display_size;
|
|
||||||
|
|
||||||
let web_path = format!("{}/attachments/{}/{}", host, self.cipher_uuid, self.id);
|
|
||||||
let display_size = get_display_size(self.file_size);
|
|
||||||
|
|
||||||
json!({
|
json!({
|
||||||
"Id": self.id,
|
"Id": self.id,
|
||||||
"Url": web_path,
|
"Url": self.get_url(host),
|
||||||
"FileName": self.file_name,
|
"FileName": self.file_name,
|
||||||
"Size": self.file_size.to_string(),
|
"Size": self.file_size.to_string(),
|
||||||
"SizeName": display_size,
|
"SizeName": crate::util::get_display_size(self.file_size),
|
||||||
"Key": self.akey,
|
"Key": self.akey,
|
||||||
"Object": "attachment"
|
"Object": "attachment"
|
||||||
})
|
})
|
||||||
@@ -91,7 +92,7 @@ impl Attachment {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(self, conn: &DbConn) -> EmptyResult {
|
pub fn delete(&self, conn: &DbConn) -> EmptyResult {
|
||||||
db_run! { conn: {
|
db_run! { conn: {
|
||||||
crate::util::retry(
|
crate::util::retry(
|
||||||
|| diesel::delete(attachments::table.filter(attachments::id.eq(&self.id))).execute(conn),
|
|| diesel::delete(attachments::table.filter(attachments::id.eq(&self.id))).execute(conn),
|
||||||
@@ -99,14 +100,25 @@ impl Attachment {
|
|||||||
)
|
)
|
||||||
.map_res("Error deleting attachment")?;
|
.map_res("Error deleting attachment")?;
|
||||||
|
|
||||||
crate::util::delete_file(&self.get_file_path())?;
|
let file_path = &self.get_file_path();
|
||||||
Ok(())
|
|
||||||
|
match crate::util::delete_file(file_path) {
|
||||||
|
// Ignore "file not found" errors. This can happen when the
|
||||||
|
// upstream caller has already cleaned up the file as part of
|
||||||
|
// its own error handling.
|
||||||
|
Err(e) if e.kind() == ErrorKind::NotFound => {
|
||||||
|
debug!("File '{}' already deleted.", file_path);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(e) => Err(e.into()),
|
||||||
|
_ => Ok(()),
|
||||||
|
}
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
for attachment in Attachment::find_by_cipher(&cipher_uuid, &conn) {
|
for attachment in Attachment::find_by_cipher(cipher_uuid, conn) {
|
||||||
attachment.delete(&conn)?;
|
attachment.delete(conn)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@@ -38,9 +38,16 @@ db_object! {
|
|||||||
|
|
||||||
pub password_history: Option<String>,
|
pub password_history: Option<String>,
|
||||||
pub deleted_at: Option<NaiveDateTime>,
|
pub deleted_at: Option<NaiveDateTime>,
|
||||||
|
pub reprompt: Option<i32>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub enum RepromptType {
|
||||||
|
None = 0,
|
||||||
|
Password = 1, // not currently used in server
|
||||||
|
}
|
||||||
|
|
||||||
/// Local methods
|
/// Local methods
|
||||||
impl Cipher {
|
impl Cipher {
|
||||||
pub fn new(atype: i32, name: String) -> Self {
|
pub fn new(atype: i32, name: String) -> Self {
|
||||||
@@ -63,6 +70,7 @@ impl Cipher {
|
|||||||
data: String::new(),
|
data: String::new(),
|
||||||
password_history: None,
|
password_history: None,
|
||||||
deleted_at: None,
|
deleted_at: None,
|
||||||
|
reprompt: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -89,7 +97,7 @@ impl Cipher {
|
|||||||
let password_history_json =
|
let password_history_json =
|
||||||
self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
||||||
|
|
||||||
let (read_only, hide_passwords) = match self.get_access_restrictions(&user_uuid, conn) {
|
let (read_only, hide_passwords) = match self.get_access_restrictions(user_uuid, conn) {
|
||||||
Some((ro, hp)) => (ro, hp),
|
Some((ro, hp)) => (ro, hp),
|
||||||
None => {
|
None => {
|
||||||
error!("Cipher ownership assertion failure");
|
error!("Cipher ownership assertion failure");
|
||||||
@@ -136,8 +144,9 @@ impl Cipher {
|
|||||||
"Type": self.atype,
|
"Type": self.atype,
|
||||||
"RevisionDate": format_date(&self.updated_at),
|
"RevisionDate": format_date(&self.updated_at),
|
||||||
"DeletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))),
|
"DeletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))),
|
||||||
"FolderId": self.get_folder_uuid(&user_uuid, conn),
|
"FolderId": self.get_folder_uuid(user_uuid, conn),
|
||||||
"Favorite": self.is_favorite(&user_uuid, conn),
|
"Favorite": self.is_favorite(user_uuid, conn),
|
||||||
|
"Reprompt": self.reprompt.unwrap_or(RepromptType::None as i32),
|
||||||
"OrganizationId": self.organization_uuid,
|
"OrganizationId": self.organization_uuid,
|
||||||
"Attachments": attachments_json,
|
"Attachments": attachments_json,
|
||||||
// We have UseTotp set to true by default within the Organization model.
|
// We have UseTotp set to true by default within the Organization model.
|
||||||
@@ -184,13 +193,13 @@ impl Cipher {
|
|||||||
let mut user_uuids = Vec::new();
|
let mut user_uuids = Vec::new();
|
||||||
match self.user_uuid {
|
match self.user_uuid {
|
||||||
Some(ref user_uuid) => {
|
Some(ref user_uuid) => {
|
||||||
User::update_uuid_revision(&user_uuid, conn);
|
User::update_uuid_revision(user_uuid, conn);
|
||||||
user_uuids.push(user_uuid.clone())
|
user_uuids.push(user_uuid.clone())
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
// Belongs to Organization, need to update affected users
|
// Belongs to Organization, need to update affected users
|
||||||
if let Some(ref org_uuid) = self.organization_uuid {
|
if let Some(ref org_uuid) = self.organization_uuid {
|
||||||
UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn).iter().for_each(|user_org| {
|
UserOrganization::find_by_cipher_and_org(&self.uuid, org_uuid, conn).iter().for_each(|user_org| {
|
||||||
User::update_uuid_revision(&user_org.user_uuid, conn);
|
User::update_uuid_revision(&user_org.user_uuid, conn);
|
||||||
user_uuids.push(user_org.user_uuid.clone())
|
user_uuids.push(user_org.user_uuid.clone())
|
||||||
});
|
});
|
||||||
@@ -251,15 +260,15 @@ impl Cipher {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
for cipher in Self::find_by_org(org_uuid, &conn) {
|
for cipher in Self::find_by_org(org_uuid, conn) {
|
||||||
cipher.delete(&conn)?;
|
cipher.delete(conn)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
for cipher in Self::find_owned_by_user(user_uuid, &conn) {
|
for cipher in Self::find_owned_by_user(user_uuid, conn) {
|
||||||
cipher.delete(&conn)?;
|
cipher.delete(conn)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -270,7 +279,7 @@ impl Cipher {
|
|||||||
let now = Utc::now().naive_utc();
|
let now = Utc::now().naive_utc();
|
||||||
let dt = now - Duration::days(auto_delete_days);
|
let dt = now - Duration::days(auto_delete_days);
|
||||||
for cipher in Self::find_deleted_before(&dt, conn) {
|
for cipher in Self::find_deleted_before(&dt, conn) {
|
||||||
cipher.delete(&conn).ok();
|
cipher.delete(conn).ok();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -278,7 +287,7 @@ impl Cipher {
|
|||||||
pub fn move_to_folder(&self, folder_uuid: Option<String>, user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn move_to_folder(&self, folder_uuid: Option<String>, user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
User::update_uuid_revision(user_uuid, conn);
|
User::update_uuid_revision(user_uuid, conn);
|
||||||
|
|
||||||
match (self.get_folder_uuid(&user_uuid, conn), folder_uuid) {
|
match (self.get_folder_uuid(user_uuid, conn), folder_uuid) {
|
||||||
// No changes
|
// No changes
|
||||||
(None, None) => Ok(()),
|
(None, None) => Ok(()),
|
||||||
(Some(ref old), Some(ref new)) if old == new => Ok(()),
|
(Some(ref old), Some(ref new)) if old == new => Ok(()),
|
||||||
@@ -310,7 +319,7 @@ impl Cipher {
|
|||||||
/// Returns whether this cipher is owned by an org in which the user has full access.
|
/// Returns whether this cipher is owned by an org in which the user has full access.
|
||||||
pub fn is_in_full_access_org(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
pub fn is_in_full_access_org(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
||||||
if let Some(ref org_uuid) = self.organization_uuid {
|
if let Some(ref org_uuid) = self.organization_uuid {
|
||||||
if let Some(user_org) = UserOrganization::find_by_user_and_org(&user_uuid, &org_uuid, conn) {
|
if let Some(user_org) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn) {
|
||||||
return user_org.has_full_access();
|
return user_org.has_full_access();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -327,7 +336,7 @@ impl Cipher {
|
|||||||
// Check whether this cipher is directly owned by the user, or is in
|
// Check whether this cipher is directly owned by the user, or is in
|
||||||
// a collection that the user has full access to. If so, there are no
|
// a collection that the user has full access to. If so, there are no
|
||||||
// access restrictions.
|
// access restrictions.
|
||||||
if self.is_owned_by_user(&user_uuid) || self.is_in_full_access_org(&user_uuid, &conn) {
|
if self.is_owned_by_user(user_uuid) || self.is_in_full_access_org(user_uuid, conn) {
|
||||||
return Some((false, false));
|
return Some((false, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -368,14 +377,14 @@ impl Cipher {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
pub fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
||||||
match self.get_access_restrictions(&user_uuid, &conn) {
|
match self.get_access_restrictions(user_uuid, conn) {
|
||||||
Some((read_only, _hide_passwords)) => !read_only,
|
Some((read_only, _hide_passwords)) => !read_only,
|
||||||
None => false,
|
None => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
pub fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
||||||
self.get_access_restrictions(&user_uuid, &conn).is_some()
|
self.get_access_restrictions(user_uuid, conn).is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns whether this cipher is a favorite of the specified user.
|
// Returns whether this cipher is a favorite of the specified user.
|
||||||
|
@@ -109,8 +109,8 @@ impl Collection {
|
|||||||
|
|
||||||
pub fn delete(self, conn: &DbConn) -> EmptyResult {
|
pub fn delete(self, conn: &DbConn) -> EmptyResult {
|
||||||
self.update_users_revision(conn);
|
self.update_users_revision(conn);
|
||||||
CollectionCipher::delete_all_by_collection(&self.uuid, &conn)?;
|
CollectionCipher::delete_all_by_collection(&self.uuid, conn)?;
|
||||||
CollectionUser::delete_all_by_collection(&self.uuid, &conn)?;
|
CollectionUser::delete_all_by_collection(&self.uuid, conn)?;
|
||||||
|
|
||||||
db_run! { conn: {
|
db_run! { conn: {
|
||||||
diesel::delete(collections::table.filter(collections::uuid.eq(self.uuid)))
|
diesel::delete(collections::table.filter(collections::uuid.eq(self.uuid)))
|
||||||
@@ -120,8 +120,8 @@ impl Collection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
for collection in Self::find_by_organization(org_uuid, &conn) {
|
for collection in Self::find_by_organization(org_uuid, conn) {
|
||||||
collection.delete(&conn)?;
|
collection.delete(conn)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -220,7 +220,7 @@ impl Collection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_writable_by_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
pub fn is_writable_by_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
||||||
match UserOrganization::find_by_user_and_org(&user_uuid, &self.org_uuid, &conn) {
|
match UserOrganization::find_by_user_and_org(user_uuid, &self.org_uuid, conn) {
|
||||||
None => false, // Not in Org
|
None => false, // Not in Org
|
||||||
Some(user_org) => {
|
Some(user_org) => {
|
||||||
if user_org.has_full_access() {
|
if user_org.has_full_access() {
|
||||||
@@ -242,7 +242,7 @@ impl Collection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn hide_passwords_for_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
pub fn hide_passwords_for_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
||||||
match UserOrganization::find_by_user_and_org(&user_uuid, &self.org_uuid, &conn) {
|
match UserOrganization::find_by_user_and_org(user_uuid, &self.org_uuid, conn) {
|
||||||
None => true, // Not in Org
|
None => true, // Not in Org
|
||||||
Some(user_org) => {
|
Some(user_org) => {
|
||||||
if user_org.has_full_access() {
|
if user_org.has_full_access() {
|
||||||
@@ -286,7 +286,7 @@ impl CollectionUser {
|
|||||||
hide_passwords: bool,
|
hide_passwords: bool,
|
||||||
conn: &DbConn,
|
conn: &DbConn,
|
||||||
) -> EmptyResult {
|
) -> EmptyResult {
|
||||||
User::update_uuid_revision(&user_uuid, conn);
|
User::update_uuid_revision(user_uuid, conn);
|
||||||
|
|
||||||
db_run! { conn:
|
db_run! { conn:
|
||||||
sqlite, mysql {
|
sqlite, mysql {
|
||||||
@@ -375,7 +375,7 @@ impl CollectionUser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
CollectionUser::find_by_collection(&collection_uuid, conn).iter().for_each(|collection| {
|
CollectionUser::find_by_collection(collection_uuid, conn).iter().for_each(|collection| {
|
||||||
User::update_uuid_revision(&collection.user_uuid, conn);
|
User::update_uuid_revision(&collection.user_uuid, conn);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -406,7 +406,7 @@ impl CollectionUser {
|
|||||||
/// Database methods
|
/// Database methods
|
||||||
impl CollectionCipher {
|
impl CollectionCipher {
|
||||||
pub fn save(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn save(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
Self::update_users_revision(&collection_uuid, conn);
|
Self::update_users_revision(collection_uuid, conn);
|
||||||
|
|
||||||
db_run! { conn:
|
db_run! { conn:
|
||||||
sqlite, mysql {
|
sqlite, mysql {
|
||||||
@@ -436,7 +436,7 @@ impl CollectionCipher {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn delete(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
Self::update_users_revision(&collection_uuid, conn);
|
Self::update_users_revision(collection_uuid, conn);
|
||||||
|
|
||||||
db_run! { conn: {
|
db_run! { conn: {
|
||||||
diesel::delete(
|
diesel::delete(
|
||||||
|
@@ -143,8 +143,8 @@ impl Device {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
for device in Self::find_by_user(user_uuid, &conn) {
|
for device in Self::find_by_user(user_uuid, conn) {
|
||||||
device.delete(&conn)?;
|
device.delete(conn)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@@ -32,10 +32,10 @@ impl Favorite {
|
|||||||
|
|
||||||
// Sets whether the specified cipher is a favorite of the specified user.
|
// Sets whether the specified cipher is a favorite of the specified user.
|
||||||
pub fn set_favorite(favorite: bool, cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn set_favorite(favorite: bool, cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
let (old, new) = (Self::is_favorite(cipher_uuid, user_uuid, &conn), favorite);
|
let (old, new) = (Self::is_favorite(cipher_uuid, user_uuid, conn), favorite);
|
||||||
match (old, new) {
|
match (old, new) {
|
||||||
(false, true) => {
|
(false, true) => {
|
||||||
User::update_uuid_revision(user_uuid, &conn);
|
User::update_uuid_revision(user_uuid, conn);
|
||||||
db_run! { conn: {
|
db_run! { conn: {
|
||||||
diesel::insert_into(favorites::table)
|
diesel::insert_into(favorites::table)
|
||||||
.values((
|
.values((
|
||||||
@@ -47,7 +47,7 @@ impl Favorite {
|
|||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
(true, false) => {
|
(true, false) => {
|
||||||
User::update_uuid_revision(user_uuid, &conn);
|
User::update_uuid_revision(user_uuid, conn);
|
||||||
db_run! { conn: {
|
db_run! { conn: {
|
||||||
diesel::delete(
|
diesel::delete(
|
||||||
favorites::table
|
favorites::table
|
||||||
|
@@ -107,7 +107,7 @@ impl Folder {
|
|||||||
|
|
||||||
pub fn delete(&self, conn: &DbConn) -> EmptyResult {
|
pub fn delete(&self, conn: &DbConn) -> EmptyResult {
|
||||||
User::update_uuid_revision(&self.user_uuid, conn);
|
User::update_uuid_revision(&self.user_uuid, conn);
|
||||||
FolderCipher::delete_all_by_folder(&self.uuid, &conn)?;
|
FolderCipher::delete_all_by_folder(&self.uuid, conn)?;
|
||||||
|
|
||||||
db_run! { conn: {
|
db_run! { conn: {
|
||||||
diesel::delete(folders::table.filter(folders::uuid.eq(&self.uuid)))
|
diesel::delete(folders::table.filter(folders::uuid.eq(&self.uuid)))
|
||||||
@@ -117,8 +117,8 @@ impl Folder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
for folder in Self::find_by_user(user_uuid, &conn) {
|
for folder in Self::find_by_user(user_uuid, conn) {
|
||||||
folder.delete(&conn)?;
|
folder.delete(conn)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@@ -1,8 +1,10 @@
|
|||||||
|
use serde::Deserialize;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::api::EmptyResult;
|
use crate::api::EmptyResult;
|
||||||
use crate::db::DbConn;
|
use crate::db::DbConn;
|
||||||
use crate::error::MapResult;
|
use crate::error::MapResult;
|
||||||
|
use crate::util::UpCase;
|
||||||
|
|
||||||
use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
|
use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
|
||||||
|
|
||||||
@@ -20,7 +22,7 @@ db_object! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, num_derive::FromPrimitive)]
|
#[derive(Copy, Clone, PartialEq, num_derive::FromPrimitive)]
|
||||||
pub enum OrgPolicyType {
|
pub enum OrgPolicyType {
|
||||||
TwoFactorAuthentication = 0,
|
TwoFactorAuthentication = 0,
|
||||||
MasterPassword = 1,
|
MasterPassword = 1,
|
||||||
@@ -29,6 +31,14 @@ pub enum OrgPolicyType {
|
|||||||
// RequireSso = 4, // Not currently supported.
|
// RequireSso = 4, // Not currently supported.
|
||||||
PersonalOwnership = 5,
|
PersonalOwnership = 5,
|
||||||
DisableSend = 6,
|
DisableSend = 6,
|
||||||
|
SendOptions = 7,
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://github.com/bitwarden/server/blob/master/src/Core/Models/Data/SendOptionsPolicyData.cs
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub struct SendOptionsPolicyData {
|
||||||
|
pub DisableHideEmail: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Local methods
|
/// Local methods
|
||||||
@@ -188,6 +198,30 @@ impl OrgPolicy {
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns true if the user belongs to an org that has enabled the `DisableHideEmail`
|
||||||
|
/// option of the `Send Options` policy, and the user is not an owner or admin of that org.
|
||||||
|
pub fn is_hide_email_disabled(user_uuid: &str, conn: &DbConn) -> bool {
|
||||||
|
// Returns confirmed users only.
|
||||||
|
for policy in OrgPolicy::find_by_user(user_uuid, conn) {
|
||||||
|
if policy.enabled && policy.has_type(OrgPolicyType::SendOptions) {
|
||||||
|
let org_uuid = &policy.org_uuid;
|
||||||
|
if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn) {
|
||||||
|
if user.atype < UserOrgType::Admin {
|
||||||
|
match serde_json::from_str::<UpCase<SendOptionsPolicyData>>(&policy.data) {
|
||||||
|
Ok(opts) => {
|
||||||
|
if opts.data.DisableHideEmail {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => error!("Failed to deserialize policy data: {}", policy.data),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
/*pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
/*pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
db_run! { conn: {
|
db_run! { conn: {
|
||||||
diesel::delete(twofactor::table.filter(twofactor::user_uuid.eq(user_uuid)))
|
diesel::delete(twofactor::table.filter(twofactor::user_uuid.eq(user_uuid)))
|
||||||
|
@@ -2,7 +2,7 @@ use num_traits::FromPrimitive;
|
|||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
use super::{CollectionUser, OrgPolicy, User};
|
use super::{CollectionUser, OrgPolicy, OrgPolicyType, User};
|
||||||
|
|
||||||
db_object! {
|
db_object! {
|
||||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||||
@@ -12,6 +12,8 @@ db_object! {
|
|||||||
pub uuid: String,
|
pub uuid: String,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub billing_email: String,
|
pub billing_email: String,
|
||||||
|
pub private_key: Option<String>,
|
||||||
|
pub public_key: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
#[derive(Identifiable, Queryable, Insertable, AsChangeset)]
|
||||||
@@ -122,12 +124,13 @@ impl PartialOrd<UserOrgType> for i32 {
|
|||||||
|
|
||||||
/// Local methods
|
/// Local methods
|
||||||
impl Organization {
|
impl Organization {
|
||||||
pub fn new(name: String, billing_email: String) -> Self {
|
pub fn new(name: String, billing_email: String, private_key: Option<String>, public_key: Option<String>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
uuid: crate::util::get_uuid(),
|
uuid: crate::util::get_uuid(),
|
||||||
|
|
||||||
name,
|
name,
|
||||||
billing_email,
|
billing_email,
|
||||||
|
private_key,
|
||||||
|
public_key,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -140,14 +143,16 @@ impl Organization {
|
|||||||
"MaxCollections": 10, // The value doesn't matter, we don't check server-side
|
"MaxCollections": 10, // The value doesn't matter, we don't check server-side
|
||||||
"MaxStorageGb": 10, // The value doesn't matter, we don't check server-side
|
"MaxStorageGb": 10, // The value doesn't matter, we don't check server-side
|
||||||
"Use2fa": true,
|
"Use2fa": true,
|
||||||
"UseDirectory": false,
|
"UseDirectory": false, // Is supported, but this value isn't checked anywhere (yet)
|
||||||
"UseEvents": false,
|
"UseEvents": false, // not supported by us
|
||||||
"UseGroups": false,
|
"UseGroups": false, // not supported by us
|
||||||
"UseTotp": true,
|
"UseTotp": true,
|
||||||
"UsePolicies": true,
|
"UsePolicies": true,
|
||||||
"UseSso": false, // We do not support SSO
|
"UseSso": false, // We do not support SSO
|
||||||
"SelfHost": true,
|
"SelfHost": true,
|
||||||
"UseApi": false, // not supported by us
|
"UseApi": false, // not supported by us
|
||||||
|
"HasPublicAndPrivateKeys": self.private_key.is_some() && self.public_key.is_some(),
|
||||||
|
"ResetPasswordEnrolled": false, // not supported by us
|
||||||
|
|
||||||
"BusinessName": null,
|
"BusinessName": null,
|
||||||
"BusinessAddress1": null,
|
"BusinessAddress1": null,
|
||||||
@@ -228,10 +233,10 @@ impl Organization {
|
|||||||
pub fn delete(self, conn: &DbConn) -> EmptyResult {
|
pub fn delete(self, conn: &DbConn) -> EmptyResult {
|
||||||
use super::{Cipher, Collection};
|
use super::{Cipher, Collection};
|
||||||
|
|
||||||
Cipher::delete_all_by_organization(&self.uuid, &conn)?;
|
Cipher::delete_all_by_organization(&self.uuid, conn)?;
|
||||||
Collection::delete_all_by_organization(&self.uuid, &conn)?;
|
Collection::delete_all_by_organization(&self.uuid, conn)?;
|
||||||
UserOrganization::delete_all_by_organization(&self.uuid, &conn)?;
|
UserOrganization::delete_all_by_organization(&self.uuid, conn)?;
|
||||||
OrgPolicy::delete_all_by_organization(&self.uuid, &conn)?;
|
OrgPolicy::delete_all_by_organization(&self.uuid, conn)?;
|
||||||
|
|
||||||
db_run! { conn: {
|
db_run! { conn: {
|
||||||
diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid)))
|
diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid)))
|
||||||
@@ -269,13 +274,15 @@ impl UserOrganization {
|
|||||||
"UsersGetPremium": true,
|
"UsersGetPremium": true,
|
||||||
|
|
||||||
"Use2fa": true,
|
"Use2fa": true,
|
||||||
"UseDirectory": false,
|
"UseDirectory": false, // Is supported, but this value isn't checked anywhere (yet)
|
||||||
"UseEvents": false,
|
"UseEvents": false, // not supported by us
|
||||||
"UseGroups": false,
|
"UseGroups": false, // not supported by us
|
||||||
"UseTotp": true,
|
"UseTotp": true,
|
||||||
"UsePolicies": true,
|
"UsePolicies": true,
|
||||||
"UseApi": false, // not supported by us
|
"UseApi": false, // not supported by us
|
||||||
"SelfHost": true,
|
"SelfHost": true,
|
||||||
|
"HasPublicAndPrivateKeys": org.private_key.is_some() && org.public_key.is_some(),
|
||||||
|
"ResetPasswordEnrolled": false, // not supported by us
|
||||||
"SsoBound": false, // We do not support SSO
|
"SsoBound": false, // We do not support SSO
|
||||||
"UseSso": false, // We do not support SSO
|
"UseSso": false, // We do not support SSO
|
||||||
// TODO: Add support for Business Portal
|
// TODO: Add support for Business Portal
|
||||||
@@ -293,10 +300,12 @@ impl UserOrganization {
|
|||||||
// "AccessReports": false,
|
// "AccessReports": false,
|
||||||
// "ManageAllCollections": false,
|
// "ManageAllCollections": false,
|
||||||
// "ManageAssignedCollections": false,
|
// "ManageAssignedCollections": false,
|
||||||
|
// "ManageCiphers": false,
|
||||||
// "ManageGroups": false,
|
// "ManageGroups": false,
|
||||||
// "ManagePolicies": false,
|
// "ManagePolicies": false,
|
||||||
|
// "ManageResetPassword": false,
|
||||||
// "ManageSso": false,
|
// "ManageSso": false,
|
||||||
// "ManageUsers": false
|
// "ManageUsers": false,
|
||||||
// },
|
// },
|
||||||
|
|
||||||
"MaxStorageGb": 10, // The value doesn't matter, we don't check server-side
|
"MaxStorageGb": 10, // The value doesn't matter, we don't check server-side
|
||||||
@@ -402,7 +411,7 @@ impl UserOrganization {
|
|||||||
pub fn delete(self, conn: &DbConn) -> EmptyResult {
|
pub fn delete(self, conn: &DbConn) -> EmptyResult {
|
||||||
User::update_uuid_revision(&self.user_uuid, conn);
|
User::update_uuid_revision(&self.user_uuid, conn);
|
||||||
|
|
||||||
CollectionUser::delete_all_by_user_and_org(&self.user_uuid, &self.org_uuid, &conn)?;
|
CollectionUser::delete_all_by_user_and_org(&self.user_uuid, &self.org_uuid, conn)?;
|
||||||
|
|
||||||
db_run! { conn: {
|
db_run! { conn: {
|
||||||
diesel::delete(users_organizations::table.filter(users_organizations::uuid.eq(self.uuid)))
|
diesel::delete(users_organizations::table.filter(users_organizations::uuid.eq(self.uuid)))
|
||||||
@@ -412,22 +421,22 @@ impl UserOrganization {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
for user_org in Self::find_by_org(&org_uuid, &conn) {
|
for user_org in Self::find_by_org(org_uuid, conn) {
|
||||||
user_org.delete(&conn)?;
|
user_org.delete(conn)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
for user_org in Self::find_any_state_by_user(&user_uuid, &conn) {
|
for user_org in Self::find_any_state_by_user(user_uuid, conn) {
|
||||||
user_org.delete(&conn)?;
|
user_org.delete(conn)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_by_email_and_org(email: &str, org_id: &str, conn: &DbConn) -> Option<UserOrganization> {
|
pub fn find_by_email_and_org(email: &str, org_id: &str, conn: &DbConn) -> Option<UserOrganization> {
|
||||||
if let Some(user) = super::User::find_by_mail(email, conn) {
|
if let Some(user) = super::User::find_by_mail(email, conn) {
|
||||||
if let Some(user_org) = UserOrganization::find_by_user_and_org(&user.uuid, org_id, &conn) {
|
if let Some(user_org) = UserOrganization::find_by_user_and_org(&user.uuid, org_id, conn) {
|
||||||
return Some(user_org);
|
return Some(user_org);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -535,6 +544,25 @@ impl UserOrganization {
|
|||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn find_by_user_and_policy(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> Vec<Self> {
|
||||||
|
db_run! { conn: {
|
||||||
|
users_organizations::table
|
||||||
|
.inner_join(
|
||||||
|
org_policies::table.on(
|
||||||
|
org_policies::org_uuid.eq(users_organizations::org_uuid)
|
||||||
|
.and(users_organizations::user_uuid.eq(user_uuid))
|
||||||
|
.and(org_policies::atype.eq(policy_type as i32))
|
||||||
|
.and(org_policies::enabled.eq(true)))
|
||||||
|
)
|
||||||
|
.filter(
|
||||||
|
users_organizations::status.eq(UserOrgStatus::Confirmed as i32)
|
||||||
|
)
|
||||||
|
.select(users_organizations::all_columns)
|
||||||
|
.load::<UserOrganizationDb>(conn)
|
||||||
|
.unwrap_or_default().from_db()
|
||||||
|
}}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn find_by_cipher_and_org(cipher_uuid: &str, org_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
pub fn find_by_cipher_and_org(cipher_uuid: &str, org_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
||||||
db_run! { conn: {
|
db_run! { conn: {
|
||||||
users_organizations::table
|
users_organizations::table
|
||||||
|
@@ -36,6 +36,7 @@ db_object! {
|
|||||||
pub deletion_date: NaiveDateTime,
|
pub deletion_date: NaiveDateTime,
|
||||||
|
|
||||||
pub disabled: bool,
|
pub disabled: bool,
|
||||||
|
pub hide_email: Option<bool>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -73,6 +74,7 @@ impl Send {
|
|||||||
deletion_date,
|
deletion_date,
|
||||||
|
|
||||||
disabled: false,
|
disabled: false,
|
||||||
|
hide_email: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -101,6 +103,22 @@ impl Send {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn creator_identifier(&self, conn: &DbConn) -> Option<String> {
|
||||||
|
if let Some(hide_email) = self.hide_email {
|
||||||
|
if hide_email {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(user_uuid) = &self.user_uuid {
|
||||||
|
if let Some(user) = User::find_by_uuid(user_uuid, conn) {
|
||||||
|
return Some(user.email);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
pub fn to_json(&self) -> Value {
|
pub fn to_json(&self) -> Value {
|
||||||
use crate::util::format_date;
|
use crate::util::format_date;
|
||||||
use data_encoding::BASE64URL_NOPAD;
|
use data_encoding::BASE64URL_NOPAD;
|
||||||
@@ -123,6 +141,7 @@ impl Send {
|
|||||||
"AccessCount": self.access_count,
|
"AccessCount": self.access_count,
|
||||||
"Password": self.password_hash.as_deref().map(|h| BASE64URL_NOPAD.encode(h)),
|
"Password": self.password_hash.as_deref().map(|h| BASE64URL_NOPAD.encode(h)),
|
||||||
"Disabled": self.disabled,
|
"Disabled": self.disabled,
|
||||||
|
"HideEmail": self.hide_email,
|
||||||
|
|
||||||
"RevisionDate": format_date(&self.revision_date),
|
"RevisionDate": format_date(&self.revision_date),
|
||||||
"ExpirationDate": self.expiration_date.as_ref().map(format_date),
|
"ExpirationDate": self.expiration_date.as_ref().map(format_date),
|
||||||
@@ -131,7 +150,7 @@ impl Send {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_json_access(&self) -> Value {
|
pub fn to_json_access(&self, conn: &DbConn) -> Value {
|
||||||
use crate::util::format_date;
|
use crate::util::format_date;
|
||||||
|
|
||||||
let data: Value = serde_json::from_str(&self.data).unwrap_or_default();
|
let data: Value = serde_json::from_str(&self.data).unwrap_or_default();
|
||||||
@@ -145,6 +164,7 @@ impl Send {
|
|||||||
"File": if self.atype == SendType::File as i32 { Some(&data) } else { None },
|
"File": if self.atype == SendType::File as i32 { Some(&data) } else { None },
|
||||||
|
|
||||||
"ExpirationDate": self.expiration_date.as_ref().map(format_date),
|
"ExpirationDate": self.expiration_date.as_ref().map(format_date),
|
||||||
|
"CreatorIdentifier": self.creator_identifier(conn),
|
||||||
"Object": "send-access",
|
"Object": "send-access",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -207,15 +227,15 @@ impl Send {
|
|||||||
|
|
||||||
/// Purge all sends that are past their deletion date.
|
/// Purge all sends that are past their deletion date.
|
||||||
pub fn purge(conn: &DbConn) {
|
pub fn purge(conn: &DbConn) {
|
||||||
for send in Self::find_by_past_deletion_date(&conn) {
|
for send in Self::find_by_past_deletion_date(conn) {
|
||||||
send.delete(&conn).ok();
|
send.delete(conn).ok();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_users_revision(&self, conn: &DbConn) {
|
pub fn update_users_revision(&self, conn: &DbConn) {
|
||||||
match &self.user_uuid {
|
match &self.user_uuid {
|
||||||
Some(user_uuid) => {
|
Some(user_uuid) => {
|
||||||
User::update_uuid_revision(&user_uuid, conn);
|
User::update_uuid_revision(user_uuid, conn);
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
// Belongs to Organization, not implemented
|
// Belongs to Organization, not implemented
|
||||||
@@ -224,8 +244,8 @@ impl Send {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
for send in Self::find_by_user(user_uuid, &conn) {
|
for send in Self::find_by_user(user_uuid, conn) {
|
||||||
send.delete(&conn)?;
|
send.delete(conn)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@@ -31,11 +31,14 @@ pub enum TwoFactorType {
|
|||||||
U2f = 4,
|
U2f = 4,
|
||||||
Remember = 5,
|
Remember = 5,
|
||||||
OrganizationDuo = 6,
|
OrganizationDuo = 6,
|
||||||
|
Webauthn = 7,
|
||||||
|
|
||||||
// These are implementation details
|
// These are implementation details
|
||||||
U2fRegisterChallenge = 1000,
|
U2fRegisterChallenge = 1000,
|
||||||
U2fLoginChallenge = 1001,
|
U2fLoginChallenge = 1001,
|
||||||
EmailVerificationChallenge = 1002,
|
EmailVerificationChallenge = 1002,
|
||||||
|
WebauthnRegisterChallenge = 1003,
|
||||||
|
WebauthnLoginChallenge = 1004,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Local methods
|
/// Local methods
|
||||||
@@ -146,4 +149,73 @@ impl TwoFactor {
|
|||||||
.map_res("Error deleting twofactors")
|
.map_res("Error deleting twofactors")
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn migrate_u2f_to_webauthn(conn: &DbConn) -> EmptyResult {
|
||||||
|
let u2f_factors = db_run! { conn: {
|
||||||
|
twofactor::table
|
||||||
|
.filter(twofactor::atype.eq(TwoFactorType::U2f as i32))
|
||||||
|
.load::<TwoFactorDb>(conn)
|
||||||
|
.expect("Error loading twofactor")
|
||||||
|
.from_db()
|
||||||
|
}};
|
||||||
|
|
||||||
|
use crate::api::core::two_factor::u2f::U2FRegistration;
|
||||||
|
use crate::api::core::two_factor::webauthn::{get_webauthn_registrations, WebauthnRegistration};
|
||||||
|
use std::convert::TryInto;
|
||||||
|
use webauthn_rs::proto::*;
|
||||||
|
|
||||||
|
for mut u2f in u2f_factors {
|
||||||
|
let mut regs: Vec<U2FRegistration> = serde_json::from_str(&u2f.data)?;
|
||||||
|
// If there are no registrations or they are migrated (we do the migration in batch so we can consider them all migrated when the first one is)
|
||||||
|
if regs.is_empty() || regs[0].migrated == Some(true) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (_, mut webauthn_regs) = get_webauthn_registrations(&u2f.user_uuid, conn)?;
|
||||||
|
|
||||||
|
// If the user already has webauthn registrations saved, don't overwrite them
|
||||||
|
if !webauthn_regs.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
for reg in &mut regs {
|
||||||
|
let x: [u8; 32] = reg.reg.pub_key[1..33].try_into().unwrap();
|
||||||
|
let y: [u8; 32] = reg.reg.pub_key[33..65].try_into().unwrap();
|
||||||
|
|
||||||
|
let key = COSEKey {
|
||||||
|
type_: COSEAlgorithm::ES256,
|
||||||
|
key: COSEKeyType::EC_EC2(COSEEC2Key {
|
||||||
|
curve: ECDSACurve::SECP256R1,
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
let new_reg = WebauthnRegistration {
|
||||||
|
id: reg.id,
|
||||||
|
migrated: true,
|
||||||
|
name: reg.name.clone(),
|
||||||
|
credential: Credential {
|
||||||
|
counter: reg.counter,
|
||||||
|
verified: false,
|
||||||
|
cred: key,
|
||||||
|
cred_id: reg.reg.key_handle.clone(),
|
||||||
|
registration_policy: UserVerificationPolicy::Discouraged,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
webauthn_regs.push(new_reg);
|
||||||
|
|
||||||
|
reg.migrated = Some(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
u2f.data = serde_json::to_string(®s)?;
|
||||||
|
u2f.save(conn)?;
|
||||||
|
|
||||||
|
TwoFactor::new(u2f.user_uuid.clone(), TwoFactorType::Webauthn, serde_json::to_string(&webauthn_regs)?)
|
||||||
|
.save(conn)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
use chrono::{NaiveDateTime, Utc};
|
use chrono::{Duration, NaiveDateTime, Utc};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::crypto;
|
use crate::crypto;
|
||||||
@@ -63,8 +63,9 @@ enum UserStatus {
|
|||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub struct UserStampException {
|
pub struct UserStampException {
|
||||||
pub route: String,
|
pub routes: Vec<String>,
|
||||||
pub security_stamp: String,
|
pub security_stamp: String,
|
||||||
|
pub expire: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Local methods
|
/// Local methods
|
||||||
@@ -135,9 +136,11 @@ impl User {
|
|||||||
/// # Arguments
|
/// # Arguments
|
||||||
///
|
///
|
||||||
/// * `password` - A str which contains a hashed version of the users master password.
|
/// * `password` - A str which contains a hashed version of the users master password.
|
||||||
/// * `allow_next_route` - A Option<&str> with the function name of the next allowed (rocket) route.
|
/// * `allow_next_route` - A Option<Vec<String>> with the function names of the next allowed (rocket) routes.
|
||||||
|
/// These routes are able to use the previous stamp id for the next 2 minutes.
|
||||||
|
/// After these 2 minutes this stamp will expire.
|
||||||
///
|
///
|
||||||
pub fn set_password(&mut self, password: &str, allow_next_route: Option<&str>) {
|
pub fn set_password(&mut self, password: &str, allow_next_route: Option<Vec<String>>) {
|
||||||
self.password_hash = crypto::hash_password(password.as_bytes(), &self.salt, self.password_iterations as u32);
|
self.password_hash = crypto::hash_password(password.as_bytes(), &self.salt, self.password_iterations as u32);
|
||||||
|
|
||||||
if let Some(route) = allow_next_route {
|
if let Some(route) = allow_next_route {
|
||||||
@@ -154,24 +157,20 @@ impl User {
|
|||||||
/// Set the stamp_exception to only allow a subsequent request matching a specific route using the current security-stamp.
|
/// Set the stamp_exception to only allow a subsequent request matching a specific route using the current security-stamp.
|
||||||
///
|
///
|
||||||
/// # Arguments
|
/// # Arguments
|
||||||
/// * `route_exception` - A str with the function name of the next allowed (rocket) route.
|
/// * `route_exception` - A Vec<String> with the function names of the next allowed (rocket) routes.
|
||||||
|
/// These routes are able to use the previous stamp id for the next 2 minutes.
|
||||||
|
/// After these 2 minutes this stamp will expire.
|
||||||
///
|
///
|
||||||
/// ### Future
|
pub fn set_stamp_exception(&mut self, route_exception: Vec<String>) {
|
||||||
/// In the future it could be posible that we need more of these exception routes.
|
|
||||||
/// In that case we could use an Vec<UserStampException> and add multiple exceptions.
|
|
||||||
pub fn set_stamp_exception(&mut self, route_exception: &str) {
|
|
||||||
let stamp_exception = UserStampException {
|
let stamp_exception = UserStampException {
|
||||||
route: route_exception.to_string(),
|
routes: route_exception,
|
||||||
security_stamp: self.security_stamp.to_string(),
|
security_stamp: self.security_stamp.to_string(),
|
||||||
|
expire: (Utc::now().naive_utc() + Duration::minutes(2)).timestamp(),
|
||||||
};
|
};
|
||||||
self.stamp_exception = Some(serde_json::to_string(&stamp_exception).unwrap_or_default());
|
self.stamp_exception = Some(serde_json::to_string(&stamp_exception).unwrap_or_default());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resets the stamp_exception to prevent re-use of the previous security-stamp
|
/// Resets the stamp_exception to prevent re-use of the previous security-stamp
|
||||||
///
|
|
||||||
/// ### Future
|
|
||||||
/// In the future it could be posible that we need more of these exception routes.
|
|
||||||
/// In that case we could use an Vec<UserStampException> and add multiple exceptions.
|
|
||||||
pub fn reset_stamp_exception(&mut self) {
|
pub fn reset_stamp_exception(&mut self) {
|
||||||
self.stamp_exception = None;
|
self.stamp_exception = None;
|
||||||
}
|
}
|
||||||
@@ -187,7 +186,7 @@ use crate::error::MapResult;
|
|||||||
impl User {
|
impl User {
|
||||||
pub fn to_json(&self, conn: &DbConn) -> Value {
|
pub fn to_json(&self, conn: &DbConn) -> Value {
|
||||||
let orgs = UserOrganization::find_by_user(&self.uuid, conn);
|
let orgs = UserOrganization::find_by_user(&self.uuid, conn);
|
||||||
let orgs_json: Vec<Value> = orgs.iter().map(|c| c.to_json(&conn)).collect();
|
let orgs_json: Vec<Value> = orgs.iter().map(|c| c.to_json(conn)).collect();
|
||||||
let twofactor_enabled = !TwoFactor::find_by_user(&self.uuid, conn).is_empty();
|
let twofactor_enabled = !TwoFactor::find_by_user(&self.uuid, conn).is_empty();
|
||||||
|
|
||||||
// TODO: Might want to save the status field in the DB
|
// TODO: Might want to save the status field in the DB
|
||||||
@@ -398,8 +397,8 @@ impl Invitation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn take(mail: &str, conn: &DbConn) -> bool {
|
pub fn take(mail: &str, conn: &DbConn) -> bool {
|
||||||
match Self::find_by_mail(mail, &conn) {
|
match Self::find_by_mail(mail, conn) {
|
||||||
Some(invitation) => invitation.delete(&conn).is_ok(),
|
Some(invitation) => invitation.delete(conn).is_ok(),
|
||||||
None => false,
|
None => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -22,6 +22,7 @@ table! {
|
|||||||
data -> Text,
|
data -> Text,
|
||||||
password_history -> Nullable<Text>,
|
password_history -> Nullable<Text>,
|
||||||
deleted_at -> Nullable<Datetime>,
|
deleted_at -> Nullable<Datetime>,
|
||||||
|
reprompt -> Nullable<Integer>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -99,6 +100,8 @@ table! {
|
|||||||
uuid -> Text,
|
uuid -> Text,
|
||||||
name -> Text,
|
name -> Text,
|
||||||
billing_email -> Text,
|
billing_email -> Text,
|
||||||
|
private_key -> Nullable<Text>,
|
||||||
|
public_key -> Nullable<Text>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -122,6 +125,7 @@ table! {
|
|||||||
expiration_date -> Nullable<Datetime>,
|
expiration_date -> Nullable<Datetime>,
|
||||||
deletion_date -> Datetime,
|
deletion_date -> Datetime,
|
||||||
disabled -> Bool,
|
disabled -> Bool,
|
||||||
|
hide_email -> Nullable<Bool>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -22,6 +22,7 @@ table! {
|
|||||||
data -> Text,
|
data -> Text,
|
||||||
password_history -> Nullable<Text>,
|
password_history -> Nullable<Text>,
|
||||||
deleted_at -> Nullable<Timestamp>,
|
deleted_at -> Nullable<Timestamp>,
|
||||||
|
reprompt -> Nullable<Integer>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -99,6 +100,8 @@ table! {
|
|||||||
uuid -> Text,
|
uuid -> Text,
|
||||||
name -> Text,
|
name -> Text,
|
||||||
billing_email -> Text,
|
billing_email -> Text,
|
||||||
|
private_key -> Nullable<Text>,
|
||||||
|
public_key -> Nullable<Text>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -122,6 +125,7 @@ table! {
|
|||||||
expiration_date -> Nullable<Timestamp>,
|
expiration_date -> Nullable<Timestamp>,
|
||||||
deletion_date -> Timestamp,
|
deletion_date -> Timestamp,
|
||||||
disabled -> Bool,
|
disabled -> Bool,
|
||||||
|
hide_email -> Nullable<Bool>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -22,6 +22,7 @@ table! {
|
|||||||
data -> Text,
|
data -> Text,
|
||||||
password_history -> Nullable<Text>,
|
password_history -> Nullable<Text>,
|
||||||
deleted_at -> Nullable<Timestamp>,
|
deleted_at -> Nullable<Timestamp>,
|
||||||
|
reprompt -> Nullable<Integer>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -99,6 +100,8 @@ table! {
|
|||||||
uuid -> Text,
|
uuid -> Text,
|
||||||
name -> Text,
|
name -> Text,
|
||||||
billing_email -> Text,
|
billing_email -> Text,
|
||||||
|
private_key -> Nullable<Text>,
|
||||||
|
public_key -> Nullable<Text>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -122,6 +125,7 @@ table! {
|
|||||||
expiration_date -> Nullable<Timestamp>,
|
expiration_date -> Nullable<Timestamp>,
|
||||||
deletion_date -> Timestamp,
|
deletion_date -> Timestamp,
|
||||||
disabled -> Bool,
|
disabled -> Bool,
|
||||||
|
hide_email -> Nullable<Bool>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
71
src/error.rs
@@ -39,20 +39,19 @@ use diesel::ConnectionError as DieselConErr;
|
|||||||
use diesel_migrations::RunMigrationsError as DieselMigErr;
|
use diesel_migrations::RunMigrationsError as DieselMigErr;
|
||||||
use handlebars::RenderError as HbErr;
|
use handlebars::RenderError as HbErr;
|
||||||
use jsonwebtoken::errors::Error as JwtErr;
|
use jsonwebtoken::errors::Error as JwtErr;
|
||||||
|
use lettre::address::AddressError as AddrErr;
|
||||||
|
use lettre::error::Error as LettreErr;
|
||||||
|
use lettre::transport::smtp::Error as SmtpErr;
|
||||||
|
use openssl::error::ErrorStack as SSLErr;
|
||||||
use regex::Error as RegexErr;
|
use regex::Error as RegexErr;
|
||||||
use reqwest::Error as ReqErr;
|
use reqwest::Error as ReqErr;
|
||||||
use serde_json::{Error as SerdeErr, Value};
|
use serde_json::{Error as SerdeErr, Value};
|
||||||
use std::io::Error as IoErr;
|
use std::io::Error as IoErr;
|
||||||
|
|
||||||
use std::time::SystemTimeError as TimeErr;
|
use std::time::SystemTimeError as TimeErr;
|
||||||
use u2f::u2ferror::U2fError as U2fErr;
|
use u2f::u2ferror::U2fError as U2fErr;
|
||||||
|
use webauthn_rs::error::WebauthnError as WebauthnErr;
|
||||||
use yubico::yubicoerror::YubicoError as YubiErr;
|
use yubico::yubicoerror::YubicoError as YubiErr;
|
||||||
|
|
||||||
use lettre::address::AddressError as AddrErr;
|
|
||||||
use lettre::error::Error as LettreErr;
|
|
||||||
use lettre::message::mime::FromStrError as FromStrErr;
|
|
||||||
use lettre::transport::smtp::Error as SmtpErr;
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
pub struct Empty {}
|
pub struct Empty {}
|
||||||
|
|
||||||
@@ -63,31 +62,32 @@ pub struct Empty {}
|
|||||||
// The second one contains the function used to obtain the response sent to the client
|
// The second one contains the function used to obtain the response sent to the client
|
||||||
make_error! {
|
make_error! {
|
||||||
// Just an empty error
|
// Just an empty error
|
||||||
EmptyError(Empty): _no_source, _serialize,
|
Empty(Empty): _no_source, _serialize,
|
||||||
// Used to represent err! calls
|
// Used to represent err! calls
|
||||||
SimpleError(String): _no_source, _api_error,
|
Simple(String): _no_source, _api_error,
|
||||||
// Used for special return values, like 2FA errors
|
// Used for special return values, like 2FA errors
|
||||||
JsonError(Value): _no_source, _serialize,
|
Json(Value): _no_source, _serialize,
|
||||||
DbError(DieselErr): _has_source, _api_error,
|
Db(DieselErr): _has_source, _api_error,
|
||||||
R2d2Error(R2d2Err): _has_source, _api_error,
|
R2d2(R2d2Err): _has_source, _api_error,
|
||||||
U2fError(U2fErr): _has_source, _api_error,
|
U2f(U2fErr): _has_source, _api_error,
|
||||||
SerdeError(SerdeErr): _has_source, _api_error,
|
Serde(SerdeErr): _has_source, _api_error,
|
||||||
JWtError(JwtErr): _has_source, _api_error,
|
JWt(JwtErr): _has_source, _api_error,
|
||||||
TemplError(HbErr): _has_source, _api_error,
|
Handlebars(HbErr): _has_source, _api_error,
|
||||||
//WsError(ws::Error): _has_source, _api_error,
|
//WsError(ws::Error): _has_source, _api_error,
|
||||||
IoError(IoErr): _has_source, _api_error,
|
Io(IoErr): _has_source, _api_error,
|
||||||
TimeError(TimeErr): _has_source, _api_error,
|
Time(TimeErr): _has_source, _api_error,
|
||||||
ReqError(ReqErr): _has_source, _api_error,
|
Req(ReqErr): _has_source, _api_error,
|
||||||
RegexError(RegexErr): _has_source, _api_error,
|
Regex(RegexErr): _has_source, _api_error,
|
||||||
YubiError(YubiErr): _has_source, _api_error,
|
Yubico(YubiErr): _has_source, _api_error,
|
||||||
|
|
||||||
LettreError(LettreErr): _has_source, _api_error,
|
Lettre(LettreErr): _has_source, _api_error,
|
||||||
AddressError(AddrErr): _has_source, _api_error,
|
Address(AddrErr): _has_source, _api_error,
|
||||||
SmtpError(SmtpErr): _has_source, _api_error,
|
Smtp(SmtpErr): _has_source, _api_error,
|
||||||
FromStrError(FromStrErr): _has_source, _api_error,
|
OpenSSL(SSLErr): _has_source, _api_error,
|
||||||
|
|
||||||
DieselConError(DieselConErr): _has_source, _api_error,
|
DieselCon(DieselConErr): _has_source, _api_error,
|
||||||
DieselMigError(DieselMigErr): _has_source, _api_error,
|
DieselMig(DieselMigErr): _has_source, _api_error,
|
||||||
|
Webauthn(WebauthnErr): _has_source, _api_error,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Debug for Error {
|
impl std::fmt::Debug for Error {
|
||||||
@@ -95,15 +95,15 @@ impl std::fmt::Debug for Error {
|
|||||||
match self.source() {
|
match self.source() {
|
||||||
Some(e) => write!(f, "{}.\n[CAUSE] {:#?}", self.message, e),
|
Some(e) => write!(f, "{}.\n[CAUSE] {:#?}", self.message, e),
|
||||||
None => match self.error {
|
None => match self.error {
|
||||||
ErrorKind::EmptyError(_) => Ok(()),
|
ErrorKind::Empty(_) => Ok(()),
|
||||||
ErrorKind::SimpleError(ref s) => {
|
ErrorKind::Simple(ref s) => {
|
||||||
if &self.message == s {
|
if &self.message == s {
|
||||||
write!(f, "{}", self.message)
|
write!(f, "{}", self.message)
|
||||||
} else {
|
} else {
|
||||||
write!(f, "{}. {}", self.message, s)
|
write!(f, "{}. {}", self.message, s)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ErrorKind::JsonError(_) => write!(f, "{}", self.message),
|
ErrorKind::Json(_) => write!(f, "{}", self.message),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -166,7 +166,7 @@ fn _serialize(e: &impl serde::Serialize, _msg: &str) -> String {
|
|||||||
|
|
||||||
fn _api_error(_: &impl std::any::Any, msg: &str) -> String {
|
fn _api_error(_: &impl std::any::Any, msg: &str) -> String {
|
||||||
let json = json!({
|
let json = json!({
|
||||||
"Message": "",
|
"Message": msg,
|
||||||
"error": "",
|
"error": "",
|
||||||
"error_description": "",
|
"error_description": "",
|
||||||
"ValidationErrors": {"": [ msg ]},
|
"ValidationErrors": {"": [ msg ]},
|
||||||
@@ -174,6 +174,9 @@ fn _api_error(_: &impl std::any::Any, msg: &str) -> String {
|
|||||||
"Message": msg,
|
"Message": msg,
|
||||||
"Object": "error"
|
"Object": "error"
|
||||||
},
|
},
|
||||||
|
"ExceptionMessage": null,
|
||||||
|
"ExceptionStackTrace": null,
|
||||||
|
"InnerExceptionMessage": null,
|
||||||
"Object": "error"
|
"Object": "error"
|
||||||
});
|
});
|
||||||
_serialize(&json, "")
|
_serialize(&json, "")
|
||||||
@@ -191,8 +194,8 @@ use rocket::response::{self, Responder, Response};
|
|||||||
impl<'r> Responder<'r> for Error {
|
impl<'r> Responder<'r> for Error {
|
||||||
fn respond_to(self, _: &Request) -> response::Result<'r> {
|
fn respond_to(self, _: &Request) -> response::Result<'r> {
|
||||||
match self.error {
|
match self.error {
|
||||||
ErrorKind::EmptyError(_) => {} // Don't print the error in this situation
|
ErrorKind::Empty(_) => {} // Don't print the error in this situation
|
||||||
ErrorKind::SimpleError(_) => {} // Don't print the error in this situation
|
ErrorKind::Simple(_) => {} // Don't print the error in this situation
|
||||||
_ => error!(target: "error", "{:#?}", self),
|
_ => error!(target: "error", "{:#?}", self),
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -219,11 +222,11 @@ macro_rules! err {
|
|||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! err_code {
|
macro_rules! err_code {
|
||||||
($msg:expr, $err_code: literal) => {{
|
($msg:expr, $err_code: expr) => {{
|
||||||
error!("{}", $msg);
|
error!("{}", $msg);
|
||||||
return Err(crate::error::Error::new($msg, $msg).with_code($err_code));
|
return Err(crate::error::Error::new($msg, $msg).with_code($err_code));
|
||||||
}};
|
}};
|
||||||
($usr_msg:expr, $log_value:expr, $err_code: literal) => {{
|
($usr_msg:expr, $log_value:expr, $err_code: expr) => {{
|
||||||
error!("{}. {}", $usr_msg, $log_value);
|
error!("{}. {}", $usr_msg, $log_value);
|
||||||
return Err(crate::error::Error::new($usr_msg, $log_value).with_code($err_code));
|
return Err(crate::error::Error::new($usr_msg, $log_value).with_code($err_code));
|
||||||
}};
|
}};
|
||||||
|
21
src/mail.rs
@@ -27,7 +27,7 @@ fn mailer() -> SmtpTransport {
|
|||||||
.timeout(Some(Duration::from_secs(CONFIG.smtp_timeout())));
|
.timeout(Some(Duration::from_secs(CONFIG.smtp_timeout())));
|
||||||
|
|
||||||
// Determine security
|
// Determine security
|
||||||
let smtp_client = if CONFIG.smtp_ssl() {
|
let smtp_client = if CONFIG.smtp_ssl() || CONFIG.smtp_explicit_tls() {
|
||||||
let mut tls_parameters = TlsParameters::builder(host);
|
let mut tls_parameters = TlsParameters::builder(host);
|
||||||
if CONFIG.smtp_accept_invalid_hostnames() {
|
if CONFIG.smtp_accept_invalid_hostnames() {
|
||||||
tls_parameters = tls_parameters.dangerous_accept_invalid_hostnames(true);
|
tls_parameters = tls_parameters.dangerous_accept_invalid_hostnames(true);
|
||||||
@@ -99,9 +99,8 @@ fn get_template(template_name: &str, data: &serde_json::Value) -> Result<(String
|
|||||||
None => err!("Template doesn't contain subject"),
|
None => err!("Template doesn't contain subject"),
|
||||||
};
|
};
|
||||||
|
|
||||||
use newline_converter::unix2dos;
|
|
||||||
let body = match text_split.next() {
|
let body = match text_split.next() {
|
||||||
Some(s) => unix2dos(s.trim()).to_string(),
|
Some(s) => s.trim().to_string(),
|
||||||
None => err!("Template doesn't contain body"),
|
None => err!("Template doesn't contain body"),
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -181,6 +180,18 @@ pub fn send_welcome_must_verify(address: &str, uuid: &str) -> EmptyResult {
|
|||||||
send_email(address, &subject, body_html, body_text)
|
send_email(address, &subject, body_html, body_text)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn send_2fa_removed_from_org(address: &str, org_name: &str) -> EmptyResult {
|
||||||
|
let (subject, body_html, body_text) = get_text(
|
||||||
|
"email/send_2fa_removed_from_org",
|
||||||
|
json!({
|
||||||
|
"url": CONFIG.domain(),
|
||||||
|
"org_name": org_name,
|
||||||
|
}),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
send_email(address, &subject, body_html, body_text)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn send_invite(
|
pub fn send_invite(
|
||||||
address: &str,
|
address: &str,
|
||||||
uuid: &str,
|
uuid: &str,
|
||||||
@@ -307,13 +318,13 @@ fn send_email(address: &str, subject: &str, body_html: String, body_text: String
|
|||||||
let html = SinglePart::builder()
|
let html = SinglePart::builder()
|
||||||
// We force Base64 encoding because in the past we had issues with different encodings.
|
// We force Base64 encoding because in the past we had issues with different encodings.
|
||||||
.header(header::ContentTransferEncoding::Base64)
|
.header(header::ContentTransferEncoding::Base64)
|
||||||
.header(header::ContentType("text/html; charset=utf-8".parse()?))
|
.header(header::ContentType::TEXT_HTML)
|
||||||
.body(body_html);
|
.body(body_html);
|
||||||
|
|
||||||
let text = SinglePart::builder()
|
let text = SinglePart::builder()
|
||||||
// We force Base64 encoding because in the past we had issues with different encodings.
|
// We force Base64 encoding because in the past we had issues with different encodings.
|
||||||
.header(header::ContentTransferEncoding::Base64)
|
.header(header::ContentTransferEncoding::Base64)
|
||||||
.header(header::ContentType("text/plain; charset=utf-8".parse()?))
|
.header(header::ContentType::TEXT_PLAIN)
|
||||||
.body(body_text);
|
.body(body_text);
|
||||||
|
|
||||||
let smtp_from = &CONFIG.smtp_from();
|
let smtp_from = &CONFIG.smtp_from();
|
||||||
|
83
src/main.rs
@@ -17,15 +17,7 @@ extern crate diesel;
|
|||||||
extern crate diesel_migrations;
|
extern crate diesel_migrations;
|
||||||
|
|
||||||
use job_scheduler::{Job, JobScheduler};
|
use job_scheduler::{Job, JobScheduler};
|
||||||
use std::{
|
use std::{fs::create_dir_all, panic, path::Path, process::exit, str::FromStr, thread, time::Duration};
|
||||||
fs::create_dir_all,
|
|
||||||
panic,
|
|
||||||
path::Path,
|
|
||||||
process::{exit, Command},
|
|
||||||
str::FromStr,
|
|
||||||
thread,
|
|
||||||
time::Duration,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod error;
|
mod error;
|
||||||
@@ -53,13 +45,18 @@ fn main() {
|
|||||||
let extra_debug = matches!(level, LF::Trace | LF::Debug);
|
let extra_debug = matches!(level, LF::Trace | LF::Debug);
|
||||||
|
|
||||||
check_data_folder();
|
check_data_folder();
|
||||||
check_rsa_keys();
|
check_rsa_keys().unwrap_or_else(|_| {
|
||||||
|
error!("Error creating keys, exiting...");
|
||||||
|
exit(1);
|
||||||
|
});
|
||||||
check_web_vault();
|
check_web_vault();
|
||||||
|
|
||||||
create_icon_cache_folder();
|
create_icon_cache_folder();
|
||||||
|
|
||||||
let pool = create_db_pool();
|
let pool = create_db_pool();
|
||||||
schedule_jobs(pool.clone());
|
schedule_jobs(pool.clone());
|
||||||
|
crate::db::models::TwoFactor::migrate_u2f_to_webauthn(&pool.get().unwrap()).unwrap();
|
||||||
|
|
||||||
launch_rocket(pool, extra_debug); // Blocks until program termination.
|
launch_rocket(pool, extra_debug); // Blocks until program termination.
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -100,7 +97,7 @@ fn launch_info() {
|
|||||||
println!("| This is an *unofficial* Bitwarden implementation, DO NOT use the |");
|
println!("| This is an *unofficial* Bitwarden implementation, DO NOT use the |");
|
||||||
println!("| official channels to report bugs/features, regardless of client. |");
|
println!("| official channels to report bugs/features, regardless of client. |");
|
||||||
println!("| Send usage/configuration questions or feature requests to: |");
|
println!("| Send usage/configuration questions or feature requests to: |");
|
||||||
println!("| https://bitwardenrs.discourse.group/ |");
|
println!("| https://vaultwarden.discourse.group/ |");
|
||||||
println!("| Report suspected bugs/issues in the software itself at: |");
|
println!("| Report suspected bugs/issues in the software itself at: |");
|
||||||
println!("| https://github.com/dani-garcia/vaultwarden/issues/new |");
|
println!("| https://github.com/dani-garcia/vaultwarden/issues/new |");
|
||||||
println!("\\--------------------------------------------------------------------/\n");
|
println!("\\--------------------------------------------------------------------/\n");
|
||||||
@@ -122,6 +119,9 @@ fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> {
|
|||||||
// Never show html5ever and hyper::proto logs, too noisy
|
// Never show html5ever and hyper::proto logs, too noisy
|
||||||
.level_for("html5ever", log::LevelFilter::Off)
|
.level_for("html5ever", log::LevelFilter::Off)
|
||||||
.level_for("hyper::proto", log::LevelFilter::Off)
|
.level_for("hyper::proto", log::LevelFilter::Off)
|
||||||
|
.level_for("hyper::client", log::LevelFilter::Off)
|
||||||
|
// Prevent cookie_store logs
|
||||||
|
.level_for("cookie_store", log::LevelFilter::Off)
|
||||||
.chain(std::io::stdout());
|
.chain(std::io::stdout());
|
||||||
|
|
||||||
// Enable smtp debug logging only specifically for smtp when need.
|
// Enable smtp debug logging only specifically for smtp when need.
|
||||||
@@ -244,52 +244,29 @@ fn check_data_folder() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_rsa_keys() {
|
fn check_rsa_keys() -> Result<(), crate::error::Error> {
|
||||||
// If the RSA keys don't exist, try to create them
|
// If the RSA keys don't exist, try to create them
|
||||||
if !util::file_exists(&CONFIG.private_rsa_key()) || !util::file_exists(&CONFIG.public_rsa_key()) {
|
let priv_path = CONFIG.private_rsa_key();
|
||||||
info!("JWT keys don't exist, checking if OpenSSL is available...");
|
let pub_path = CONFIG.public_rsa_key();
|
||||||
|
|
||||||
Command::new("openssl").arg("version").status().unwrap_or_else(|_| {
|
if !util::file_exists(&priv_path) {
|
||||||
info!(
|
let rsa_key = openssl::rsa::Rsa::generate(2048)?;
|
||||||
"Can't create keys because OpenSSL is not available, make sure it's installed and available on the PATH"
|
|
||||||
);
|
|
||||||
exit(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
info!("OpenSSL detected, creating keys...");
|
let priv_key = rsa_key.private_key_to_pem()?;
|
||||||
|
crate::util::write_file(&priv_path, &priv_key)?;
|
||||||
let key = CONFIG.rsa_key_filename();
|
info!("Private key created correctly.");
|
||||||
|
|
||||||
let pem = format!("{}.pem", key);
|
|
||||||
let priv_der = format!("{}.der", key);
|
|
||||||
let pub_der = format!("{}.pub.der", key);
|
|
||||||
|
|
||||||
let mut success = Command::new("openssl")
|
|
||||||
.args(&["genrsa", "-out", &pem])
|
|
||||||
.status()
|
|
||||||
.expect("Failed to create private pem file")
|
|
||||||
.success();
|
|
||||||
|
|
||||||
success &= Command::new("openssl")
|
|
||||||
.args(&["rsa", "-in", &pem, "-outform", "DER", "-out", &priv_der])
|
|
||||||
.status()
|
|
||||||
.expect("Failed to create private der file")
|
|
||||||
.success();
|
|
||||||
|
|
||||||
success &= Command::new("openssl")
|
|
||||||
.args(&["rsa", "-in", &priv_der, "-inform", "DER"])
|
|
||||||
.args(&["-RSAPublicKey_out", "-outform", "DER", "-out", &pub_der])
|
|
||||||
.status()
|
|
||||||
.expect("Failed to create public der file")
|
|
||||||
.success();
|
|
||||||
|
|
||||||
if success {
|
|
||||||
info!("Keys created correctly.");
|
|
||||||
} else {
|
|
||||||
error!("Error creating keys, exiting...");
|
|
||||||
exit(1);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !util::file_exists(&pub_path) {
|
||||||
|
let rsa_key = openssl::rsa::Rsa::private_key_from_pem(&util::read_file(&priv_path)?)?;
|
||||||
|
|
||||||
|
let pub_key = rsa_key.public_key_to_pem()?;
|
||||||
|
crate::util::write_file(&pub_path, &pub_key)?;
|
||||||
|
info!("Public key created correctly.");
|
||||||
|
}
|
||||||
|
|
||||||
|
auth::load_keys();
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_web_vault() {
|
fn check_web_vault() {
|
||||||
|
@@ -198,7 +198,9 @@
|
|||||||
"amazon.in",
|
"amazon.in",
|
||||||
"amazon.it",
|
"amazon.it",
|
||||||
"amazon.nl",
|
"amazon.nl",
|
||||||
|
"amazon.pl",
|
||||||
"amazon.sa",
|
"amazon.sa",
|
||||||
|
"amazon.se",
|
||||||
"amazon.sg"
|
"amazon.sg"
|
||||||
],
|
],
|
||||||
"Excluded": false
|
"Excluded": false
|
||||||
|
Before Width: | Height: | Size: 9.2 KiB After Width: | Height: | Size: 6.9 KiB |
Before Width: | Height: | Size: 5.3 KiB After Width: | Height: | Size: 2.5 KiB |
Before Width: | Height: | Size: 1.8 KiB |
BIN
src/static/images/vaultwarden-icon.png
Normal file
After Width: | Height: | Size: 945 B |
5015
src/static/scripts/bootstrap-native.js
vendored
14435
src/static/scripts/bootstrap.css
vendored
26
src/static/scripts/datatables.css
vendored
@@ -4,13 +4,18 @@
|
|||||||
*
|
*
|
||||||
* To rebuild or modify this file with the latest versions of the included
|
* To rebuild or modify this file with the latest versions of the included
|
||||||
* software please visit:
|
* software please visit:
|
||||||
* https://datatables.net/download/#bs4/dt-1.10.23
|
* https://datatables.net/download/#bs5/dt-1.10.25
|
||||||
*
|
*
|
||||||
* Included libraries:
|
* Included libraries:
|
||||||
* DataTables 1.10.23
|
* DataTables 1.10.25
|
||||||
*/
|
*/
|
||||||
|
|
||||||
@charset "UTF-8";
|
@charset "UTF-8";
|
||||||
|
/*! Bootstrap 5 integration for DataTables
|
||||||
|
*
|
||||||
|
* ©2020 SpryMedia Ltd, all rights reserved.
|
||||||
|
* License: MIT datatables.net/license/mit
|
||||||
|
*/
|
||||||
table.dataTable {
|
table.dataTable {
|
||||||
clear: both;
|
clear: both;
|
||||||
margin-top: 6px !important;
|
margin-top: 6px !important;
|
||||||
@@ -105,7 +110,7 @@ table.dataTable > thead .sorting_asc_disabled:after,
|
|||||||
table.dataTable > thead .sorting_desc_disabled:before,
|
table.dataTable > thead .sorting_desc_disabled:before,
|
||||||
table.dataTable > thead .sorting_desc_disabled:after {
|
table.dataTable > thead .sorting_desc_disabled:after {
|
||||||
position: absolute;
|
position: absolute;
|
||||||
bottom: 0.9em;
|
bottom: 0.5em;
|
||||||
display: block;
|
display: block;
|
||||||
opacity: 0.3;
|
opacity: 0.3;
|
||||||
}
|
}
|
||||||
@@ -193,18 +198,27 @@ table.dataTable.table-sm .sorting_desc:after {
|
|||||||
table.table-bordered.dataTable {
|
table.table-bordered.dataTable {
|
||||||
border-right-width: 0;
|
border-right-width: 0;
|
||||||
}
|
}
|
||||||
|
table.table-bordered.dataTable thead tr:first-child th,
|
||||||
|
table.table-bordered.dataTable thead tr:first-child td {
|
||||||
|
border-top-width: 1px;
|
||||||
|
}
|
||||||
table.table-bordered.dataTable th,
|
table.table-bordered.dataTable th,
|
||||||
table.table-bordered.dataTable td {
|
table.table-bordered.dataTable td {
|
||||||
border-left-width: 0;
|
border-left-width: 0;
|
||||||
}
|
}
|
||||||
|
table.table-bordered.dataTable th:first-child, table.table-bordered.dataTable th:first-child,
|
||||||
|
table.table-bordered.dataTable td:first-child,
|
||||||
|
table.table-bordered.dataTable td:first-child {
|
||||||
|
border-left-width: 1px;
|
||||||
|
}
|
||||||
table.table-bordered.dataTable th:last-child, table.table-bordered.dataTable th:last-child,
|
table.table-bordered.dataTable th:last-child, table.table-bordered.dataTable th:last-child,
|
||||||
table.table-bordered.dataTable td:last-child,
|
table.table-bordered.dataTable td:last-child,
|
||||||
table.table-bordered.dataTable td:last-child {
|
table.table-bordered.dataTable td:last-child {
|
||||||
border-right-width: 1px;
|
border-right-width: 1px;
|
||||||
}
|
}
|
||||||
table.table-bordered.dataTable tbody th,
|
table.table-bordered.dataTable th,
|
||||||
table.table-bordered.dataTable tbody td {
|
table.table-bordered.dataTable td {
|
||||||
border-bottom-width: 0;
|
border-bottom-width: 1px;
|
||||||
}
|
}
|
||||||
|
|
||||||
div.dataTables_scrollHead table.table-bordered {
|
div.dataTables_scrollHead table.table-bordered {
|
||||||
|
90
src/static/scripts/datatables.js
vendored
@@ -4,24 +4,24 @@
|
|||||||
*
|
*
|
||||||
* To rebuild or modify this file with the latest versions of the included
|
* To rebuild or modify this file with the latest versions of the included
|
||||||
* software please visit:
|
* software please visit:
|
||||||
* https://datatables.net/download/#bs4/dt-1.10.23
|
* https://datatables.net/download/#bs5/dt-1.10.25
|
||||||
*
|
*
|
||||||
* Included libraries:
|
* Included libraries:
|
||||||
* DataTables 1.10.23
|
* DataTables 1.10.25
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/*! DataTables 1.10.23
|
/*! DataTables 1.10.25
|
||||||
* ©2008-2020 SpryMedia Ltd - datatables.net/license
|
* ©2008-2021 SpryMedia Ltd - datatables.net/license
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @summary DataTables
|
* @summary DataTables
|
||||||
* @description Paginate, search and order HTML tables
|
* @description Paginate, search and order HTML tables
|
||||||
* @version 1.10.23
|
* @version 1.10.25
|
||||||
* @file jquery.dataTables.js
|
* @file jquery.dataTables.js
|
||||||
* @author SpryMedia Ltd
|
* @author SpryMedia Ltd
|
||||||
* @contact www.datatables.net
|
* @contact www.datatables.net
|
||||||
* @copyright Copyright 2008-2020 SpryMedia Ltd.
|
* @copyright Copyright 2008-2021 SpryMedia Ltd.
|
||||||
*
|
*
|
||||||
* This source file is free software, available under the following license:
|
* This source file is free software, available under the following license:
|
||||||
* MIT license - http://datatables.net/license
|
* MIT license - http://datatables.net/license
|
||||||
@@ -1100,6 +1100,8 @@
|
|||||||
_fnLanguageCompat( json );
|
_fnLanguageCompat( json );
|
||||||
_fnCamelToHungarian( defaults.oLanguage, json );
|
_fnCamelToHungarian( defaults.oLanguage, json );
|
||||||
$.extend( true, oLanguage, json );
|
$.extend( true, oLanguage, json );
|
||||||
|
|
||||||
|
_fnCallbackFire( oSettings, null, 'i18n', [oSettings]);
|
||||||
_fnInitialise( oSettings );
|
_fnInitialise( oSettings );
|
||||||
},
|
},
|
||||||
error: function () {
|
error: function () {
|
||||||
@@ -1109,6 +1111,9 @@
|
|||||||
} );
|
} );
|
||||||
bInitHandedOff = true;
|
bInitHandedOff = true;
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
_fnCallbackFire( oSettings, null, 'i18n', [oSettings]);
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Stripes
|
* Stripes
|
||||||
@@ -1260,7 +1265,7 @@
|
|||||||
|
|
||||||
var tbody = $this.children('tbody');
|
var tbody = $this.children('tbody');
|
||||||
if ( tbody.length === 0 ) {
|
if ( tbody.length === 0 ) {
|
||||||
tbody = $('<tbody/>').appendTo($this);
|
tbody = $('<tbody/>').insertAfter(thead);
|
||||||
}
|
}
|
||||||
oSettings.nTBody = tbody[0];
|
oSettings.nTBody = tbody[0];
|
||||||
|
|
||||||
@@ -2315,8 +2320,9 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Only a single match is needed for html type since it is
|
// Only a single match is needed for html type since it is
|
||||||
// bottom of the pile and very similar to string
|
// bottom of the pile and very similar to string - but it
|
||||||
if ( detectedType === 'html' ) {
|
// must not be empty
|
||||||
|
if ( detectedType === 'html' && ! _empty(cache[k]) ) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -3421,9 +3427,10 @@
|
|||||||
/**
|
/**
|
||||||
* Insert the required TR nodes into the table for display
|
* Insert the required TR nodes into the table for display
|
||||||
* @param {object} oSettings dataTables settings object
|
* @param {object} oSettings dataTables settings object
|
||||||
|
* @param ajaxComplete true after ajax call to complete rendering
|
||||||
* @memberof DataTable#oApi
|
* @memberof DataTable#oApi
|
||||||
*/
|
*/
|
||||||
function _fnDraw( oSettings )
|
function _fnDraw( oSettings, ajaxComplete )
|
||||||
{
|
{
|
||||||
/* Provide a pre-callback function which can be used to cancel the draw is false is returned */
|
/* Provide a pre-callback function which can be used to cancel the draw is false is returned */
|
||||||
var aPreDraw = _fnCallbackFire( oSettings, 'aoPreDrawCallback', 'preDraw', [oSettings] );
|
var aPreDraw = _fnCallbackFire( oSettings, 'aoPreDrawCallback', 'preDraw', [oSettings] );
|
||||||
@@ -3472,8 +3479,9 @@
|
|||||||
{
|
{
|
||||||
oSettings.iDraw++;
|
oSettings.iDraw++;
|
||||||
}
|
}
|
||||||
else if ( !oSettings.bDestroying && !_fnAjaxUpdate( oSettings ) )
|
else if ( !oSettings.bDestroying && !ajaxComplete)
|
||||||
{
|
{
|
||||||
|
_fnAjaxUpdate( oSettings );
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -4005,21 +4013,16 @@
|
|||||||
*/
|
*/
|
||||||
function _fnAjaxUpdate( settings )
|
function _fnAjaxUpdate( settings )
|
||||||
{
|
{
|
||||||
if ( settings.bAjaxDataGet ) {
|
settings.iDraw++;
|
||||||
settings.iDraw++;
|
_fnProcessingDisplay( settings, true );
|
||||||
_fnProcessingDisplay( settings, true );
|
|
||||||
|
|
||||||
_fnBuildAjax(
|
_fnBuildAjax(
|
||||||
settings,
|
settings,
|
||||||
_fnAjaxParameters( settings ),
|
_fnAjaxParameters( settings ),
|
||||||
function(json) {
|
function(json) {
|
||||||
_fnAjaxUpdateDraw( settings, json );
|
_fnAjaxUpdateDraw( settings, json );
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -4172,14 +4175,12 @@
|
|||||||
}
|
}
|
||||||
settings.aiDisplay = settings.aiDisplayMaster.slice();
|
settings.aiDisplay = settings.aiDisplayMaster.slice();
|
||||||
|
|
||||||
settings.bAjaxDataGet = false;
|
_fnDraw( settings, true );
|
||||||
_fnDraw( settings );
|
|
||||||
|
|
||||||
if ( ! settings._bInitComplete ) {
|
if ( ! settings._bInitComplete ) {
|
||||||
_fnInitComplete( settings, json );
|
_fnInitComplete( settings, json );
|
||||||
}
|
}
|
||||||
|
|
||||||
settings.bAjaxDataGet = true;
|
|
||||||
_fnProcessingDisplay( settings, false );
|
_fnProcessingDisplay( settings, false );
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -6108,7 +6109,7 @@
|
|||||||
{
|
{
|
||||||
var col = columns[i];
|
var col = columns[i];
|
||||||
var asSorting = col.asSorting;
|
var asSorting = col.asSorting;
|
||||||
var sTitle = col.sTitle.replace( /<.*?>/g, "" );
|
var sTitle = col.ariaTitle || col.sTitle.replace( /<.*?>/g, "" );
|
||||||
var th = col.nTh;
|
var th = col.nTh;
|
||||||
|
|
||||||
// IE7 is throwing an error when setting these properties with jQuery's
|
// IE7 is throwing an error when setting these properties with jQuery's
|
||||||
@@ -9542,7 +9543,7 @@
|
|||||||
* @type string
|
* @type string
|
||||||
* @default Version number
|
* @default Version number
|
||||||
*/
|
*/
|
||||||
DataTable.version = "1.10.23";
|
DataTable.version = "1.10.25";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Private data store, containing all of the settings objects that are
|
* Private data store, containing all of the settings objects that are
|
||||||
@@ -13623,13 +13624,6 @@
|
|||||||
*/
|
*/
|
||||||
"sAjaxDataProp": null,
|
"sAjaxDataProp": null,
|
||||||
|
|
||||||
/**
|
|
||||||
* Note if draw should be blocked while getting data
|
|
||||||
* @type boolean
|
|
||||||
* @default true
|
|
||||||
*/
|
|
||||||
"bAjaxDataGet": true,
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The last jQuery XHR object that was used for server-side data gathering.
|
* The last jQuery XHR object that was used for server-side data gathering.
|
||||||
* This can be used for working with the XHR information in one of the
|
* This can be used for working with the XHR information in one of the
|
||||||
@@ -13966,7 +13960,7 @@
|
|||||||
*
|
*
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
build:"bs4/dt-1.10.23",
|
build:"bs5/dt-1.10.25",
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -14494,8 +14488,8 @@
|
|||||||
"sSortAsc": "sorting_asc",
|
"sSortAsc": "sorting_asc",
|
||||||
"sSortDesc": "sorting_desc",
|
"sSortDesc": "sorting_desc",
|
||||||
"sSortable": "sorting", /* Sortable in both directions */
|
"sSortable": "sorting", /* Sortable in both directions */
|
||||||
"sSortableAsc": "sorting_asc_disabled",
|
"sSortableAsc": "sorting_desc_disabled",
|
||||||
"sSortableDesc": "sorting_desc_disabled",
|
"sSortableDesc": "sorting_asc_disabled",
|
||||||
"sSortableNone": "sorting_disabled",
|
"sSortableNone": "sorting_disabled",
|
||||||
"sSortColumn": "sorting_", /* Note that an int is postfixed for the sorting order */
|
"sSortColumn": "sorting_", /* Note that an int is postfixed for the sorting order */
|
||||||
|
|
||||||
@@ -14936,7 +14930,6 @@
|
|||||||
|
|
||||||
cell
|
cell
|
||||||
.removeClass(
|
.removeClass(
|
||||||
column.sSortingClass +' '+
|
|
||||||
classes.sSortAsc +' '+
|
classes.sSortAsc +' '+
|
||||||
classes.sSortDesc
|
classes.sSortDesc
|
||||||
)
|
)
|
||||||
@@ -15061,6 +15054,11 @@
|
|||||||
decimal+(d - intPart).toFixed( precision ).substring( 2 ):
|
decimal+(d - intPart).toFixed( precision ).substring( 2 ):
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
// If zero, then can't have a negative prefix
|
||||||
|
if (intPart === 0 && parseFloat(floatPart) === 0) {
|
||||||
|
negative = '';
|
||||||
|
}
|
||||||
|
|
||||||
return negative + (prefix||'') +
|
return negative + (prefix||'') +
|
||||||
intPart.toString().replace(
|
intPart.toString().replace(
|
||||||
/\B(?=(\d{3})+(?!\d))/g, thousands
|
/\B(?=(\d{3})+(?!\d))/g, thousands
|
||||||
@@ -15395,12 +15393,12 @@
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
|
|
||||||
/*! DataTables Bootstrap 4 integration
|
/*! DataTables Bootstrap 5 integration
|
||||||
* ©2011-2017 SpryMedia Ltd - datatables.net/license
|
* 2020 SpryMedia Ltd - datatables.net/license
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* DataTables integration for Bootstrap 4. This requires Bootstrap 4 and
|
* DataTables integration for Bootstrap 4. This requires Bootstrap 5 and
|
||||||
* DataTables 1.10 or newer.
|
* DataTables 1.10 or newer.
|
||||||
*
|
*
|
||||||
* This file sets the defaults and adds options to DataTables to style its
|
* This file sets the defaults and adds options to DataTables to style its
|
||||||
@@ -15452,9 +15450,9 @@ $.extend( true, DataTable.defaults, {
|
|||||||
|
|
||||||
/* Default class modification */
|
/* Default class modification */
|
||||||
$.extend( DataTable.ext.classes, {
|
$.extend( DataTable.ext.classes, {
|
||||||
sWrapper: "dataTables_wrapper dt-bootstrap4",
|
sWrapper: "dataTables_wrapper dt-bootstrap5",
|
||||||
sFilterInput: "form-control form-control-sm",
|
sFilterInput: "form-control form-control-sm",
|
||||||
sLengthSelect: "custom-select custom-select-sm form-control form-control-sm",
|
sLengthSelect: "form-select form-select-sm",
|
||||||
sProcessing: "dataTables_processing card",
|
sProcessing: "dataTables_processing card",
|
||||||
sPageButton: "paginate_button page-item"
|
sPageButton: "paginate_button page-item"
|
||||||
} );
|
} );
|
||||||
|
@@ -1,15 +1,15 @@
|
|||||||
/*!
|
/*!
|
||||||
* jQuery JavaScript Library v3.5.1 -ajax,-ajax/jsonp,-ajax/load,-ajax/script,-ajax/var/location,-ajax/var/nonce,-ajax/var/rquery,-ajax/xhr,-manipulation/_evalUrl,-deprecated/ajax-event-alias,-effects,-effects/Tween,-effects/animatedSelector
|
* jQuery JavaScript Library v3.6.0 -ajax,-ajax/jsonp,-ajax/load,-ajax/script,-ajax/var/location,-ajax/var/nonce,-ajax/var/rquery,-ajax/xhr,-manipulation/_evalUrl,-deprecated/ajax-event-alias,-effects,-effects/Tween,-effects/animatedSelector
|
||||||
* https://jquery.com/
|
* https://jquery.com/
|
||||||
*
|
*
|
||||||
* Includes Sizzle.js
|
* Includes Sizzle.js
|
||||||
* https://sizzlejs.com/
|
* https://sizzlejs.com/
|
||||||
*
|
*
|
||||||
* Copyright JS Foundation and other contributors
|
* Copyright OpenJS Foundation and other contributors
|
||||||
* Released under the MIT license
|
* Released under the MIT license
|
||||||
* https://jquery.org/license
|
* https://jquery.org/license
|
||||||
*
|
*
|
||||||
* Date: 2020-05-04T22:49Z
|
* Date: 2021-03-02T17:08Z
|
||||||
*/
|
*/
|
||||||
( function( global, factory ) {
|
( function( global, factory ) {
|
||||||
|
|
||||||
@@ -76,12 +76,16 @@ var support = {};
|
|||||||
|
|
||||||
var isFunction = function isFunction( obj ) {
|
var isFunction = function isFunction( obj ) {
|
||||||
|
|
||||||
// Support: Chrome <=57, Firefox <=52
|
// Support: Chrome <=57, Firefox <=52
|
||||||
// In some browsers, typeof returns "function" for HTML <object> elements
|
// In some browsers, typeof returns "function" for HTML <object> elements
|
||||||
// (i.e., `typeof document.createElement( "object" ) === "function"`).
|
// (i.e., `typeof document.createElement( "object" ) === "function"`).
|
||||||
// We don't want to classify *any* DOM node as a function.
|
// We don't want to classify *any* DOM node as a function.
|
||||||
return typeof obj === "function" && typeof obj.nodeType !== "number";
|
// Support: QtWeb <=3.8.5, WebKit <=534.34, wkhtmltopdf tool <=0.12.5
|
||||||
};
|
// Plus for old WebKit, typeof returns "function" for HTML collections
|
||||||
|
// (e.g., `typeof document.getElementsByTagName("div") === "function"`). (gh-4756)
|
||||||
|
return typeof obj === "function" && typeof obj.nodeType !== "number" &&
|
||||||
|
typeof obj.item !== "function";
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
var isWindow = function isWindow( obj ) {
|
var isWindow = function isWindow( obj ) {
|
||||||
@@ -147,7 +151,7 @@ function toType( obj ) {
|
|||||||
|
|
||||||
|
|
||||||
var
|
var
|
||||||
version = "3.5.1 -ajax,-ajax/jsonp,-ajax/load,-ajax/script,-ajax/var/location,-ajax/var/nonce,-ajax/var/rquery,-ajax/xhr,-manipulation/_evalUrl,-deprecated/ajax-event-alias,-effects,-effects/Tween,-effects/animatedSelector",
|
version = "3.6.0 -ajax,-ajax/jsonp,-ajax/load,-ajax/script,-ajax/var/location,-ajax/var/nonce,-ajax/var/rquery,-ajax/xhr,-manipulation/_evalUrl,-deprecated/ajax-event-alias,-effects,-effects/Tween,-effects/animatedSelector",
|
||||||
|
|
||||||
// Define a local copy of jQuery
|
// Define a local copy of jQuery
|
||||||
jQuery = function( selector, context ) {
|
jQuery = function( selector, context ) {
|
||||||
@@ -401,7 +405,7 @@ jQuery.extend( {
|
|||||||
if ( isArrayLike( Object( arr ) ) ) {
|
if ( isArrayLike( Object( arr ) ) ) {
|
||||||
jQuery.merge( ret,
|
jQuery.merge( ret,
|
||||||
typeof arr === "string" ?
|
typeof arr === "string" ?
|
||||||
[ arr ] : arr
|
[ arr ] : arr
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
push.call( ret, arr );
|
push.call( ret, arr );
|
||||||
@@ -496,9 +500,9 @@ if ( typeof Symbol === "function" ) {
|
|||||||
|
|
||||||
// Populate the class2type map
|
// Populate the class2type map
|
||||||
jQuery.each( "Boolean Number String Function Array Date RegExp Object Error Symbol".split( " " ),
|
jQuery.each( "Boolean Number String Function Array Date RegExp Object Error Symbol".split( " " ),
|
||||||
function( _i, name ) {
|
function( _i, name ) {
|
||||||
class2type[ "[object " + name + "]" ] = name.toLowerCase();
|
class2type[ "[object " + name + "]" ] = name.toLowerCase();
|
||||||
} );
|
} );
|
||||||
|
|
||||||
function isArrayLike( obj ) {
|
function isArrayLike( obj ) {
|
||||||
|
|
||||||
@@ -518,14 +522,14 @@ function isArrayLike( obj ) {
|
|||||||
}
|
}
|
||||||
var Sizzle =
|
var Sizzle =
|
||||||
/*!
|
/*!
|
||||||
* Sizzle CSS Selector Engine v2.3.5
|
* Sizzle CSS Selector Engine v2.3.6
|
||||||
* https://sizzlejs.com/
|
* https://sizzlejs.com/
|
||||||
*
|
*
|
||||||
* Copyright JS Foundation and other contributors
|
* Copyright JS Foundation and other contributors
|
||||||
* Released under the MIT license
|
* Released under the MIT license
|
||||||
* https://js.foundation/
|
* https://js.foundation/
|
||||||
*
|
*
|
||||||
* Date: 2020-03-14
|
* Date: 2021-02-16
|
||||||
*/
|
*/
|
||||||
( function( window ) {
|
( function( window ) {
|
||||||
var i,
|
var i,
|
||||||
@@ -1108,8 +1112,8 @@ support = Sizzle.support = {};
|
|||||||
* @returns {Boolean} True iff elem is a non-HTML XML node
|
* @returns {Boolean} True iff elem is a non-HTML XML node
|
||||||
*/
|
*/
|
||||||
isXML = Sizzle.isXML = function( elem ) {
|
isXML = Sizzle.isXML = function( elem ) {
|
||||||
var namespace = elem.namespaceURI,
|
var namespace = elem && elem.namespaceURI,
|
||||||
docElem = ( elem.ownerDocument || elem ).documentElement;
|
docElem = elem && ( elem.ownerDocument || elem ).documentElement;
|
||||||
|
|
||||||
// Support: IE <=8
|
// Support: IE <=8
|
||||||
// Assume HTML when documentElement doesn't yet exist, such as inside loading iframes
|
// Assume HTML when documentElement doesn't yet exist, such as inside loading iframes
|
||||||
@@ -3024,9 +3028,9 @@ var rneedsContext = jQuery.expr.match.needsContext;
|
|||||||
|
|
||||||
function nodeName( elem, name ) {
|
function nodeName( elem, name ) {
|
||||||
|
|
||||||
return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase();
|
return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase();
|
||||||
|
|
||||||
};
|
}
|
||||||
var rsingleTag = ( /^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i );
|
var rsingleTag = ( /^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i );
|
||||||
|
|
||||||
|
|
||||||
@@ -3997,8 +4001,8 @@ jQuery.extend( {
|
|||||||
resolveContexts = Array( i ),
|
resolveContexts = Array( i ),
|
||||||
resolveValues = slice.call( arguments ),
|
resolveValues = slice.call( arguments ),
|
||||||
|
|
||||||
// the master Deferred
|
// the primary Deferred
|
||||||
master = jQuery.Deferred(),
|
primary = jQuery.Deferred(),
|
||||||
|
|
||||||
// subordinate callback factory
|
// subordinate callback factory
|
||||||
updateFunc = function( i ) {
|
updateFunc = function( i ) {
|
||||||
@@ -4006,30 +4010,30 @@ jQuery.extend( {
|
|||||||
resolveContexts[ i ] = this;
|
resolveContexts[ i ] = this;
|
||||||
resolveValues[ i ] = arguments.length > 1 ? slice.call( arguments ) : value;
|
resolveValues[ i ] = arguments.length > 1 ? slice.call( arguments ) : value;
|
||||||
if ( !( --remaining ) ) {
|
if ( !( --remaining ) ) {
|
||||||
master.resolveWith( resolveContexts, resolveValues );
|
primary.resolveWith( resolveContexts, resolveValues );
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
// Single- and empty arguments are adopted like Promise.resolve
|
// Single- and empty arguments are adopted like Promise.resolve
|
||||||
if ( remaining <= 1 ) {
|
if ( remaining <= 1 ) {
|
||||||
adoptValue( singleValue, master.done( updateFunc( i ) ).resolve, master.reject,
|
adoptValue( singleValue, primary.done( updateFunc( i ) ).resolve, primary.reject,
|
||||||
!remaining );
|
!remaining );
|
||||||
|
|
||||||
// Use .then() to unwrap secondary thenables (cf. gh-3000)
|
// Use .then() to unwrap secondary thenables (cf. gh-3000)
|
||||||
if ( master.state() === "pending" ||
|
if ( primary.state() === "pending" ||
|
||||||
isFunction( resolveValues[ i ] && resolveValues[ i ].then ) ) {
|
isFunction( resolveValues[ i ] && resolveValues[ i ].then ) ) {
|
||||||
|
|
||||||
return master.then();
|
return primary.then();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Multiple arguments are aggregated like Promise.all array elements
|
// Multiple arguments are aggregated like Promise.all array elements
|
||||||
while ( i-- ) {
|
while ( i-- ) {
|
||||||
adoptValue( resolveValues[ i ], updateFunc( i ), master.reject );
|
adoptValue( resolveValues[ i ], updateFunc( i ), primary.reject );
|
||||||
}
|
}
|
||||||
|
|
||||||
return master.promise();
|
return primary.promise();
|
||||||
}
|
}
|
||||||
} );
|
} );
|
||||||
|
|
||||||
@@ -4180,8 +4184,8 @@ var access = function( elems, fn, key, value, chainable, emptyGet, raw ) {
|
|||||||
for ( ; i < len; i++ ) {
|
for ( ; i < len; i++ ) {
|
||||||
fn(
|
fn(
|
||||||
elems[ i ], key, raw ?
|
elems[ i ], key, raw ?
|
||||||
value :
|
value :
|
||||||
value.call( elems[ i ], i, fn( elems[ i ], key ) )
|
value.call( elems[ i ], i, fn( elems[ i ], key ) )
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -5089,10 +5093,7 @@ function buildFragment( elems, context, scripts, selection, ignored ) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
var
|
var rtypenamespace = /^([^.]*)(?:\.(.+)|)/;
|
||||||
rkeyEvent = /^key/,
|
|
||||||
rmouseEvent = /^(?:mouse|pointer|contextmenu|drag|drop)|click/,
|
|
||||||
rtypenamespace = /^([^.]*)(?:\.(.+)|)/;
|
|
||||||
|
|
||||||
function returnTrue() {
|
function returnTrue() {
|
||||||
return true;
|
return true;
|
||||||
@@ -5387,8 +5388,8 @@ jQuery.event = {
|
|||||||
event = jQuery.event.fix( nativeEvent ),
|
event = jQuery.event.fix( nativeEvent ),
|
||||||
|
|
||||||
handlers = (
|
handlers = (
|
||||||
dataPriv.get( this, "events" ) || Object.create( null )
|
dataPriv.get( this, "events" ) || Object.create( null )
|
||||||
)[ event.type ] || [],
|
)[ event.type ] || [],
|
||||||
special = jQuery.event.special[ event.type ] || {};
|
special = jQuery.event.special[ event.type ] || {};
|
||||||
|
|
||||||
// Use the fix-ed jQuery.Event rather than the (read-only) native event
|
// Use the fix-ed jQuery.Event rather than the (read-only) native event
|
||||||
@@ -5512,12 +5513,12 @@ jQuery.event = {
|
|||||||
get: isFunction( hook ) ?
|
get: isFunction( hook ) ?
|
||||||
function() {
|
function() {
|
||||||
if ( this.originalEvent ) {
|
if ( this.originalEvent ) {
|
||||||
return hook( this.originalEvent );
|
return hook( this.originalEvent );
|
||||||
}
|
}
|
||||||
} :
|
} :
|
||||||
function() {
|
function() {
|
||||||
if ( this.originalEvent ) {
|
if ( this.originalEvent ) {
|
||||||
return this.originalEvent[ name ];
|
return this.originalEvent[ name ];
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
@@ -5656,7 +5657,13 @@ function leverageNative( el, type, expectSync ) {
|
|||||||
// Cancel the outer synthetic event
|
// Cancel the outer synthetic event
|
||||||
event.stopImmediatePropagation();
|
event.stopImmediatePropagation();
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
return result.value;
|
|
||||||
|
// Support: Chrome 86+
|
||||||
|
// In Chrome, if an element having a focusout handler is blurred by
|
||||||
|
// clicking outside of it, it invokes the handler synchronously. If
|
||||||
|
// that handler calls `.remove()` on the element, the data is cleared,
|
||||||
|
// leaving `result` undefined. We need to guard against this.
|
||||||
|
return result && result.value;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If this is an inner synthetic event for an event with a bubbling surrogate
|
// If this is an inner synthetic event for an event with a bubbling surrogate
|
||||||
@@ -5821,34 +5828,7 @@ jQuery.each( {
|
|||||||
targetTouches: true,
|
targetTouches: true,
|
||||||
toElement: true,
|
toElement: true,
|
||||||
touches: true,
|
touches: true,
|
||||||
|
which: true
|
||||||
which: function( event ) {
|
|
||||||
var button = event.button;
|
|
||||||
|
|
||||||
// Add which for key events
|
|
||||||
if ( event.which == null && rkeyEvent.test( event.type ) ) {
|
|
||||||
return event.charCode != null ? event.charCode : event.keyCode;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add which for click: 1 === left; 2 === middle; 3 === right
|
|
||||||
if ( !event.which && button !== undefined && rmouseEvent.test( event.type ) ) {
|
|
||||||
if ( button & 1 ) {
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( button & 2 ) {
|
|
||||||
return 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( button & 4 ) {
|
|
||||||
return 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
return event.which;
|
|
||||||
}
|
|
||||||
}, jQuery.event.addProp );
|
}, jQuery.event.addProp );
|
||||||
|
|
||||||
jQuery.each( { focus: "focusin", blur: "focusout" }, function( type, delegateType ) {
|
jQuery.each( { focus: "focusin", blur: "focusout" }, function( type, delegateType ) {
|
||||||
@@ -5874,6 +5854,12 @@ jQuery.each( { focus: "focusin", blur: "focusout" }, function( type, delegateTyp
|
|||||||
return true;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
|
// Suppress native focus or blur as it's already being fired
|
||||||
|
// in leverageNative.
|
||||||
|
_default: function() {
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
|
||||||
delegateType: delegateType
|
delegateType: delegateType
|
||||||
};
|
};
|
||||||
} );
|
} );
|
||||||
@@ -6541,6 +6527,10 @@ var rboxStyle = new RegExp( cssExpand.join( "|" ), "i" );
|
|||||||
// set in CSS while `offset*` properties report correct values.
|
// set in CSS while `offset*` properties report correct values.
|
||||||
// Behavior in IE 9 is more subtle than in newer versions & it passes
|
// Behavior in IE 9 is more subtle than in newer versions & it passes
|
||||||
// some versions of this test; make sure not to make it pass there!
|
// some versions of this test; make sure not to make it pass there!
|
||||||
|
//
|
||||||
|
// Support: Firefox 70+
|
||||||
|
// Only Firefox includes border widths
|
||||||
|
// in computed dimensions. (gh-4529)
|
||||||
reliableTrDimensions: function() {
|
reliableTrDimensions: function() {
|
||||||
var table, tr, trChild, trStyle;
|
var table, tr, trChild, trStyle;
|
||||||
if ( reliableTrDimensionsVal == null ) {
|
if ( reliableTrDimensionsVal == null ) {
|
||||||
@@ -6548,17 +6538,32 @@ var rboxStyle = new RegExp( cssExpand.join( "|" ), "i" );
|
|||||||
tr = document.createElement( "tr" );
|
tr = document.createElement( "tr" );
|
||||||
trChild = document.createElement( "div" );
|
trChild = document.createElement( "div" );
|
||||||
|
|
||||||
table.style.cssText = "position:absolute;left:-11111px";
|
table.style.cssText = "position:absolute;left:-11111px;border-collapse:separate";
|
||||||
|
tr.style.cssText = "border:1px solid";
|
||||||
|
|
||||||
|
// Support: Chrome 86+
|
||||||
|
// Height set through cssText does not get applied.
|
||||||
|
// Computed height then comes back as 0.
|
||||||
tr.style.height = "1px";
|
tr.style.height = "1px";
|
||||||
trChild.style.height = "9px";
|
trChild.style.height = "9px";
|
||||||
|
|
||||||
|
// Support: Android 8 Chrome 86+
|
||||||
|
// In our bodyBackground.html iframe,
|
||||||
|
// display for all div elements is set to "inline",
|
||||||
|
// which causes a problem only in Android 8 Chrome 86.
|
||||||
|
// Ensuring the div is display: block
|
||||||
|
// gets around this issue.
|
||||||
|
trChild.style.display = "block";
|
||||||
|
|
||||||
documentElement
|
documentElement
|
||||||
.appendChild( table )
|
.appendChild( table )
|
||||||
.appendChild( tr )
|
.appendChild( tr )
|
||||||
.appendChild( trChild );
|
.appendChild( trChild );
|
||||||
|
|
||||||
trStyle = window.getComputedStyle( tr );
|
trStyle = window.getComputedStyle( tr );
|
||||||
reliableTrDimensionsVal = parseInt( trStyle.height ) > 3;
|
reliableTrDimensionsVal = ( parseInt( trStyle.height, 10 ) +
|
||||||
|
parseInt( trStyle.borderTopWidth, 10 ) +
|
||||||
|
parseInt( trStyle.borderBottomWidth, 10 ) ) === tr.offsetHeight;
|
||||||
|
|
||||||
documentElement.removeChild( table );
|
documentElement.removeChild( table );
|
||||||
}
|
}
|
||||||
@@ -7022,10 +7027,10 @@ jQuery.each( [ "height", "width" ], function( _i, dimension ) {
|
|||||||
// Running getBoundingClientRect on a disconnected node
|
// Running getBoundingClientRect on a disconnected node
|
||||||
// in IE throws an error.
|
// in IE throws an error.
|
||||||
( !elem.getClientRects().length || !elem.getBoundingClientRect().width ) ?
|
( !elem.getClientRects().length || !elem.getBoundingClientRect().width ) ?
|
||||||
swap( elem, cssShow, function() {
|
swap( elem, cssShow, function() {
|
||||||
return getWidthOrHeight( elem, dimension, extra );
|
return getWidthOrHeight( elem, dimension, extra );
|
||||||
} ) :
|
} ) :
|
||||||
getWidthOrHeight( elem, dimension, extra );
|
getWidthOrHeight( elem, dimension, extra );
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
@@ -7084,7 +7089,7 @@ jQuery.cssHooks.marginLeft = addGetHookIf( support.reliableMarginLeft,
|
|||||||
swap( elem, { marginLeft: 0 }, function() {
|
swap( elem, { marginLeft: 0 }, function() {
|
||||||
return elem.getBoundingClientRect().left;
|
return elem.getBoundingClientRect().left;
|
||||||
} )
|
} )
|
||||||
) + "px";
|
) + "px";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@@ -7608,8 +7613,8 @@ jQuery.fn.extend( {
|
|||||||
if ( this.setAttribute ) {
|
if ( this.setAttribute ) {
|
||||||
this.setAttribute( "class",
|
this.setAttribute( "class",
|
||||||
className || value === false ?
|
className || value === false ?
|
||||||
"" :
|
"" :
|
||||||
dataPriv.get( this, "__className__" ) || ""
|
dataPriv.get( this, "__className__" ) || ""
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -7624,7 +7629,7 @@ jQuery.fn.extend( {
|
|||||||
while ( ( elem = this[ i++ ] ) ) {
|
while ( ( elem = this[ i++ ] ) ) {
|
||||||
if ( elem.nodeType === 1 &&
|
if ( elem.nodeType === 1 &&
|
||||||
( " " + stripAndCollapse( getClass( elem ) ) + " " ).indexOf( className ) > -1 ) {
|
( " " + stripAndCollapse( getClass( elem ) ) + " " ).indexOf( className ) > -1 ) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -7914,9 +7919,7 @@ jQuery.extend( jQuery.event, {
|
|||||||
special.bindType || type;
|
special.bindType || type;
|
||||||
|
|
||||||
// jQuery handler
|
// jQuery handler
|
||||||
handle = (
|
handle = ( dataPriv.get( cur, "events" ) || Object.create( null ) )[ event.type ] &&
|
||||||
dataPriv.get( cur, "events" ) || Object.create( null )
|
|
||||||
)[ event.type ] &&
|
|
||||||
dataPriv.get( cur, "handle" );
|
dataPriv.get( cur, "handle" );
|
||||||
if ( handle ) {
|
if ( handle ) {
|
||||||
handle.apply( cur, data );
|
handle.apply( cur, data );
|
||||||
@@ -8057,7 +8060,7 @@ if ( !support.focusin ) {
|
|||||||
|
|
||||||
// Cross-browser xml parsing
|
// Cross-browser xml parsing
|
||||||
jQuery.parseXML = function( data ) {
|
jQuery.parseXML = function( data ) {
|
||||||
var xml;
|
var xml, parserErrorElem;
|
||||||
if ( !data || typeof data !== "string" ) {
|
if ( !data || typeof data !== "string" ) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@@ -8066,12 +8069,17 @@ jQuery.parseXML = function( data ) {
|
|||||||
// IE throws on parseFromString with invalid input.
|
// IE throws on parseFromString with invalid input.
|
||||||
try {
|
try {
|
||||||
xml = ( new window.DOMParser() ).parseFromString( data, "text/xml" );
|
xml = ( new window.DOMParser() ).parseFromString( data, "text/xml" );
|
||||||
} catch ( e ) {
|
} catch ( e ) {}
|
||||||
xml = undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( !xml || xml.getElementsByTagName( "parsererror" ).length ) {
|
parserErrorElem = xml && xml.getElementsByTagName( "parsererror" )[ 0 ];
|
||||||
jQuery.error( "Invalid XML: " + data );
|
if ( !xml || parserErrorElem ) {
|
||||||
|
jQuery.error( "Invalid XML: " + (
|
||||||
|
parserErrorElem ?
|
||||||
|
jQuery.map( parserErrorElem.childNodes, function( el ) {
|
||||||
|
return el.textContent;
|
||||||
|
} ).join( "\n" ) :
|
||||||
|
data
|
||||||
|
) );
|
||||||
}
|
}
|
||||||
return xml;
|
return xml;
|
||||||
};
|
};
|
||||||
@@ -8172,16 +8180,14 @@ jQuery.fn.extend( {
|
|||||||
// Can add propHook for "elements" to filter or add form elements
|
// Can add propHook for "elements" to filter or add form elements
|
||||||
var elements = jQuery.prop( this, "elements" );
|
var elements = jQuery.prop( this, "elements" );
|
||||||
return elements ? jQuery.makeArray( elements ) : this;
|
return elements ? jQuery.makeArray( elements ) : this;
|
||||||
} )
|
} ).filter( function() {
|
||||||
.filter( function() {
|
|
||||||
var type = this.type;
|
var type = this.type;
|
||||||
|
|
||||||
// Use .is( ":disabled" ) so that fieldset[disabled] works
|
// Use .is( ":disabled" ) so that fieldset[disabled] works
|
||||||
return this.name && !jQuery( this ).is( ":disabled" ) &&
|
return this.name && !jQuery( this ).is( ":disabled" ) &&
|
||||||
rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) &&
|
rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) &&
|
||||||
( this.checked || !rcheckableType.test( type ) );
|
( this.checked || !rcheckableType.test( type ) );
|
||||||
} )
|
} ).map( function( _i, elem ) {
|
||||||
.map( function( _i, elem ) {
|
|
||||||
var val = jQuery( this ).val();
|
var val = jQuery( this ).val();
|
||||||
|
|
||||||
if ( val == null ) {
|
if ( val == null ) {
|
||||||
@@ -8387,12 +8393,6 @@ jQuery.offset = {
|
|||||||
options.using.call( elem, props );
|
options.using.call( elem, props );
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
if ( typeof props.top === "number" ) {
|
|
||||||
props.top += "px";
|
|
||||||
}
|
|
||||||
if ( typeof props.left === "number" ) {
|
|
||||||
props.left += "px";
|
|
||||||
}
|
|
||||||
curElem.css( props );
|
curElem.css( props );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -8561,8 +8561,11 @@ jQuery.each( [ "top", "left" ], function( _i, prop ) {
|
|||||||
|
|
||||||
// Create innerHeight, innerWidth, height, width, outerHeight and outerWidth methods
|
// Create innerHeight, innerWidth, height, width, outerHeight and outerWidth methods
|
||||||
jQuery.each( { Height: "height", Width: "width" }, function( name, type ) {
|
jQuery.each( { Height: "height", Width: "width" }, function( name, type ) {
|
||||||
jQuery.each( { padding: "inner" + name, content: type, "": "outer" + name },
|
jQuery.each( {
|
||||||
function( defaultExtra, funcName ) {
|
padding: "inner" + name,
|
||||||
|
content: type,
|
||||||
|
"": "outer" + name
|
||||||
|
}, function( defaultExtra, funcName ) {
|
||||||
|
|
||||||
// Margin is only for outerHeight, outerWidth
|
// Margin is only for outerHeight, outerWidth
|
||||||
jQuery.fn[ funcName ] = function( margin, value ) {
|
jQuery.fn[ funcName ] = function( margin, value ) {
|
||||||
@@ -8631,7 +8634,8 @@ jQuery.fn.extend( {
|
|||||||
}
|
}
|
||||||
} );
|
} );
|
||||||
|
|
||||||
jQuery.each( ( "blur focus focusin focusout resize scroll click dblclick " +
|
jQuery.each(
|
||||||
|
( "blur focus focusin focusout resize scroll click dblclick " +
|
||||||
"mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave " +
|
"mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave " +
|
||||||
"change select submit keydown keypress keyup contextmenu" ).split( " " ),
|
"change select submit keydown keypress keyup contextmenu" ).split( " " ),
|
||||||
function( _i, name ) {
|
function( _i, name ) {
|
||||||
@@ -8642,7 +8646,8 @@ jQuery.each( ( "blur focus focusin focusout resize scroll click dblclick " +
|
|||||||
this.on( name, null, data, fn ) :
|
this.on( name, null, data, fn ) :
|
||||||
this.trigger( name );
|
this.trigger( name );
|
||||||
};
|
};
|
||||||
} );
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
@@ -4,7 +4,7 @@
|
|||||||
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
|
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no" />
|
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no" />
|
||||||
<meta name="robots" content="noindex,nofollow" />
|
<meta name="robots" content="noindex,nofollow" />
|
||||||
<link rel="icon" type="image/png" href="{{urlpath}}/bwrs_static/shield-white.png">
|
<link rel="icon" type="image/png" href="{{urlpath}}/bwrs_static/vaultwarden-icon.png">
|
||||||
<title>Vaultwarden Admin Panel</title>
|
<title>Vaultwarden Admin Panel</title>
|
||||||
<link rel="stylesheet" href="{{urlpath}}/bwrs_static/bootstrap.css" />
|
<link rel="stylesheet" href="{{urlpath}}/bwrs_static/bootstrap.css" />
|
||||||
<style>
|
<style>
|
||||||
@@ -15,13 +15,16 @@
|
|||||||
width: 48px;
|
width: 48px;
|
||||||
height: 48px;
|
height: 48px;
|
||||||
}
|
}
|
||||||
.navbar img {
|
.vaultwarden-icon {
|
||||||
height: 24px;
|
height: 32px;
|
||||||
width: auto;
|
width: auto;
|
||||||
|
margin: -5px 0 0 0;
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
<script src="{{urlpath}}/bwrs_static/identicon.js"></script>
|
<script src="{{urlpath}}/bwrs_static/identicon.js"></script>
|
||||||
<script>
|
<script>
|
||||||
|
'use strict';
|
||||||
|
|
||||||
function reload() { window.location.reload(); }
|
function reload() { window.location.reload(); }
|
||||||
function msg(text, reload_page = true) {
|
function msg(text, reload_page = true) {
|
||||||
text && alert(text);
|
text && alert(text);
|
||||||
@@ -77,19 +80,18 @@
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body class="bg-light">
|
<body class="bg-light">
|
||||||
<nav class="navbar navbar-expand-md navbar-dark bg-dark mb-4 shadow fixed-top">
|
<nav class="navbar navbar-expand-md navbar-dark bg-dark mb-4 shadow fixed-top">
|
||||||
<div class="container-xl">
|
<div class="container-xl">
|
||||||
<a class="navbar-brand" href="{{urlpath}}/admin"><img class="pr-1" src="{{urlpath}}/bwrs_static/shield-white.png">Vaultwarden Admin</a>
|
<a class="navbar-brand" href="{{urlpath}}/admin"><img class="vaultwarden-icon" src="{{urlpath}}/bwrs_static/vaultwarden-icon.png" alt="V">aultwarden Admin</a>
|
||||||
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarCollapse"
|
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarCollapse"
|
||||||
aria-controls="navbarCollapse" aria-expanded="false" aria-label="Toggle navigation">
|
aria-controls="navbarCollapse" aria-expanded="false" aria-label="Toggle navigation">
|
||||||
<span class="navbar-toggler-icon"></span>
|
<span class="navbar-toggler-icon"></span>
|
||||||
</button>
|
</button>
|
||||||
<div class="collapse navbar-collapse" id="navbarCollapse">
|
<div class="collapse navbar-collapse" id="navbarCollapse">
|
||||||
<ul class="navbar-nav mr-auto">
|
<ul class="navbar-nav me-auto">
|
||||||
{{#if logged_in}}
|
{{#if logged_in}}
|
||||||
<li class="nav-item">
|
<li class="nav-item">
|
||||||
<a class="nav-link" href="{{urlpath}}/admin">Settings</a>
|
<a class="nav-link" href="{{urlpath}}/admin">Settings</a>
|
||||||
@@ -116,21 +118,23 @@
|
|||||||
</div>
|
</div>
|
||||||
</nav>
|
</nav>
|
||||||
|
|
||||||
{{> (page_content) }}
|
{{> (lookup this "page_content") }}
|
||||||
|
|
||||||
<!-- This script needs to be at the bottom, else it will fail! -->
|
<!-- This script needs to be at the bottom, else it will fail! -->
|
||||||
<script>
|
<script>
|
||||||
|
'use strict';
|
||||||
|
|
||||||
// get current URL path and assign 'active' class to the correct nav-item
|
// get current URL path and assign 'active' class to the correct nav-item
|
||||||
(function () {
|
(() => {
|
||||||
var pathname = window.location.pathname;
|
var pathname = window.location.pathname;
|
||||||
if (pathname === "") return;
|
if (pathname === "") return;
|
||||||
var navItem = document.querySelectorAll('.navbar-nav .nav-item a[href="'+pathname+'"]');
|
var navItem = document.querySelectorAll('.navbar-nav .nav-item a[href="'+pathname+'"]');
|
||||||
if (navItem.length === 1) {
|
if (navItem.length === 1) {
|
||||||
navItem[0].parentElement.className = navItem[0].parentElement.className + ' active';
|
navItem[0].className = navItem[0].className + ' active';
|
||||||
|
navItem[0].setAttribute('aria-current', 'page');
|
||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
</script>
|
</script>
|
||||||
<!-- This script needs to be at the bottom, else it will fail! -->
|
|
||||||
<script src="{{urlpath}}/bwrs_static/bootstrap-native.js"></script>
|
<script src="{{urlpath}}/bwrs_static/bootstrap-native.js"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@@ -7,37 +7,37 @@
|
|||||||
<div class="col-md">
|
<div class="col-md">
|
||||||
<dl class="row">
|
<dl class="row">
|
||||||
<dt class="col-sm-5">Server Installed
|
<dt class="col-sm-5">Server Installed
|
||||||
<span class="badge badge-success d-none" id="server-success" title="Latest version is installed.">Ok</span>
|
<span class="badge bg-success d-none" id="server-success" title="Latest version is installed.">Ok</span>
|
||||||
<span class="badge badge-warning d-none" id="server-warning" title="There seems to be an update available.">Update</span>
|
<span class="badge bg-warning d-none" id="server-warning" title="There seems to be an update available.">Update</span>
|
||||||
<span class="badge badge-info d-none" id="server-branch" title="This is a branched version.">Branched</span>
|
<span class="badge bg-info d-none" id="server-branch" title="This is a branched version.">Branched</span>
|
||||||
</dt>
|
</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
<span id="server-installed">{{version}}</span>
|
<span id="server-installed">{{version}}</span>
|
||||||
</dd>
|
</dd>
|
||||||
<dt class="col-sm-5">Server Latest
|
<dt class="col-sm-5">Server Latest
|
||||||
<span class="badge badge-secondary d-none" id="server-failed" title="Unable to determine latest version.">Unknown</span>
|
<span class="badge bg-secondary d-none" id="server-failed" title="Unable to determine latest version.">Unknown</span>
|
||||||
</dt>
|
</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
<span id="server-latest">{{diagnostics.latest_release}}<span id="server-latest-commit" class="d-none">-{{diagnostics.latest_commit}}</span></span>
|
<span id="server-latest">{{page_data.latest_release}}<span id="server-latest-commit" class="d-none">-{{page_data.latest_commit}}</span></span>
|
||||||
</dd>
|
</dd>
|
||||||
{{#if diagnostics.web_vault_enabled}}
|
{{#if page_data.web_vault_enabled}}
|
||||||
<dt class="col-sm-5">Web Installed
|
<dt class="col-sm-5">Web Installed
|
||||||
<span class="badge badge-success d-none" id="web-success" title="Latest version is installed.">Ok</span>
|
<span class="badge bg-success d-none" id="web-success" title="Latest version is installed.">Ok</span>
|
||||||
<span class="badge badge-warning d-none" id="web-warning" title="There seems to be an update available.">Update</span>
|
<span class="badge bg-warning d-none" id="web-warning" title="There seems to be an update available.">Update</span>
|
||||||
</dt>
|
</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
<span id="web-installed">{{diagnostics.web_vault_version}}</span>
|
<span id="web-installed">{{page_data.web_vault_version}}</span>
|
||||||
</dd>
|
</dd>
|
||||||
{{#unless diagnostics.running_within_docker}}
|
{{#unless page_data.running_within_docker}}
|
||||||
<dt class="col-sm-5">Web Latest
|
<dt class="col-sm-5">Web Latest
|
||||||
<span class="badge badge-secondary d-none" id="web-failed" title="Unable to determine latest version.">Unknown</span>
|
<span class="badge bg-secondary d-none" id="web-failed" title="Unable to determine latest version.">Unknown</span>
|
||||||
</dt>
|
</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
<span id="web-latest">{{diagnostics.latest_web_build}}</span>
|
<span id="web-latest">{{page_data.latest_web_build}}</span>
|
||||||
</dd>
|
</dd>
|
||||||
{{/unless}}
|
{{/unless}}
|
||||||
{{/if}}
|
{{/if}}
|
||||||
{{#unless diagnostics.web_vault_enabled}}
|
{{#unless page_data.web_vault_enabled}}
|
||||||
<dt class="col-sm-5">Web Installed</dt>
|
<dt class="col-sm-5">Web Installed</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
<span id="web-installed">Web Vault is disabled</span>
|
<span id="web-installed">Web Vault is disabled</span>
|
||||||
@@ -45,7 +45,7 @@
|
|||||||
{{/unless}}
|
{{/unless}}
|
||||||
<dt class="col-sm-5">Database</dt>
|
<dt class="col-sm-5">Database</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
<span><b>{{diagnostics.db_type}}:</b> {{diagnostics.db_version}}</span>
|
<span><b>{{page_data.db_type}}:</b> {{page_data.db_version}}</span>
|
||||||
</dd>
|
</dd>
|
||||||
</dl>
|
</dl>
|
||||||
</div>
|
</div>
|
||||||
@@ -57,96 +57,105 @@
|
|||||||
<dl class="row">
|
<dl class="row">
|
||||||
<dt class="col-sm-5">Running within Docker</dt>
|
<dt class="col-sm-5">Running within Docker</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
{{#if diagnostics.running_within_docker}}
|
{{#if page_data.running_within_docker}}
|
||||||
<span class="d-block"><b>Yes</b></span>
|
<span class="d-block"><b>Yes</b></span>
|
||||||
{{/if}}
|
{{/if}}
|
||||||
{{#unless diagnostics.running_within_docker}}
|
{{#unless page_data.running_within_docker}}
|
||||||
|
<span class="d-block"><b>No</b></span>
|
||||||
|
{{/unless}}
|
||||||
|
</dd>
|
||||||
|
<dt class="col-sm-5">Environment settings overridden</dt>
|
||||||
|
<dd class="col-sm-7">
|
||||||
|
{{#if page_data.overrides}}
|
||||||
|
<span class="d-block" title="The following settings are overridden: {{page_data.overrides}}"><b>Yes</b></span>
|
||||||
|
{{/if}}
|
||||||
|
{{#unless page_data.overrides}}
|
||||||
<span class="d-block"><b>No</b></span>
|
<span class="d-block"><b>No</b></span>
|
||||||
{{/unless}}
|
{{/unless}}
|
||||||
</dd>
|
</dd>
|
||||||
<dt class="col-sm-5">Uses a reverse proxy</dt>
|
<dt class="col-sm-5">Uses a reverse proxy</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
{{#if diagnostics.ip_header_exists}}
|
{{#if page_data.ip_header_exists}}
|
||||||
<span class="d-block" title="IP Header found."><b>Yes</b></span>
|
<span class="d-block" title="IP Header found."><b>Yes</b></span>
|
||||||
{{/if}}
|
{{/if}}
|
||||||
{{#unless diagnostics.ip_header_exists}}
|
{{#unless page_data.ip_header_exists}}
|
||||||
<span class="d-block" title="No IP Header found."><b>No</b></span>
|
<span class="d-block" title="No IP Header found."><b>No</b></span>
|
||||||
{{/unless}}
|
{{/unless}}
|
||||||
</dd>
|
</dd>
|
||||||
{{!-- Only show this if the IP Header Exists --}}
|
{{!-- Only show this if the IP Header Exists --}}
|
||||||
{{#if diagnostics.ip_header_exists}}
|
{{#if page_data.ip_header_exists}}
|
||||||
<dt class="col-sm-5">IP header
|
<dt class="col-sm-5">IP header
|
||||||
{{#if diagnostics.ip_header_match}}
|
{{#if page_data.ip_header_match}}
|
||||||
<span class="badge badge-success" title="IP_HEADER config seems to be valid.">Match</span>
|
<span class="badge bg-success" title="IP_HEADER config seems to be valid.">Match</span>
|
||||||
{{/if}}
|
{{/if}}
|
||||||
{{#unless diagnostics.ip_header_match}}
|
{{#unless page_data.ip_header_match}}
|
||||||
<span class="badge badge-danger" title="IP_HEADER config seems to be invalid. IP's in the log could be invalid. Please fix.">No Match</span>
|
<span class="badge bg-danger" title="IP_HEADER config seems to be invalid. IP's in the log could be invalid. Please fix.">No Match</span>
|
||||||
{{/unless}}
|
{{/unless}}
|
||||||
</dt>
|
</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
{{#if diagnostics.ip_header_match}}
|
{{#if page_data.ip_header_match}}
|
||||||
<span class="d-block"><b>Config/Server:</b> {{ diagnostics.ip_header_name }}</span>
|
<span class="d-block"><b>Config/Server:</b> {{ page_data.ip_header_name }}</span>
|
||||||
{{/if}}
|
{{/if}}
|
||||||
{{#unless diagnostics.ip_header_match}}
|
{{#unless page_data.ip_header_match}}
|
||||||
<span class="d-block"><b>Config:</b> {{ diagnostics.ip_header_config }}</span>
|
<span class="d-block"><b>Config:</b> {{ page_data.ip_header_config }}</span>
|
||||||
<span class="d-block"><b>Server:</b> {{ diagnostics.ip_header_name }}</span>
|
<span class="d-block"><b>Server:</b> {{ page_data.ip_header_name }}</span>
|
||||||
{{/unless}}
|
{{/unless}}
|
||||||
</dd>
|
</dd>
|
||||||
{{/if}}
|
{{/if}}
|
||||||
{{!-- End if IP Header Exists --}}
|
{{!-- End if IP Header Exists --}}
|
||||||
<dt class="col-sm-5">Internet access
|
<dt class="col-sm-5">Internet access
|
||||||
{{#if diagnostics.has_http_access}}
|
{{#if page_data.has_http_access}}
|
||||||
<span class="badge badge-success" title="We have internet access!">Ok</span>
|
<span class="badge bg-success" title="We have internet access!">Ok</span>
|
||||||
{{/if}}
|
{{/if}}
|
||||||
{{#unless diagnostics.has_http_access}}
|
{{#unless page_data.has_http_access}}
|
||||||
<span class="badge badge-danger" title="There seems to be no internet access. Please fix.">Error</span>
|
<span class="badge bg-danger" title="There seems to be no internet access. Please fix.">Error</span>
|
||||||
{{/unless}}
|
{{/unless}}
|
||||||
</dt>
|
</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
{{#if diagnostics.has_http_access}}
|
{{#if page_data.has_http_access}}
|
||||||
<span class="d-block"><b>Yes</b></span>
|
<span class="d-block"><b>Yes</b></span>
|
||||||
{{/if}}
|
{{/if}}
|
||||||
{{#unless diagnostics.has_http_access}}
|
{{#unless page_data.has_http_access}}
|
||||||
<span class="d-block"><b>No</b></span>
|
<span class="d-block"><b>No</b></span>
|
||||||
{{/unless}}
|
{{/unless}}
|
||||||
</dd>
|
</dd>
|
||||||
<dt class="col-sm-5">Internet access via a proxy</dt>
|
<dt class="col-sm-5">Internet access via a proxy</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
{{#if diagnostics.uses_proxy}}
|
{{#if page_data.uses_proxy}}
|
||||||
<span class="d-block" title="Internet access goes via a proxy (HTTPS_PROXY or HTTP_PROXY is configured)."><b>Yes</b></span>
|
<span class="d-block" title="Internet access goes via a proxy (HTTPS_PROXY or HTTP_PROXY is configured)."><b>Yes</b></span>
|
||||||
{{/if}}
|
{{/if}}
|
||||||
{{#unless diagnostics.uses_proxy}}
|
{{#unless page_data.uses_proxy}}
|
||||||
<span class="d-block" title="We have direct internet access, no outgoing proxy configured."><b>No</b></span>
|
<span class="d-block" title="We have direct internet access, no outgoing proxy configured."><b>No</b></span>
|
||||||
{{/unless}}
|
{{/unless}}
|
||||||
</dd>
|
</dd>
|
||||||
<dt class="col-sm-5">DNS (github.com)
|
<dt class="col-sm-5">DNS (github.com)
|
||||||
<span class="badge badge-success d-none" id="dns-success" title="DNS Resolving works!">Ok</span>
|
<span class="badge bg-success d-none" id="dns-success" title="DNS Resolving works!">Ok</span>
|
||||||
<span class="badge badge-danger d-none" id="dns-warning" title="DNS Resolving failed. Please fix.">Error</span>
|
<span class="badge bg-danger d-none" id="dns-warning" title="DNS Resolving failed. Please fix.">Error</span>
|
||||||
</dt>
|
</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
<span id="dns-resolved">{{diagnostics.dns_resolved}}</span>
|
<span id="dns-resolved">{{page_data.dns_resolved}}</span>
|
||||||
</dd>
|
</dd>
|
||||||
<dt class="col-sm-5">Date & Time (Local)</dt>
|
<dt class="col-sm-5">Date & Time (Local)</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
<span><b>Server:</b> {{diagnostics.server_time_local}}</span>
|
<span><b>Server:</b> {{page_data.server_time_local}}</span>
|
||||||
</dd>
|
</dd>
|
||||||
<dt class="col-sm-5">Date & Time (UTC)
|
<dt class="col-sm-5">Date & Time (UTC)
|
||||||
<span class="badge badge-success d-none" id="time-success" title="Time offsets seem to be correct.">Ok</span>
|
<span class="badge bg-success d-none" id="time-success" title="Time offsets seem to be correct.">Ok</span>
|
||||||
<span class="badge badge-danger d-none" id="time-warning" title="Time offsets are too mouch at drift.">Error</span>
|
<span class="badge bg-danger d-none" id="time-warning" title="Time offsets are too mouch at drift.">Error</span>
|
||||||
</dt>
|
</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
<span id="time-server" class="d-block"><b>Server:</b> <span id="time-server-string">{{diagnostics.server_time}}</span></span>
|
<span id="time-server" class="d-block"><b>Server:</b> <span id="time-server-string">{{page_data.server_time}}</span></span>
|
||||||
<span id="time-browser" class="d-block"><b>Browser:</b> <span id="time-browser-string"></span></span>
|
<span id="time-browser" class="d-block"><b>Browser:</b> <span id="time-browser-string"></span></span>
|
||||||
</dd>
|
</dd>
|
||||||
|
|
||||||
<dt class="col-sm-5">Domain configuration
|
<dt class="col-sm-5">Domain configuration
|
||||||
<span class="badge badge-success d-none" id="domain-success" title="The domain variable matches the browser location and seems to be configured correctly.">Match</span>
|
<span class="badge bg-success d-none" id="domain-success" title="The domain variable matches the browser location and seems to be configured correctly.">Match</span>
|
||||||
<span class="badge badge-danger d-none" id="domain-warning" title="The domain variable does not matches the browsers location.
The domain variable does not seem to be configured correctly.
Some features may not work as expected!">No Match</span>
|
<span class="badge bg-danger d-none" id="domain-warning" title="The domain variable does not matches the browsers location.
The domain variable does not seem to be configured correctly.
Some features may not work as expected!">No Match</span>
|
||||||
<span class="badge badge-success d-none" id="https-success" title="Configurued to use HTTPS">HTTPS</span>
|
<span class="badge bg-success d-none" id="https-success" title="Configurued to use HTTPS">HTTPS</span>
|
||||||
<span class="badge badge-danger d-none" id="https-warning" title="Not configured to use HTTPS.
Some features may not work as expected!">No HTTPS</span>
|
<span class="badge bg-danger d-none" id="https-warning" title="Not configured to use HTTPS.
Some features may not work as expected!">No HTTPS</span>
|
||||||
</dt>
|
</dt>
|
||||||
<dd class="col-sm-7">
|
<dd class="col-sm-7">
|
||||||
<span id="domain-server" class="d-block"><b>Server:</b> <span id="domain-server-string">{{diagnostics.admin_url}}</span></span>
|
<span id="domain-server" class="d-block"><b>Server:</b> <span id="domain-server-string">{{page_data.admin_url}}</span></span>
|
||||||
<span id="domain-browser" class="d-block"><b>Browser:</b> <span id="domain-browser-string"></span></span>
|
<span id="domain-browser" class="d-block"><b>Browser:</b> <span id="domain-browser-string"></span></span>
|
||||||
</dd>
|
</dd>
|
||||||
</dl>
|
</dl>
|
||||||
@@ -159,7 +168,7 @@
|
|||||||
<dl class="row">
|
<dl class="row">
|
||||||
<dd class="col-sm-12">
|
<dd class="col-sm-12">
|
||||||
If you need support please check the following links first before you create a new issue:
|
If you need support please check the following links first before you create a new issue:
|
||||||
<a href="https://bitwardenrs.discourse.group/" target="_blank" rel="noreferrer">Vaultwarden Forum</a>
|
<a href="https://vaultwarden.discourse.group/" target="_blank" rel="noreferrer">Vaultwarden Forum</a>
|
||||||
| <a href="https://github.com/dani-garcia/vaultwarden/discussions" target="_blank" rel="noreferrer">Github Discussions</a>
|
| <a href="https://github.com/dani-garcia/vaultwarden/discussions" target="_blank" rel="noreferrer">Github Discussions</a>
|
||||||
</dd>
|
</dd>
|
||||||
</dl>
|
</dl>
|
||||||
@@ -173,10 +182,17 @@
|
|||||||
<dt class="col-sm-3">
|
<dt class="col-sm-3">
|
||||||
<button type="button" id="gen-support" class="btn btn-primary" onclick="generateSupportString(); return false;">Generate Support String</button>
|
<button type="button" id="gen-support" class="btn btn-primary" onclick="generateSupportString(); return false;">Generate Support String</button>
|
||||||
<br><br>
|
<br><br>
|
||||||
<button type="button" id="copy-support" class="btn btn-info d-none" onclick="copyToClipboard(); return false;">Copy To Clipboard</button>
|
<button type="button" id="copy-support" class="btn btn-info mb-3 d-none" onclick="copyToClipboard(); return false;">Copy To Clipboard</button>
|
||||||
|
<div class="toast-container position-absolute float-start" style="width: 15rem;">
|
||||||
|
<div id="toastClipboardCopy" class="toast fade hide" role="status" aria-live="polite" aria-atomic="true" data-bs-autohide="true" data-bs-delay="1500">
|
||||||
|
<div class="toast-body">
|
||||||
|
Copied to clipboard!
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</dt>
|
</dt>
|
||||||
<dd class="col-sm-9">
|
<dd class="col-sm-9">
|
||||||
<pre id="support-string" class="pre-scrollable d-none" style="width: 100%; height: 16em; size: 0.6em; border: 1px solid; padding: 4px;"></pre>
|
<pre id="support-string" class="pre-scrollable d-none w-100 border p-2" style="height: 16rem;"></pre>
|
||||||
</dd>
|
</dd>
|
||||||
</dl>
|
</dl>
|
||||||
</div>
|
</div>
|
||||||
@@ -185,10 +201,13 @@
|
|||||||
</main>
|
</main>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
dnsCheck = false;
|
'use strict';
|
||||||
timeCheck = false;
|
|
||||||
domainCheck = false;
|
var dnsCheck = false;
|
||||||
httpsCheck = false;
|
var timeCheck = false;
|
||||||
|
var domainCheck = false;
|
||||||
|
var httpsCheck = false;
|
||||||
|
|
||||||
(() => {
|
(() => {
|
||||||
// ================================
|
// ================================
|
||||||
// Date & Time Check
|
// Date & Time Check
|
||||||
@@ -203,7 +222,10 @@
|
|||||||
document.getElementById("time-browser-string").innerText = browserUTC;
|
document.getElementById("time-browser-string").innerText = browserUTC;
|
||||||
|
|
||||||
const serverUTC = document.getElementById("time-server-string").innerText;
|
const serverUTC = document.getElementById("time-server-string").innerText;
|
||||||
const timeDrift = (Date.parse(serverUTC) - Date.parse(browserUTC)) / 1000;
|
const timeDrift = (
|
||||||
|
Date.parse(serverUTC.replace(' ', 'T').replace(' UTC', '')) -
|
||||||
|
Date.parse(browserUTC.replace(' ', 'T').replace(' UTC', ''))
|
||||||
|
) / 1000;
|
||||||
if (timeDrift > 30 || timeDrift < -30) {
|
if (timeDrift > 30 || timeDrift < -30) {
|
||||||
document.getElementById('time-warning').classList.remove('d-none');
|
document.getElementById('time-warning').classList.remove('d-none');
|
||||||
} else {
|
} else {
|
||||||
@@ -233,7 +255,7 @@
|
|||||||
const webInstalled = document.getElementById('web-installed').innerText;
|
const webInstalled = document.getElementById('web-installed').innerText;
|
||||||
checkVersions('server', serverInstalled, serverLatest, serverLatestCommit);
|
checkVersions('server', serverInstalled, serverLatest, serverLatestCommit);
|
||||||
|
|
||||||
{{#unless diagnostics.running_within_docker}}
|
{{#unless page_data.running_within_docker}}
|
||||||
const webLatest = document.getElementById('web-latest').innerText;
|
const webLatest = document.getElementById('web-latest').innerText;
|
||||||
checkVersions('web', webInstalled, webLatest);
|
checkVersions('web', webInstalled, webLatest);
|
||||||
{{/unless}}
|
{{/unless}}
|
||||||
@@ -303,30 +325,38 @@
|
|||||||
// ================================
|
// ================================
|
||||||
// Generate support string to be pasted on github or the forum
|
// Generate support string to be pasted on github or the forum
|
||||||
async function generateSupportString() {
|
async function generateSupportString() {
|
||||||
supportString = "### Your environment (Generated via diagnostics page)\n";
|
let supportString = "### Your environment (Generated via diagnostics page)\n";
|
||||||
|
|
||||||
supportString += "* Vaultwarden version: v{{ version }}\n";
|
supportString += "* Vaultwarden version: v{{ version }}\n";
|
||||||
supportString += "* Web-vault version: v{{ diagnostics.web_vault_version }}\n";
|
supportString += "* Web-vault version: v{{ page_data.web_vault_version }}\n";
|
||||||
supportString += "* Running within Docker: {{ diagnostics.running_within_docker }}\n";
|
supportString += "* Running within Docker: {{ page_data.running_within_docker }}\n";
|
||||||
supportString += "* Uses a reverse proxy: {{ diagnostics.ip_header_exists }}\n";
|
supportString += "* Environment settings overridden: ";
|
||||||
{{#if diagnostics.ip_header_exists}}
|
{{#if page_data.overrides}}
|
||||||
supportString += "* IP Header check: {{ diagnostics.ip_header_match }} ({{ diagnostics.ip_header_name }})\n";
|
supportString += "true\n"
|
||||||
|
{{else}}
|
||||||
|
supportString += "false\n"
|
||||||
{{/if}}
|
{{/if}}
|
||||||
supportString += "* Internet access: {{ diagnostics.has_http_access }}\n";
|
supportString += "* Uses a reverse proxy: {{ page_data.ip_header_exists }}\n";
|
||||||
supportString += "* Internet access via a proxy: {{ diagnostics.uses_proxy }}\n";
|
{{#if page_data.ip_header_exists}}
|
||||||
|
supportString += "* IP Header check: {{ page_data.ip_header_match }} ({{ page_data.ip_header_name }})\n";
|
||||||
|
{{/if}}
|
||||||
|
supportString += "* Internet access: {{ page_data.has_http_access }}\n";
|
||||||
|
supportString += "* Internet access via a proxy: {{ page_data.uses_proxy }}\n";
|
||||||
supportString += "* DNS Check: " + dnsCheck + "\n";
|
supportString += "* DNS Check: " + dnsCheck + "\n";
|
||||||
supportString += "* Time Check: " + timeCheck + "\n";
|
supportString += "* Time Check: " + timeCheck + "\n";
|
||||||
supportString += "* Domain Configuration Check: " + domainCheck + "\n";
|
supportString += "* Domain Configuration Check: " + domainCheck + "\n";
|
||||||
supportString += "* HTTPS Check: " + httpsCheck + "\n";
|
supportString += "* HTTPS Check: " + httpsCheck + "\n";
|
||||||
supportString += "* Database type: {{ diagnostics.db_type }}\n";
|
supportString += "* Database type: {{ page_data.db_type }}\n";
|
||||||
supportString += "* Database version: {{ diagnostics.db_version }}\n";
|
supportString += "* Database version: {{ page_data.db_version }}\n";
|
||||||
supportString += "* Clients used: \n";
|
supportString += "* Clients used: \n";
|
||||||
supportString += "* Reverse proxy and version: \n";
|
supportString += "* Reverse proxy and version: \n";
|
||||||
supportString += "* Other relevant information: \n";
|
supportString += "* Other relevant information: \n";
|
||||||
|
|
||||||
jsonResponse = await fetch('{{urlpath}}/admin/diagnostics/config');
|
let jsonResponse = await fetch('{{urlpath}}/admin/diagnostics/config');
|
||||||
configJson = await jsonResponse.json();
|
const configJson = await jsonResponse.json();
|
||||||
supportString += "\n### Config (Generated via diagnostics page)\n```json\n" + JSON.stringify(configJson, undefined, 2) + "\n```\n";
|
supportString += "\n### Config (Generated via diagnostics page)\n<details><summary>Show Running Config</summary>\n"
|
||||||
|
supportString += "\n**Environment settings which are overridden:** {{page_data.overrides}}\n"
|
||||||
|
supportString += "\n\n```json\n" + JSON.stringify(configJson, undefined, 2) + "\n```\n</details>\n";
|
||||||
|
|
||||||
document.getElementById('support-string').innerText = supportString;
|
document.getElementById('support-string').innerText = supportString;
|
||||||
document.getElementById('support-string').classList.remove('d-none');
|
document.getElementById('support-string').classList.remove('d-none');
|
||||||
@@ -334,16 +364,19 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
function copyToClipboard() {
|
function copyToClipboard() {
|
||||||
const str = document.getElementById('support-string').innerText;
|
const supportStr = document.getElementById('support-string').innerText;
|
||||||
const el = document.createElement('textarea');
|
const tmpCopyEl = document.createElement('textarea');
|
||||||
el.value = str;
|
|
||||||
el.setAttribute('readonly', '');
|
|
||||||
el.style.position = 'absolute';
|
|
||||||
el.style.left = '-9999px';
|
|
||||||
document.body.appendChild(el);
|
|
||||||
el.select();
|
|
||||||
document.execCommand('copy');
|
|
||||||
document.body.removeChild(el);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
tmpCopyEl.setAttribute('id', 'copy-support-string');
|
||||||
|
tmpCopyEl.setAttribute('readonly', '');
|
||||||
|
tmpCopyEl.value = supportStr;
|
||||||
|
tmpCopyEl.style.position = 'absolute';
|
||||||
|
tmpCopyEl.style.left = '-9999px';
|
||||||
|
document.body.appendChild(tmpCopyEl);
|
||||||
|
tmpCopyEl.select();
|
||||||
|
document.execCommand('copy');
|
||||||
|
tmpCopyEl.remove();
|
||||||
|
|
||||||
|
new BSN.Toast('#toastClipboardCopy').show();
|
||||||
|
}
|
||||||
</script>
|
</script>
|
||||||
|
@@ -1,7 +1,6 @@
|
|||||||
<main class="container-xl">
|
<main class="container-xl">
|
||||||
<div id="organizations-block" class="my-3 p-3 bg-white rounded shadow">
|
<div id="organizations-block" class="my-3 p-3 bg-white rounded shadow">
|
||||||
<h6 class="border-bottom pb-2 mb-3">Organizations</h6>
|
<h6 class="border-bottom pb-2 mb-3">Organizations</h6>
|
||||||
|
|
||||||
<div class="table-responsive-xl small">
|
<div class="table-responsive-xl small">
|
||||||
<table id="orgs-table" class="table table-sm table-striped table-hover">
|
<table id="orgs-table" class="table table-sm table-striped table-hover">
|
||||||
<thead>
|
<thead>
|
||||||
@@ -10,19 +9,19 @@
|
|||||||
<th>Users</th>
|
<th>Users</th>
|
||||||
<th>Items</th>
|
<th>Items</th>
|
||||||
<th>Attachments</th>
|
<th>Attachments</th>
|
||||||
<th style="width: 120px; min-width: 120px;">Actions</th>
|
<th style="width: 130px; min-width: 130px;">Actions</th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
{{#each organizations}}
|
{{#each page_data}}
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
<img class="mr-2 float-left rounded identicon" data-src="{{Id}}">
|
<img class="float-start me-2 rounded identicon" data-src="{{Id}}">
|
||||||
<div class="float-left">
|
<div class="float-start">
|
||||||
<strong>{{Name}}</strong>
|
<strong>{{Name}}</strong>
|
||||||
<span class="mr-2">({{BillingEmail}})</span>
|
<span class="me-2">({{BillingEmail}})</span>
|
||||||
<span class="d-block">
|
<span class="d-block">
|
||||||
<span class="badge badge-success">{{Id}}</span>
|
<span class="badge bg-success">{{Id}}</span>
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
</td>
|
</td>
|
||||||
@@ -38,7 +37,7 @@
|
|||||||
<span class="d-block"><strong>Size:</strong> {{attachment_size}}</span>
|
<span class="d-block"><strong>Size:</strong> {{attachment_size}}</span>
|
||||||
{{/if}}
|
{{/if}}
|
||||||
</td>
|
</td>
|
||||||
<td style="font-size: 90%; text-align: right; padding-right: 15px">
|
<td class="text-end pe-2 small">
|
||||||
<a class="d-block" href="#" onclick='deleteOrganization({{jsesc Id}}, {{jsesc Name}}, {{jsesc BillingEmail}})'>Delete Organization</a>
|
<a class="d-block" href="#" onclick='deleteOrganization({{jsesc Id}}, {{jsesc Name}}, {{jsesc BillingEmail}})'>Delete Organization</a>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
@@ -46,14 +45,15 @@
|
|||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
</main>
|
</main>
|
||||||
|
|
||||||
<link rel="stylesheet" href="{{urlpath}}/bwrs_static/datatables.css" />
|
<link rel="stylesheet" href="{{urlpath}}/bwrs_static/datatables.css" />
|
||||||
<script src="{{urlpath}}/bwrs_static/jquery-3.5.1.slim.js"></script>
|
<script src="{{urlpath}}/bwrs_static/jquery-3.6.0.slim.js"></script>
|
||||||
<script src="{{urlpath}}/bwrs_static/datatables.js"></script>
|
<script src="{{urlpath}}/bwrs_static/datatables.js"></script>
|
||||||
<script>
|
<script>
|
||||||
|
'use strict';
|
||||||
|
|
||||||
function deleteOrganization(id, name, billing_email) {
|
function deleteOrganization(id, name, billing_email) {
|
||||||
// First make sure the user wants to delete this organization
|
// First make sure the user wants to delete this organization
|
||||||
var continueDelete = confirm("WARNING: All data of this organization ("+ name +") will be lost!\nMake sure you have a backup, this cannot be undone!");
|
var continueDelete = confirm("WARNING: All data of this organization ("+ name +") will be lost!\nMake sure you have a backup, this cannot be undone!");
|
||||||
@@ -79,7 +79,7 @@
|
|||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
|
|
||||||
document.addEventListener("DOMContentLoaded", function(event) {
|
document.addEventListener("DOMContentLoaded", function() {
|
||||||
$('#orgs-table').DataTable({
|
$('#orgs-table').DataTable({
|
||||||
"responsive": true,
|
"responsive": true,
|
||||||
"lengthMenu": [ [-1, 5, 10, 25, 50], ["All", 5, 10, 25, 50] ],
|
"lengthMenu": [ [-1, 5, 10, 25, 50], ["All", 5, 10, 25, 50] ],
|
||||||
|
@@ -3,34 +3,32 @@
|
|||||||
<div>
|
<div>
|
||||||
<h6 class="text-white mb-3">Configuration</h6>
|
<h6 class="text-white mb-3">Configuration</h6>
|
||||||
<div class="small text-white mb-3">
|
<div class="small text-white mb-3">
|
||||||
NOTE: The settings here override the environment variables. Once saved, it's recommended to stop setting
|
<span class="font-weight-bolder">NOTE:</span> The settings here override the environment variables. Once saved, it's recommended to stop setting them to avoid confusion.<br>
|
||||||
them to avoid confusion. This does not apply to the read-only section, which can only be set through the
|
This does not apply to the read-only section, which can only be set via environment variables.<br>
|
||||||
environment.
|
Settings which are overridden are shown with <span class="is-overridden-true">double underscores</span>.
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<form class="form accordion" id="config-form" onsubmit="saveConfig(); return false;">
|
<form class="form needs-validation" id="config-form" onsubmit="saveConfig(); return false;" novalidate>
|
||||||
{{#each config}}
|
{{#each config}}
|
||||||
{{#if groupdoc}}
|
{{#if groupdoc}}
|
||||||
<div class="card bg-light mb-3">
|
<div class="card bg-light mb-3">
|
||||||
<div class="card-header"><button type="button" class="btn btn-link collapsed" data-toggle="collapse"
|
<div class="card-header" role="button" data-bs-toggle="collapse" data-bs-target="#g_{{group}}">
|
||||||
data-target="#g_{{group}}">{{groupdoc}}</button></div>
|
<button type="button" class="btn btn-link text-decoration-none collapsed" data-bs-toggle="collapse" data-bs-target="#g_{{group}}">{{groupdoc}}</button>
|
||||||
<div id="g_{{group}}" class="card-body collapse" data-parent="#config-form">
|
</div>
|
||||||
|
<div id="g_{{group}}" class="card-body collapse">
|
||||||
{{#each elements}}
|
{{#each elements}}
|
||||||
{{#if editable}}
|
{{#if editable}}
|
||||||
<div class="form-group row align-items-center" title="[{{name}}] {{doc.description}}">
|
<div class="row my-2 align-items-center is-overridden-{{overridden}}" title="[{{name}}] {{doc.description}}">
|
||||||
{{#case type "text" "number" "password"}}
|
{{#case type "text" "number" "password"}}
|
||||||
<label for="input_{{name}}" class="col-sm-3 col-form-label">{{doc.name}}</label>
|
<label for="input_{{name}}" class="col-sm-3 col-form-label">{{doc.name}}</label>
|
||||||
<div class="col-sm-8 input-group">
|
<div class="col-sm-8">
|
||||||
|
<div class="input-group">
|
||||||
<input class="form-control conf-{{type}}" id="input_{{name}}" type="{{type}}"
|
<input class="form-control conf-{{type}}" id="input_{{name}}" type="{{type}}"
|
||||||
name="{{name}}" value="{{value}}" {{#if default}} placeholder="Default: {{default}}"
|
name="{{name}}" value="{{value}}" {{#if default}} placeholder="Default: {{default}}"{{/if}}>
|
||||||
{{/if}}>
|
|
||||||
|
|
||||||
{{#case type "password"}}
|
{{#case type "password"}}
|
||||||
<div class="input-group-append">
|
<button class="btn btn-outline-secondary input-group-text" type="button" onclick="toggleVis('input_{{name}}');">Show/hide</button>
|
||||||
<button class="btn btn-outline-secondary" type="button"
|
|
||||||
onclick="toggleVis('input_{{name}}');">Show/hide</button>
|
|
||||||
</div>
|
|
||||||
{{/case}}
|
{{/case}}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{{/case}}
|
{{/case}}
|
||||||
{{#case type "checkbox"}}
|
{{#case type "checkbox"}}
|
||||||
@@ -48,13 +46,12 @@
|
|||||||
{{/if}}
|
{{/if}}
|
||||||
{{/each}}
|
{{/each}}
|
||||||
{{#case group "smtp"}}
|
{{#case group "smtp"}}
|
||||||
<div class="form-group row align-items-center pt-3 border-top" title="Send a test email to given email address">
|
<div class="row my-2 align-items-center pt-3 border-top" title="Send a test email to given email address">
|
||||||
<label for="smtp-test-email" class="col-sm-3 col-form-label">Test SMTP</label>
|
<label for="smtp-test-email" class="col-sm-3 col-form-label">Test SMTP</label>
|
||||||
<div class="col-sm-8 input-group">
|
<div class="col-sm-8 input-group">
|
||||||
<input class="form-control" id="smtp-test-email" type="email" placeholder="Enter test email">
|
<input class="form-control" id="smtp-test-email" type="email" placeholder="Enter test email" required>
|
||||||
<div class="input-group-append">
|
<button type="button" class="btn btn-outline-primary input-group-text" onclick="smtpTest(); return false;">Send test email</button>
|
||||||
<button type="button" class="btn btn-outline-primary" onclick="smtpTest(); return false;">Send test email</button>
|
<div class="invalid-tooltip">Please provide a valid email address</div>
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{{/case}}
|
{{/case}}
|
||||||
@@ -64,9 +61,11 @@
|
|||||||
{{/each}}
|
{{/each}}
|
||||||
|
|
||||||
<div class="card bg-light mb-3">
|
<div class="card bg-light mb-3">
|
||||||
<div class="card-header"><button type="button" class="btn btn-link collapsed" data-toggle="collapse"
|
<div class="card-header" role="button" data-bs-toggle="collapse" data-bs-target="#g_readonly">
|
||||||
data-target="#g_readonly">Read-Only Config</button></div>
|
<button type="button" class="btn btn-link text-decoration-none collapsed" data-bs-toggle="collapse" data-bs-target="#g_readonly">Read-Only Config</button>
|
||||||
<div id="g_readonly" class="card-body collapse" data-parent="#config-form">
|
</div>
|
||||||
|
|
||||||
|
<div id="g_readonly" class="card-body collapse">
|
||||||
<div class="small mb-3">
|
<div class="small mb-3">
|
||||||
NOTE: These options can't be modified in the editor because they would require the server
|
NOTE: These options can't be modified in the editor because they would require the server
|
||||||
to be restarted. To modify them, you need to set the correct environment variables when
|
to be restarted. To modify them, you need to set the correct environment variables when
|
||||||
@@ -76,19 +75,17 @@
|
|||||||
{{#each config}}
|
{{#each config}}
|
||||||
{{#each elements}}
|
{{#each elements}}
|
||||||
{{#unless editable}}
|
{{#unless editable}}
|
||||||
<div class="form-group row align-items-center" title="[{{name}}] {{doc.description}}">
|
<div class="row my-2 align-items-center" title="[{{name}}] {{doc.description}}">
|
||||||
{{#case type "text" "number" "password"}}
|
{{#case type "text" "number" "password"}}
|
||||||
<label for="input_{{name}}" class="col-sm-3 col-form-label">{{doc.name}}</label>
|
<label for="input_{{name}}" class="col-sm-3 col-form-label">{{doc.name}}</label>
|
||||||
<div class="col-sm-8 input-group">
|
<div class="col-sm-8">
|
||||||
|
<div class="input-group">
|
||||||
<input readonly class="form-control" id="input_{{name}}" type="{{type}}"
|
<input readonly class="form-control" id="input_{{name}}" type="{{type}}"
|
||||||
value="{{value}}" {{#if default}} placeholder="Default: {{default}}" {{/if}}>
|
value="{{value}}" {{#if default}} placeholder="Default: {{default}}" {{/if}}>
|
||||||
|
|
||||||
{{#case type "password"}}
|
{{#case type "password"}}
|
||||||
<div class="input-group-append">
|
<button class="btn btn-outline-secondary" type="button" onclick="toggleVis('input_{{name}}');">Show/hide</button>
|
||||||
<button class="btn btn-outline-secondary" type="button"
|
|
||||||
onclick="toggleVis('input_{{name}}');">Show/hide</button>
|
|
||||||
</div>
|
|
||||||
{{/case}}
|
{{/case}}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{{/case}}
|
{{/case}}
|
||||||
{{#case type "checkbox"}}
|
{{#case type "checkbox"}}
|
||||||
@@ -112,9 +109,10 @@
|
|||||||
|
|
||||||
{{#if can_backup}}
|
{{#if can_backup}}
|
||||||
<div class="card bg-light mb-3">
|
<div class="card bg-light mb-3">
|
||||||
<div class="card-header"><button type="button" class="btn btn-link collapsed" data-toggle="collapse"
|
<div class="card-header" role="button" data-bs-toggle="collapse" data-bs-target="#g_database">
|
||||||
data-target="#g_database">Backup Database</button></div>
|
<button type="button" class="btn btn-link text-decoration-none collapsed" data-bs-toggle="collapse" data-bs-target="#g_database">Backup Database</button>
|
||||||
<div id="g_database" class="card-body collapse" data-parent="#config-form">
|
</div>
|
||||||
|
<div id="g_database" class="card-body collapse">
|
||||||
<div class="small mb-3">
|
<div class="small mb-3">
|
||||||
WARNING: This function only creates a backup copy of the SQLite database.
|
WARNING: This function only creates a backup copy of the SQLite database.
|
||||||
This does not include any configuration or file attachment data that may
|
This does not include any configuration or file attachment data that may
|
||||||
@@ -128,7 +126,7 @@
|
|||||||
{{/if}}
|
{{/if}}
|
||||||
|
|
||||||
<button type="submit" class="btn btn-primary">Save</button>
|
<button type="submit" class="btn btn-primary">Save</button>
|
||||||
<button type="button" class="btn btn-danger float-right" onclick="deleteConf();">Reset defaults</button>
|
<button type="button" class="btn btn-danger float-end" onclick="deleteConf();">Reset defaults</button>
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -139,16 +137,34 @@
|
|||||||
/* Most modern browsers support this now. */
|
/* Most modern browsers support this now. */
|
||||||
color: orangered;
|
color: orangered;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.is-overridden-true {
|
||||||
|
text-decoration: underline double;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
|
'use strict';
|
||||||
|
|
||||||
function smtpTest() {
|
function smtpTest() {
|
||||||
if (formHasChanges(config_form)) {
|
if (formHasChanges(config_form)) {
|
||||||
|
event.preventDefault();
|
||||||
|
event.stopPropagation();
|
||||||
alert("Config has been changed but not yet saved.\nPlease save the changes first before sending a test email.");
|
alert("Config has been changed but not yet saved.\nPlease save the changes first before sending a test email.");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
test_email = document.getElementById("smtp-test-email");
|
|
||||||
data = JSON.stringify({ "email": test_email.value });
|
let test_email = document.getElementById("smtp-test-email");
|
||||||
|
|
||||||
|
// Do a very very basic email address check.
|
||||||
|
if (test_email.value.match(/\S+@\S+/i) === null) {
|
||||||
|
test_email.parentElement.classList.add('was-validated');
|
||||||
|
event.preventDefault();
|
||||||
|
event.stopPropagation();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = JSON.stringify({ "email": test_email.value });
|
||||||
_post("{{urlpath}}/admin/test/smtp/",
|
_post("{{urlpath}}/admin/test/smtp/",
|
||||||
"SMTP Test email sent correctly",
|
"SMTP Test email sent correctly",
|
||||||
"Error sending SMTP test email", data, false);
|
"Error sending SMTP test email", data, false);
|
||||||
@@ -157,21 +173,21 @@
|
|||||||
function getFormData() {
|
function getFormData() {
|
||||||
let data = {};
|
let data = {};
|
||||||
|
|
||||||
document.querySelectorAll(".conf-checkbox").forEach(function (e, i) {
|
document.querySelectorAll(".conf-checkbox").forEach(function (e) {
|
||||||
data[e.name] = e.checked;
|
data[e.name] = e.checked;
|
||||||
});
|
});
|
||||||
|
|
||||||
document.querySelectorAll(".conf-number").forEach(function (e, i) {
|
document.querySelectorAll(".conf-number").forEach(function (e) {
|
||||||
data[e.name] = e.value ? +e.value : null;
|
data[e.name] = e.value ? +e.value : null;
|
||||||
});
|
});
|
||||||
|
|
||||||
document.querySelectorAll(".conf-text, .conf-password").forEach(function (e, i) {
|
document.querySelectorAll(".conf-text, .conf-password").forEach(function (e) {
|
||||||
data[e.name] = e.value || null;
|
data[e.name] = e.value || null;
|
||||||
});
|
});
|
||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
function saveConfig() {
|
function saveConfig() {
|
||||||
data = JSON.stringify(getFormData());
|
const data = JSON.stringify(getFormData());
|
||||||
_post("{{urlpath}}/admin/config/", "Config saved correctly",
|
_post("{{urlpath}}/admin/config/", "Config saved correctly",
|
||||||
"Error saving config", data);
|
"Error saving config", data);
|
||||||
return false;
|
return false;
|
||||||
@@ -198,10 +214,10 @@
|
|||||||
function masterCheck(check_id, inputs_query) {
|
function masterCheck(check_id, inputs_query) {
|
||||||
function onChanged(checkbox, inputs_query) {
|
function onChanged(checkbox, inputs_query) {
|
||||||
return function _fn() {
|
return function _fn() {
|
||||||
document.querySelectorAll(inputs_query).forEach(function (e, i) { e.disabled = !checkbox.checked; });
|
document.querySelectorAll(inputs_query).forEach(function (e) { e.disabled = !checkbox.checked; });
|
||||||
checkbox.disabled = false;
|
checkbox.disabled = false;
|
||||||
};
|
};
|
||||||
};
|
}
|
||||||
|
|
||||||
const checkbox = document.getElementById(check_id);
|
const checkbox = document.getElementById(check_id);
|
||||||
const onChange = onChanged(checkbox, inputs_query);
|
const onChange = onChanged(checkbox, inputs_query);
|
||||||
@@ -238,7 +254,6 @@
|
|||||||
Array.from(risk_el).forEach((el) => {
|
Array.from(risk_el).forEach((el) => {
|
||||||
if (el.innerText.toLowerCase().includes('risks') ) {
|
if (el.innerText.toLowerCase().includes('risks') ) {
|
||||||
el.parentElement.className += ' alert-danger'
|
el.parentElement.className += ' alert-danger'
|
||||||
console.log(el)
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@@ -7,34 +7,34 @@
|
|||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>User</th>
|
<th>User</th>
|
||||||
<th style="width:65px; min-width: 65px;">Created at</th>
|
<th style="width: 85px; min-width: 70px;">Created at</th>
|
||||||
<th style="width:70px; min-width: 65px;">Last Active</th>
|
<th style="width: 85px; min-width: 70px;">Last Active</th>
|
||||||
<th style="width:35px; min-width: 35px;">Items</th>
|
<th style="width: 35px; min-width: 35px;">Items</th>
|
||||||
<th>Attachments</th>
|
<th>Attachments</th>
|
||||||
<th style="min-width: 120px;">Organizations</th>
|
<th style="min-width: 120px;">Organizations</th>
|
||||||
<th style="width: 120px; min-width: 120px;">Actions</th>
|
<th style="width: 130px; min-width: 130px;">Actions</th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
{{#each users}}
|
{{#each page_data}}
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
<img class="float-left mr-2 rounded identicon" data-src="{{Email}}">
|
<img class="float-start me-2 rounded identicon" data-src="{{Email}}">
|
||||||
<div class="float-left">
|
<div class="float-start">
|
||||||
<strong>{{Name}}</strong>
|
<strong>{{Name}}</strong>
|
||||||
<span class="d-block">{{Email}}</span>
|
<span class="d-block">{{Email}}</span>
|
||||||
<span class="d-block">
|
<span class="d-block">
|
||||||
{{#unless user_enabled}}
|
{{#unless user_enabled}}
|
||||||
<span class="badge badge-danger mr-2" title="User is disabled">Disabled</span>
|
<span class="badge bg-danger me-2" title="User is disabled">Disabled</span>
|
||||||
{{/unless}}
|
{{/unless}}
|
||||||
{{#if TwoFactorEnabled}}
|
{{#if TwoFactorEnabled}}
|
||||||
<span class="badge badge-success mr-2" title="2FA is enabled">2FA</span>
|
<span class="badge bg-success me-2" title="2FA is enabled">2FA</span>
|
||||||
{{/if}}
|
{{/if}}
|
||||||
{{#case _Status 1}}
|
{{#case _Status 1}}
|
||||||
<span class="badge badge-warning mr-2" title="User is invited">Invited</span>
|
<span class="badge bg-warning me-2" title="User is invited">Invited</span>
|
||||||
{{/case}}
|
{{/case}}
|
||||||
{{#if EmailVerified}}
|
{{#if EmailVerified}}
|
||||||
<span class="badge badge-success mr-2" title="Email has been verified">Verified</span>
|
<span class="badge bg-success me-2" title="Email has been verified">Verified</span>
|
||||||
{{/if}}
|
{{/if}}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
@@ -57,11 +57,11 @@
|
|||||||
<td>
|
<td>
|
||||||
<div class="overflow-auto" style="max-height: 120px;">
|
<div class="overflow-auto" style="max-height: 120px;">
|
||||||
{{#each Organizations}}
|
{{#each Organizations}}
|
||||||
<button class="badge badge-primary" data-toggle="modal" data-target="#userOrgTypeDialog" data-orgtype="{{Type}}" data-orguuid="{{jsesc Id no_quote}}" data-orgname="{{jsesc Name no_quote}}" data-useremail="{{jsesc ../Email no_quote}}" data-useruuid="{{jsesc ../Id no_quote}}">{{Name}}</button>
|
<button class="badge" data-bs-toggle="modal" data-bs-target="#userOrgTypeDialog" data-orgtype="{{Type}}" data-orguuid="{{jsesc Id no_quote}}" data-orgname="{{jsesc Name no_quote}}" data-useremail="{{jsesc ../Email no_quote}}" data-useruuid="{{jsesc ../Id no_quote}}">{{Name}}</button>
|
||||||
{{/each}}
|
{{/each}}
|
||||||
</div>
|
</div>
|
||||||
</td>
|
</td>
|
||||||
<td style="font-size: 90%; text-align: right; padding-right: 15px">
|
<td class="text-end pe-2 small">
|
||||||
{{#if TwoFactorEnabled}}
|
{{#if TwoFactorEnabled}}
|
||||||
<a class="d-block" href="#" onclick='remove2fa({{jsesc Id}})'>Remove all 2FA</a>
|
<a class="d-block" href="#" onclick='remove2fa({{jsesc Id}})'>Remove all 2FA</a>
|
||||||
{{/if}}
|
{{/if}}
|
||||||
@@ -85,7 +85,7 @@
|
|||||||
Force clients to resync
|
Force clients to resync
|
||||||
</button>
|
</button>
|
||||||
|
|
||||||
<button type="button" class="btn btn-sm btn-primary float-right" onclick="reload();">Reload users</button>
|
<button type="button" class="btn btn-sm btn-primary float-end" onclick="reload();">Reload users</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -94,8 +94,8 @@
|
|||||||
<h6 class="mb-0 text-white">Invite User</h6>
|
<h6 class="mb-0 text-white">Invite User</h6>
|
||||||
<small>Email:</small>
|
<small>Email:</small>
|
||||||
|
|
||||||
<form class="form-inline" id="invite-form" onsubmit="inviteUser(); return false;">
|
<form class="form-inline input-group w-50" id="invite-form" onsubmit="inviteUser(); return false;">
|
||||||
<input type="email" class="form-control w-50 mr-2" id="email-invite" placeholder="Enter email">
|
<input type="email" class="form-control me-2" id="email-invite" placeholder="Enter email" required>
|
||||||
<button type="submit" class="btn btn-primary">Invite</button>
|
<button type="submit" class="btn btn-primary">Invite</button>
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
@@ -106,9 +106,7 @@
|
|||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
<div class="modal-header">
|
<div class="modal-header">
|
||||||
<h6 class="modal-title" id="userOrgTypeDialogTitle"></h6>
|
<h6 class="modal-title" id="userOrgTypeDialogTitle"></h6>
|
||||||
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
|
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
|
||||||
<span aria-hidden="true">×</span>
|
|
||||||
</button>
|
|
||||||
</div>
|
</div>
|
||||||
<form class="form" id="userOrgTypeForm" onsubmit="updateUserOrgType(); return false;">
|
<form class="form" id="userOrgTypeForm" onsubmit="updateUserOrgType(); return false;">
|
||||||
<input type="hidden" name="user_uuid" id="userOrgTypeUserUuid" value="">
|
<input type="hidden" name="user_uuid" id="userOrgTypeUserUuid" value="">
|
||||||
@@ -128,7 +126,7 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="modal-footer">
|
<div class="modal-footer">
|
||||||
<button type="button" class="btn btn-sm btn-secondary" data-dismiss="modal">Cancel</button>
|
<button type="button" class="btn btn-sm btn-secondary" data-bs-dismiss="modal">Cancel</button>
|
||||||
<button type="submit" class="btn btn-sm btn-primary">Change Role</button>
|
<button type="submit" class="btn btn-sm btn-primary">Change Role</button>
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
@@ -138,9 +136,11 @@
|
|||||||
</main>
|
</main>
|
||||||
|
|
||||||
<link rel="stylesheet" href="{{urlpath}}/bwrs_static/datatables.css" />
|
<link rel="stylesheet" href="{{urlpath}}/bwrs_static/datatables.css" />
|
||||||
<script src="{{urlpath}}/bwrs_static/jquery-3.5.1.slim.js"></script>
|
<script src="{{urlpath}}/bwrs_static/jquery-3.6.0.slim.js"></script>
|
||||||
<script src="{{urlpath}}/bwrs_static/datatables.js"></script>
|
<script src="{{urlpath}}/bwrs_static/datatables.js"></script>
|
||||||
<script>
|
<script>
|
||||||
|
'use strict';
|
||||||
|
|
||||||
function deleteUser(id, mail) {
|
function deleteUser(id, mail) {
|
||||||
var input_mail = prompt("To delete user '" + mail + "', please type the email below")
|
var input_mail = prompt("To delete user '" + mail + "', please type the email below")
|
||||||
if (input_mail != null) {
|
if (input_mail != null) {
|
||||||
@@ -191,8 +191,8 @@
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
function inviteUser() {
|
function inviteUser() {
|
||||||
inv = document.getElementById("email-invite");
|
const inv = document.getElementById("email-invite");
|
||||||
data = JSON.stringify({ "email": inv.value });
|
const data = JSON.stringify({ "email": inv.value });
|
||||||
inv.value = "";
|
inv.value = "";
|
||||||
_post("{{urlpath}}/admin/invite/", "User invited correctly",
|
_post("{{urlpath}}/admin/invite/", "User invited correctly",
|
||||||
"Error inviting user", data);
|
"Error inviting user", data);
|
||||||
@@ -212,7 +212,7 @@
|
|||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
|
|
||||||
document.querySelectorAll("[data-orgtype]").forEach(function (e, i) {
|
document.querySelectorAll("[data-orgtype]").forEach(function (e) {
|
||||||
let orgtype = OrgTypes[e.dataset.orgtype];
|
let orgtype = OrgTypes[e.dataset.orgtype];
|
||||||
e.style.backgroundColor = orgtype.color;
|
e.style.backgroundColor = orgtype.color;
|
||||||
e.title = orgtype.name;
|
e.title = orgtype.name;
|
||||||
@@ -225,7 +225,7 @@
|
|||||||
let sortDate = a.replace(/(<([^>]+)>)/gi, "").trim();
|
let sortDate = a.replace(/(<([^>]+)>)/gi, "").trim();
|
||||||
if ( sortDate !== '' ) {
|
if ( sortDate !== '' ) {
|
||||||
let dtParts = sortDate.split(' ');
|
let dtParts = sortDate.split(' ');
|
||||||
var timeParts = (undefined != dtParts[1]) ? dtParts[1].split(':') : [00,00,00];
|
var timeParts = (undefined != dtParts[1]) ? dtParts[1].split(':') : ['00','00','00'];
|
||||||
var dateParts = dtParts[0].split('-');
|
var dateParts = dtParts[0].split('-');
|
||||||
x = (dateParts[0] + dateParts[1] + dateParts[2] + timeParts[0] + timeParts[1] + ((undefined != timeParts[2]) ? timeParts[2] : 0)) * 1;
|
x = (dateParts[0] + dateParts[1] + dateParts[2] + timeParts[0] + timeParts[1] + ((undefined != timeParts[2]) ? timeParts[2] : 0)) * 1;
|
||||||
if ( isNaN(x) ) {
|
if ( isNaN(x) ) {
|
||||||
@@ -246,7 +246,7 @@
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
document.addEventListener("DOMContentLoaded", function(event) {
|
document.addEventListener("DOMContentLoaded", function() {
|
||||||
$('#users-table').DataTable({
|
$('#users-table').DataTable({
|
||||||
"responsive": true,
|
"responsive": true,
|
||||||
"lengthMenu": [ [-1, 5, 10, 25, 50], ["All", 5, 10, 25, 50] ],
|
"lengthMenu": [ [-1, 5, 10, 25, 50], ["All", 5, 10, 25, 50] ],
|
||||||
@@ -275,7 +275,7 @@
|
|||||||
}, false);
|
}, false);
|
||||||
|
|
||||||
// Prevent accidental submission of the form with valid elements after the modal has been hidden.
|
// Prevent accidental submission of the form with valid elements after the modal has been hidden.
|
||||||
userOrgTypeDialog.addEventListener('hide.bs.modal', function(event){
|
userOrgTypeDialog.addEventListener('hide.bs.modal', function(){
|
||||||
document.getElementById("userOrgTypeDialogTitle").innerHTML = '';
|
document.getElementById("userOrgTypeDialogTitle").innerHTML = '';
|
||||||
document.getElementById("userOrgTypeUserUuid").value = '';
|
document.getElementById("userOrgTypeUserUuid").value = '';
|
||||||
document.getElementById("userOrgTypeOrgUuid").value = '';
|
document.getElementById("userOrgTypeOrgUuid").value = '';
|
||||||
|
@@ -1,129 +1,16 @@
|
|||||||
Your Email Change
|
Your Email Change
|
||||||
<!---------------->
|
<!---------------->
|
||||||
<html xmlns="http://www.w3.org/1999/xhtml" xmlns="http://www.w3.org/1999/xhtml" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
{{> email/email_header }}
|
||||||
<head>
|
<table width="100%" cellpadding="0" cellspacing="0" style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
<meta name="viewport" content="width=device-width" />
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
||||||
<title>Vaultwarden</title>
|
To finalize changing your email address enter the following code in web vault: <b style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">{{token}}</b>
|
||||||
</head>
|
</td>
|
||||||
<body style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; height: 100%; line-height: 25px; width: 100% !important;" bgcolor="#f6f6f6">
|
</tr>
|
||||||
<style type="text/css">
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
body {
|
<td class="content-block last" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
||||||
margin: 0;
|
If you did not try to change an email address, you can safely ignore this email.
|
||||||
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
|
</td>
|
||||||
box-sizing: border-box;
|
</tr>
|
||||||
font-size: 16px;
|
</table>
|
||||||
color: #333;
|
{{> email/email_footer }}
|
||||||
line-height: 25px;
|
|
||||||
-webkit-font-smoothing: antialiased;
|
|
||||||
-webkit-text-size-adjust: none;
|
|
||||||
}
|
|
||||||
body * {
|
|
||||||
margin: 0;
|
|
||||||
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
|
|
||||||
box-sizing: border-box;
|
|
||||||
font-size: 16px;
|
|
||||||
color: #333;
|
|
||||||
line-height: 25px;
|
|
||||||
-webkit-font-smoothing: antialiased;
|
|
||||||
-webkit-text-size-adjust: none;
|
|
||||||
}
|
|
||||||
img {
|
|
||||||
max-width: 100%;
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
body {
|
|
||||||
-webkit-font-smoothing: antialiased;
|
|
||||||
-webkit-text-size-adjust: none;
|
|
||||||
width: 100% !important;
|
|
||||||
height: 100%;
|
|
||||||
line-height: 25px;
|
|
||||||
}
|
|
||||||
body {
|
|
||||||
background-color: #f6f6f6;
|
|
||||||
}
|
|
||||||
@media only screen and (max-width: 600px) {
|
|
||||||
body {
|
|
||||||
padding: 0 !important;
|
|
||||||
}
|
|
||||||
.container {
|
|
||||||
padding: 0 !important;
|
|
||||||
width: 100% !important;
|
|
||||||
}
|
|
||||||
.container-table {
|
|
||||||
padding: 0 !important;
|
|
||||||
width: 100% !important;
|
|
||||||
}
|
|
||||||
.content {
|
|
||||||
padding: 0 0 10px 0 !important;
|
|
||||||
}
|
|
||||||
.content-wrap {
|
|
||||||
padding: 10px !important;
|
|
||||||
}
|
|
||||||
.invoice {
|
|
||||||
width: 100% !important;
|
|
||||||
}
|
|
||||||
.main {
|
|
||||||
border-right: none !important;
|
|
||||||
border-left: none !important;
|
|
||||||
border-radius: 0 !important;
|
|
||||||
}
|
|
||||||
.logo {
|
|
||||||
padding-top: 10px !important;
|
|
||||||
}
|
|
||||||
.footer {
|
|
||||||
margin-top: 10px !important;
|
|
||||||
}
|
|
||||||
.indented {
|
|
||||||
padding-left: 10px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<table class="body-wrap" cellpadding="0" cellspacing="0" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; width: 100%;" bgcolor="#f6f6f6">
|
|
||||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
|
||||||
<td valign="middle" class="aligncenter middle logo" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; padding: 20px 0 10px;" align="center">
|
|
||||||
<img src="{{url}}/bwrs_static/logo-gray.png" alt="" width="250" height="39" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; border: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; max-width: 100%;" />
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
|
||||||
<td class="container" align="center" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; clear: both !important; color: #333; display: block !important; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0 auto; max-width: 600px !important; width: 600px;" valign="top">
|
|
||||||
<table cellpadding="0" cellspacing="0" class="container-table" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; clear: both !important; color: #333; display: block !important; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0 auto; max-width: 600px !important; width: max-content;">
|
|
||||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
|
||||||
<td class="content" align="center" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; display: block; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 0; line-height: 0; margin: 0 auto; max-width: 600px; padding-bottom: 20px;" valign="top">
|
|
||||||
<table class="main" width="100%" cellpadding="0" cellspacing="0" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; margin: 0; -webkit-text-size-adjust: none; border: 1px solid #e9e9e9; border-radius: 3px;" bgcolor="white">
|
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
|
||||||
<td class="content-wrap" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 20px; -webkit-text-size-adjust: none;" valign="top">
|
|
||||||
<table width="100%" cellpadding="0" cellspacing="0" style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
|
||||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
|
||||||
To finalize changing your email address enter the following code in web vault: <b style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">{{token}}</b>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
|
||||||
<td class="content-block last" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
|
||||||
If you did not try to change an email address, you can safely ignore this email.
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
<table class="footer" cellpadding="0" cellspacing="0" width="100%" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; clear: both; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; width: 100%;">
|
|
||||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
|
||||||
<td class="aligncenter social-icons" align="center" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; padding: 15px 0 0 0;" valign="top">
|
|
||||||
<table cellpadding="0" cellspacing="0" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0 auto;">
|
|
||||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
|
||||||
<td style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; padding: 0 10px;" valign="top"><a href="https://github.com/dani-garcia/vaultwarden" target="_blank" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; text-decoration: underline;"><img src="{{url}}/bwrs_static/mail-github.png" alt="GitHub" width="30" height="30" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; border: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; max-width: 100%;" /></a></td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
@@ -5,6 +5,4 @@ Click the link below to delete your account.
|
|||||||
Delete Your Account: {{url}}/#/verify-recover-delete?userId={{user_id}}&token={{token}}&email={{email}}
|
Delete Your Account: {{url}}/#/verify-recover-delete?userId={{user_id}}&token={{token}}&email={{email}}
|
||||||
|
|
||||||
If you did not request this email to delete your account, you can safely ignore this email.
|
If you did not request this email to delete your account, you can safely ignore this email.
|
||||||
|
{{> email/email_footer_text }}
|
||||||
===
|
|
||||||
Github: https://github.com/dani-garcia/vaultwarden
|
|
@@ -1,137 +1,24 @@
|
|||||||
Delete Your Account
|
Delete Your Account
|
||||||
<!---------------->
|
<!---------------->
|
||||||
<html xmlns="http://www.w3.org/1999/xhtml" xmlns="http://www.w3.org/1999/xhtml" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
{{> email/email_header }}
|
||||||
<head>
|
<table width="100%" cellpadding="0" cellspacing="0" style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
<meta name="viewport" content="width=device-width" />
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
||||||
<title>Vaultwarden</title>
|
click the link below to delete your account.
|
||||||
</head>
|
</td>
|
||||||
<body style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; height: 100%; line-height: 25px; width: 100% !important;" bgcolor="#f6f6f6">
|
</tr>
|
||||||
<style type="text/css">
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
body {
|
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
||||||
margin: 0;
|
<a href="{{url}}/#/verify-recover-delete?userId={{user_id}}&token={{token}}&email={{email}}"
|
||||||
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
|
clicktracking=off target="_blank" style="color: #ffffff; text-decoration: none; text-align: center; cursor: pointer; display: inline-block; border-radius: 5px; background-color: #3c8dbc; border-color: #3c8dbc; border-style: solid; border-width: 10px 20px; margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
box-sizing: border-box;
|
Delete Your Account
|
||||||
font-size: 16px;
|
</a>
|
||||||
color: #333;
|
</td>
|
||||||
line-height: 25px;
|
</tr>
|
||||||
-webkit-font-smoothing: antialiased;
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
-webkit-text-size-adjust: none;
|
<td class="content-block last" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
||||||
}
|
If you did not request this email to delete your account, you can safely ignore this email.
|
||||||
body * {
|
</td>
|
||||||
margin: 0;
|
</tr>
|
||||||
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
|
</table>
|
||||||
box-sizing: border-box;
|
{{> email/email_footer }}
|
||||||
font-size: 16px;
|
|
||||||
color: #333;
|
|
||||||
line-height: 25px;
|
|
||||||
-webkit-font-smoothing: antialiased;
|
|
||||||
-webkit-text-size-adjust: none;
|
|
||||||
}
|
|
||||||
img {
|
|
||||||
max-width: 100%;
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
body {
|
|
||||||
-webkit-font-smoothing: antialiased;
|
|
||||||
-webkit-text-size-adjust: none;
|
|
||||||
width: 100% !important;
|
|
||||||
height: 100%;
|
|
||||||
line-height: 25px;
|
|
||||||
}
|
|
||||||
body {
|
|
||||||
background-color: #f6f6f6;
|
|
||||||
}
|
|
||||||
@media only screen and (max-width: 600px) {
|
|
||||||
body {
|
|
||||||
padding: 0 !important;
|
|
||||||
}
|
|
||||||
.container {
|
|
||||||
padding: 0 !important;
|
|
||||||
width: 100% !important;
|
|
||||||
}
|
|
||||||
.container-table {
|
|
||||||
padding: 0 !important;
|
|
||||||
width: 100% !important;
|
|
||||||
}
|
|
||||||
.content {
|
|
||||||
padding: 0 0 10px 0 !important;
|
|
||||||
}
|
|
||||||
.content-wrap {
|
|
||||||
padding: 10px !important;
|
|
||||||
}
|
|
||||||
.invoice {
|
|
||||||
width: 100% !important;
|
|
||||||
}
|
|
||||||
.main {
|
|
||||||
border-right: none !important;
|
|
||||||
border-left: none !important;
|
|
||||||
border-radius: 0 !important;
|
|
||||||
}
|
|
||||||
.logo {
|
|
||||||
padding-top: 10px !important;
|
|
||||||
}
|
|
||||||
.footer {
|
|
||||||
margin-top: 10px !important;
|
|
||||||
}
|
|
||||||
.indented {
|
|
||||||
padding-left: 10px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<table class="body-wrap" cellpadding="0" cellspacing="0" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; width: 100%;" bgcolor="#f6f6f6">
|
|
||||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
|
||||||
<td valign="middle" class="aligncenter middle logo" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; padding: 20px 0 10px;" align="center">
|
|
||||||
<img src="{{url}}/bwrs_static/logo-gray.png" alt="" width="250" height="39" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; border: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; max-width: 100%;" />
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
|
||||||
<td class="container" align="center" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; clear: both !important; color: #333; display: block !important; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0 auto; max-width: 600px !important; width: 600px;" valign="top">
|
|
||||||
<table cellpadding="0" cellspacing="0" class="container-table" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; clear: both !important; color: #333; display: block !important; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0 auto; max-width: 600px !important; width: max-content;">
|
|
||||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
|
||||||
<td class="content" align="center" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; display: block; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 0; line-height: 0; margin: 0 auto; max-width: 600px; padding-bottom: 20px;" valign="top">
|
|
||||||
<table class="main" width="100%" cellpadding="0" cellspacing="0" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; margin: 0; -webkit-text-size-adjust: none; border: 1px solid #e9e9e9; border-radius: 3px;" bgcolor="white">
|
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
|
||||||
<td class="content-wrap" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 20px; -webkit-text-size-adjust: none;" valign="top">
|
|
||||||
<table width="100%" cellpadding="0" cellspacing="0" style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
|
||||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
|
||||||
click the link below to delete your account.
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
|
||||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
|
||||||
<a href="{{url}}/#/verify-recover-delete?userId={{user_id}}&token={{token}}&email={{email}}"
|
|
||||||
clicktracking=off target="_blank" style="color: #ffffff; text-decoration: none; text-align: center; cursor: pointer; display: inline-block; border-radius: 5px; background-color: #3c8dbc; border-color: #3c8dbc; border-style: solid; border-width: 10px 20px; margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
|
||||||
Delete Your Account
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
|
||||||
<td class="content-block last" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
|
||||||
If you did not request this email to delete your account, you can safely ignore this email.
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
<table class="footer" cellpadding="0" cellspacing="0" width="100%" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; clear: both; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; width: 100%;">
|
|
||||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
|
||||||
<td class="aligncenter social-icons" align="center" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; padding: 15px 0 0 0;" valign="top">
|
|
||||||
<table cellpadding="0" cellspacing="0" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0 auto;">
|
|
||||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
|
||||||
<td style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; padding: 0 10px;" valign="top"><a href="https://github.com/dani-garcia/vaultwarden" target="_blank" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; text-decoration: underline;"><img src="{{url}}/bwrs_static/mail-github.png" alt="GitHub" width="30" height="30" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; border: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; max-width: 100%;" /></a></td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
24
src/static/templates/email/email_footer.hbs
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<table class="footer" cellpadding="0" cellspacing="0" width="100%" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; clear: both; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; width: 100%;">
|
||||||
|
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
||||||
|
<td class="aligncenter social-icons" align="center" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; padding: 15px 0 0 0;" valign="top">
|
||||||
|
<table cellpadding="0" cellspacing="0" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0 auto;">
|
||||||
|
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
||||||
|
<td style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; padding: 0 10px;" valign="top"><a href="https://github.com/dani-garcia/vaultwarden" target="_blank" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; text-decoration: underline;"><img src="{{url}}/bwrs_static/mail-github.png" alt="GitHub" width="30" height="30" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; border: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; max-width: 100%;" /></a></td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</body>
|
||||||
|
</html>
|
3
src/static/templates/email/email_footer_text.hbs
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
|
||||||
|
===
|
||||||
|
Github: https://github.com/dani-garcia/vaultwarden
|