mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-09-10 18:55:57 +03:00
Compare commits
57 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
cdfdc6ff4f | ||
|
2393c3f3c0 | ||
|
0d16b38a68 | ||
|
ff33534c07 | ||
|
adb21d5c1a | ||
|
e927b8aa5e | ||
|
ba48ca68fc | ||
|
294b429436 | ||
|
37c14c3c69 | ||
|
d0581da638 | ||
|
38aad4f7be | ||
|
20d9e885bf | ||
|
2f20ad86f9 | ||
|
33bae5fbe9 | ||
|
f60502a17e | ||
|
13f4b66e62 | ||
|
c967d0ddc1 | ||
|
ae6ed0ece8 | ||
|
b7c254eb30 | ||
|
a47b484172 | ||
|
65629a99f0 | ||
|
49c5dec9b6 | ||
|
cd195ff243 | ||
|
e3541763fd | ||
|
f0efec7c96 | ||
|
040e2a7bb0 | ||
|
d184c8f08c | ||
|
7d6dec6413 | ||
|
de01111082 | ||
|
0bd8f607cb | ||
|
21efc0800d | ||
|
1031c2e286 | ||
|
1bf85201e7 | ||
|
6ceed9284d | ||
|
25d99e3506 | ||
|
dca14285fd | ||
|
66baa5e7d8 | ||
|
248e561b3f | ||
|
55623ad9c6 | ||
|
e9acd8bd3c | ||
|
544b7229e8 | ||
|
978f009293 | ||
|
92f1530e96 | ||
|
2b824e8096 | ||
|
059661be48 | ||
|
0f3f97cc76 | ||
|
aa0fe7785a | ||
|
65d11a9720 | ||
|
c722006385 | ||
|
aaab7f9640 | ||
|
cbdb5657f1 | ||
|
669b9db758 | ||
|
3466a8040e | ||
|
7d47155d83 | ||
|
9e26014b4d | ||
|
339612c917 | ||
|
9eebbf3b9f |
@@ -1,40 +1,15 @@
|
|||||||
# Local build artifacts
|
// Ignore everything
|
||||||
target
|
*
|
||||||
|
|
||||||
# Data folder
|
// Allow what is needed
|
||||||
data
|
!.git
|
||||||
|
|
||||||
# Misc
|
|
||||||
.env
|
|
||||||
.env.template
|
|
||||||
.gitattributes
|
|
||||||
.gitignore
|
|
||||||
rustfmt.toml
|
|
||||||
|
|
||||||
# IDE files
|
|
||||||
.vscode
|
|
||||||
.idea
|
|
||||||
.editorconfig
|
|
||||||
*.iml
|
|
||||||
|
|
||||||
# Documentation
|
|
||||||
.github
|
|
||||||
*.md
|
|
||||||
*.txt
|
|
||||||
*.yml
|
|
||||||
*.yaml
|
|
||||||
|
|
||||||
# Docker
|
|
||||||
hooks
|
|
||||||
tools
|
|
||||||
Dockerfile
|
|
||||||
.dockerignore
|
|
||||||
docker/**
|
|
||||||
!docker/healthcheck.sh
|
!docker/healthcheck.sh
|
||||||
!docker/start.sh
|
!docker/start.sh
|
||||||
|
!migrations
|
||||||
|
!src
|
||||||
|
|
||||||
# Web vault
|
!build.rs
|
||||||
web-vault
|
!Cargo.lock
|
||||||
|
!Cargo.toml
|
||||||
# Vaultwarden Resources
|
!rustfmt.toml
|
||||||
resources
|
!rust-toolchain.toml
|
||||||
|
@@ -280,12 +280,13 @@
|
|||||||
## The default for new users. If changed, it will be updated during login for existing users.
|
## The default for new users. If changed, it will be updated during login for existing users.
|
||||||
# PASSWORD_ITERATIONS=600000
|
# PASSWORD_ITERATIONS=600000
|
||||||
|
|
||||||
## Controls whether users can set password hints. This setting applies globally to all users.
|
## Controls whether users can set or show password hints. This setting applies globally to all users.
|
||||||
# PASSWORD_HINTS_ALLOWED=true
|
# PASSWORD_HINTS_ALLOWED=true
|
||||||
|
|
||||||
## Controls whether a password hint should be shown directly in the web page if
|
## Controls whether a password hint should be shown directly in the web page if
|
||||||
## SMTP service is not configured. Not recommended for publicly-accessible instances
|
## SMTP service is not configured and password hints are allowed.
|
||||||
## as this provides unauthenticated access to potentially sensitive data.
|
## Not recommended for publicly-accessible instances because this provides
|
||||||
|
## unauthenticated access to potentially sensitive data.
|
||||||
# SHOW_PASSWORD_HINT=false
|
# SHOW_PASSWORD_HINT=false
|
||||||
|
|
||||||
#########################
|
#########################
|
||||||
@@ -347,7 +348,10 @@
|
|||||||
## - "autofill-overlay": Add an overlay menu to form fields for quick access to credentials.
|
## - "autofill-overlay": Add an overlay menu to form fields for quick access to credentials.
|
||||||
## - "autofill-v2": Use the new autofill implementation.
|
## - "autofill-v2": Use the new autofill implementation.
|
||||||
## - "browser-fileless-import": Directly import credentials from other providers without a file.
|
## - "browser-fileless-import": Directly import credentials from other providers without a file.
|
||||||
|
## - "extension-refresh": Temporarily enable the new extension design until general availability (should be used with the beta Chrome extension)
|
||||||
## - "fido2-vault-credentials": Enable the use of FIDO2 security keys as second factor.
|
## - "fido2-vault-credentials": Enable the use of FIDO2 security keys as second factor.
|
||||||
|
## - "ssh-key-vault-item": Enable the creation and use of SSH key vault items. (Needs clients >=2024.12.0)
|
||||||
|
## - "ssh-agent": Enable SSH agent support on Desktop. (Needs desktop >=2024.12.0)
|
||||||
# EXPERIMENTAL_CLIENT_FEATURE_FLAGS=fido2-vault-credentials
|
# EXPERIMENTAL_CLIENT_FEATURE_FLAGS=fido2-vault-credentials
|
||||||
|
|
||||||
## Require new device emails. When a user logs in an email is required to be sent.
|
## Require new device emails. When a user logs in an email is required to be sent.
|
||||||
@@ -425,6 +429,12 @@
|
|||||||
## KNOW WHAT YOU ARE DOING!
|
## KNOW WHAT YOU ARE DOING!
|
||||||
# INCREASE_NOTE_SIZE_LIMIT=false
|
# INCREASE_NOTE_SIZE_LIMIT=false
|
||||||
|
|
||||||
|
## Enforce Single Org with Reset Password Policy
|
||||||
|
## Enforce that the Single Org policy is enabled before setting the Reset Password policy
|
||||||
|
## Bitwarden enforces this by default. In Vaultwarden we encouraged to use multiple organizations because groups were not available.
|
||||||
|
## Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy.
|
||||||
|
# ENFORCE_SINGLE_ORG_WITH_RESET_PW_POLICY=false
|
||||||
|
|
||||||
########################
|
########################
|
||||||
### MFA/2FA settings ###
|
### MFA/2FA settings ###
|
||||||
########################
|
########################
|
||||||
|
66
.github/ISSUE_TEMPLATE/bug_report.md
vendored
66
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,66 +0,0 @@
|
|||||||
---
|
|
||||||
name: Bug report
|
|
||||||
about: Use this ONLY for bugs in vaultwarden itself. Use the Discourse forum (link below) to request features or get help with usage/configuration. If in doubt, use the forum.
|
|
||||||
title: ''
|
|
||||||
labels: ''
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
<!--
|
|
||||||
# ###
|
|
||||||
NOTE: Please update to the latest version of vaultwarden before reporting an issue!
|
|
||||||
This saves you and us a lot of time and troubleshooting.
|
|
||||||
See:
|
|
||||||
* https://github.com/dani-garcia/vaultwarden/issues/1180
|
|
||||||
* https://github.com/dani-garcia/vaultwarden/wiki/Updating-the-vaultwarden-image
|
|
||||||
# ###
|
|
||||||
-->
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Please fill out the following template to make solving your problem easier and faster for us.
|
|
||||||
This is only a guideline. If you think that parts are unnecessary for your issue, feel free to remove them.
|
|
||||||
|
|
||||||
Remember to hide/redact personal or confidential information,
|
|
||||||
such as passwords, IP addresses, and DNS names as appropriate.
|
|
||||||
-->
|
|
||||||
|
|
||||||
### Subject of the issue
|
|
||||||
<!-- Describe your issue here. -->
|
|
||||||
|
|
||||||
### Deployment environment
|
|
||||||
|
|
||||||
<!--
|
|
||||||
=========================================================================================
|
|
||||||
Preferably, use the `Generate Support String` button on the admin page's Diagnostics tab.
|
|
||||||
That will auto-generate most of the info requested in this section.
|
|
||||||
=========================================================================================
|
|
||||||
-->
|
|
||||||
|
|
||||||
<!-- The version number, obtained from the logs (at startup) or the admin diagnostics page -->
|
|
||||||
<!-- This is NOT the version number shown on the web vault, which is versioned separately from vaultwarden -->
|
|
||||||
<!-- Remember to check if your issue exists on the latest version first! -->
|
|
||||||
* vaultwarden version:
|
|
||||||
|
|
||||||
<!-- How the server was installed: Docker image, OS package, built from source, etc. -->
|
|
||||||
* Install method:
|
|
||||||
|
|
||||||
* Clients used: <!-- web vault, desktop, Android, iOS, etc. (if applicable) -->
|
|
||||||
|
|
||||||
* Reverse proxy and version: <!-- if applicable -->
|
|
||||||
|
|
||||||
* MySQL/MariaDB or PostgreSQL version: <!-- if applicable -->
|
|
||||||
|
|
||||||
* Other relevant details:
|
|
||||||
|
|
||||||
### Steps to reproduce
|
|
||||||
<!-- Tell us how to reproduce this issue. What parameters did you set (differently from the defaults)
|
|
||||||
and how did you start vaultwarden? -->
|
|
||||||
|
|
||||||
### Expected behaviour
|
|
||||||
<!-- Tell us what you expected to happen -->
|
|
||||||
|
|
||||||
### Actual behaviour
|
|
||||||
<!-- Tell us what actually happened -->
|
|
||||||
|
|
||||||
### Troubleshooting data
|
|
||||||
<!-- Share any log files, screenshots, or other relevant troubleshooting data -->
|
|
167
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
167
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
name: Bug Report
|
||||||
|
description: File a bug report
|
||||||
|
labels: ["bug"]
|
||||||
|
body:
|
||||||
|
#
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Thanks for taking the time to fill out this bug report!
|
||||||
|
|
||||||
|
Please *do not* submit feature requests or ask for help on how to configure Vaultwarden here.
|
||||||
|
|
||||||
|
The [GitHub Discussions](https://github.com/dani-garcia/vaultwarden/discussions/) has sections for Questions and Ideas.
|
||||||
|
|
||||||
|
Also, make sure you are running [](https://github.com/dani-garcia/vaultwarden/releases/latest) of Vaultwarden!
|
||||||
|
And search for existing open or closed issues or discussions regarding your topic before posting.
|
||||||
|
|
||||||
|
Be sure to check and validate the Vaultwarden Admin Diagnostics (`/admin/diagnostics`) page for any errors!
|
||||||
|
See here [how to enable the admin page](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-admin-page).
|
||||||
|
#
|
||||||
|
- id: support-string
|
||||||
|
type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Vaultwarden Support String
|
||||||
|
description: Output of the **Generate Support String** from the `/admin/diagnostics` page.
|
||||||
|
placeholder: |
|
||||||
|
1. Go to the Vaultwarden Admin of your instance https://example.domain.tld/admin/diagnostics
|
||||||
|
2. Click on `Generate Support String`
|
||||||
|
3. Click on `Copy To Clipboard`
|
||||||
|
4. Replace this text by pasting it into this textarea without any modifications
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
#
|
||||||
|
- id: version
|
||||||
|
type: input
|
||||||
|
attributes:
|
||||||
|
label: Vaultwarden Build Version
|
||||||
|
description: What version of Vaultwarden are you running?
|
||||||
|
placeholder: ex. v1.31.0 or v1.32.0-3466a804
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
#
|
||||||
|
- id: deployment
|
||||||
|
type: dropdown
|
||||||
|
attributes:
|
||||||
|
label: Deployment method
|
||||||
|
description: How did you deploy Vaultwarden?
|
||||||
|
multiple: false
|
||||||
|
options:
|
||||||
|
- Official Container Image
|
||||||
|
- Build from source
|
||||||
|
- OS Package (apt, yum/dnf, pacman, apk, nix, ...)
|
||||||
|
- Manually Extracted from Container Image
|
||||||
|
- Downloaded from GitHub Actions Release Workflow
|
||||||
|
- Other method
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
#
|
||||||
|
- id: deployment-other
|
||||||
|
type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Custom deployment method
|
||||||
|
description: If you deployed Vaultwarden via any other method, please describe how.
|
||||||
|
#
|
||||||
|
- id: reverse-proxy
|
||||||
|
type: input
|
||||||
|
attributes:
|
||||||
|
label: Reverse Proxy
|
||||||
|
description: Are you using a reverse proxy, if so which and what version?
|
||||||
|
placeholder: ex. nginx 1.26.2, caddy 2.8.4, traefik 3.1.2, haproxy 3.0
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
#
|
||||||
|
- id: os
|
||||||
|
type: dropdown
|
||||||
|
attributes:
|
||||||
|
label: Host/Server Operating System
|
||||||
|
description: On what operating system are you running the Vaultwarden server?
|
||||||
|
multiple: false
|
||||||
|
options:
|
||||||
|
- Linux
|
||||||
|
- NAS/SAN
|
||||||
|
- Cloud
|
||||||
|
- Windows
|
||||||
|
- macOS
|
||||||
|
- Other
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
#
|
||||||
|
- id: os-version
|
||||||
|
type: input
|
||||||
|
attributes:
|
||||||
|
label: Operating System Version
|
||||||
|
description: What version of the operating system(s) are you seeing the problem on?
|
||||||
|
placeholder: ex. Arch Linux, Ubuntu 24.04, Kubernetes, Synology DSM 7.x, Windows 11
|
||||||
|
#
|
||||||
|
- id: clients
|
||||||
|
type: dropdown
|
||||||
|
attributes:
|
||||||
|
label: Clients
|
||||||
|
description: What client(s) are you seeing the problem on?
|
||||||
|
multiple: true
|
||||||
|
options:
|
||||||
|
- Web Vault
|
||||||
|
- Browser Extension
|
||||||
|
- CLI
|
||||||
|
- Desktop
|
||||||
|
- Android
|
||||||
|
- iOS
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
#
|
||||||
|
- id: client-version
|
||||||
|
type: input
|
||||||
|
attributes:
|
||||||
|
label: Client Version
|
||||||
|
description: What version(s) of the client(s) are you seeing the problem on?
|
||||||
|
placeholder: ex. CLI v2024.7.2, Firefox 130 - v2024.7.0
|
||||||
|
#
|
||||||
|
- id: reproduce
|
||||||
|
type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Steps To Reproduce
|
||||||
|
description: How can we reproduce the behavior.
|
||||||
|
value: |
|
||||||
|
1. Go to '...'
|
||||||
|
2. Click on '....'
|
||||||
|
3. Scroll down to '....'
|
||||||
|
4. Click on '...'
|
||||||
|
5. Etc '...'
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
#
|
||||||
|
- id: expected
|
||||||
|
type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Expected Result
|
||||||
|
description: A clear and concise description of what you expected to happen.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
#
|
||||||
|
- id: actual
|
||||||
|
type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Actual Result
|
||||||
|
description: A clear and concise description of what is happening.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
#
|
||||||
|
- id: logs
|
||||||
|
type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Logs
|
||||||
|
description: Provide the logs generated by Vaultwarden during the time this issue occurs.
|
||||||
|
render: text
|
||||||
|
#
|
||||||
|
- id: screenshots
|
||||||
|
type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Screenshots or Videos
|
||||||
|
description: If applicable, add screenshots and/or a short video to help explain your problem.
|
||||||
|
#
|
||||||
|
- id: additional-context
|
||||||
|
type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Additional Context
|
||||||
|
description: Add any other context about the problem here.
|
10
.github/ISSUE_TEMPLATE/config.yml
vendored
10
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,8 +1,8 @@
|
|||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: Discourse forum for vaultwarden
|
- name: GitHub Discussions for Vaultwarden
|
||||||
url: https://vaultwarden.discourse.group/
|
|
||||||
about: Use this forum to request features or get help with usage/configuration.
|
|
||||||
- name: GitHub Discussions for vaultwarden
|
|
||||||
url: https://github.com/dani-garcia/vaultwarden/discussions
|
url: https://github.com/dani-garcia/vaultwarden/discussions
|
||||||
about: An alternative to the Discourse forum, if this is easier for you.
|
about: Use the discussions to request features or get help with usage/configuration.
|
||||||
|
- name: Discourse forum for Vaultwarden
|
||||||
|
url: https://vaultwarden.discourse.group/
|
||||||
|
about: An alternative to the GitHub Discussions, if this is easier for you.
|
||||||
|
7
.github/workflows/build.yml
vendored
7
.github/workflows/build.yml
vendored
@@ -28,6 +28,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
# We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
# Make warnings errors, this is to prevent warnings slipping through.
|
# Make warnings errors, this is to prevent warnings slipping through.
|
||||||
@@ -46,7 +47,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
# Checkout the repo
|
# Checkout the repo
|
||||||
- name: "Checkout"
|
- name: "Checkout"
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4.1.7
|
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
|
||||||
# End Checkout the repo
|
# End Checkout the repo
|
||||||
|
|
||||||
|
|
||||||
@@ -74,7 +75,7 @@ jobs:
|
|||||||
|
|
||||||
# Only install the clippy and rustfmt components on the default rust-toolchain
|
# Only install the clippy and rustfmt components on the default rust-toolchain
|
||||||
- name: "Install rust-toolchain version"
|
- name: "Install rust-toolchain version"
|
||||||
uses: dtolnay/rust-toolchain@21dc36fb71dd22e3317045c0c31a3f4249868b17 # master @ Jun 13, 2024, 6:20 PM GMT+2
|
uses: dtolnay/rust-toolchain@7b1c307e0dcbda6122208f10795a713336a9b35a # master @ Aug 8, 2024, 7:36 PM GMT+2
|
||||||
if: ${{ matrix.channel == 'rust-toolchain' }}
|
if: ${{ matrix.channel == 'rust-toolchain' }}
|
||||||
with:
|
with:
|
||||||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||||
@@ -84,7 +85,7 @@ jobs:
|
|||||||
|
|
||||||
# Install the any other channel to be used for which we do not execute clippy and rustfmt
|
# Install the any other channel to be used for which we do not execute clippy and rustfmt
|
||||||
- name: "Install MSRV version"
|
- name: "Install MSRV version"
|
||||||
uses: dtolnay/rust-toolchain@21dc36fb71dd22e3317045c0c31a3f4249868b17 # master @ Jun 13, 2024, 6:20 PM GMT+2
|
uses: dtolnay/rust-toolchain@7b1c307e0dcbda6122208f10795a713336a9b35a # master @ Aug 8, 2024, 7:36 PM GMT+2
|
||||||
if: ${{ matrix.channel != 'rust-toolchain' }}
|
if: ${{ matrix.channel != 'rust-toolchain' }}
|
||||||
with:
|
with:
|
||||||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||||
|
30
.github/workflows/hadolint.yml
vendored
30
.github/workflows/hadolint.yml
vendored
@@ -8,14 +8,26 @@ on: [
|
|||||||
jobs:
|
jobs:
|
||||||
hadolint:
|
hadolint:
|
||||||
name: Validate Dockerfile syntax
|
name: Validate Dockerfile syntax
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
steps:
|
steps:
|
||||||
# Checkout the repo
|
# Checkout the repo
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
|
||||||
# End Checkout the repo
|
# End Checkout the repo
|
||||||
|
|
||||||
|
# Start Docker Buildx
|
||||||
|
- name: Setup Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1
|
||||||
|
# https://github.com/moby/buildkit/issues/3969
|
||||||
|
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
|
||||||
|
with:
|
||||||
|
buildkitd-config-inline: |
|
||||||
|
[worker.oci]
|
||||||
|
max-parallelism = 2
|
||||||
|
driver-opts: |
|
||||||
|
network=host
|
||||||
|
|
||||||
# Download hadolint - https://github.com/hadolint/hadolint/releases
|
# Download hadolint - https://github.com/hadolint/hadolint/releases
|
||||||
- name: Download hadolint
|
- name: Download hadolint
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -26,8 +38,18 @@ jobs:
|
|||||||
HADOLINT_VERSION: 2.12.0
|
HADOLINT_VERSION: 2.12.0
|
||||||
# End Download hadolint
|
# End Download hadolint
|
||||||
|
|
||||||
# Test Dockerfiles
|
# Test Dockerfiles with hadolint
|
||||||
- name: Run hadolint
|
- name: Run hadolint
|
||||||
shell: bash
|
shell: bash
|
||||||
run: hadolint docker/Dockerfile.{debian,alpine}
|
run: hadolint docker/Dockerfile.{debian,alpine}
|
||||||
# End Test Dockerfiles
|
# End Test Dockerfiles with hadolint
|
||||||
|
|
||||||
|
# Test Dockerfiles with docker build checks
|
||||||
|
- name: Run docker build check
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "Checking docker/Dockerfile.debian"
|
||||||
|
docker build --check . -f docker/Dockerfile.debian
|
||||||
|
echo "Checking docker/Dockerfile.alpine"
|
||||||
|
docker build --check . -f docker/Dockerfile.alpine
|
||||||
|
# End Test Dockerfiles with docker build checks
|
||||||
|
22
.github/workflows/release.yml
vendored
22
.github/workflows/release.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
|||||||
# Some checks to determine if we need to continue with building a new docker.
|
# Some checks to determine if we need to continue with building a new docker.
|
||||||
# We will skip this check if we are creating a tag, because that has the same hash as a previous run already.
|
# We will skip this check if we are creating a tag, because that has the same hash as a previous run already.
|
||||||
skip_check:
|
skip_check:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||||
outputs:
|
outputs:
|
||||||
should_skip: ${{ steps.skip_check.outputs.should_skip }}
|
should_skip: ${{ steps.skip_check.outputs.should_skip }}
|
||||||
@@ -27,7 +27,7 @@ jobs:
|
|||||||
if: ${{ github.ref_type == 'branch' }}
|
if: ${{ github.ref_type == 'branch' }}
|
||||||
|
|
||||||
docker-build:
|
docker-build:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
needs: skip_check
|
needs: skip_check
|
||||||
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
|
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
|
||||||
@@ -58,7 +58,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
# Checkout the repo
|
# Checkout the repo
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -69,13 +69,13 @@ jobs:
|
|||||||
|
|
||||||
# Start Docker Buildx
|
# Start Docker Buildx
|
||||||
- name: Setup Docker Buildx
|
- name: Setup Docker Buildx
|
||||||
uses: docker/setup-buildx-action@aa33708b10e362ff993539393ff100fa93ed6a27 # v3.5.0
|
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1
|
||||||
# https://github.com/moby/buildkit/issues/3969
|
# https://github.com/moby/buildkit/issues/3969
|
||||||
# Also set max parallelism to 3, the default of 4 breaks GitHub Actions and causes OOMKills
|
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
|
||||||
with:
|
with:
|
||||||
buildkitd-config-inline: |
|
buildkitd-config-inline: |
|
||||||
[worker.oci]
|
[worker.oci]
|
||||||
max-parallelism = 3
|
max-parallelism = 2
|
||||||
driver-opts: |
|
driver-opts: |
|
||||||
network=host
|
network=host
|
||||||
|
|
||||||
@@ -165,7 +165,7 @@ jobs:
|
|||||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}"
|
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}"
|
||||||
|
|
||||||
- name: Bake ${{ matrix.base_image }} containers
|
- name: Bake ${{ matrix.base_image }} containers
|
||||||
uses: docker/bake-action@a4d7f0b5b91c14a296d792d4ec53a9db17f02e67 # v5.5.0
|
uses: docker/bake-action@2e3d19baedb14545e5d41222653874f25d5b4dfb # v5.10.0
|
||||||
env:
|
env:
|
||||||
BASE_TAGS: "${{ env.BASE_TAGS }}"
|
BASE_TAGS: "${{ env.BASE_TAGS }}"
|
||||||
SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}"
|
SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}"
|
||||||
@@ -223,28 +223,28 @@ jobs:
|
|||||||
|
|
||||||
# Upload artifacts to Github Actions
|
# Upload artifacts to Github Actions
|
||||||
- name: "Upload amd64 artifact"
|
- name: "Upload amd64 artifact"
|
||||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4
|
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||||
if: ${{ matrix.base_image == 'alpine' }}
|
if: ${{ matrix.base_image == 'alpine' }}
|
||||||
with:
|
with:
|
||||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64
|
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64
|
||||||
path: vaultwarden-amd64
|
path: vaultwarden-amd64
|
||||||
|
|
||||||
- name: "Upload arm64 artifact"
|
- name: "Upload arm64 artifact"
|
||||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4
|
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||||
if: ${{ matrix.base_image == 'alpine' }}
|
if: ${{ matrix.base_image == 'alpine' }}
|
||||||
with:
|
with:
|
||||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64
|
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64
|
||||||
path: vaultwarden-arm64
|
path: vaultwarden-arm64
|
||||||
|
|
||||||
- name: "Upload armv7 artifact"
|
- name: "Upload armv7 artifact"
|
||||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4
|
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||||
if: ${{ matrix.base_image == 'alpine' }}
|
if: ${{ matrix.base_image == 'alpine' }}
|
||||||
with:
|
with:
|
||||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7
|
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7
|
||||||
path: vaultwarden-armv7
|
path: vaultwarden-armv7
|
||||||
|
|
||||||
- name: "Upload armv6 artifact"
|
- name: "Upload armv6 artifact"
|
||||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4
|
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||||
if: ${{ matrix.base_image == 'alpine' }}
|
if: ${{ matrix.base_image == 'alpine' }}
|
||||||
with:
|
with:
|
||||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6
|
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6
|
||||||
|
2
.github/workflows/releasecache-cleanup.yml
vendored
2
.github/workflows/releasecache-cleanup.yml
vendored
@@ -13,7 +13,7 @@ name: Cleanup
|
|||||||
jobs:
|
jobs:
|
||||||
releasecache-cleanup:
|
releasecache-cleanup:
|
||||||
name: Releasecache Cleanup
|
name: Releasecache Cleanup
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
steps:
|
steps:
|
||||||
|
13
.github/workflows/trivy.yml
vendored
13
.github/workflows/trivy.yml
vendored
@@ -9,15 +9,18 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
branches: [ "main" ]
|
branches: [ "main" ]
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '00 12 * * *'
|
- cron: '08 11 * * *'
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
trivy-scan:
|
trivy-scan:
|
||||||
|
# Only run this in the master repo and not on forks
|
||||||
|
# When all forks run this at the same time, it is causing `Too Many Requests` issues
|
||||||
|
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||||
name: Check
|
name: Check
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
@@ -25,10 +28,10 @@ jobs:
|
|||||||
actions: read
|
actions: read
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4.1.7
|
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 #v4.2.1
|
||||||
|
|
||||||
- name: Run Trivy vulnerability scanner
|
- name: Run Trivy vulnerability scanner
|
||||||
uses: aquasecurity/trivy-action@6e7b7d1fd3e4fef0c5fa8cce1229c54b2c9bd0d8 # v0.24.0
|
uses: aquasecurity/trivy-action@5681af892cd0f4997658e2bacc62bd0a894cf564 # v0.27.0
|
||||||
with:
|
with:
|
||||||
scan-type: repo
|
scan-type: repo
|
||||||
ignore-unfixed: true
|
ignore-unfixed: true
|
||||||
@@ -37,6 +40,6 @@ jobs:
|
|||||||
severity: CRITICAL,HIGH
|
severity: CRITICAL,HIGH
|
||||||
|
|
||||||
- name: Upload Trivy scan results to GitHub Security tab
|
- name: Upload Trivy scan results to GitHub Security tab
|
||||||
uses: github/codeql-action/upload-sarif@2bbafcdd7fbf96243689e764c2f15d9735164f33 # v3.25.10
|
uses: github/codeql-action/upload-sarif@2bbafcdd7fbf96243689e764c2f15d9735164f33 # v3.26.6
|
||||||
with:
|
with:
|
||||||
sarif_file: 'trivy-results.sarif'
|
sarif_file: 'trivy-results.sarif'
|
||||||
|
1323
Cargo.lock
generated
1323
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
89
Cargo.toml
89
Cargo.toml
@@ -3,7 +3,7 @@ name = "vaultwarden"
|
|||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.79.0"
|
rust-version = "1.80.0"
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
repository = "https://github.com/dani-garcia/vaultwarden"
|
repository = "https://github.com/dani-garcia/vaultwarden"
|
||||||
@@ -18,42 +18,42 @@ build = "build.rs"
|
|||||||
enable_syslog = []
|
enable_syslog = []
|
||||||
mysql = ["diesel/mysql", "diesel_migrations/mysql"]
|
mysql = ["diesel/mysql", "diesel_migrations/mysql"]
|
||||||
postgresql = ["diesel/postgres", "diesel_migrations/postgres"]
|
postgresql = ["diesel/postgres", "diesel_migrations/postgres"]
|
||||||
sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "libsqlite3-sys"]
|
sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "dep:libsqlite3-sys"]
|
||||||
# Enable to use a vendored and statically linked openssl
|
# Enable to use a vendored and statically linked openssl
|
||||||
vendored_openssl = ["openssl/vendored"]
|
vendored_openssl = ["openssl/vendored"]
|
||||||
# Enable MiMalloc memory allocator to replace the default malloc
|
# Enable MiMalloc memory allocator to replace the default malloc
|
||||||
# This can improve performance for Alpine builds
|
# This can improve performance for Alpine builds
|
||||||
enable_mimalloc = ["mimalloc"]
|
enable_mimalloc = ["dep:mimalloc"]
|
||||||
# This is a development dependency, and should only be used during development!
|
# This is a development dependency, and should only be used during development!
|
||||||
# It enables the usage of the diesel_logger crate, which is able to output the generated queries.
|
# It enables the usage of the diesel_logger crate, which is able to output the generated queries.
|
||||||
# You also need to set an env variable `QUERY_LOGGER=1` to fully activate this so you do not have to re-compile
|
# You also need to set an env variable `QUERY_LOGGER=1` to fully activate this so you do not have to re-compile
|
||||||
# if you want to turn off the logging for a specific run.
|
# if you want to turn off the logging for a specific run.
|
||||||
query_logger = ["diesel_logger"]
|
query_logger = ["dep:diesel_logger"]
|
||||||
|
|
||||||
# Enable unstable features, requires nightly
|
# Enable unstable features, requires nightly
|
||||||
# Currently only used to enable rusts official ip support
|
# Currently only used to enable rusts official ip support
|
||||||
unstable = []
|
unstable = []
|
||||||
|
|
||||||
[target."cfg(not(windows))".dependencies]
|
[target."cfg(unix)".dependencies]
|
||||||
# Logging
|
# Logging
|
||||||
syslog = "6.1.1"
|
syslog = "6.1.1"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# Logging
|
# Logging
|
||||||
log = "0.4.22"
|
log = "0.4.22"
|
||||||
fern = { version = "0.6.2", features = ["syslog-6", "reopen-1"] }
|
fern = { version = "0.7.0", features = ["syslog-6", "reopen-1"] }
|
||||||
tracing = { version = "0.1.40", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
|
tracing = { version = "0.1.40", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
|
||||||
|
|
||||||
# A `dotenv` implementation for Rust
|
# A `dotenv` implementation for Rust
|
||||||
dotenvy = { version = "0.15.7", default-features = false }
|
dotenvy = { version = "0.15.7", default-features = false }
|
||||||
|
|
||||||
# Lazy initialization
|
# Lazy initialization
|
||||||
once_cell = "1.19.0"
|
once_cell = "1.20.2"
|
||||||
|
|
||||||
# Numerical libraries
|
# Numerical libraries
|
||||||
num-traits = "0.2.19"
|
num-traits = "0.2.19"
|
||||||
num-derive = "0.4.2"
|
num-derive = "0.4.2"
|
||||||
bigdecimal = "0.4.5"
|
bigdecimal = "0.4.6"
|
||||||
|
|
||||||
# Web framework
|
# Web framework
|
||||||
rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false }
|
rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false }
|
||||||
@@ -63,34 +63,34 @@ rocket_ws = { version ="0.1.1" }
|
|||||||
rmpv = "1.3.0" # MessagePack library
|
rmpv = "1.3.0" # MessagePack library
|
||||||
|
|
||||||
# Concurrent HashMap used for WebSocket messaging and favicons
|
# Concurrent HashMap used for WebSocket messaging and favicons
|
||||||
dashmap = "6.0.1"
|
dashmap = "6.1.0"
|
||||||
|
|
||||||
# Async futures
|
# Async futures
|
||||||
futures = "0.3.30"
|
futures = "0.3.31"
|
||||||
tokio = { version = "1.39.2", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
tokio = { version = "1.41.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
||||||
|
|
||||||
# A generic serialization/deserialization framework
|
# A generic serialization/deserialization framework
|
||||||
serde = { version = "1.0.204", features = ["derive"] }
|
serde = { version = "1.0.214", features = ["derive"] }
|
||||||
serde_json = "1.0.122"
|
serde_json = "1.0.132"
|
||||||
|
|
||||||
# A safe, extensible ORM and Query builder
|
# A safe, extensible ORM and Query builder
|
||||||
diesel = { version = "2.2.2", features = ["chrono", "r2d2", "numeric"] }
|
diesel = { version = "2.2.4", features = ["chrono", "r2d2", "numeric"] }
|
||||||
diesel_migrations = "2.2.0"
|
diesel_migrations = "2.2.0"
|
||||||
diesel_logger = { version = "0.3.0", optional = true }
|
diesel_logger = { version = "0.3.0", optional = true }
|
||||||
|
|
||||||
# Bundled/Static SQLite
|
# Bundled/Static SQLite
|
||||||
libsqlite3-sys = { version = "0.29.0", features = ["bundled"], optional = true }
|
libsqlite3-sys = { version = "0.30.1", features = ["bundled"], optional = true }
|
||||||
|
|
||||||
# Crypto-related libraries
|
# Crypto-related libraries
|
||||||
rand = { version = "0.8.5", features = ["small_rng"] }
|
rand = { version = "0.8.5", features = ["small_rng"] }
|
||||||
ring = "0.17.8"
|
ring = "0.17.8"
|
||||||
|
|
||||||
# UUID generation
|
# UUID generation
|
||||||
uuid = { version = "1.10.0", features = ["v4"] }
|
uuid = { version = "1.11.0", features = ["v4"] }
|
||||||
|
|
||||||
# Date and time libraries
|
# Date and time libraries
|
||||||
chrono = { version = "0.4.38", features = ["clock", "serde"], default-features = false }
|
chrono = { version = "0.4.38", features = ["clock", "serde"], default-features = false }
|
||||||
chrono-tz = "0.9.0"
|
chrono-tz = "0.10.0"
|
||||||
time = "0.3.36"
|
time = "0.3.36"
|
||||||
|
|
||||||
# Job scheduler
|
# Job scheduler
|
||||||
@@ -112,42 +112,42 @@ yubico = { version = "0.11.0", features = ["online-tokio"], default-features = f
|
|||||||
webauthn-rs = "0.3.2"
|
webauthn-rs = "0.3.2"
|
||||||
|
|
||||||
# Handling of URL's for WebAuthn and favicons
|
# Handling of URL's for WebAuthn and favicons
|
||||||
url = "2.5.2"
|
url = "2.5.3"
|
||||||
|
|
||||||
# Email libraries
|
# Email libraries
|
||||||
lettre = { version = "0.11.7", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
lettre = { version = "0.11.10", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
||||||
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
|
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
|
||||||
email_address = "0.2.9"
|
email_address = "0.2.9"
|
||||||
|
|
||||||
# HTML Template library
|
# HTML Template library
|
||||||
handlebars = { version = "6.0.0", features = ["dir_source"] }
|
handlebars = { version = "6.2.0", features = ["dir_source"] }
|
||||||
|
|
||||||
# HTTP client (Used for favicons, version check, DUO and HIBP API)
|
# HTTP client (Used for favicons, version check, DUO and HIBP API)
|
||||||
reqwest = { version = "0.12.5", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] }
|
reqwest = { version = "0.12.9", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] }
|
||||||
hickory-resolver = "0.24.1"
|
hickory-resolver = "0.24.1"
|
||||||
|
|
||||||
# Favicon extraction libraries
|
# Favicon extraction libraries
|
||||||
html5gum = "0.5.7"
|
html5gum = "0.6.1"
|
||||||
regex = { version = "1.10.6", features = ["std", "perf", "unicode-perl"], default-features = false }
|
regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false }
|
||||||
data-url = "0.3.1"
|
data-url = "0.3.1"
|
||||||
bytes = "1.7.1"
|
bytes = "1.8.0"
|
||||||
|
|
||||||
# Cache function results (Used for version check and favicon fetching)
|
# Cache function results (Used for version check and favicon fetching)
|
||||||
cached = { version = "0.53.1", features = ["async"] }
|
cached = { version = "0.54.0", features = ["async"] }
|
||||||
|
|
||||||
# Used for custom short lived cookie jar during favicon extraction
|
# Used for custom short lived cookie jar during favicon extraction
|
||||||
cookie = "0.18.1"
|
cookie = "0.18.1"
|
||||||
cookie_store = "0.21.0"
|
cookie_store = "0.21.1"
|
||||||
|
|
||||||
# Used by U2F, JWT and PostgreSQL
|
# Used by U2F, JWT and PostgreSQL
|
||||||
openssl = "0.10.66"
|
openssl = "0.10.68"
|
||||||
|
|
||||||
# CLI argument parsing
|
# CLI argument parsing
|
||||||
pico-args = "0.5.0"
|
pico-args = "0.5.0"
|
||||||
|
|
||||||
# Macro ident concatenation
|
# Macro ident concatenation
|
||||||
paste = "1.0.15"
|
paste = "1.0.15"
|
||||||
governor = "0.6.3"
|
governor = "0.7.0"
|
||||||
|
|
||||||
# Check client versions for specific features.
|
# Check client versions for specific features.
|
||||||
semver = "1.0.23"
|
semver = "1.0.23"
|
||||||
@@ -155,7 +155,7 @@ semver = "1.0.23"
|
|||||||
# Allow overriding the default memory allocator
|
# Allow overriding the default memory allocator
|
||||||
# Mainly used for the musl builds, since the default musl malloc is very slow
|
# Mainly used for the musl builds, since the default musl malloc is very slow
|
||||||
mimalloc = { version = "0.1.43", features = ["secure"], default-features = false, optional = true }
|
mimalloc = { version = "0.1.43", features = ["secure"], default-features = false, optional = true }
|
||||||
which = "6.0.2"
|
which = "7.0.0"
|
||||||
|
|
||||||
# Argon2 library with support for the PHC format
|
# Argon2 library with support for the PHC format
|
||||||
argon2 = "0.5.3"
|
argon2 = "0.5.3"
|
||||||
@@ -163,6 +163,9 @@ argon2 = "0.5.3"
|
|||||||
# Reading a password from the cli for generating the Argon2id ADMIN_TOKEN
|
# Reading a password from the cli for generating the Argon2id ADMIN_TOKEN
|
||||||
rpassword = "7.3.1"
|
rpassword = "7.3.1"
|
||||||
|
|
||||||
|
# Loading a dynamic CSS Stylesheet
|
||||||
|
grass_compiler = { version = "0.13.4", default-features = false }
|
||||||
|
|
||||||
# Strip debuginfo from the release builds
|
# Strip debuginfo from the release builds
|
||||||
# The symbols are the provide better panic traces
|
# The symbols are the provide better panic traces
|
||||||
# Also enable fat LTO and use 1 codegen unit for optimizations
|
# Also enable fat LTO and use 1 codegen unit for optimizations
|
||||||
@@ -198,33 +201,46 @@ lto = "thin"
|
|||||||
codegen-units = 16
|
codegen-units = 16
|
||||||
|
|
||||||
# Linting config
|
# Linting config
|
||||||
|
# https://doc.rust-lang.org/rustc/lints/groups.html
|
||||||
[lints.rust]
|
[lints.rust]
|
||||||
# Forbid
|
# Forbid
|
||||||
unsafe_code = "forbid"
|
unsafe_code = "forbid"
|
||||||
non_ascii_idents = "forbid"
|
non_ascii_idents = "forbid"
|
||||||
|
|
||||||
# Deny
|
# Deny
|
||||||
|
deprecated_in_future = "deny"
|
||||||
future_incompatible = { level = "deny", priority = -1 }
|
future_incompatible = { level = "deny", priority = -1 }
|
||||||
|
keyword_idents = { level = "deny", priority = -1 }
|
||||||
|
let_underscore = { level = "deny", priority = -1 }
|
||||||
noop_method_call = "deny"
|
noop_method_call = "deny"
|
||||||
|
refining_impl_trait = { level = "deny", priority = -1 }
|
||||||
rust_2018_idioms = { level = "deny", priority = -1 }
|
rust_2018_idioms = { level = "deny", priority = -1 }
|
||||||
rust_2021_compatibility = { level = "deny", priority = -1 }
|
rust_2021_compatibility = { level = "deny", priority = -1 }
|
||||||
|
# rust_2024_compatibility = { level = "deny", priority = -1 } # Enable once we are at MSRV 1.81.0
|
||||||
|
single_use_lifetimes = "deny"
|
||||||
trivial_casts = "deny"
|
trivial_casts = "deny"
|
||||||
trivial_numeric_casts = "deny"
|
trivial_numeric_casts = "deny"
|
||||||
unused = { level = "deny", priority = -1 }
|
unused = { level = "deny", priority = -1 }
|
||||||
unused_import_braces = "deny"
|
unused_import_braces = "deny"
|
||||||
unused_lifetimes = "deny"
|
unused_lifetimes = "deny"
|
||||||
deprecated_in_future = "deny"
|
unused_qualifications = "deny"
|
||||||
|
variant_size_differences = "deny"
|
||||||
|
# The lints below are part of the rust_2024_compatibility group
|
||||||
|
static-mut-refs = "deny"
|
||||||
|
unsafe-op-in-unsafe-fn = "deny"
|
||||||
|
|
||||||
|
# https://rust-lang.github.io/rust-clippy/stable/index.html
|
||||||
[lints.clippy]
|
[lints.clippy]
|
||||||
# Allow
|
# Warn
|
||||||
# We need this since Rust v1.76+, since it has some bugs
|
dbg_macro = "warn"
|
||||||
# https://github.com/rust-lang/rust-clippy/issues/12016
|
todo = "warn"
|
||||||
blocks_in_conditions = "allow"
|
|
||||||
|
|
||||||
# Deny
|
# Deny
|
||||||
|
case_sensitive_file_extension_comparisons = "deny"
|
||||||
cast_lossless = "deny"
|
cast_lossless = "deny"
|
||||||
clone_on_ref_ptr = "deny"
|
clone_on_ref_ptr = "deny"
|
||||||
equatable_if_let = "deny"
|
equatable_if_let = "deny"
|
||||||
|
filter_map_next = "deny"
|
||||||
float_cmp_const = "deny"
|
float_cmp_const = "deny"
|
||||||
inefficient_to_string = "deny"
|
inefficient_to_string = "deny"
|
||||||
iter_on_empty_collections = "deny"
|
iter_on_empty_collections = "deny"
|
||||||
@@ -234,13 +250,18 @@ macro_use_imports = "deny"
|
|||||||
manual_assert = "deny"
|
manual_assert = "deny"
|
||||||
manual_instant_elapsed = "deny"
|
manual_instant_elapsed = "deny"
|
||||||
manual_string_new = "deny"
|
manual_string_new = "deny"
|
||||||
|
match_on_vec_items = "deny"
|
||||||
match_wildcard_for_single_variants = "deny"
|
match_wildcard_for_single_variants = "deny"
|
||||||
mem_forget = "deny"
|
mem_forget = "deny"
|
||||||
|
needless_continue = "deny"
|
||||||
needless_lifetimes = "deny"
|
needless_lifetimes = "deny"
|
||||||
|
option_option = "deny"
|
||||||
string_add_assign = "deny"
|
string_add_assign = "deny"
|
||||||
string_to_string = "deny"
|
string_to_string = "deny"
|
||||||
unnecessary_join = "deny"
|
unnecessary_join = "deny"
|
||||||
unnecessary_self_imports = "deny"
|
unnecessary_self_imports = "deny"
|
||||||
|
unnested_or_patterns = "deny"
|
||||||
unused_async = "deny"
|
unused_async = "deny"
|
||||||
|
unused_self = "deny"
|
||||||
verbose_file_reads = "deny"
|
verbose_file_reads = "deny"
|
||||||
zero_sized_map_values = "deny"
|
zero_sized_map_values = "deny"
|
||||||
|
202
README.md
202
README.md
@@ -1,102 +1,144 @@
|
|||||||
### Alternative implementation of the Bitwarden server API written in Rust and compatible with [upstream Bitwarden clients](https://bitwarden.com/download/)*, perfect for self-hosted deployment where running the official resource-heavy service might not be ideal.
|

|
||||||
|
|
||||||
📢 Note: This project was known as Bitwarden_RS and has been renamed to separate itself from the official Bitwarden server in the hopes of avoiding confusion and trademark/branding issues. Please see [#1642](https://github.com/dani-garcia/vaultwarden/discussions/1642) for more explanation.
|
An alternative server implementation of the Bitwarden Client API, written in Rust and compatible with [official Bitwarden clients](https://bitwarden.com/download/) [[disclaimer](#disclaimer)], perfect for self-hosted deployment where running the official resource-heavy service might not be ideal.
|
||||||
|
|
||||||
---
|
---
|
||||||
[](https://github.com/dani-garcia/vaultwarden/actions/workflows/build.yml)
|
|
||||||
[](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden)
|
|
||||||
[](https://hub.docker.com/r/vaultwarden/server)
|
|
||||||
[](https://quay.io/repository/vaultwarden/server)
|
|
||||||
[](https://deps.rs/repo/github/dani-garcia/vaultwarden)
|
|
||||||
[](https://github.com/dani-garcia/vaultwarden/releases/latest)
|
|
||||||
[](https://github.com/dani-garcia/vaultwarden/blob/main/LICENSE.txt)
|
|
||||||
[](https://matrix.to/#/#vaultwarden:matrix.org)
|
|
||||||
|
|
||||||
Image is based on [Rust implementation of Bitwarden API](https://github.com/dani-garcia/vaultwarden).
|
[](https://github.com/dani-garcia/vaultwarden/releases/latest)
|
||||||
|
[](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden)
|
||||||
|
[](https://hub.docker.com/r/vaultwarden/server)
|
||||||
|
[](https://quay.io/repository/vaultwarden/server) <br>
|
||||||
|
[](https://github.com/dani-garcia/vaultwarden/graphs/contributors)
|
||||||
|
[](https://github.com/dani-garcia/vaultwarden/network/members)
|
||||||
|
[](https://github.com/dani-garcia/vaultwarden/stargazers)
|
||||||
|
[](https://github.com/dani-garcia/vaultwarden/issues)
|
||||||
|
[](https://github.com/dani-garcia/vaultwarden/issues?q=is%3Aissue+is%3Aclosed)
|
||||||
|
[](https://github.com/dani-garcia/vaultwarden/blob/main/LICENSE.txt) <br>
|
||||||
|
[%3D'svg'%5D%2F*%5Blocal-name()%3D'g'%5D%5B2%5D%2F*%5Blocal-name()%3D'text'%5D%5B4%5D&style=flat-square&logo=rust&label=dependencies&color=005AA4)](https://deps.rs/repo/github/dani-garcia/vaultwarden)
|
||||||
|
[](https://github.com/dani-garcia/vaultwarden/actions/workflows/release.yml)
|
||||||
|
[](https://github.com/dani-garcia/vaultwarden/actions/workflows/build.yml) <br>
|
||||||
|
[](https://matrix.to/#/#vaultwarden:matrix.org)
|
||||||
|
[](https://github.com/dani-garcia/vaultwarden/discussions)
|
||||||
|
[](https://vaultwarden.discourse.group/)
|
||||||
|
|
||||||
**This project is not associated with the [Bitwarden](https://bitwarden.com/) project nor Bitwarden, Inc.**
|
> [!IMPORTANT]
|
||||||
|
> **When using this server, please report any bugs or suggestions directly to us (see [Get in touch](#get-in-touch)), regardless of whatever clients you are using (mobile, desktop, browser...). DO NOT use the official Bitwarden support channels.**
|
||||||
|
|
||||||
#### ⚠️**IMPORTANT**⚠️: When using this server, please report any bugs or suggestions to us directly (look at the bottom of this page for ways to get in touch), regardless of whatever clients you are using (mobile, desktop, browser...). DO NOT use the official support channels.
|
<br>
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
Basically full implementation of Bitwarden API is provided including:
|
A nearly complete implementation of the Bitwarden Client API is provided, including:
|
||||||
|
|
||||||
* Organizations support
|
* [Personal Vault](https://bitwarden.com/help/managing-items/)
|
||||||
* Attachments and Send
|
* [Send](https://bitwarden.com/help/about-send/)
|
||||||
* Vault API support
|
* [Attachments](https://bitwarden.com/help/attachments/)
|
||||||
* Serving the static files for Vault interface
|
* [Website icons](https://bitwarden.com/help/website-icons/)
|
||||||
* Website icons API
|
* [Personal API Key](https://bitwarden.com/help/personal-api-key/)
|
||||||
* Authenticator and U2F support
|
* [Organizations](https://bitwarden.com/help/getting-started-organizations/)
|
||||||
* YubiKey and Duo support
|
- [Collections](https://bitwarden.com/help/about-collections/),
|
||||||
* Emergency Access
|
[Password Sharing](https://bitwarden.com/help/sharing/),
|
||||||
|
[Member Roles](https://bitwarden.com/help/user-types-access-control/),
|
||||||
|
[Groups](https://bitwarden.com/help/about-groups/),
|
||||||
|
[Event Logs](https://bitwarden.com/help/event-logs/),
|
||||||
|
[Admin Password Reset](https://bitwarden.com/help/admin-reset/),
|
||||||
|
[Directory Connector](https://bitwarden.com/help/directory-sync/),
|
||||||
|
[Policies](https://bitwarden.com/help/policies/)
|
||||||
|
* [Multi/Two Factor Authentication](https://bitwarden.com/help/bitwarden-field-guide-two-step-login/)
|
||||||
|
- [Authenticator](https://bitwarden.com/help/setup-two-step-login-authenticator/),
|
||||||
|
[Email](https://bitwarden.com/help/setup-two-step-login-email/),
|
||||||
|
[FIDO2 WebAuthn](https://bitwarden.com/help/setup-two-step-login-fido/),
|
||||||
|
[YubiKey](https://bitwarden.com/help/setup-two-step-login-yubikey/),
|
||||||
|
[Duo](https://bitwarden.com/help/setup-two-step-login-duo/)
|
||||||
|
* [Emergency Access](https://bitwarden.com/help/emergency-access/)
|
||||||
|
* [Vaultwarden Admin Backend](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-admin-page)
|
||||||
|
* [Modified Web Vault client](https://github.com/dani-garcia/bw_web_builds) (Bundled within our containers)
|
||||||
|
|
||||||
## Installation
|
<br>
|
||||||
Pull the docker image and mount a volume from the host for persistent storage:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
docker pull vaultwarden/server:latest
|
|
||||||
docker run -d --name vaultwarden -v /vw-data/:/data/ --restart unless-stopped -p 80:80 vaultwarden/server:latest
|
|
||||||
```
|
|
||||||
This will preserve any persistent data under /vw-data/, you can adapt the path to whatever suits you.
|
|
||||||
|
|
||||||
**IMPORTANT**: Most modern web browsers disallow the use of Web Crypto APIs in insecure contexts. In this case, you might get an error like `Cannot read property 'importKey'`. To solve this problem, you need to access the web vault via HTTPS or localhost.
|
|
||||||
|
|
||||||
This can be configured in [vaultwarden directly](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS) or using a third-party reverse proxy ([some examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)).
|
|
||||||
|
|
||||||
If you have an available domain name, you can get HTTPS certificates with [Let's Encrypt](https://letsencrypt.org/), or you can generate self-signed certificates with utilities like [mkcert](https://github.com/FiloSottile/mkcert). Some proxies automatically do this step, like Caddy (see examples linked above).
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
See the [vaultwarden wiki](https://github.com/dani-garcia/vaultwarden/wiki) for more information on how to configure and run the vaultwarden server.
|
|
||||||
|
> [!IMPORTANT]
|
||||||
|
> Most modern web browsers disallow the use of Web Crypto APIs in insecure contexts. In this case, you might get an error like `Cannot read property 'importKey'`. To solve this problem, you need to access the web vault via HTTPS or localhost.
|
||||||
|
>
|
||||||
|
>This can be configured in [Vaultwarden directly](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS) or using a third-party reverse proxy ([some examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)).
|
||||||
|
>
|
||||||
|
>If you have an available domain name, you can get HTTPS certificates with [Let's Encrypt](https://letsencrypt.org/), or you can generate self-signed certificates with utilities like [mkcert](https://github.com/FiloSottile/mkcert). Some proxies automatically do this step, like Caddy or Traefik (see examples linked above).
|
||||||
|
|
||||||
|
> [!TIP]
|
||||||
|
>**For more detailed examples on how to install, use and configure Vaultwarden you can check our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).**
|
||||||
|
|
||||||
|
The main way to use Vaultwarden is via our container images which are published to [ghcr.io](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden), [docker.io](https://hub.docker.com/r/vaultwarden/server) and [quay.io](https://quay.io/repository/vaultwarden/server).
|
||||||
|
|
||||||
|
There are also [community driven packages](https://github.com/dani-garcia/vaultwarden/wiki/Third-party-packages) which can be used, but those might be lagging behind the latest version or might deviate in the way Vaultwarden is configured, as described in our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).
|
||||||
|
|
||||||
|
### Docker/Podman CLI
|
||||||
|
|
||||||
|
Pull the container image and mount a volume from the host for persistent storage.<br>
|
||||||
|
You can replace `docker` with `podman` if you prefer to use podman.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
docker pull vaultwarden/server:latest
|
||||||
|
docker run --detach --name vaultwarden \
|
||||||
|
--env DOMAIN="https://vw.domain.tld" \
|
||||||
|
--volume /vw-data/:/data/ \
|
||||||
|
--restart unless-stopped \
|
||||||
|
--publish 80:80 \
|
||||||
|
vaultwarden/server:latest
|
||||||
|
```
|
||||||
|
|
||||||
|
This will preserve any persistent data under `/vw-data/`, you can adapt the path to whatever suits you.
|
||||||
|
|
||||||
|
### Docker Compose
|
||||||
|
|
||||||
|
To use Docker compose you need to create a `compose.yaml` which will hold the configuration to run the Vaultwarden container.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
services:
|
||||||
|
vaultwarden:
|
||||||
|
image: vaultwarden/server:latest
|
||||||
|
container_name: vaultwarden
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
DOMAIN: "https://vw.domain.tld"
|
||||||
|
volumes:
|
||||||
|
- ./vw-data/:/data/
|
||||||
|
ports:
|
||||||
|
- 80:80
|
||||||
|
```
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
## Get in touch
|
## Get in touch
|
||||||
To ask a question, offer suggestions or new features or to get help configuring or installing the software, please use [GitHub Discussions](https://github.com/dani-garcia/vaultwarden/discussions) or [the forum](https://vaultwarden.discourse.group/).
|
|
||||||
|
|
||||||
If you spot any bugs or crashes with vaultwarden itself, please [create an issue](https://github.com/dani-garcia/vaultwarden/issues/). Make sure you are on the latest version and there aren't any similar issues open, though!
|
Have a question, suggestion or need help? Join our community on [Matrix](https://matrix.to/#/#vaultwarden:matrix.org), [GitHub Discussions](https://github.com/dani-garcia/vaultwarden/discussions) or [Discourse Forums](https://vaultwarden.discourse.group/).
|
||||||
|
|
||||||
If you prefer to chat, we're usually hanging around at [#vaultwarden:matrix.org](https://matrix.to/#/#vaultwarden:matrix.org) room on Matrix. Feel free to join us!
|
Encountered a bug or crash? Please search our issue tracker and discussions to see if it's already been reported. If not, please [start a new discussion](https://github.com/dani-garcia/vaultwarden/discussions) or [create a new issue](https://github.com/dani-garcia/vaultwarden/issues/). Ensure you're using the latest version of Vaultwarden and there aren't any similar issues open or closed!
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
## Contributors
|
||||||
|
|
||||||
### Sponsors
|
|
||||||
Thanks for your contribution to the project!
|
Thanks for your contribution to the project!
|
||||||
|
|
||||||
<!--
|
[](https://github.com/dani-garcia/vaultwarden/graphs/contributors)<br>
|
||||||
<table>
|
[](https://github.com/dani-garcia/vaultwarden/graphs/contributors)
|
||||||
<tr>
|
|
||||||
<td align="center">
|
|
||||||
<a href="https://github.com/username">
|
|
||||||
<img src="https://avatars.githubusercontent.com/u/725423?s=75&v=4" width="75px;" alt="username"/>
|
|
||||||
<br />
|
|
||||||
<sub><b>username</b></sub>
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
<br/>
|
<br>
|
||||||
-->
|
|
||||||
|
|
||||||
<table>
|
## Disclaimer
|
||||||
<tr>
|
|
||||||
<td align="center">
|
**This project is not associated with [Bitwarden](https://bitwarden.com/) or Bitwarden, Inc.**
|
||||||
<a href="https://github.com/themightychris" style="width: 75px">
|
|
||||||
<sub><b>Chris Alfano</b></sub>
|
However, one of the active maintainers for Vaultwarden is employed by Bitwarden and is allowed to contribute to the project on their own time. These contributions are independent of Bitwarden and are reviewed by other maintainers.
|
||||||
</a>
|
|
||||||
</td>
|
The maintainers work together to set the direction for the project, focusing on serving the self-hosting community, including individuals, families, and small organizations, while ensuring the project's sustainability.
|
||||||
</tr>
|
|
||||||
<tr>
|
**Please note:** We cannot be held liable for any data loss that may occur while using Vaultwarden. This includes passwords, attachments, and other information handled by the application. We highly recommend performing regular backups of your files and database. However, should you experience data loss, we encourage you to contact us immediately.
|
||||||
<td align="center">
|
|
||||||
<a href="https://github.com/numberly" style="width: 75px">
|
<br>
|
||||||
<sub><b>Numberly</b></sub>
|
|
||||||
</a>
|
## Bitwarden_RS
|
||||||
</td>
|
|
||||||
</tr>
|
This project was known as Bitwarden_RS and has been renamed to separate itself from the official Bitwarden server in the hopes of avoiding confusion and trademark/branding issues.<br>
|
||||||
<tr>
|
Please see [#1642 - v1.21.0 release and project rename to Vaultwarden](https://github.com/dani-garcia/vaultwarden/discussions/1642) for more explanation.
|
||||||
<td align="center">
|
|
||||||
<a href="https://github.com/IQ333777" style="width: 75px">
|
|
||||||
<sub><b>IQ333777</b></sub>
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
12
SECURITY.md
12
SECURITY.md
@@ -39,7 +39,11 @@ Thank you for helping keep Vaultwarden and our users safe!
|
|||||||
|
|
||||||
# How to contact us
|
# How to contact us
|
||||||
|
|
||||||
- You can contact us on Matrix https://matrix.to/#/#vaultwarden:matrix.org (user: `@danig:matrix.org`)
|
- You can contact us on Matrix https://matrix.to/#/#vaultwarden:matrix.org (users: `@danig:matrix.org` and/or `@blackdex:matrix.org`)
|
||||||
- You can send an  to report a security issue.
|
- You can send an  to report a security issue.<br>
|
||||||
- If you want to send an encrypted email you can use the following GPG key:<br>
|
If you want to send an encrypted email you can use the following GPG key: 13BB3A34C9E380258CE43D595CB150B31F6426BC<br>
|
||||||
https://keyserver.ubuntu.com/pks/lookup?search=0xB9B7A108373276BF3C0406F9FC8A7D14C3CD543A&fingerprint=on&op=index
|
It can be found on several public GPG key servers.<br>
|
||||||
|
* https://keys.openpgp.org/search?q=security%40vaultwarden.org
|
||||||
|
* https://keys.mailvelope.com/pks/lookup?op=get&search=security%40vaultwarden.org
|
||||||
|
* https://pgpkeys.eu/pks/lookup?search=security%40vaultwarden.org&fingerprint=on&op=index
|
||||||
|
* https://keyserver.ubuntu.com/pks/lookup?search=security%40vaultwarden.org&fingerprint=on&op=index
|
||||||
|
@@ -1,10 +1,11 @@
|
|||||||
---
|
---
|
||||||
vault_version: "v2024.6.2b"
|
vault_version: "v2024.6.2c"
|
||||||
vault_image_digest: "sha256:25a8b48792a3d2c16ddaea493eee93a0b6785648f2d8782c7537d198cb41be55"
|
vault_image_digest: "sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b"
|
||||||
# Cross Compile Docker Helper Scripts v1.4.0
|
# Cross Compile Docker Helper Scripts v1.5.0
|
||||||
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
|
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
|
||||||
xx_image_digest: "sha256:0cd3f05c72d6c9b038eb135f91376ee1169ef3a330d34e418e65e2a5c2e9c0d4"
|
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
|
||||||
rust_version: 1.80.1 # Rust version to be used
|
xx_image_digest: "sha256:1978e7a58a1777cb0ef0dde76bad60b7914b21da57cfa88047875e4f364297aa"
|
||||||
|
rust_version: 1.82.0 # Rust version to be used
|
||||||
debian_version: bookworm # Debian release name to be used
|
debian_version: bookworm # Debian release name to be used
|
||||||
alpine_version: "3.20" # Alpine version to be used
|
alpine_version: "3.20" # Alpine version to be used
|
||||||
# For which platforms/architectures will we try to build images
|
# For which platforms/architectures will we try to build images
|
||||||
|
@@ -19,23 +19,23 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull docker.io/vaultwarden/web-vault:v2024.6.2b
|
# $ docker pull docker.io/vaultwarden/web-vault:v2024.6.2c
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2024.6.2b
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2024.6.2c
|
||||||
# [docker.io/vaultwarden/web-vault@sha256:25a8b48792a3d2c16ddaea493eee93a0b6785648f2d8782c7537d198cb41be55]
|
# [docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:25a8b48792a3d2c16ddaea493eee93a0b6785648f2d8782c7537d198cb41be55
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b
|
||||||
# [docker.io/vaultwarden/web-vault:v2024.6.2b]
|
# [docker.io/vaultwarden/web-vault:v2024.6.2c]
|
||||||
#
|
#
|
||||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:25a8b48792a3d2c16ddaea493eee93a0b6785648f2d8782c7537d198cb41be55 AS vault
|
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b AS vault
|
||||||
|
|
||||||
########################## ALPINE BUILD IMAGES ##########################
|
########################## ALPINE BUILD IMAGES ##########################
|
||||||
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
||||||
## And for Alpine we define all build images here, they will only be loaded when actually used
|
## And for Alpine we define all build images here, they will only be loaded when actually used
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.80.1 AS build_amd64
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.82.0 AS build_amd64
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.80.1 AS build_arm64
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.82.0 AS build_arm64
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.80.1 AS build_armv7
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.82.0 AS build_armv7
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.80.1 AS build_armv6
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.82.0 AS build_armv6
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# hadolint ignore=DL3006
|
# hadolint ignore=DL3006
|
||||||
|
@@ -19,24 +19,24 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull docker.io/vaultwarden/web-vault:v2024.6.2b
|
# $ docker pull docker.io/vaultwarden/web-vault:v2024.6.2c
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2024.6.2b
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2024.6.2c
|
||||||
# [docker.io/vaultwarden/web-vault@sha256:25a8b48792a3d2c16ddaea493eee93a0b6785648f2d8782c7537d198cb41be55]
|
# [docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:25a8b48792a3d2c16ddaea493eee93a0b6785648f2d8782c7537d198cb41be55
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b
|
||||||
# [docker.io/vaultwarden/web-vault:v2024.6.2b]
|
# [docker.io/vaultwarden/web-vault:v2024.6.2c]
|
||||||
#
|
#
|
||||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:25a8b48792a3d2c16ddaea493eee93a0b6785648f2d8782c7537d198cb41be55 AS vault
|
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:409ab328ca931439cb916b388a4bb784bd44220717aaf74cf71620c23e34fc2b AS vault
|
||||||
|
|
||||||
########################## Cross Compile Docker Helper Scripts ##########################
|
########################## Cross Compile Docker Helper Scripts ##########################
|
||||||
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
|
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
|
||||||
## And these bash scripts do not have any significant difference if at all
|
## And these bash scripts do not have any significant difference if at all
|
||||||
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:0cd3f05c72d6c9b038eb135f91376ee1169ef3a330d34e418e65e2a5c2e9c0d4 AS xx
|
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:1978e7a58a1777cb0ef0dde76bad60b7914b21da57cfa88047875e4f364297aa AS xx
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# hadolint ignore=DL3006
|
# hadolint ignore=DL3006
|
||||||
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.80.1-slim-bookworm AS build
|
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.82.0-slim-bookworm AS build
|
||||||
COPY --from=xx / /
|
COPY --from=xx / /
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG TARGETVARIANT
|
ARG TARGETVARIANT
|
||||||
|
@@ -1,5 +1,9 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
|
if [ -n "${UMASK}" ]; then
|
||||||
|
umask "${UMASK}"
|
||||||
|
fi
|
||||||
|
|
||||||
if [ -r /etc/vaultwarden.sh ]; then
|
if [ -r /etc/vaultwarden.sh ]; then
|
||||||
. /etc/vaultwarden.sh
|
. /etc/vaultwarden.sh
|
||||||
elif [ -r /etc/bitwarden_rs.sh ]; then
|
elif [ -r /etc/bitwarden_rs.sh ]; then
|
||||||
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE `twofactor_incomplete` DROP COLUMN `device_type`;
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE twofactor_incomplete ADD COLUMN device_type INTEGER NOT NULL DEFAULT 14; -- 14 = Unknown Browser
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE twofactor_incomplete DROP COLUMN device_type;
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE twofactor_incomplete ADD COLUMN device_type INTEGER NOT NULL DEFAULT 14; -- 14 = Unknown Browser
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE `twofactor_incomplete` DROP COLUMN `device_type`;
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE twofactor_incomplete ADD COLUMN device_type INTEGER NOT NULL DEFAULT 14; -- 14 = Unknown Browser
|
78
resources/vaultwarden-logo-auto.svg
Normal file
78
resources/vaultwarden-logo-auto.svg
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
<svg width="1365.8256" height="280.48944" version="1.1" viewBox="0 0 1365.8255 280.48944" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||||
|
<style>
|
||||||
|
@media (prefers-color-scheme: dark) {
|
||||||
|
svg { -webkit-filter:invert(0.90); filter:invert(0.90); }
|
||||||
|
}</style>
|
||||||
|
<title>Vaultwarden Logo</title>
|
||||||
|
<defs>
|
||||||
|
<mask id="d">
|
||||||
|
<rect x="-60" y="-60" width="120" height="120" fill="#fff"/>
|
||||||
|
<circle id="b" cy="-40" r="3"/>
|
||||||
|
<use transform="rotate(72)" xlink:href="#b"/>
|
||||||
|
<use transform="rotate(144)" xlink:href="#b"/>
|
||||||
|
<use transform="rotate(216)" xlink:href="#b"/>
|
||||||
|
<use transform="rotate(-72)" xlink:href="#b"/>
|
||||||
|
</mask>
|
||||||
|
</defs>
|
||||||
|
<g transform="translate(-10.708266,-9.2965379)" aria-label="aultwarden">
|
||||||
|
<path d="m371.55338 223.43649-5.76172-14.84375h-0.78125q-7.51953 9.47266-15.52735 13.1836-7.91015 3.61328-20.70312 3.61328-15.72266 0-24.80469-8.98438-8.98437-8.98437-8.98437-25.58593 0-17.38282 12.10937-25.58594 12.20703-8.30078 36.71875-9.17969l18.94531-0.58594v-4.78515q0-16.60157-16.99218-16.60157-13.08594 0-30.76172 7.91016l-9.86328-20.11719q18.84765-9.86328 41.79687-9.86328 21.97266 0 33.69141 9.57031 11.71875 9.57032 11.71875 29.10157v72.7539zm-8.78907-50.58593-11.52343 0.39062q-12.98829 0.39063-19.33594 4.6875-6.34766 4.29688-6.34766 13.08594 0 12.59765 14.45313 12.59765 10.35156 0 16.5039-5.95703 6.25-5.95703 6.25-15.82031zm137.59766 50.58593-4.00391-13.96484h-1.5625q-4.78515 7.61719-13.57422 11.81641-8.78906 4.10156-20.01953 4.10156-19.23828 0-29.0039-10.25391-9.76563-10.35156-9.76563-29.6875v-71.1914h29.78516v63.76953q0 11.8164 4.19922 17.77343 4.19922 5.85938 13.3789 5.85938 12.5 0 18.06641-8.30078 5.56641-8.39844 5.56641-27.73438v-51.36718h29.78515v109.17968zm83.88672 0h-29.78516v-151.953122h29.78516zm77.24609-21.77734q7.8125 0 18.75-3.41797v22.16797q-11.13281 4.98047-27.34375 4.98047-17.87109 0-26.07422-8.98438-8.10547-9.08203-8.10547-27.14843v-52.63672h-14.25781v-12.59766l16.40625-9.96094 8.59375-23.046872h19.04297v23.242192h30.56641v22.36328h-30.56641v52.63672q0 6.34765 3.51563 9.375 3.61328 3.02734 9.47265 3.02734z"/>
|
||||||
|
<path d="m791.27994 223.43649-19.62891-62.79297q-1.85547-5.76171-6.93359-26.17187h-0.78125q-3.90625 17.08984-6.83594 26.36719l-20.21484 62.59765h-18.75l-29.19922-107.03125h16.99219q10.35156 40.33203 15.72265 61.42578 5.46875 21.09375 6.25 28.41797h0.78125q1.07422-5.5664 3.41797-14.35547 2.44141-8.88671 4.19922-14.0625l19.62891-61.42578h17.57812l19.14063 61.42578q5.46875 16.79688 7.42187 28.22266h0.78125q0.39063-3.51562 2.05078-10.83984 1.75781-7.32422 20.41016-78.8086h16.79687l-29.58984 107.03125zm133.98437 0-3.22265-15.23437h-0.78125q-8.00782 10.05859-16.01563 13.67187-7.91015 3.51563-19.82422 3.51563-15.91797 0-25-8.20313-8.98437-8.20312-8.98437-23.33984 0-32.42188 51.85547-33.98438l18.16406-0.58593v-6.64063q0-12.59765-5.46875-18.55469-5.37109-6.05468-17.28516-6.05468-13.3789 0-30.27343 8.20312l-4.98047-12.40234q7.91015-4.29688 17.28515-6.73828 9.47266-2.44141 18.94532-2.44141 19.14062 0 28.32031 8.49609 9.27734 8.4961 9.27734 27.2461v73.04687zm-36.62109-11.42578q15.13672 0 23.73047-8.30078 8.6914-8.30078 8.6914-23.24219v-9.66797l-16.21093 0.6836q-19.33594 0.68359-27.92969 6.05469-8.49609 5.27343-8.49609 16.5039 0 8.78906 5.27343 13.37891 5.3711 4.58984 14.94141 4.58984zm130.85938-97.55859q7.1289 0 12.793 1.17187l-2.2461 15.03907q-6.6407-1.46485-11.7188-1.46485-12.9883 0-22.26561 10.54688-9.17968 10.54687-9.17968 26.26953v57.42187h-16.21094v-107.03125h13.37891l1.85546 19.82422h0.78125q5.95704-10.44922 14.35551-16.11328 8.3984-5.66406 18.457-5.66406zm101.6602 94.6289h-0.879q-11.2304 16.3086-33.5937 16.3086-20.9961 0-32.7148-14.35547-11.6211-14.35547-11.6211-40.82031 0-26.46485 11.7187-41.11328 11.7188-14.64844 32.6172-14.64844 21.7773 0 33.3984 15.82031h1.2696l-0.6836-7.71484-0.3907-7.51953v-43.554692h16.211v151.953122h-13.1836zm-32.4219 2.73438q16.6015 0 24.0234-8.98438 7.5195-9.08203 7.5195-29.19921v-3.41797q0-22.75391-7.6171-32.42188-7.5196-9.76562-24.1211-9.76562-14.2578 0-21.875 11.13281-7.5196 11.03516-7.5196 31.25 0 20.50781 7.5196 30.95703 7.5195 10.44922 22.0703 10.44922zm127.3437 13.57422q-23.7304 0-37.5-14.45313-13.6718-14.45312-13.6718-40.13672 0-25.8789 12.6953-41.11328 12.7929-15.23437 34.2773-15.23437 20.1172 0 31.8359 13.28125 11.7188 13.18359 11.7188 34.86328v10.25391h-73.7305q0.4883 18.84765 9.4727 28.61328 9.082 9.76562 25.4883 9.76562 17.2851 0 34.1797-7.22656v14.45312q-8.5938 3.71094-16.3086 5.27344-7.6172 1.66016-18.4571 1.66016zm-4.3945-97.36328q-12.8906 0-20.6055 8.39843-7.6172 8.39844-8.9843 23.24219h55.957q0-15.33203-6.836-23.4375-6.8359-8.20312-19.5312-8.20312zm144.6289 95.41015v-69.23828q0-13.08594-5.957-19.53125-5.9571-6.44531-18.6524-6.44531-16.7968 0-24.6093 9.08203t-7.8125 29.98047v56.15234h-16.211v-107.03125h13.1836l2.6367 14.64844h0.7813q4.9804-7.91016 13.9648-12.20703 8.9844-4.39453 20.0196-4.39453 19.3359 0 29.1015 9.375 9.7656 9.27734 9.7656 29.78515v69.82422z"/>
|
||||||
|
</g>
|
||||||
|
<g transform="translate(-10.708266,-9.2965379)">
|
||||||
|
<g id="e" transform="matrix(2.6712834,0,0,2.6712834,150.95027,149.53854)">
|
||||||
|
<g id="f" mask="url(#d)">
|
||||||
|
<path d="m-31.1718-33.813208 26.496029 74.188883h9.3515399l26.49603-74.188883h-9.767164l-16.728866 47.588948q-1.662496 4.571864-2.805462 8.624198-1.142966 3.948427-1.870308 7.585137-.72734199-3.63671-1.8703079-7.689043-1.142966-4.052334-2.805462-8.728104l-16.624959-47.381136z" stroke="#000" stroke-width="4.51171"/>
|
||||||
|
<circle transform="scale(-1,1)" r="43" fill="none" stroke="#000" stroke-width="9"/>
|
||||||
|
<g id="g" transform="scale(-1,1)">
|
||||||
|
<polygon id="a" points="46 -3 46 3 51 0" stroke="#000" stroke-linejoin="round" stroke-width="3"/>
|
||||||
|
<use transform="rotate(11.25)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(22.5)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(33.75)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(45)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(56.25)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(67.5)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(78.75)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(90)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(101.25)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(112.5)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(123.75)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(135)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(146.25)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(157.5)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(168.75)" xlink:href="#a"/>
|
||||||
|
<use transform="scale(-1)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(191.25)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(202.5)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(213.75)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(225)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(236.25)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(247.5)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(258.75)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(-90)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(-78.75)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(-67.5)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(-56.25)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(-45)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(-33.75)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(-22.5)" xlink:href="#a"/>
|
||||||
|
<use transform="rotate(-11.25)" xlink:href="#a"/>
|
||||||
|
</g>
|
||||||
|
<g id="h" transform="scale(-1,1)">
|
||||||
|
<polygon id="c" points="7 -42 -7 -42 0 -35" stroke="#000" stroke-linejoin="round" stroke-width="6"/>
|
||||||
|
<use transform="rotate(72)" xlink:href="#c"/>
|
||||||
|
<use transform="rotate(144)" xlink:href="#c"/>
|
||||||
|
<use transform="rotate(216)" xlink:href="#c"/>
|
||||||
|
<use transform="rotate(-72)" xlink:href="#c"/>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
<mask>
|
||||||
|
<rect x="-60" y="-60" width="120" height="120" fill="#fff"/>
|
||||||
|
<circle cy="-40" r="3"/>
|
||||||
|
<use transform="rotate(72)" xlink:href="#b"/>
|
||||||
|
<use transform="rotate(144)" xlink:href="#b"/>
|
||||||
|
<use transform="rotate(216)" xlink:href="#b"/>
|
||||||
|
<use transform="rotate(-72)" xlink:href="#b"/>
|
||||||
|
</mask>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 7.6 KiB |
@@ -1,4 +1,4 @@
|
|||||||
[toolchain]
|
[toolchain]
|
||||||
channel = "1.80.1"
|
channel = "1.82.0"
|
||||||
components = [ "rustfmt", "clippy" ]
|
components = [ "rustfmt", "clippy" ]
|
||||||
profile = "minimal"
|
profile = "minimal"
|
||||||
|
@@ -25,7 +25,8 @@ use crate::{
|
|||||||
http_client::make_http_request,
|
http_client::make_http_request,
|
||||||
mail,
|
mail,
|
||||||
util::{
|
util::{
|
||||||
container_base_image, format_naive_datetime_local, get_display_size, is_running_in_container, NumberOrString,
|
container_base_image, format_naive_datetime_local, get_display_size, get_web_vault_version,
|
||||||
|
is_running_in_container, NumberOrString,
|
||||||
},
|
},
|
||||||
CONFIG, VERSION,
|
CONFIG, VERSION,
|
||||||
};
|
};
|
||||||
@@ -196,7 +197,7 @@ fn post_admin_login(
|
|||||||
|
|
||||||
let cookie = Cookie::build((COOKIE_NAME, jwt))
|
let cookie = Cookie::build((COOKIE_NAME, jwt))
|
||||||
.path(admin_path())
|
.path(admin_path())
|
||||||
.max_age(rocket::time::Duration::minutes(CONFIG.admin_session_lifetime()))
|
.max_age(time::Duration::minutes(CONFIG.admin_session_lifetime()))
|
||||||
.same_site(SameSite::Strict)
|
.same_site(SameSite::Strict)
|
||||||
.http_only(true)
|
.http_only(true)
|
||||||
.secure(secure.https);
|
.secure(secure.https);
|
||||||
@@ -297,7 +298,7 @@ async fn invite_user(data: Json<InviteData>, _token: AdminToken, mut conn: DbCon
|
|||||||
|
|
||||||
async fn _generate_invite(user: &User, conn: &mut DbConn) -> EmptyResult {
|
async fn _generate_invite(user: &User, conn: &mut DbConn) -> EmptyResult {
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
mail::send_invite(&user.email, &user.uuid, None, None, &CONFIG.invitation_org_name(), None).await
|
mail::send_invite(user, None, None, &CONFIG.invitation_org_name(), None).await
|
||||||
} else {
|
} else {
|
||||||
let invitation = Invitation::new(&user.email);
|
let invitation = Invitation::new(&user.email);
|
||||||
invitation.save(conn).await
|
invitation.save(conn).await
|
||||||
@@ -473,7 +474,7 @@ async fn resend_user_invite(uuid: &str, _token: AdminToken, mut conn: DbConn) ->
|
|||||||
}
|
}
|
||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
mail::send_invite(&user.email, &user.uuid, None, None, &CONFIG.invitation_org_name(), None).await
|
mail::send_invite(&user, None, None, &CONFIG.invitation_org_name(), None).await
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -575,11 +576,6 @@ async fn delete_organization(uuid: &str, _token: AdminToken, mut conn: DbConn) -
|
|||||||
org.delete(&mut conn).await
|
org.delete(&mut conn).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct WebVaultVersion {
|
|
||||||
version: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct GitRelease {
|
struct GitRelease {
|
||||||
tag_name: String,
|
tag_name: String,
|
||||||
@@ -679,18 +675,6 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn)
|
|||||||
use chrono::prelude::*;
|
use chrono::prelude::*;
|
||||||
use std::net::ToSocketAddrs;
|
use std::net::ToSocketAddrs;
|
||||||
|
|
||||||
// Get current running versions
|
|
||||||
let web_vault_version: WebVaultVersion =
|
|
||||||
match std::fs::read_to_string(format!("{}/{}", CONFIG.web_vault_folder(), "vw-version.json")) {
|
|
||||||
Ok(s) => serde_json::from_str(&s)?,
|
|
||||||
_ => match std::fs::read_to_string(format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
|
|
||||||
Ok(s) => serde_json::from_str(&s)?,
|
|
||||||
_ => WebVaultVersion {
|
|
||||||
version: String::from("Version file missing"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Execute some environment checks
|
// Execute some environment checks
|
||||||
let running_within_container = is_running_in_container();
|
let running_within_container = is_running_in_container();
|
||||||
let has_http_access = has_http_access().await;
|
let has_http_access = has_http_access().await;
|
||||||
@@ -710,13 +694,16 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn)
|
|||||||
|
|
||||||
let ip_header_name = &ip_header.0.unwrap_or_default();
|
let ip_header_name = &ip_header.0.unwrap_or_default();
|
||||||
|
|
||||||
|
// Get current running versions
|
||||||
|
let web_vault_version = get_web_vault_version();
|
||||||
|
|
||||||
let diagnostics_json = json!({
|
let diagnostics_json = json!({
|
||||||
"dns_resolved": dns_resolved,
|
"dns_resolved": dns_resolved,
|
||||||
"current_release": VERSION,
|
"current_release": VERSION,
|
||||||
"latest_release": latest_release,
|
"latest_release": latest_release,
|
||||||
"latest_commit": latest_commit,
|
"latest_commit": latest_commit,
|
||||||
"web_vault_enabled": &CONFIG.web_vault_enabled(),
|
"web_vault_enabled": &CONFIG.web_vault_enabled(),
|
||||||
"web_vault_version": web_vault_version.version.trim_start_matches('v'),
|
"web_vault_version": web_vault_version,
|
||||||
"latest_web_build": latest_web_build,
|
"latest_web_build": latest_web_build,
|
||||||
"running_within_container": running_within_container,
|
"running_within_container": running_within_container,
|
||||||
"container_base_image": if running_within_container { container_base_image() } else { "Not applicable" },
|
"container_base_image": if running_within_container { container_base_image() } else { "Not applicable" },
|
||||||
@@ -730,8 +717,8 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn)
|
|||||||
"db_version": get_sql_server_version(&mut conn).await,
|
"db_version": get_sql_server_version(&mut conn).await,
|
||||||
"admin_url": format!("{}/diagnostics", admin_url()),
|
"admin_url": format!("{}/diagnostics", admin_url()),
|
||||||
"overrides": &CONFIG.get_overrides().join(", "),
|
"overrides": &CONFIG.get_overrides().join(", "),
|
||||||
"host_arch": std::env::consts::ARCH,
|
"host_arch": env::consts::ARCH,
|
||||||
"host_os": std::env::consts::OS,
|
"host_os": env::consts::OS,
|
||||||
"server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(),
|
"server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(),
|
||||||
"server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the server date/time check as late as possible to minimize the time difference
|
"server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the server date/time check as late as possible to minimize the time difference
|
||||||
"ntp_time": get_ntp_time(has_http_access).await, // Run the ntp check as late as possible to minimize the time difference
|
"ntp_time": get_ntp_time(has_http_access).await, // Run the ntp check as late as possible to minimize the time difference
|
||||||
@@ -750,18 +737,27 @@ fn get_diagnostics_config(_token: AdminToken) -> Json<Value> {
|
|||||||
#[post("/config", data = "<data>")]
|
#[post("/config", data = "<data>")]
|
||||||
fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult {
|
fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult {
|
||||||
let data: ConfigBuilder = data.into_inner();
|
let data: ConfigBuilder = data.into_inner();
|
||||||
CONFIG.update_config(data)
|
if let Err(e) = CONFIG.update_config(data) {
|
||||||
|
err!(format!("Unable to save config: {e:?}"))
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/config/delete")]
|
#[post("/config/delete")]
|
||||||
fn delete_config(_token: AdminToken) -> EmptyResult {
|
fn delete_config(_token: AdminToken) -> EmptyResult {
|
||||||
CONFIG.delete_user_config()
|
if let Err(e) = CONFIG.delete_user_config() {
|
||||||
|
err!(format!("Unable to delete config: {e:?}"))
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/config/backup_db")]
|
#[post("/config/backup_db")]
|
||||||
async fn backup_db(_token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
async fn backup_db(_token: AdminToken, mut conn: DbConn) -> ApiResult<String> {
|
||||||
if *CAN_BACKUP {
|
if *CAN_BACKUP {
|
||||||
backup_database(&mut conn).await
|
match backup_database(&mut conn).await {
|
||||||
|
Ok(f) => Ok(format!("Backup to '{f}' was successful")),
|
||||||
|
Err(e) => err!(format!("Backup was unsuccessful {e}")),
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
err!("Can't back up current DB (Only SQLite supports this feature)");
|
err!("Can't back up current DB (Only SQLite supports this feature)");
|
||||||
}
|
}
|
||||||
|
@@ -1,3 +1,5 @@
|
|||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
use crate::db::DbPool;
|
use crate::db::DbPool;
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
use rocket::serde::json::Json;
|
use rocket::serde::json::Json;
|
||||||
@@ -13,7 +15,7 @@ use crate::{
|
|||||||
crypto,
|
crypto,
|
||||||
db::{models::*, DbConn},
|
db::{models::*, DbConn},
|
||||||
mail,
|
mail,
|
||||||
util::NumberOrString,
|
util::{format_date, NumberOrString},
|
||||||
CONFIG,
|
CONFIG,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -112,7 +114,7 @@ async fn is_email_2fa_required(org_user_uuid: Option<String>, conn: &mut DbConn)
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if org_user_uuid.is_some() {
|
if org_user_uuid.is_some() {
|
||||||
return OrgPolicy::is_enabled_by_org(&org_user_uuid.unwrap(), OrgPolicyType::TwoFactorAuthentication, conn)
|
return OrgPolicy::is_enabled_for_member(&org_user_uuid.unwrap(), OrgPolicyType::TwoFactorAuthentication, conn)
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
@@ -223,7 +225,7 @@ pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult
|
|||||||
}
|
}
|
||||||
|
|
||||||
if verified_by_invite && is_email_2fa_required(data.organization_user_id, &mut conn).await {
|
if verified_by_invite && is_email_2fa_required(data.organization_user_id, &mut conn).await {
|
||||||
let _ = email::activate_email_2fa(&user, &mut conn).await;
|
email::activate_email_2fa(&user, &mut conn).await.ok();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -232,7 +234,7 @@ pub async fn _register(data: Json<RegisterData>, mut conn: DbConn) -> JsonResult
|
|||||||
// accept any open emergency access invitations
|
// accept any open emergency access invitations
|
||||||
if !CONFIG.mail_enabled() && CONFIG.emergency_access_allowed() {
|
if !CONFIG.mail_enabled() && CONFIG.emergency_access_allowed() {
|
||||||
for mut emergency_invite in EmergencyAccess::find_all_invited_by_grantee_email(&user.email, &mut conn).await {
|
for mut emergency_invite in EmergencyAccess::find_all_invited_by_grantee_email(&user.email, &mut conn).await {
|
||||||
let _ = emergency_invite.accept_invite(&user.uuid, &user.email, &mut conn).await;
|
emergency_invite.accept_invite(&user.uuid, &user.email, &mut conn).await.ok();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -477,6 +479,60 @@ struct KeyData {
|
|||||||
private_key: String,
|
private_key: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn validate_keydata(
|
||||||
|
data: &KeyData,
|
||||||
|
existing_ciphers: &[Cipher],
|
||||||
|
existing_folders: &[Folder],
|
||||||
|
existing_emergency_access: &[EmergencyAccess],
|
||||||
|
existing_user_orgs: &[UserOrganization],
|
||||||
|
existing_sends: &[Send],
|
||||||
|
) -> EmptyResult {
|
||||||
|
// Check that we're correctly rotating all the user's ciphers
|
||||||
|
let existing_cipher_ids = existing_ciphers.iter().map(|c| c.uuid.as_str()).collect::<HashSet<_>>();
|
||||||
|
let provided_cipher_ids = data
|
||||||
|
.ciphers
|
||||||
|
.iter()
|
||||||
|
.filter(|c| c.organization_id.is_none())
|
||||||
|
.filter_map(|c| c.id.as_deref())
|
||||||
|
.collect::<HashSet<_>>();
|
||||||
|
if !provided_cipher_ids.is_superset(&existing_cipher_ids) {
|
||||||
|
err!("All existing ciphers must be included in the rotation")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that we're correctly rotating all the user's folders
|
||||||
|
let existing_folder_ids = existing_folders.iter().map(|f| f.uuid.as_str()).collect::<HashSet<_>>();
|
||||||
|
let provided_folder_ids = data.folders.iter().filter_map(|f| f.id.as_deref()).collect::<HashSet<_>>();
|
||||||
|
if !provided_folder_ids.is_superset(&existing_folder_ids) {
|
||||||
|
err!("All existing folders must be included in the rotation")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that we're correctly rotating all the user's emergency access keys
|
||||||
|
let existing_emergency_access_ids =
|
||||||
|
existing_emergency_access.iter().map(|ea| ea.uuid.as_str()).collect::<HashSet<_>>();
|
||||||
|
let provided_emergency_access_ids =
|
||||||
|
data.emergency_access_keys.iter().map(|ea| ea.id.as_str()).collect::<HashSet<_>>();
|
||||||
|
if !provided_emergency_access_ids.is_superset(&existing_emergency_access_ids) {
|
||||||
|
err!("All existing emergency access keys must be included in the rotation")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that we're correctly rotating all the user's reset password keys
|
||||||
|
let existing_reset_password_ids = existing_user_orgs.iter().map(|uo| uo.org_uuid.as_str()).collect::<HashSet<_>>();
|
||||||
|
let provided_reset_password_ids =
|
||||||
|
data.reset_password_keys.iter().map(|rp| rp.organization_id.as_str()).collect::<HashSet<_>>();
|
||||||
|
if !provided_reset_password_ids.is_superset(&existing_reset_password_ids) {
|
||||||
|
err!("All existing reset password keys must be included in the rotation")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that we're correctly rotating all the user's sends
|
||||||
|
let existing_send_ids = existing_sends.iter().map(|s| s.uuid.as_str()).collect::<HashSet<_>>();
|
||||||
|
let provided_send_ids = data.sends.iter().filter_map(|s| s.id.as_deref()).collect::<HashSet<_>>();
|
||||||
|
if !provided_send_ids.is_superset(&existing_send_ids) {
|
||||||
|
err!("All existing sends must be included in the rotation")
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
#[post("/accounts/key", data = "<data>")]
|
#[post("/accounts/key", data = "<data>")]
|
||||||
async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||||
// TODO: See if we can wrap everything within a SQL Transaction. If something fails it should revert everything.
|
// TODO: See if we can wrap everything within a SQL Transaction. If something fails it should revert everything.
|
||||||
@@ -490,24 +546,39 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
|||||||
// Bitwarden does not process the import if there is one item invalid.
|
// Bitwarden does not process the import if there is one item invalid.
|
||||||
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
|
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
|
||||||
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
|
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
|
||||||
Cipher::validate_notes(&data.ciphers)?;
|
Cipher::validate_cipher_data(&data.ciphers)?;
|
||||||
|
|
||||||
let user_uuid = &headers.user.uuid;
|
let user_uuid = &headers.user.uuid;
|
||||||
|
|
||||||
|
// TODO: Ideally we'd do everything after this point in a single transaction.
|
||||||
|
|
||||||
|
let mut existing_ciphers = Cipher::find_owned_by_user(user_uuid, &mut conn).await;
|
||||||
|
let mut existing_folders = Folder::find_by_user(user_uuid, &mut conn).await;
|
||||||
|
let mut existing_emergency_access = EmergencyAccess::find_all_by_grantor_uuid(user_uuid, &mut conn).await;
|
||||||
|
let mut existing_user_orgs = UserOrganization::find_by_user(user_uuid, &mut conn).await;
|
||||||
|
// We only rotate the reset password key if it is set.
|
||||||
|
existing_user_orgs.retain(|uo| uo.reset_password_key.is_some());
|
||||||
|
let mut existing_sends = Send::find_by_user(user_uuid, &mut conn).await;
|
||||||
|
|
||||||
|
validate_keydata(
|
||||||
|
&data,
|
||||||
|
&existing_ciphers,
|
||||||
|
&existing_folders,
|
||||||
|
&existing_emergency_access,
|
||||||
|
&existing_user_orgs,
|
||||||
|
&existing_sends,
|
||||||
|
)?;
|
||||||
|
|
||||||
// Update folder data
|
// Update folder data
|
||||||
for folder_data in data.folders {
|
for folder_data in data.folders {
|
||||||
// Skip `null` folder id entries.
|
// Skip `null` folder id entries.
|
||||||
// See: https://github.com/bitwarden/clients/issues/8453
|
// See: https://github.com/bitwarden/clients/issues/8453
|
||||||
if let Some(folder_id) = folder_data.id {
|
if let Some(folder_id) = folder_data.id {
|
||||||
let mut saved_folder = match Folder::find_by_uuid(&folder_id, &mut conn).await {
|
let saved_folder = match existing_folders.iter_mut().find(|f| f.uuid == folder_id) {
|
||||||
Some(folder) => folder,
|
Some(folder) => folder,
|
||||||
None => err!("Folder doesn't exist"),
|
None => err!("Folder doesn't exist"),
|
||||||
};
|
};
|
||||||
|
|
||||||
if &saved_folder.user_uuid != user_uuid {
|
|
||||||
err!("The folder is not owned by the user")
|
|
||||||
}
|
|
||||||
|
|
||||||
saved_folder.name = folder_data.name;
|
saved_folder.name = folder_data.name;
|
||||||
saved_folder.save(&mut conn).await?
|
saved_folder.save(&mut conn).await?
|
||||||
}
|
}
|
||||||
@@ -515,9 +586,8 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
|||||||
|
|
||||||
// Update emergency access data
|
// Update emergency access data
|
||||||
for emergency_access_data in data.emergency_access_keys {
|
for emergency_access_data in data.emergency_access_keys {
|
||||||
let mut saved_emergency_access =
|
let saved_emergency_access =
|
||||||
match EmergencyAccess::find_by_uuid_and_grantor_uuid(&emergency_access_data.id, user_uuid, &mut conn).await
|
match existing_emergency_access.iter_mut().find(|ea| ea.uuid == emergency_access_data.id) {
|
||||||
{
|
|
||||||
Some(emergency_access) => emergency_access,
|
Some(emergency_access) => emergency_access,
|
||||||
None => err!("Emergency access doesn't exist or is not owned by the user"),
|
None => err!("Emergency access doesn't exist or is not owned by the user"),
|
||||||
};
|
};
|
||||||
@@ -528,13 +598,11 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
|||||||
|
|
||||||
// Update reset password data
|
// Update reset password data
|
||||||
for reset_password_data in data.reset_password_keys {
|
for reset_password_data in data.reset_password_keys {
|
||||||
let mut user_org =
|
let user_org = match existing_user_orgs.iter_mut().find(|uo| uo.org_uuid == reset_password_data.organization_id)
|
||||||
match UserOrganization::find_by_user_and_org(user_uuid, &reset_password_data.organization_id, &mut conn)
|
{
|
||||||
.await
|
Some(reset_password) => reset_password,
|
||||||
{
|
None => err!("Reset password doesn't exist"),
|
||||||
Some(reset_password) => reset_password,
|
};
|
||||||
None => err!("Reset password doesn't exist"),
|
|
||||||
};
|
|
||||||
|
|
||||||
user_org.reset_password_key = Some(reset_password_data.reset_password_key);
|
user_org.reset_password_key = Some(reset_password_data.reset_password_key);
|
||||||
user_org.save(&mut conn).await?
|
user_org.save(&mut conn).await?
|
||||||
@@ -542,12 +610,12 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
|||||||
|
|
||||||
// Update send data
|
// Update send data
|
||||||
for send_data in data.sends {
|
for send_data in data.sends {
|
||||||
let mut send = match Send::find_by_uuid(send_data.id.as_ref().unwrap(), &mut conn).await {
|
let send = match existing_sends.iter_mut().find(|s| &s.uuid == send_data.id.as_ref().unwrap()) {
|
||||||
Some(send) => send,
|
Some(send) => send,
|
||||||
None => err!("Send doesn't exist"),
|
None => err!("Send doesn't exist"),
|
||||||
};
|
};
|
||||||
|
|
||||||
update_send_from_data(&mut send, send_data, &headers, &mut conn, &nt, UpdateType::None).await?;
|
update_send_from_data(send, send_data, &headers, &mut conn, &nt, UpdateType::None).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update cipher data
|
// Update cipher data
|
||||||
@@ -555,20 +623,15 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn,
|
|||||||
|
|
||||||
for cipher_data in data.ciphers {
|
for cipher_data in data.ciphers {
|
||||||
if cipher_data.organization_id.is_none() {
|
if cipher_data.organization_id.is_none() {
|
||||||
let mut saved_cipher = match Cipher::find_by_uuid(cipher_data.id.as_ref().unwrap(), &mut conn).await {
|
let saved_cipher = match existing_ciphers.iter_mut().find(|c| &c.uuid == cipher_data.id.as_ref().unwrap()) {
|
||||||
Some(cipher) => cipher,
|
Some(cipher) => cipher,
|
||||||
None => err!("Cipher doesn't exist"),
|
None => err!("Cipher doesn't exist"),
|
||||||
};
|
};
|
||||||
|
|
||||||
if saved_cipher.user_uuid.as_ref().unwrap() != user_uuid {
|
|
||||||
err!("The cipher is not owned by the user")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prevent triggering cipher updates via WebSockets by settings UpdateType::None
|
// Prevent triggering cipher updates via WebSockets by settings UpdateType::None
|
||||||
// The user sessions are invalidated because all the ciphers were re-encrypted and thus triggering an update could cause issues.
|
// The user sessions are invalidated because all the ciphers were re-encrypted and thus triggering an update could cause issues.
|
||||||
// We force the users to logout after the user has been saved to try and prevent these issues.
|
// We force the users to logout after the user has been saved to try and prevent these issues.
|
||||||
update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None)
|
update_cipher_from_data(saved_cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await?
|
||||||
.await?
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -842,7 +905,7 @@ struct PasswordHintData {
|
|||||||
|
|
||||||
#[post("/accounts/password-hint", data = "<data>")]
|
#[post("/accounts/password-hint", data = "<data>")]
|
||||||
async fn password_hint(data: Json<PasswordHintData>, mut conn: DbConn) -> EmptyResult {
|
async fn password_hint(data: Json<PasswordHintData>, mut conn: DbConn) -> EmptyResult {
|
||||||
if !CONFIG.mail_enabled() && !CONFIG.show_password_hint() {
|
if !CONFIG.password_hints_allowed() || (!CONFIG.mail_enabled() && !CONFIG.show_password_hint()) {
|
||||||
err!("This server is not configured to provide password hints.");
|
err!("This server is not configured to provide password hints.");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -901,14 +964,12 @@ pub async fn _prelogin(data: Json<PreloginData>, mut conn: DbConn) -> Json<Value
|
|||||||
None => (User::CLIENT_KDF_TYPE_DEFAULT, User::CLIENT_KDF_ITER_DEFAULT, None, None),
|
None => (User::CLIENT_KDF_TYPE_DEFAULT, User::CLIENT_KDF_ITER_DEFAULT, None, None),
|
||||||
};
|
};
|
||||||
|
|
||||||
let result = json!({
|
Json(json!({
|
||||||
"kdf": kdf_type,
|
"kdf": kdf_type,
|
||||||
"kdfIterations": kdf_iter,
|
"kdfIterations": kdf_iter,
|
||||||
"kdfMemory": kdf_mem,
|
"kdfMemory": kdf_mem,
|
||||||
"kdfParallelism": kdf_para,
|
"kdfParallelism": kdf_para,
|
||||||
});
|
}))
|
||||||
|
|
||||||
Json(result)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://github.com/bitwarden/server/blob/master/src/Api/Models/Request/Accounts/SecretVerificationRequestModel.cs
|
// https://github.com/bitwarden/server/blob/master/src/Api/Models/Request/Accounts/SecretVerificationRequestModel.cs
|
||||||
@@ -1038,7 +1099,7 @@ async fn put_device_token(uuid: &str, data: Json<PushToken>, headers: Headers, m
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
} else {
|
} else {
|
||||||
// Try to unregister already registered device
|
// Try to unregister already registered device
|
||||||
let _ = unregister_push_device(device.push_uuid).await;
|
unregister_push_device(device.push_uuid).await.ok();
|
||||||
}
|
}
|
||||||
// clear the push_uuid
|
// clear the push_uuid
|
||||||
device.push_uuid = None;
|
device.push_uuid = None;
|
||||||
@@ -1084,14 +1145,15 @@ struct AuthRequestRequest {
|
|||||||
device_identifier: String,
|
device_identifier: String,
|
||||||
email: String,
|
email: String,
|
||||||
public_key: String,
|
public_key: String,
|
||||||
#[serde(alias = "type")]
|
// Not used for now
|
||||||
_type: i32,
|
// #[serde(alias = "type")]
|
||||||
|
// _type: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/auth-requests", data = "<data>")]
|
#[post("/auth-requests", data = "<data>")]
|
||||||
async fn post_auth_request(
|
async fn post_auth_request(
|
||||||
data: Json<AuthRequestRequest>,
|
data: Json<AuthRequestRequest>,
|
||||||
headers: ClientHeaders,
|
client_headers: ClientHeaders,
|
||||||
mut conn: DbConn,
|
mut conn: DbConn,
|
||||||
nt: Notify<'_>,
|
nt: Notify<'_>,
|
||||||
) -> JsonResult {
|
) -> JsonResult {
|
||||||
@@ -1099,16 +1161,20 @@ async fn post_auth_request(
|
|||||||
|
|
||||||
let user = match User::find_by_mail(&data.email, &mut conn).await {
|
let user = match User::find_by_mail(&data.email, &mut conn).await {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => {
|
None => err!("AuthRequest doesn't exist", "User not found"),
|
||||||
err!("AuthRequest doesn't exist")
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Validate device uuid and type
|
||||||
|
match Device::find_by_uuid_and_user(&data.device_identifier, &user.uuid, &mut conn).await {
|
||||||
|
Some(device) if device.atype == client_headers.device_type => {}
|
||||||
|
_ => err!("AuthRequest doesn't exist", "Device verification failed"),
|
||||||
|
}
|
||||||
|
|
||||||
let mut auth_request = AuthRequest::new(
|
let mut auth_request = AuthRequest::new(
|
||||||
user.uuid.clone(),
|
user.uuid.clone(),
|
||||||
data.device_identifier.clone(),
|
data.device_identifier.clone(),
|
||||||
headers.device_type,
|
client_headers.device_type,
|
||||||
headers.ip.ip.to_string(),
|
client_headers.ip.ip.to_string(),
|
||||||
data.access_code,
|
data.access_code,
|
||||||
data.public_key,
|
data.public_key,
|
||||||
);
|
);
|
||||||
@@ -1123,7 +1189,7 @@ async fn post_auth_request(
|
|||||||
"requestIpAddress": auth_request.request_ip,
|
"requestIpAddress": auth_request.request_ip,
|
||||||
"key": null,
|
"key": null,
|
||||||
"masterPasswordHash": null,
|
"masterPasswordHash": null,
|
||||||
"creationDate": auth_request.creation_date.and_utc(),
|
"creationDate": format_date(&auth_request.creation_date),
|
||||||
"responseDate": null,
|
"responseDate": null,
|
||||||
"requestApproved": false,
|
"requestApproved": false,
|
||||||
"origin": CONFIG.domain_origin(),
|
"origin": CONFIG.domain_origin(),
|
||||||
@@ -1132,31 +1198,31 @@ async fn post_auth_request(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[get("/auth-requests/<uuid>")]
|
#[get("/auth-requests/<uuid>")]
|
||||||
async fn get_auth_request(uuid: &str, mut conn: DbConn) -> JsonResult {
|
async fn get_auth_request(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||||
let auth_request = match AuthRequest::find_by_uuid(uuid, &mut conn).await {
|
let auth_request = match AuthRequest::find_by_uuid(uuid, &mut conn).await {
|
||||||
Some(auth_request) => auth_request,
|
Some(auth_request) => auth_request,
|
||||||
None => {
|
None => err!("AuthRequest doesn't exist", "Record not found"),
|
||||||
err!("AuthRequest doesn't exist")
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let response_date_utc = auth_request.response_date.map(|response_date| response_date.and_utc());
|
if headers.user.uuid != auth_request.user_uuid {
|
||||||
|
err!("AuthRequest doesn't exist", "User uuid's do not match")
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Json(json!(
|
let response_date_utc = auth_request.response_date.map(|response_date| format_date(&response_date));
|
||||||
{
|
|
||||||
"id": uuid,
|
Ok(Json(json!({
|
||||||
"publicKey": auth_request.public_key,
|
"id": uuid,
|
||||||
"requestDeviceType": DeviceType::from_i32(auth_request.device_type).to_string(),
|
"publicKey": auth_request.public_key,
|
||||||
"requestIpAddress": auth_request.request_ip,
|
"requestDeviceType": DeviceType::from_i32(auth_request.device_type).to_string(),
|
||||||
"key": auth_request.enc_key,
|
"requestIpAddress": auth_request.request_ip,
|
||||||
"masterPasswordHash": auth_request.master_password_hash,
|
"key": auth_request.enc_key,
|
||||||
"creationDate": auth_request.creation_date.and_utc(),
|
"masterPasswordHash": auth_request.master_password_hash,
|
||||||
"responseDate": response_date_utc,
|
"creationDate": format_date(&auth_request.creation_date),
|
||||||
"requestApproved": auth_request.approved,
|
"responseDate": response_date_utc,
|
||||||
"origin": CONFIG.domain_origin(),
|
"requestApproved": auth_request.approved,
|
||||||
"object":"auth-request"
|
"origin": CONFIG.domain_origin(),
|
||||||
}
|
"object":"auth-request"
|
||||||
)))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
@@ -1172,6 +1238,7 @@ struct AuthResponseRequest {
|
|||||||
async fn put_auth_request(
|
async fn put_auth_request(
|
||||||
uuid: &str,
|
uuid: &str,
|
||||||
data: Json<AuthResponseRequest>,
|
data: Json<AuthResponseRequest>,
|
||||||
|
headers: Headers,
|
||||||
mut conn: DbConn,
|
mut conn: DbConn,
|
||||||
ant: AnonymousNotify<'_>,
|
ant: AnonymousNotify<'_>,
|
||||||
nt: Notify<'_>,
|
nt: Notify<'_>,
|
||||||
@@ -1179,71 +1246,84 @@ async fn put_auth_request(
|
|||||||
let data = data.into_inner();
|
let data = data.into_inner();
|
||||||
let mut auth_request: AuthRequest = match AuthRequest::find_by_uuid(uuid, &mut conn).await {
|
let mut auth_request: AuthRequest = match AuthRequest::find_by_uuid(uuid, &mut conn).await {
|
||||||
Some(auth_request) => auth_request,
|
Some(auth_request) => auth_request,
|
||||||
None => {
|
None => err!("AuthRequest doesn't exist", "Record not found"),
|
||||||
err!("AuthRequest doesn't exist")
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
auth_request.approved = Some(data.request_approved);
|
if headers.user.uuid != auth_request.user_uuid {
|
||||||
auth_request.enc_key = Some(data.key);
|
err!("AuthRequest doesn't exist", "User uuid's do not match")
|
||||||
auth_request.master_password_hash = data.master_password_hash;
|
|
||||||
auth_request.response_device_id = Some(data.device_identifier.clone());
|
|
||||||
auth_request.save(&mut conn).await?;
|
|
||||||
|
|
||||||
if auth_request.approved.unwrap_or(false) {
|
|
||||||
ant.send_auth_response(&auth_request.user_uuid, &auth_request.uuid).await;
|
|
||||||
nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, data.device_identifier, &mut conn).await;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let response_date_utc = auth_request.response_date.map(|response_date| response_date.and_utc());
|
if auth_request.approved.is_some() {
|
||||||
|
err!("An authentication request with the same device already exists")
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Json(json!(
|
let response_date = Utc::now().naive_utc();
|
||||||
{
|
let response_date_utc = format_date(&response_date);
|
||||||
"id": uuid,
|
|
||||||
"publicKey": auth_request.public_key,
|
if data.request_approved {
|
||||||
"requestDeviceType": DeviceType::from_i32(auth_request.device_type).to_string(),
|
auth_request.approved = Some(data.request_approved);
|
||||||
"requestIpAddress": auth_request.request_ip,
|
auth_request.enc_key = Some(data.key);
|
||||||
"key": auth_request.enc_key,
|
auth_request.master_password_hash = data.master_password_hash;
|
||||||
"masterPasswordHash": auth_request.master_password_hash,
|
auth_request.response_device_id = Some(data.device_identifier.clone());
|
||||||
"creationDate": auth_request.creation_date.and_utc(),
|
auth_request.response_date = Some(response_date);
|
||||||
"responseDate": response_date_utc,
|
auth_request.save(&mut conn).await?;
|
||||||
"requestApproved": auth_request.approved,
|
|
||||||
"origin": CONFIG.domain_origin(),
|
ant.send_auth_response(&auth_request.user_uuid, &auth_request.uuid).await;
|
||||||
"object":"auth-request"
|
nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, data.device_identifier, &mut conn).await;
|
||||||
}
|
} else {
|
||||||
)))
|
// If denied, there's no reason to keep the request
|
||||||
|
auth_request.delete(&mut conn).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(json!({
|
||||||
|
"id": uuid,
|
||||||
|
"publicKey": auth_request.public_key,
|
||||||
|
"requestDeviceType": DeviceType::from_i32(auth_request.device_type).to_string(),
|
||||||
|
"requestIpAddress": auth_request.request_ip,
|
||||||
|
"key": auth_request.enc_key,
|
||||||
|
"masterPasswordHash": auth_request.master_password_hash,
|
||||||
|
"creationDate": format_date(&auth_request.creation_date),
|
||||||
|
"responseDate": response_date_utc,
|
||||||
|
"requestApproved": auth_request.approved,
|
||||||
|
"origin": CONFIG.domain_origin(),
|
||||||
|
"object":"auth-request"
|
||||||
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/auth-requests/<uuid>/response?<code>")]
|
#[get("/auth-requests/<uuid>/response?<code>")]
|
||||||
async fn get_auth_request_response(uuid: &str, code: &str, mut conn: DbConn) -> JsonResult {
|
async fn get_auth_request_response(
|
||||||
|
uuid: &str,
|
||||||
|
code: &str,
|
||||||
|
client_headers: ClientHeaders,
|
||||||
|
mut conn: DbConn,
|
||||||
|
) -> JsonResult {
|
||||||
let auth_request = match AuthRequest::find_by_uuid(uuid, &mut conn).await {
|
let auth_request = match AuthRequest::find_by_uuid(uuid, &mut conn).await {
|
||||||
Some(auth_request) => auth_request,
|
Some(auth_request) => auth_request,
|
||||||
None => {
|
None => err!("AuthRequest doesn't exist", "User not found"),
|
||||||
err!("AuthRequest doesn't exist")
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if !auth_request.check_access_code(code) {
|
if auth_request.device_type != client_headers.device_type
|
||||||
err!("Access code invalid doesn't exist")
|
|| auth_request.request_ip != client_headers.ip.ip.to_string()
|
||||||
|
|| !auth_request.check_access_code(code)
|
||||||
|
{
|
||||||
|
err!("AuthRequest doesn't exist", "Invalid device, IP or code")
|
||||||
}
|
}
|
||||||
|
|
||||||
let response_date_utc = auth_request.response_date.map(|response_date| response_date.and_utc());
|
let response_date_utc = auth_request.response_date.map(|response_date| format_date(&response_date));
|
||||||
|
|
||||||
Ok(Json(json!(
|
Ok(Json(json!({
|
||||||
{
|
"id": uuid,
|
||||||
"id": uuid,
|
"publicKey": auth_request.public_key,
|
||||||
"publicKey": auth_request.public_key,
|
"requestDeviceType": DeviceType::from_i32(auth_request.device_type).to_string(),
|
||||||
"requestDeviceType": DeviceType::from_i32(auth_request.device_type).to_string(),
|
"requestIpAddress": auth_request.request_ip,
|
||||||
"requestIpAddress": auth_request.request_ip,
|
"key": auth_request.enc_key,
|
||||||
"key": auth_request.enc_key,
|
"masterPasswordHash": auth_request.master_password_hash,
|
||||||
"masterPasswordHash": auth_request.master_password_hash,
|
"creationDate": format_date(&auth_request.creation_date),
|
||||||
"creationDate": auth_request.creation_date.and_utc(),
|
"responseDate": response_date_utc,
|
||||||
"responseDate": response_date_utc,
|
"requestApproved": auth_request.approved,
|
||||||
"requestApproved": auth_request.approved,
|
"origin": CONFIG.domain_origin(),
|
||||||
"origin": CONFIG.domain_origin(),
|
"object":"auth-request"
|
||||||
"object":"auth-request"
|
})))
|
||||||
}
|
|
||||||
)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/auth-requests")]
|
#[get("/auth-requests")]
|
||||||
@@ -1255,7 +1335,7 @@ async fn get_auth_requests(headers: Headers, mut conn: DbConn) -> JsonResult {
|
|||||||
.iter()
|
.iter()
|
||||||
.filter(|request| request.approved.is_none())
|
.filter(|request| request.approved.is_none())
|
||||||
.map(|request| {
|
.map(|request| {
|
||||||
let response_date_utc = request.response_date.map(|response_date| response_date.and_utc());
|
let response_date_utc = request.response_date.map(|response_date| format_date(&response_date));
|
||||||
|
|
||||||
json!({
|
json!({
|
||||||
"id": request.uuid,
|
"id": request.uuid,
|
||||||
@@ -1264,7 +1344,7 @@ async fn get_auth_requests(headers: Headers, mut conn: DbConn) -> JsonResult {
|
|||||||
"requestIpAddress": request.request_ip,
|
"requestIpAddress": request.request_ip,
|
||||||
"key": request.enc_key,
|
"key": request.enc_key,
|
||||||
"masterPasswordHash": request.master_password_hash,
|
"masterPasswordHash": request.master_password_hash,
|
||||||
"creationDate": request.creation_date.and_utc(),
|
"creationDate": format_date(&request.creation_date),
|
||||||
"responseDate": response_date_utc,
|
"responseDate": response_date_utc,
|
||||||
"requestApproved": request.approved,
|
"requestApproved": request.approved,
|
||||||
"origin": CONFIG.domain_origin(),
|
"origin": CONFIG.domain_origin(),
|
||||||
|
@@ -10,6 +10,7 @@ use rocket::{
|
|||||||
};
|
};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
|
use crate::auth::ClientVersion;
|
||||||
use crate::util::NumberOrString;
|
use crate::util::NumberOrString;
|
||||||
use crate::{
|
use crate::{
|
||||||
api::{self, core::log_event, EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType},
|
api::{self, core::log_event, EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType},
|
||||||
@@ -104,11 +105,27 @@ struct SyncData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[get("/sync?<data..>")]
|
#[get("/sync?<data..>")]
|
||||||
async fn sync(data: SyncData, headers: Headers, mut conn: DbConn) -> Json<Value> {
|
async fn sync(
|
||||||
|
data: SyncData,
|
||||||
|
headers: Headers,
|
||||||
|
client_version: Option<ClientVersion>,
|
||||||
|
mut conn: DbConn,
|
||||||
|
) -> Json<Value> {
|
||||||
let user_json = headers.user.to_json(&mut conn).await;
|
let user_json = headers.user.to_json(&mut conn).await;
|
||||||
|
|
||||||
// Get all ciphers which are visible by the user
|
// Get all ciphers which are visible by the user
|
||||||
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &mut conn).await;
|
let mut ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &mut conn).await;
|
||||||
|
|
||||||
|
// Filter out SSH keys if the client version is less than 2024.12.0
|
||||||
|
let show_ssh_keys = if let Some(client_version) = client_version {
|
||||||
|
let ver_match = semver::VersionReq::parse(">=2024.12.0").unwrap();
|
||||||
|
ver_match.matches(&client_version.0)
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
};
|
||||||
|
if !show_ssh_keys {
|
||||||
|
ciphers.retain(|c| c.atype != 5);
|
||||||
|
}
|
||||||
|
|
||||||
let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, CipherSyncType::User, &mut conn).await;
|
let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, CipherSyncType::User, &mut conn).await;
|
||||||
|
|
||||||
@@ -150,7 +167,6 @@ async fn sync(data: SyncData, headers: Headers, mut conn: DbConn) -> Json<Value>
|
|||||||
"ciphers": ciphers_json,
|
"ciphers": ciphers_json,
|
||||||
"domains": domains_json,
|
"domains": domains_json,
|
||||||
"sends": sends_json,
|
"sends": sends_json,
|
||||||
"unofficialServer": true,
|
|
||||||
"object": "sync"
|
"object": "sync"
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@@ -206,7 +222,7 @@ pub struct CipherData {
|
|||||||
// Id is optional as it is included only in bulk share
|
// Id is optional as it is included only in bulk share
|
||||||
pub id: Option<String>,
|
pub id: Option<String>,
|
||||||
// Folder id is not included in import
|
// Folder id is not included in import
|
||||||
folder_id: Option<String>,
|
pub folder_id: Option<String>,
|
||||||
// TODO: Some of these might appear all the time, no need for Option
|
// TODO: Some of these might appear all the time, no need for Option
|
||||||
#[serde(alias = "organizationID")]
|
#[serde(alias = "organizationID")]
|
||||||
pub organization_id: Option<String>,
|
pub organization_id: Option<String>,
|
||||||
@@ -217,7 +233,8 @@ pub struct CipherData {
|
|||||||
Login = 1,
|
Login = 1,
|
||||||
SecureNote = 2,
|
SecureNote = 2,
|
||||||
Card = 3,
|
Card = 3,
|
||||||
Identity = 4
|
Identity = 4,
|
||||||
|
SshKey = 5
|
||||||
*/
|
*/
|
||||||
pub r#type: i32,
|
pub r#type: i32,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
@@ -229,11 +246,12 @@ pub struct CipherData {
|
|||||||
secure_note: Option<Value>,
|
secure_note: Option<Value>,
|
||||||
card: Option<Value>,
|
card: Option<Value>,
|
||||||
identity: Option<Value>,
|
identity: Option<Value>,
|
||||||
|
ssh_key: Option<Value>,
|
||||||
|
|
||||||
favorite: Option<bool>,
|
favorite: Option<bool>,
|
||||||
reprompt: Option<i32>,
|
reprompt: Option<i32>,
|
||||||
|
|
||||||
password_history: Option<Value>,
|
pub password_history: Option<Value>,
|
||||||
|
|
||||||
// These are used during key rotation
|
// These are used during key rotation
|
||||||
// 'Attachments' is unused, contains map of {id: filename}
|
// 'Attachments' is unused, contains map of {id: filename}
|
||||||
@@ -287,10 +305,6 @@ async fn post_ciphers_create(
|
|||||||
if data.cipher.organization_id.is_some() && data.collection_ids.is_empty() {
|
if data.cipher.organization_id.is_some() && data.collection_ids.is_empty() {
|
||||||
err!("You must select at least one collection.");
|
err!("You must select at least one collection.");
|
||||||
}
|
}
|
||||||
// reverse sanity check to prevent corruptions
|
|
||||||
if !data.collection_ids.is_empty() && data.cipher.organization_id.is_none() {
|
|
||||||
err!("The client has not provided an organization id!");
|
|
||||||
}
|
|
||||||
|
|
||||||
// This check is usually only needed in update_cipher_from_data(), but we
|
// This check is usually only needed in update_cipher_from_data(), but we
|
||||||
// need it here as well to avoid creating an empty cipher in the call to
|
// need it here as well to avoid creating an empty cipher in the call to
|
||||||
@@ -474,6 +488,7 @@ pub async fn update_cipher_from_data(
|
|||||||
2 => data.secure_note,
|
2 => data.secure_note,
|
||||||
3 => data.card,
|
3 => data.card,
|
||||||
4 => data.identity,
|
4 => data.identity,
|
||||||
|
5 => data.ssh_key,
|
||||||
_ => err!("Invalid type"),
|
_ => err!("Invalid type"),
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -567,14 +582,14 @@ async fn post_ciphers_import(
|
|||||||
// Bitwarden does not process the import if there is one item invalid.
|
// Bitwarden does not process the import if there is one item invalid.
|
||||||
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
|
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
|
||||||
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
|
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
|
||||||
Cipher::validate_notes(&data.ciphers)?;
|
Cipher::validate_cipher_data(&data.ciphers)?;
|
||||||
|
|
||||||
// Read and create the folders
|
// Read and create the folders
|
||||||
let existing_folders: Vec<String> =
|
let existing_folders: HashSet<Option<String>> =
|
||||||
Folder::find_by_user(&headers.user.uuid, &mut conn).await.into_iter().map(|f| f.uuid).collect();
|
Folder::find_by_user(&headers.user.uuid, &mut conn).await.into_iter().map(|f| Some(f.uuid)).collect();
|
||||||
let mut folders: Vec<String> = Vec::with_capacity(data.folders.len());
|
let mut folders: Vec<String> = Vec::with_capacity(data.folders.len());
|
||||||
for folder in data.folders.into_iter() {
|
for folder in data.folders.into_iter() {
|
||||||
let folder_uuid = if folder.id.is_some() && existing_folders.contains(folder.id.as_ref().unwrap()) {
|
let folder_uuid = if existing_folders.contains(&folder.id) {
|
||||||
folder.id.unwrap()
|
folder.id.unwrap()
|
||||||
} else {
|
} else {
|
||||||
let mut new_folder = Folder::new(headers.user.uuid.clone(), folder.name);
|
let mut new_folder = Folder::new(headers.user.uuid.clone(), folder.name);
|
||||||
@@ -586,8 +601,8 @@ async fn post_ciphers_import(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Read the relations between folders and ciphers
|
// Read the relations between folders and ciphers
|
||||||
|
// Ciphers can only be in one folder at the same time
|
||||||
let mut relations_map = HashMap::with_capacity(data.folder_relationships.len());
|
let mut relations_map = HashMap::with_capacity(data.folder_relationships.len());
|
||||||
|
|
||||||
for relation in data.folder_relationships {
|
for relation in data.folder_relationships {
|
||||||
relations_map.insert(relation.key, relation.value);
|
relations_map.insert(relation.key, relation.value);
|
||||||
}
|
}
|
||||||
@@ -707,6 +722,7 @@ async fn put_cipher_partial(
|
|||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
struct CollectionsAdminData {
|
struct CollectionsAdminData {
|
||||||
|
#[serde(alias = "CollectionIds")]
|
||||||
collection_ids: Vec<String>,
|
collection_ids: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -135,12 +135,13 @@ async fn put_eq_domains(data: Json<EquivDomainData>, headers: Headers, conn: DbC
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[get("/hibp/breach?<username>")]
|
#[get("/hibp/breach?<username>")]
|
||||||
async fn hibp_breach(username: &str) -> JsonResult {
|
async fn hibp_breach(username: &str, _headers: Headers) -> JsonResult {
|
||||||
let url = format!(
|
let username: String = url::form_urlencoded::byte_serialize(username.as_bytes()).collect();
|
||||||
"https://haveibeenpwned.com/api/v3/breachedaccount/{username}?truncateResponse=false&includeUnverified=false"
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Some(api_key) = crate::CONFIG.hibp_api_key() {
|
if let Some(api_key) = crate::CONFIG.hibp_api_key() {
|
||||||
|
let url = format!(
|
||||||
|
"https://haveibeenpwned.com/api/v3/breachedaccount/{username}?truncateResponse=false&includeUnverified=false"
|
||||||
|
);
|
||||||
|
|
||||||
let res = make_http_request(Method::GET, &url)?.header("hibp-api-key", api_key).send().await?;
|
let res = make_http_request(Method::GET, &url)?.header("hibp-api-key", api_key).send().await?;
|
||||||
|
|
||||||
// If we get a 404, return a 404, it means no breached accounts
|
// If we get a 404, return a 404, it means no breached accounts
|
||||||
@@ -202,8 +203,10 @@ fn config() -> Json<Value> {
|
|||||||
"gitHash": option_env!("GIT_REV"),
|
"gitHash": option_env!("GIT_REV"),
|
||||||
"server": {
|
"server": {
|
||||||
"name": "Vaultwarden",
|
"name": "Vaultwarden",
|
||||||
"url": "https://github.com/dani-garcia/vaultwarden",
|
"url": "https://github.com/dani-garcia/vaultwarden"
|
||||||
"version": crate::VERSION
|
},
|
||||||
|
"settings": {
|
||||||
|
"disableUserRegistration": !crate::CONFIG.signups_allowed() && crate::CONFIG.signups_domains_whitelist().is_empty(),
|
||||||
},
|
},
|
||||||
"environment": {
|
"environment": {
|
||||||
"vault": domain,
|
"vault": domain,
|
||||||
|
@@ -9,9 +9,8 @@ use crate::{
|
|||||||
core::{log_event, two_factor, CipherSyncData, CipherSyncType},
|
core::{log_event, two_factor, CipherSyncData, CipherSyncType},
|
||||||
EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType,
|
EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType,
|
||||||
},
|
},
|
||||||
auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders},
|
auth::{decode_invite, AdminHeaders, ClientVersion, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders},
|
||||||
db::{models::*, DbConn},
|
db::{models::*, DbConn},
|
||||||
error::Error,
|
|
||||||
mail,
|
mail,
|
||||||
util::{convert_json_key_lcase_first, NumberOrString},
|
util::{convert_json_key_lcase_first, NumberOrString},
|
||||||
CONFIG,
|
CONFIG,
|
||||||
@@ -127,6 +126,7 @@ struct NewCollectionData {
|
|||||||
name: String,
|
name: String,
|
||||||
groups: Vec<NewCollectionObjectData>,
|
groups: Vec<NewCollectionObjectData>,
|
||||||
users: Vec<NewCollectionObjectData>,
|
users: Vec<NewCollectionObjectData>,
|
||||||
|
id: Option<String>,
|
||||||
external_id: Option<String>,
|
external_id: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -358,11 +358,12 @@ async fn get_org_collections_details(org_id: &str, headers: ManagerHeadersLoose,
|
|||||||
Vec::with_capacity(0)
|
Vec::with_capacity(0)
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut json_object = col.to_json();
|
let mut json_object = col.to_json_details(&headers.user.uuid, None, &mut conn).await;
|
||||||
json_object["assigned"] = json!(assigned);
|
json_object["assigned"] = json!(assigned);
|
||||||
json_object["users"] = json!(users);
|
json_object["users"] = json!(users);
|
||||||
json_object["groups"] = json!(groups);
|
json_object["groups"] = json!(groups);
|
||||||
json_object["object"] = json!("collectionAccessDetails");
|
json_object["object"] = json!("collectionAccessDetails");
|
||||||
|
json_object["unmanaged"] = json!(false);
|
||||||
data.push(json_object)
|
data.push(json_object)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -509,7 +510,7 @@ async fn post_organization_collection_update(
|
|||||||
CollectionUser::save(&org_user.user_uuid, col_id, user.read_only, user.hide_passwords, &mut conn).await?;
|
CollectionUser::save(&org_user.user_uuid, col_id, user.read_only, user.hide_passwords, &mut conn).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Json(collection.to_json()))
|
Ok(Json(collection.to_json_details(&headers.user.uuid, None, &mut conn).await))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("/organizations/<org_id>/collections/<col_id>/user/<org_user_id>")]
|
#[delete("/organizations/<org_id>/collections/<col_id>/user/<org_user_id>")]
|
||||||
@@ -680,7 +681,7 @@ async fn get_org_collection_detail(
|
|||||||
|
|
||||||
let assigned = Collection::can_access_collection(&user_org, &collection.uuid, &mut conn).await;
|
let assigned = Collection::can_access_collection(&user_org, &collection.uuid, &mut conn).await;
|
||||||
|
|
||||||
let mut json_object = collection.to_json();
|
let mut json_object = collection.to_json_details(&headers.user.uuid, None, &mut conn).await;
|
||||||
json_object["assigned"] = json!(assigned);
|
json_object["assigned"] = json!(assigned);
|
||||||
json_object["users"] = json!(users);
|
json_object["users"] = json!(users);
|
||||||
json_object["groups"] = json!(groups);
|
json_object["groups"] = json!(groups);
|
||||||
@@ -872,20 +873,19 @@ async fn send_invite(org_id: &str, data: Json<InviteData>, headers: AdminHeaders
|
|||||||
}
|
}
|
||||||
|
|
||||||
for email in data.emails.iter() {
|
for email in data.emails.iter() {
|
||||||
let email = email.to_lowercase();
|
|
||||||
let mut user_org_status = UserOrgStatus::Invited as i32;
|
let mut user_org_status = UserOrgStatus::Invited as i32;
|
||||||
let user = match User::find_by_mail(&email, &mut conn).await {
|
let user = match User::find_by_mail(email, &mut conn).await {
|
||||||
None => {
|
None => {
|
||||||
if !CONFIG.invitations_allowed() {
|
if !CONFIG.invitations_allowed() {
|
||||||
err!(format!("User does not exist: {email}"))
|
err!(format!("User does not exist: {email}"))
|
||||||
}
|
}
|
||||||
|
|
||||||
if !CONFIG.is_email_domain_allowed(&email) {
|
if !CONFIG.is_email_domain_allowed(email) {
|
||||||
err!("Email domain not eligible for invitations")
|
err!("Email domain not eligible for invitations")
|
||||||
}
|
}
|
||||||
|
|
||||||
if !CONFIG.mail_enabled() {
|
if !CONFIG.mail_enabled() {
|
||||||
let invitation = Invitation::new(&email);
|
let invitation = Invitation::new(email);
|
||||||
invitation.save(&mut conn).await?;
|
invitation.save(&mut conn).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -956,8 +956,7 @@ async fn send_invite(org_id: &str, data: Json<InviteData>, headers: AdminHeaders
|
|||||||
};
|
};
|
||||||
|
|
||||||
mail::send_invite(
|
mail::send_invite(
|
||||||
&email,
|
&user,
|
||||||
&user.uuid,
|
|
||||||
Some(String::from(org_id)),
|
Some(String::from(org_id)),
|
||||||
Some(new_user.uuid),
|
Some(new_user.uuid),
|
||||||
&org_name,
|
&org_name,
|
||||||
@@ -1033,8 +1032,7 @@ async fn _reinvite_user(org_id: &str, user_org: &str, invited_by_email: &str, co
|
|||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
mail::send_invite(
|
mail::send_invite(
|
||||||
&user.email,
|
&user,
|
||||||
&user.uuid,
|
|
||||||
Some(org_id.to_string()),
|
Some(org_id.to_string()),
|
||||||
Some(user_org.uuid),
|
Some(user_org.uuid),
|
||||||
&org_name,
|
&org_name,
|
||||||
@@ -1598,42 +1596,45 @@ async fn post_org_import(
|
|||||||
// Bitwarden does not process the import if there is one item invalid.
|
// Bitwarden does not process the import if there is one item invalid.
|
||||||
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
|
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
|
||||||
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
|
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
|
||||||
Cipher::validate_notes(&data.ciphers)?;
|
Cipher::validate_cipher_data(&data.ciphers)?;
|
||||||
|
|
||||||
let mut collections = Vec::new();
|
let existing_collections: HashSet<Option<String>> =
|
||||||
|
Collection::find_by_organization(&org_id, &mut conn).await.into_iter().map(|c| (Some(c.uuid))).collect();
|
||||||
|
let mut collections: Vec<String> = Vec::with_capacity(data.collections.len());
|
||||||
for coll in data.collections {
|
for coll in data.collections {
|
||||||
let collection = Collection::new(org_id.clone(), coll.name, coll.external_id);
|
let collection_uuid = if existing_collections.contains(&coll.id) {
|
||||||
if collection.save(&mut conn).await.is_err() {
|
coll.id.unwrap()
|
||||||
collections.push(Err(Error::new("Failed to create Collection", "Failed to create Collection")));
|
|
||||||
} else {
|
} else {
|
||||||
collections.push(Ok(collection));
|
let new_collection = Collection::new(org_id.clone(), coll.name, coll.external_id);
|
||||||
}
|
new_collection.save(&mut conn).await?;
|
||||||
|
new_collection.uuid
|
||||||
|
};
|
||||||
|
|
||||||
|
collections.push(collection_uuid);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read the relations between collections and ciphers
|
// Read the relations between collections and ciphers
|
||||||
let mut relations = Vec::new();
|
// Ciphers can be in multiple collections at the same time
|
||||||
|
let mut relations = Vec::with_capacity(data.collection_relationships.len());
|
||||||
for relation in data.collection_relationships {
|
for relation in data.collection_relationships {
|
||||||
relations.push((relation.key, relation.value));
|
relations.push((relation.key, relation.value));
|
||||||
}
|
}
|
||||||
|
|
||||||
let headers: Headers = headers.into();
|
let headers: Headers = headers.into();
|
||||||
|
|
||||||
let mut ciphers = Vec::new();
|
let mut ciphers: Vec<String> = Vec::with_capacity(data.ciphers.len());
|
||||||
for cipher_data in data.ciphers {
|
for mut cipher_data in data.ciphers {
|
||||||
|
// Always clear folder_id's via an organization import
|
||||||
|
cipher_data.folder_id = None;
|
||||||
let mut cipher = Cipher::new(cipher_data.r#type, cipher_data.name.clone());
|
let mut cipher = Cipher::new(cipher_data.r#type, cipher_data.name.clone());
|
||||||
update_cipher_from_data(&mut cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await.ok();
|
update_cipher_from_data(&mut cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await.ok();
|
||||||
ciphers.push(cipher);
|
ciphers.push(cipher.uuid);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Assign the collections
|
// Assign the collections
|
||||||
for (cipher_index, coll_index) in relations {
|
for (cipher_index, coll_index) in relations {
|
||||||
let cipher_id = &ciphers[cipher_index].uuid;
|
let cipher_id = &ciphers[cipher_index];
|
||||||
let coll = &collections[coll_index];
|
let coll_id = &collections[coll_index];
|
||||||
let coll_id = match coll {
|
|
||||||
Ok(coll) => coll.uuid.as_str(),
|
|
||||||
Err(_) => err!("Failed to assign to collection"),
|
|
||||||
};
|
|
||||||
|
|
||||||
CollectionCipher::save(cipher_id, coll_id, &mut conn).await?;
|
CollectionCipher::save(cipher_id, coll_id, &mut conn).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1722,7 +1723,7 @@ async fn list_policies_token(org_id: &str, token: &str, mut conn: DbConn) -> Jso
|
|||||||
return Ok(Json(json!({})));
|
return Ok(Json(json!({})));
|
||||||
}
|
}
|
||||||
|
|
||||||
let invite = crate::auth::decode_invite(token)?;
|
let invite = decode_invite(token)?;
|
||||||
|
|
||||||
let invite_org_id = match invite.org_id {
|
let invite_org_id = match invite.org_id {
|
||||||
Some(invite_org_id) => invite_org_id,
|
Some(invite_org_id) => invite_org_id,
|
||||||
@@ -1782,6 +1783,38 @@ async fn put_policy(
|
|||||||
None => err!("Invalid or unsupported policy type"),
|
None => err!("Invalid or unsupported policy type"),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Bitwarden only allows the Reset Password policy when Single Org policy is enabled
|
||||||
|
// Vaultwarden encouraged to use multiple orgs instead of groups because groups were not available in the past
|
||||||
|
// Now that groups are available we can enforce this option when wanted.
|
||||||
|
// We put this behind a config option to prevent breaking current installation.
|
||||||
|
// Maybe we want to enable this by default in the future, but currently it is disabled by default.
|
||||||
|
if CONFIG.enforce_single_org_with_reset_pw_policy() {
|
||||||
|
if pol_type_enum == OrgPolicyType::ResetPassword && data.enabled {
|
||||||
|
let single_org_policy_enabled =
|
||||||
|
match OrgPolicy::find_by_org_and_type(org_id, OrgPolicyType::SingleOrg, &mut conn).await {
|
||||||
|
Some(p) => p.enabled,
|
||||||
|
None => false,
|
||||||
|
};
|
||||||
|
|
||||||
|
if !single_org_policy_enabled {
|
||||||
|
err!("Single Organization policy is not enabled. It is mandatory for this policy to be enabled.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also prevent the Single Org Policy to be disabled if the Reset Password policy is enabled
|
||||||
|
if pol_type_enum == OrgPolicyType::SingleOrg && !data.enabled {
|
||||||
|
let reset_pw_policy_enabled =
|
||||||
|
match OrgPolicy::find_by_org_and_type(org_id, OrgPolicyType::ResetPassword, &mut conn).await {
|
||||||
|
Some(p) => p.enabled,
|
||||||
|
None => false,
|
||||||
|
};
|
||||||
|
|
||||||
|
if reset_pw_policy_enabled {
|
||||||
|
err!("Account recovery policy is enabled. It is not allowed to disable this policy.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// When enabling the TwoFactorAuthentication policy, revoke all members that do not have 2FA
|
// When enabling the TwoFactorAuthentication policy, revoke all members that do not have 2FA
|
||||||
if pol_type_enum == OrgPolicyType::TwoFactorAuthentication && data.enabled {
|
if pol_type_enum == OrgPolicyType::TwoFactorAuthentication && data.enabled {
|
||||||
two_factor::enforce_2fa_policy_for_org(
|
two_factor::enforce_2fa_policy_for_org(
|
||||||
@@ -2005,8 +2038,7 @@ async fn import(org_id: &str, data: Json<OrgImportData>, headers: Headers, mut c
|
|||||||
};
|
};
|
||||||
|
|
||||||
mail::send_invite(
|
mail::send_invite(
|
||||||
&user_data.email,
|
&user,
|
||||||
&user.uuid,
|
|
||||||
Some(String::from(org_id)),
|
Some(String::from(org_id)),
|
||||||
Some(new_org_user.uuid),
|
Some(new_org_user.uuid),
|
||||||
&org_name,
|
&org_name,
|
||||||
@@ -2281,6 +2313,7 @@ async fn get_groups(org_id: &str, _headers: ManagerHeadersLoose, mut conn: DbCon
|
|||||||
// Group::find_by_organization(&org_id, &mut conn).await.iter().map(Group::to_json).collect::<Value>()
|
// Group::find_by_organization(&org_id, &mut conn).await.iter().map(Group::to_json).collect::<Value>()
|
||||||
let groups = Group::find_by_organization(org_id, &mut conn).await;
|
let groups = Group::find_by_organization(org_id, &mut conn).await;
|
||||||
let mut groups_json = Vec::with_capacity(groups.len());
|
let mut groups_json = Vec::with_capacity(groups.len());
|
||||||
|
|
||||||
for g in groups {
|
for g in groups {
|
||||||
groups_json.push(g.to_json_details(&mut conn).await)
|
groups_json.push(g.to_json_details(&mut conn).await)
|
||||||
}
|
}
|
||||||
@@ -2969,18 +3002,20 @@ async fn put_reset_password_enrollment(
|
|||||||
// We need to convert all keys so they have the first character to be a lowercase.
|
// We need to convert all keys so they have the first character to be a lowercase.
|
||||||
// Else the export will be just an empty JSON file.
|
// Else the export will be just an empty JSON file.
|
||||||
#[get("/organizations/<org_id>/export")]
|
#[get("/organizations/<org_id>/export")]
|
||||||
async fn get_org_export(org_id: &str, headers: AdminHeaders, mut conn: DbConn) -> Json<Value> {
|
async fn get_org_export(
|
||||||
use semver::{Version, VersionReq};
|
org_id: &str,
|
||||||
|
headers: AdminHeaders,
|
||||||
|
client_version: Option<ClientVersion>,
|
||||||
|
mut conn: DbConn,
|
||||||
|
) -> Json<Value> {
|
||||||
// Since version v2023.1.0 the format of the export is different.
|
// Since version v2023.1.0 the format of the export is different.
|
||||||
// Also, this endpoint was created since v2022.9.0.
|
// Also, this endpoint was created since v2022.9.0.
|
||||||
// Therefore, we will check for any version smaller then v2023.1.0 and return a different response.
|
// Therefore, we will check for any version smaller then v2023.1.0 and return a different response.
|
||||||
// If we can't determine the version, we will use the latest default v2023.1.0 and higher.
|
// If we can't determine the version, we will use the latest default v2023.1.0 and higher.
|
||||||
// https://github.com/bitwarden/server/blob/9ca93381ce416454734418c3a9f99ab49747f1b6/src/Api/Controllers/OrganizationExportController.cs#L44
|
// https://github.com/bitwarden/server/blob/9ca93381ce416454734418c3a9f99ab49747f1b6/src/Api/Controllers/OrganizationExportController.cs#L44
|
||||||
let use_list_response_model = if let Some(client_version) = headers.client_version {
|
let use_list_response_model = if let Some(client_version) = client_version {
|
||||||
let ver_match = VersionReq::parse("<2023.1.0").unwrap();
|
let ver_match = semver::VersionReq::parse("<2023.1.0").unwrap();
|
||||||
let client_version = Version::parse(&client_version).unwrap();
|
ver_match.matches(&client_version.0)
|
||||||
ver_match.matches(&client_version)
|
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
};
|
};
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
use rocket::{
|
use rocket::{
|
||||||
request::{self, FromRequest, Outcome},
|
request::{FromRequest, Outcome},
|
||||||
serde::json::Json,
|
serde::json::Json,
|
||||||
Request, Route,
|
Request, Route,
|
||||||
};
|
};
|
||||||
@@ -123,15 +123,8 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: Db
|
|||||||
None => err!("Error looking up organization"),
|
None => err!("Error looking up organization"),
|
||||||
};
|
};
|
||||||
|
|
||||||
mail::send_invite(
|
mail::send_invite(&user, Some(org_id.clone()), Some(new_org_user.uuid), &org_name, Some(org_email))
|
||||||
&user_data.email,
|
.await?;
|
||||||
&user.uuid,
|
|
||||||
Some(org_id.clone()),
|
|
||||||
Some(new_org_user.uuid),
|
|
||||||
&org_name,
|
|
||||||
Some(org_email),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -199,7 +192,7 @@ pub struct PublicToken(String);
|
|||||||
impl<'r> FromRequest<'r> for PublicToken {
|
impl<'r> FromRequest<'r> for PublicToken {
|
||||||
type Error = &'static str;
|
type Error = &'static str;
|
||||||
|
|
||||||
async fn from_request(request: &'r Request<'_>) -> request::Outcome<Self, Self::Error> {
|
async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> {
|
||||||
let headers = request.headers();
|
let headers = request.headers();
|
||||||
// Get access_token
|
// Get access_token
|
||||||
let access_token: &str = match headers.get_one("Authorization") {
|
let access_token: &str = match headers.get_one("Authorization") {
|
||||||
|
@@ -281,10 +281,6 @@ fn sign_duo_values(key: &str, email: &str, ikey: &str, prefix: &str, expire: i64
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn validate_duo_login(email: &str, response: &str, conn: &mut DbConn) -> EmptyResult {
|
pub async fn validate_duo_login(email: &str, response: &str, conn: &mut DbConn) -> EmptyResult {
|
||||||
// email is as entered by the user, so it needs to be normalized before
|
|
||||||
// comparison with auth_user below.
|
|
||||||
let email = &email.to_lowercase();
|
|
||||||
|
|
||||||
let split: Vec<&str> = response.split(':').collect();
|
let split: Vec<&str> = response.split(':').collect();
|
||||||
if split.len() != 2 {
|
if split.len() != 2 {
|
||||||
err!(
|
err!(
|
||||||
|
@@ -211,10 +211,7 @@ impl DuoClient {
|
|||||||
nonce,
|
nonce,
|
||||||
};
|
};
|
||||||
|
|
||||||
let token = match self.encode_duo_jwt(jwt_payload) {
|
let token = self.encode_duo_jwt(jwt_payload)?;
|
||||||
Ok(token) => token,
|
|
||||||
Err(e) => return Err(e),
|
|
||||||
};
|
|
||||||
|
|
||||||
let authz_endpoint = format!("https://{}/oauth/v1/authorize", self.api_host);
|
let authz_endpoint = format!("https://{}/oauth/v1/authorize", self.api_host);
|
||||||
let mut auth_url = match Url::parse(authz_endpoint.as_str()) {
|
let mut auth_url = match Url::parse(authz_endpoint.as_str()) {
|
||||||
@@ -357,7 +354,7 @@ pub async fn purge_duo_contexts(pool: DbPool) {
|
|||||||
// Construct the url that Duo should redirect users to.
|
// Construct the url that Duo should redirect users to.
|
||||||
fn make_callback_url(client_name: &str) -> Result<String, Error> {
|
fn make_callback_url(client_name: &str) -> Result<String, Error> {
|
||||||
// Get the location of this application as defined in the config.
|
// Get the location of this application as defined in the config.
|
||||||
let base = match Url::parse(CONFIG.domain().as_str()) {
|
let base = match Url::parse(&format!("{}/", CONFIG.domain())) {
|
||||||
Ok(url) => url,
|
Ok(url) => url,
|
||||||
Err(e) => err!(format!("Error parsing configured domain URL (check your domain configuration): {e:?}")),
|
Err(e) => err!(format!("Error parsing configured domain URL (check your domain configuration): {e:?}")),
|
||||||
};
|
};
|
||||||
|
@@ -292,7 +292,7 @@ impl EmailTokenData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_json(string: &str) -> Result<EmailTokenData, Error> {
|
pub fn from_json(string: &str) -> Result<EmailTokenData, Error> {
|
||||||
let res: Result<EmailTokenData, crate::serde_json::Error> = serde_json::from_str(string);
|
let res: Result<EmailTokenData, serde_json::Error> = serde_json::from_str(string);
|
||||||
match res {
|
match res {
|
||||||
Ok(x) => Ok(x),
|
Ok(x) => Ok(x),
|
||||||
Err(_) => err!("Could not decode EmailTokenData from string"),
|
Err(_) => err!("Could not decode EmailTokenData from string"),
|
||||||
|
@@ -269,8 +269,14 @@ pub async fn send_incomplete_2fa_notifications(pool: DbPool) {
|
|||||||
"User {} did not complete a 2FA login within the configured time limit. IP: {}",
|
"User {} did not complete a 2FA login within the configured time limit. IP: {}",
|
||||||
user.email, login.ip_address
|
user.email, login.ip_address
|
||||||
);
|
);
|
||||||
match mail::send_incomplete_2fa_login(&user.email, &login.ip_address, &login.login_time, &login.device_name)
|
match mail::send_incomplete_2fa_login(
|
||||||
.await
|
&user.email,
|
||||||
|
&login.ip_address,
|
||||||
|
&login.login_time,
|
||||||
|
&login.device_name,
|
||||||
|
&DeviceType::from_i32(login.device_type).to_string(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
{
|
{
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
if let Err(e) = login.delete(&mut conn).await {
|
if let Err(e) = login.delete(&mut conn).await {
|
||||||
|
@@ -42,7 +42,7 @@ impl ProtectedActionData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_json(string: &str) -> Result<Self, Error> {
|
pub fn from_json(string: &str) -> Result<Self, Error> {
|
||||||
let res: Result<Self, crate::serde_json::Error> = serde_json::from_str(string);
|
let res: Result<Self, serde_json::Error> = serde_json::from_str(string);
|
||||||
match res {
|
match res {
|
||||||
Ok(x) => Ok(x),
|
Ok(x) => Ok(x),
|
||||||
Err(_) => err!("Could not decode ProtectedActionData from string"),
|
Err(_) => err!("Could not decode ProtectedActionData from string"),
|
||||||
|
@@ -49,7 +49,7 @@ fn parse_yubikeys(data: &EnableYubikeyData) -> Vec<String> {
|
|||||||
data_keys.iter().filter_map(|e| e.as_ref().cloned()).collect()
|
data_keys.iter().filter_map(|e| e.as_ref().cloned()).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn jsonify_yubikeys(yubikeys: Vec<String>) -> serde_json::Value {
|
fn jsonify_yubikeys(yubikeys: Vec<String>) -> Value {
|
||||||
let mut result = Value::Object(serde_json::Map::new());
|
let mut result = Value::Object(serde_json::Map::new());
|
||||||
|
|
||||||
for (i, key) in yubikeys.into_iter().enumerate() {
|
for (i, key) in yubikeys.into_iter().enumerate() {
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
use std::{
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
net::IpAddr,
|
net::IpAddr,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
time::{Duration, SystemTime},
|
time::{Duration, SystemTime},
|
||||||
@@ -446,6 +447,9 @@ async fn get_page_with_referer(url: &str, referer: &str) -> Result<Response, Err
|
|||||||
/// priority2 = get_icon_priority("https://example.com/path/to/a/favicon.ico", "");
|
/// priority2 = get_icon_priority("https://example.com/path/to/a/favicon.ico", "");
|
||||||
/// ```
|
/// ```
|
||||||
fn get_icon_priority(href: &str, sizes: &str) -> u8 {
|
fn get_icon_priority(href: &str, sizes: &str) -> u8 {
|
||||||
|
static PRIORITY_MAP: Lazy<HashMap<&'static str, u8>> =
|
||||||
|
Lazy::new(|| [(".png", 10), (".jpg", 20), (".jpeg", 20)].into_iter().collect());
|
||||||
|
|
||||||
// Check if there is a dimension set
|
// Check if there is a dimension set
|
||||||
let (width, height) = parse_sizes(sizes);
|
let (width, height) = parse_sizes(sizes);
|
||||||
|
|
||||||
@@ -470,13 +474,9 @@ fn get_icon_priority(href: &str, sizes: &str) -> u8 {
|
|||||||
200
|
200
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Change priority by file extension
|
match href.rsplit_once('.') {
|
||||||
if href.ends_with(".png") {
|
Some((_, extension)) => PRIORITY_MAP.get(&*extension.to_ascii_lowercase()).copied().unwrap_or(30),
|
||||||
10
|
None => 30,
|
||||||
} else if href.ends_with(".jpg") || href.ends_with(".jpeg") {
|
|
||||||
20
|
|
||||||
} else {
|
|
||||||
30
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -623,7 +623,7 @@ use cookie_store::CookieStore;
|
|||||||
pub struct Jar(std::sync::RwLock<CookieStore>);
|
pub struct Jar(std::sync::RwLock<CookieStore>);
|
||||||
|
|
||||||
impl reqwest::cookie::CookieStore for Jar {
|
impl reqwest::cookie::CookieStore for Jar {
|
||||||
fn set_cookies(&self, cookie_headers: &mut dyn Iterator<Item = &header::HeaderValue>, url: &url::Url) {
|
fn set_cookies(&self, cookie_headers: &mut dyn Iterator<Item = &HeaderValue>, url: &url::Url) {
|
||||||
use cookie::{Cookie as RawCookie, ParseError as RawCookieParseError};
|
use cookie::{Cookie as RawCookie, ParseError as RawCookieParseError};
|
||||||
use time::Duration;
|
use time::Duration;
|
||||||
|
|
||||||
@@ -642,7 +642,7 @@ impl reqwest::cookie::CookieStore for Jar {
|
|||||||
cookie_store.store_response_cookies(cookies, url);
|
cookie_store.store_response_cookies(cookies, url);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cookies(&self, url: &url::Url) -> Option<header::HeaderValue> {
|
fn cookies(&self, url: &url::Url) -> Option<HeaderValue> {
|
||||||
let cookie_store = self.0.read().unwrap();
|
let cookie_store = self.0.read().unwrap();
|
||||||
let s = cookie_store
|
let s = cookie_store
|
||||||
.get_request_values(url)
|
.get_request_values(url)
|
||||||
@@ -654,7 +654,7 @@ impl reqwest::cookie::CookieStore for Jar {
|
|||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
header::HeaderValue::from_maybe_shared(Bytes::from(s)).ok()
|
HeaderValue::from_maybe_shared(Bytes::from(s)).ok()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -120,21 +120,25 @@ async fn _refresh_login(data: ConnectData, conn: &mut DbConn) -> JsonResult {
|
|||||||
"expires_in": expires_in,
|
"expires_in": expires_in,
|
||||||
"token_type": "Bearer",
|
"token_type": "Bearer",
|
||||||
"refresh_token": device.refresh_token,
|
"refresh_token": device.refresh_token,
|
||||||
"Key": user.akey,
|
|
||||||
"PrivateKey": user.private_key,
|
|
||||||
|
|
||||||
"Kdf": user.client_kdf_type,
|
|
||||||
"KdfIterations": user.client_kdf_iter,
|
|
||||||
"KdfMemory": user.client_kdf_memory,
|
|
||||||
"KdfParallelism": user.client_kdf_parallelism,
|
|
||||||
"ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing
|
|
||||||
"scope": scope,
|
"scope": scope,
|
||||||
"unofficialServer": true,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
Ok(Json(result))
|
Ok(Json(result))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Deserialize, Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
struct MasterPasswordPolicy {
|
||||||
|
min_complexity: u8,
|
||||||
|
min_length: u32,
|
||||||
|
require_lower: bool,
|
||||||
|
require_upper: bool,
|
||||||
|
require_numbers: bool,
|
||||||
|
require_special: bool,
|
||||||
|
enforce_on_login: bool,
|
||||||
|
}
|
||||||
|
|
||||||
async fn _password_login(
|
async fn _password_login(
|
||||||
data: ConnectData,
|
data: ConnectData,
|
||||||
user_uuid: &mut Option<String>,
|
user_uuid: &mut Option<String>,
|
||||||
@@ -161,20 +165,22 @@ async fn _password_login(
|
|||||||
// Set the user_uuid here to be passed back used for event logging.
|
// Set the user_uuid here to be passed back used for event logging.
|
||||||
*user_uuid = Some(user.uuid.clone());
|
*user_uuid = Some(user.uuid.clone());
|
||||||
|
|
||||||
// Check password
|
// Check if the user is disabled
|
||||||
let password = data.password.as_ref().unwrap();
|
if !user.enabled {
|
||||||
if let Some(auth_request_uuid) = data.auth_request.clone() {
|
err!(
|
||||||
if let Some(auth_request) = AuthRequest::find_by_uuid(auth_request_uuid.as_str(), conn).await {
|
"This user has been disabled",
|
||||||
if !auth_request.check_access_code(password) {
|
format!("IP: {}. Username: {}.", ip.ip, username),
|
||||||
err!(
|
ErrorEvent {
|
||||||
"Username or access code is incorrect. Try again",
|
event: EventType::UserFailedLogIn
|
||||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
|
||||||
ErrorEvent {
|
|
||||||
event: EventType::UserFailedLogIn,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
} else {
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
let password = data.password.as_ref().unwrap();
|
||||||
|
|
||||||
|
// If we get an auth request, we don't check the user's password, but the access code of the auth request
|
||||||
|
if let Some(ref auth_request_uuid) = data.auth_request {
|
||||||
|
let Some(auth_request) = AuthRequest::find_by_uuid(auth_request_uuid.as_str(), conn).await else {
|
||||||
err!(
|
err!(
|
||||||
"Auth request not found. Try again.",
|
"Auth request not found. Try again.",
|
||||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
format!("IP: {}. Username: {}.", ip.ip, username),
|
||||||
@@ -182,6 +188,24 @@ async fn _password_login(
|
|||||||
event: EventType::UserFailedLogIn,
|
event: EventType::UserFailedLogIn,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
let expiration_time = auth_request.creation_date + chrono::Duration::minutes(5);
|
||||||
|
let request_expired = Utc::now().naive_utc() >= expiration_time;
|
||||||
|
|
||||||
|
if auth_request.user_uuid != user.uuid
|
||||||
|
|| !auth_request.approved.unwrap_or(false)
|
||||||
|
|| request_expired
|
||||||
|
|| ip.ip.to_string() != auth_request.request_ip
|
||||||
|
|| !auth_request.check_access_code(password)
|
||||||
|
{
|
||||||
|
err!(
|
||||||
|
"Username or access code is incorrect. Try again",
|
||||||
|
format!("IP: {}. Username: {}.", ip.ip, username),
|
||||||
|
ErrorEvent {
|
||||||
|
event: EventType::UserFailedLogIn,
|
||||||
|
}
|
||||||
|
)
|
||||||
}
|
}
|
||||||
} else if !user.check_valid_password(password) {
|
} else if !user.check_valid_password(password) {
|
||||||
err!(
|
err!(
|
||||||
@@ -193,8 +217,8 @@ async fn _password_login(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Change the KDF Iterations
|
// Change the KDF Iterations (only when not logging in with an auth request)
|
||||||
if user.password_iterations != CONFIG.password_iterations() {
|
if data.auth_request.is_none() && user.password_iterations != CONFIG.password_iterations() {
|
||||||
user.password_iterations = CONFIG.password_iterations();
|
user.password_iterations = CONFIG.password_iterations();
|
||||||
user.set_password(password, None, false, None);
|
user.set_password(password, None, false, None);
|
||||||
|
|
||||||
@@ -203,17 +227,6 @@ async fn _password_login(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if the user is disabled
|
|
||||||
if !user.enabled {
|
|
||||||
err!(
|
|
||||||
"This user has been disabled",
|
|
||||||
format!("IP: {}. Username: {}.", ip.ip, username),
|
|
||||||
ErrorEvent {
|
|
||||||
event: EventType::UserFailedLogIn
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let now = Utc::now().naive_utc();
|
let now = Utc::now().naive_utc();
|
||||||
|
|
||||||
if user.verified_at.is_none() && CONFIG.mail_enabled() && CONFIG.signups_verify() {
|
if user.verified_at.is_none() && CONFIG.mail_enabled() && CONFIG.signups_verify() {
|
||||||
@@ -253,7 +266,7 @@ async fn _password_login(
|
|||||||
let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, conn).await?;
|
let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, conn).await?;
|
||||||
|
|
||||||
if CONFIG.mail_enabled() && new_device {
|
if CONFIG.mail_enabled() && new_device {
|
||||||
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name).await {
|
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device).await {
|
||||||
error!("Error sending new device email: {:#?}", e);
|
error!("Error sending new device email: {:#?}", e);
|
||||||
|
|
||||||
if CONFIG.require_device_email() {
|
if CONFIG.require_device_email() {
|
||||||
@@ -282,6 +295,36 @@ async fn _password_login(
|
|||||||
let (access_token, expires_in) = device.refresh_tokens(&user, scope_vec);
|
let (access_token, expires_in) = device.refresh_tokens(&user, scope_vec);
|
||||||
device.save(conn).await?;
|
device.save(conn).await?;
|
||||||
|
|
||||||
|
// Fetch all valid Master Password Policies and merge them into one with all true's and larges numbers as one policy
|
||||||
|
let master_password_policies: Vec<MasterPasswordPolicy> =
|
||||||
|
OrgPolicy::find_accepted_and_confirmed_by_user_and_active_policy(
|
||||||
|
&user.uuid,
|
||||||
|
OrgPolicyType::MasterPassword,
|
||||||
|
conn,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|p| serde_json::from_str(&p.data).ok())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let master_password_policy = if !master_password_policies.is_empty() {
|
||||||
|
let mut mpp_json = json!(master_password_policies.into_iter().reduce(|acc, policy| {
|
||||||
|
MasterPasswordPolicy {
|
||||||
|
min_complexity: acc.min_complexity.max(policy.min_complexity),
|
||||||
|
min_length: acc.min_length.max(policy.min_length),
|
||||||
|
require_lower: acc.require_lower || policy.require_lower,
|
||||||
|
require_upper: acc.require_upper || policy.require_upper,
|
||||||
|
require_numbers: acc.require_numbers || policy.require_numbers,
|
||||||
|
require_special: acc.require_special || policy.require_special,
|
||||||
|
enforce_on_login: acc.enforce_on_login || policy.enforce_on_login,
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
mpp_json["object"] = json!("masterPasswordPolicy");
|
||||||
|
mpp_json
|
||||||
|
} else {
|
||||||
|
json!({"object": "masterPasswordPolicy"})
|
||||||
|
};
|
||||||
|
|
||||||
let mut result = json!({
|
let mut result = json!({
|
||||||
"access_token": access_token,
|
"access_token": access_token,
|
||||||
"expires_in": expires_in,
|
"expires_in": expires_in,
|
||||||
@@ -297,12 +340,9 @@ async fn _password_login(
|
|||||||
"KdfParallelism": user.client_kdf_parallelism,
|
"KdfParallelism": user.client_kdf_parallelism,
|
||||||
"ResetMasterPassword": false, // TODO: Same as above
|
"ResetMasterPassword": false, // TODO: Same as above
|
||||||
"ForcePasswordReset": false,
|
"ForcePasswordReset": false,
|
||||||
"MasterPasswordPolicy": {
|
"MasterPasswordPolicy": master_password_policy,
|
||||||
"object": "masterPasswordPolicy",
|
|
||||||
},
|
|
||||||
|
|
||||||
"scope": scope,
|
"scope": scope,
|
||||||
"unofficialServer": true,
|
|
||||||
"UserDecryptionOptions": {
|
"UserDecryptionOptions": {
|
||||||
"HasMasterPassword": !user.password_hash.is_empty(),
|
"HasMasterPassword": !user.password_hash.is_empty(),
|
||||||
"Object": "userDecryptionOptions"
|
"Object": "userDecryptionOptions"
|
||||||
@@ -381,7 +421,7 @@ async fn _user_api_key_login(
|
|||||||
|
|
||||||
if CONFIG.mail_enabled() && new_device {
|
if CONFIG.mail_enabled() && new_device {
|
||||||
let now = Utc::now().naive_utc();
|
let now = Utc::now().naive_utc();
|
||||||
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name).await {
|
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device).await {
|
||||||
error!("Error sending new device email: {:#?}", e);
|
error!("Error sending new device email: {:#?}", e);
|
||||||
|
|
||||||
if CONFIG.require_device_email() {
|
if CONFIG.require_device_email() {
|
||||||
@@ -421,9 +461,8 @@ async fn _user_api_key_login(
|
|||||||
"KdfIterations": user.client_kdf_iter,
|
"KdfIterations": user.client_kdf_iter,
|
||||||
"KdfMemory": user.client_kdf_memory,
|
"KdfMemory": user.client_kdf_memory,
|
||||||
"KdfParallelism": user.client_kdf_parallelism,
|
"KdfParallelism": user.client_kdf_parallelism,
|
||||||
"ResetMasterPassword": false, // TODO: Same as above
|
"ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing
|
||||||
"scope": "api",
|
"scope": "api",
|
||||||
"unofficialServer": true,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
Ok(Json(result))
|
Ok(Json(result))
|
||||||
@@ -455,7 +494,6 @@ async fn _organization_api_key_login(data: ConnectData, conn: &mut DbConn, ip: &
|
|||||||
"expires_in": 3600,
|
"expires_in": 3600,
|
||||||
"token_type": "Bearer",
|
"token_type": "Bearer",
|
||||||
"scope": "api.organization",
|
"scope": "api.organization",
|
||||||
"unofficialServer": true,
|
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -495,7 +533,7 @@ async fn twofactor_auth(
|
|||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
TwoFactorIncomplete::mark_incomplete(&user.uuid, &device.uuid, &device.name, ip, conn).await?;
|
TwoFactorIncomplete::mark_incomplete(&user.uuid, &device.uuid, &device.name, device.atype, ip, conn).await?;
|
||||||
|
|
||||||
let twofactor_ids: Vec<_> = twofactors.iter().map(|tf| tf.atype).collect();
|
let twofactor_ids: Vec<_> = twofactors.iter().map(|tf| tf.atype).collect();
|
||||||
let selected_id = data.two_factor_provider.unwrap_or(twofactor_ids[0]); // If we aren't given a two factor provider, assume the first one
|
let selected_id = data.two_factor_provider.unwrap_or(twofactor_ids[0]); // If we aren't given a two factor provider, assume the first one
|
||||||
|
@@ -428,7 +428,7 @@ impl WebSocketUsers {
|
|||||||
let (user_uuid, collection_uuids, revision_date) = if let Some(collection_uuids) = collection_uuids {
|
let (user_uuid, collection_uuids, revision_date) = if let Some(collection_uuids) = collection_uuids {
|
||||||
(
|
(
|
||||||
Value::Nil,
|
Value::Nil,
|
||||||
Value::Array(collection_uuids.into_iter().map(|v| v.into()).collect::<Vec<rmpv::Value>>()),
|
Value::Array(collection_uuids.into_iter().map(|v| v.into()).collect::<Vec<Value>>()),
|
||||||
serialize_date(Utc::now().naive_utc()),
|
serialize_date(Utc::now().naive_utc()),
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
|
103
src/api/web.rs
103
src/api/web.rs
@@ -1,13 +1,20 @@
|
|||||||
|
use once_cell::sync::Lazy;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use rocket::{fs::NamedFile, http::ContentType, response::content::RawHtml as Html, serde::json::Json, Catcher, Route};
|
use rocket::{
|
||||||
|
fs::NamedFile,
|
||||||
|
http::ContentType,
|
||||||
|
response::{content::RawCss as Css, content::RawHtml as Html, Redirect},
|
||||||
|
serde::json::Json,
|
||||||
|
Catcher, Route,
|
||||||
|
};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
api::{core::now, ApiResult, EmptyResult},
|
api::{core::now, ApiResult, EmptyResult},
|
||||||
auth::decode_file_download,
|
auth::decode_file_download,
|
||||||
error::Error,
|
error::Error,
|
||||||
util::{Cached, SafeString},
|
util::{get_web_vault_version, Cached, SafeString},
|
||||||
CONFIG,
|
CONFIG,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -16,7 +23,7 @@ pub fn routes() -> Vec<Route> {
|
|||||||
// crate::utils::LOGGED_ROUTES to make sure they appear in the log
|
// crate::utils::LOGGED_ROUTES to make sure they appear in the log
|
||||||
let mut routes = routes![attachments, alive, alive_head, static_files];
|
let mut routes = routes![attachments, alive, alive_head, static_files];
|
||||||
if CONFIG.web_vault_enabled() {
|
if CONFIG.web_vault_enabled() {
|
||||||
routes.append(&mut routes![web_index, web_index_head, app_id, web_files]);
|
routes.append(&mut routes![web_index, web_index_direct, web_index_head, app_id, web_files, vaultwarden_css]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
@@ -45,11 +52,101 @@ fn not_found() -> ApiResult<Html<String>> {
|
|||||||
Ok(Html(text))
|
Ok(Html(text))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[get("/css/vaultwarden.css")]
|
||||||
|
fn vaultwarden_css() -> Cached<Css<String>> {
|
||||||
|
// Configure the web-vault version as an integer so it can be used as a comparison smaller or greater then.
|
||||||
|
// The default is based upon the version since this feature is added.
|
||||||
|
static WEB_VAULT_VERSION: Lazy<u32> = Lazy::new(|| {
|
||||||
|
let re = regex::Regex::new(r"(\d{4})\.(\d{1,2})\.(\d{1,2})").unwrap();
|
||||||
|
let vault_version = get_web_vault_version();
|
||||||
|
|
||||||
|
let (major, minor, patch) = match re.captures(&vault_version) {
|
||||||
|
Some(c) if c.len() == 4 => (
|
||||||
|
c.get(1).unwrap().as_str().parse().unwrap(),
|
||||||
|
c.get(2).unwrap().as_str().parse().unwrap(),
|
||||||
|
c.get(3).unwrap().as_str().parse().unwrap(),
|
||||||
|
),
|
||||||
|
_ => (2024, 6, 2),
|
||||||
|
};
|
||||||
|
format!("{major}{minor:02}{patch:02}").parse::<u32>().unwrap()
|
||||||
|
});
|
||||||
|
|
||||||
|
// Configure the Vaultwarden version as an integer so it can be used as a comparison smaller or greater then.
|
||||||
|
// The default is based upon the version since this feature is added.
|
||||||
|
static VW_VERSION: Lazy<u32> = Lazy::new(|| {
|
||||||
|
let re = regex::Regex::new(r"(\d{1})\.(\d{1,2})\.(\d{1,2})").unwrap();
|
||||||
|
let vw_version = crate::VERSION.unwrap_or("1.32.1");
|
||||||
|
|
||||||
|
let (major, minor, patch) = match re.captures(vw_version) {
|
||||||
|
Some(c) if c.len() == 4 => (
|
||||||
|
c.get(1).unwrap().as_str().parse().unwrap(),
|
||||||
|
c.get(2).unwrap().as_str().parse().unwrap(),
|
||||||
|
c.get(3).unwrap().as_str().parse().unwrap(),
|
||||||
|
),
|
||||||
|
_ => (1, 32, 1),
|
||||||
|
};
|
||||||
|
format!("{major}{minor:02}{patch:02}").parse::<u32>().unwrap()
|
||||||
|
});
|
||||||
|
|
||||||
|
let css_options = json!({
|
||||||
|
"web_vault_version": *WEB_VAULT_VERSION,
|
||||||
|
"vw_version": *VW_VERSION,
|
||||||
|
"signup_disabled": !CONFIG.signups_allowed() && CONFIG.signups_domains_whitelist().is_empty(),
|
||||||
|
"mail_enabled": CONFIG.mail_enabled(),
|
||||||
|
"yubico_enabled": CONFIG._enable_yubico() && (CONFIG.yubico_client_id().is_some() == CONFIG.yubico_secret_key().is_some()),
|
||||||
|
"emergency_access_allowed": CONFIG.emergency_access_allowed(),
|
||||||
|
"sends_allowed": CONFIG.sends_allowed(),
|
||||||
|
"load_user_scss": true,
|
||||||
|
});
|
||||||
|
|
||||||
|
let scss = match CONFIG.render_template("scss/vaultwarden.scss", &css_options) {
|
||||||
|
Ok(t) => t,
|
||||||
|
Err(e) => {
|
||||||
|
// Something went wrong loading the template. Use the fallback
|
||||||
|
warn!("Loading scss/vaultwarden.scss.hbs or scss/user.vaultwarden.scss.hbs failed. {e}");
|
||||||
|
CONFIG
|
||||||
|
.render_fallback_template("scss/vaultwarden.scss", &css_options)
|
||||||
|
.expect("Fallback scss/vaultwarden.scss.hbs to render")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let css = match grass_compiler::from_string(
|
||||||
|
scss,
|
||||||
|
&grass_compiler::Options::default().style(grass_compiler::OutputStyle::Compressed),
|
||||||
|
) {
|
||||||
|
Ok(css) => css,
|
||||||
|
Err(e) => {
|
||||||
|
// Something went wrong compiling the scss. Use the fallback
|
||||||
|
warn!("Compiling the Vaultwarden SCSS styles failed. {e}");
|
||||||
|
let mut css_options = css_options;
|
||||||
|
css_options["load_user_scss"] = json!(false);
|
||||||
|
let scss = CONFIG
|
||||||
|
.render_fallback_template("scss/vaultwarden.scss", &css_options)
|
||||||
|
.expect("Fallback scss/vaultwarden.scss.hbs to render");
|
||||||
|
grass_compiler::from_string(
|
||||||
|
scss,
|
||||||
|
&grass_compiler::Options::default().style(grass_compiler::OutputStyle::Compressed),
|
||||||
|
)
|
||||||
|
.expect("SCSS to compile")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Cache for one day should be enough and not too much
|
||||||
|
Cached::ttl(Css(css), 86_400, false)
|
||||||
|
}
|
||||||
|
|
||||||
#[get("/")]
|
#[get("/")]
|
||||||
async fn web_index() -> Cached<Option<NamedFile>> {
|
async fn web_index() -> Cached<Option<NamedFile>> {
|
||||||
Cached::short(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join("index.html")).await.ok(), false)
|
Cached::short(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join("index.html")).await.ok(), false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Make sure that `/index.html` redirect to actual domain path.
|
||||||
|
// If not, this might cause issues with the web-vault
|
||||||
|
#[get("/index.html")]
|
||||||
|
fn web_index_direct() -> Redirect {
|
||||||
|
Redirect::to(format!("{}/", CONFIG.domain_path()))
|
||||||
|
}
|
||||||
|
|
||||||
#[head("/")]
|
#[head("/")]
|
||||||
fn web_index_head() -> EmptyResult {
|
fn web_index_head() -> EmptyResult {
|
||||||
// Add an explicit HEAD route to prevent uptime monitoring services from
|
// Add an explicit HEAD route to prevent uptime monitoring services from
|
||||||
|
30
src/auth.rs
30
src/auth.rs
@@ -35,8 +35,8 @@ static JWT_FILE_DOWNLOAD_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|file_do
|
|||||||
static PRIVATE_RSA_KEY: OnceCell<EncodingKey> = OnceCell::new();
|
static PRIVATE_RSA_KEY: OnceCell<EncodingKey> = OnceCell::new();
|
||||||
static PUBLIC_RSA_KEY: OnceCell<DecodingKey> = OnceCell::new();
|
static PUBLIC_RSA_KEY: OnceCell<DecodingKey> = OnceCell::new();
|
||||||
|
|
||||||
pub fn initialize_keys() -> Result<(), crate::error::Error> {
|
pub fn initialize_keys() -> Result<(), Error> {
|
||||||
fn read_key(create_if_missing: bool) -> Result<(Rsa<openssl::pkey::Private>, Vec<u8>), crate::error::Error> {
|
fn read_key(create_if_missing: bool) -> Result<(Rsa<openssl::pkey::Private>, Vec<u8>), Error> {
|
||||||
let mut priv_key_buffer = Vec::with_capacity(2048);
|
let mut priv_key_buffer = Vec::with_capacity(2048);
|
||||||
|
|
||||||
let mut priv_key_file = File::options()
|
let mut priv_key_file = File::options()
|
||||||
@@ -53,7 +53,7 @@ pub fn initialize_keys() -> Result<(), crate::error::Error> {
|
|||||||
Rsa::private_key_from_pem(&priv_key_buffer[..bytes_read])?
|
Rsa::private_key_from_pem(&priv_key_buffer[..bytes_read])?
|
||||||
} else if create_if_missing {
|
} else if create_if_missing {
|
||||||
// Only create the key if the file doesn't exist or is empty
|
// Only create the key if the file doesn't exist or is empty
|
||||||
let rsa_key = openssl::rsa::Rsa::generate(2048)?;
|
let rsa_key = Rsa::generate(2048)?;
|
||||||
priv_key_buffer = rsa_key.private_key_to_pem()?;
|
priv_key_buffer = rsa_key.private_key_to_pem()?;
|
||||||
priv_key_file.write_all(&priv_key_buffer)?;
|
priv_key_file.write_all(&priv_key_buffer)?;
|
||||||
info!("Private key '{}' created correctly", CONFIG.private_rsa_key());
|
info!("Private key '{}' created correctly", CONFIG.private_rsa_key());
|
||||||
@@ -615,7 +615,6 @@ pub struct AdminHeaders {
|
|||||||
pub device: Device,
|
pub device: Device,
|
||||||
pub user: User,
|
pub user: User,
|
||||||
pub org_user_type: UserOrgType,
|
pub org_user_type: UserOrgType,
|
||||||
pub client_version: Option<String>,
|
|
||||||
pub ip: ClientIp,
|
pub ip: ClientIp,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -625,14 +624,12 @@ impl<'r> FromRequest<'r> for AdminHeaders {
|
|||||||
|
|
||||||
async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> {
|
async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> {
|
||||||
let headers = try_outcome!(OrgHeaders::from_request(request).await);
|
let headers = try_outcome!(OrgHeaders::from_request(request).await);
|
||||||
let client_version = request.headers().get_one("Bitwarden-Client-Version").map(String::from);
|
|
||||||
if headers.org_user_type >= UserOrgType::Admin {
|
if headers.org_user_type >= UserOrgType::Admin {
|
||||||
Outcome::Success(Self {
|
Outcome::Success(Self {
|
||||||
host: headers.host,
|
host: headers.host,
|
||||||
device: headers.device,
|
device: headers.device,
|
||||||
user: headers.user,
|
user: headers.user,
|
||||||
org_user_type: headers.org_user_type,
|
org_user_type: headers.org_user_type,
|
||||||
client_version,
|
|
||||||
ip: headers.ip,
|
ip: headers.ip,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
@@ -900,3 +897,24 @@ impl<'r> FromRequest<'r> for WsAccessTokenHeader {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct ClientVersion(pub semver::Version);
|
||||||
|
|
||||||
|
#[rocket::async_trait]
|
||||||
|
impl<'r> FromRequest<'r> for ClientVersion {
|
||||||
|
type Error = &'static str;
|
||||||
|
|
||||||
|
async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> {
|
||||||
|
let headers = request.headers();
|
||||||
|
|
||||||
|
let Some(version) = headers.get_one("Bitwarden-Client-Version") else {
|
||||||
|
err_handler!("No Bitwarden-Client-Version header provided")
|
||||||
|
};
|
||||||
|
|
||||||
|
let Ok(version) = semver::Version::parse(version) else {
|
||||||
|
err_handler!("Invalid Bitwarden-Client-Version header provided")
|
||||||
|
};
|
||||||
|
|
||||||
|
Outcome::Success(ClientVersion(version))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@@ -1,6 +1,9 @@
|
|||||||
use std::env::consts::EXE_SUFFIX;
|
use std::env::consts::EXE_SUFFIX;
|
||||||
use std::process::exit;
|
use std::process::exit;
|
||||||
use std::sync::RwLock;
|
use std::sync::{
|
||||||
|
atomic::{AtomicBool, Ordering},
|
||||||
|
RwLock,
|
||||||
|
};
|
||||||
|
|
||||||
use job_scheduler_ng::Schedule;
|
use job_scheduler_ng::Schedule;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
@@ -17,6 +20,8 @@ static CONFIG_FILE: Lazy<String> = Lazy::new(|| {
|
|||||||
get_env("CONFIG_FILE").unwrap_or_else(|| format!("{data_folder}/config.json"))
|
get_env("CONFIG_FILE").unwrap_or_else(|| format!("{data_folder}/config.json"))
|
||||||
});
|
});
|
||||||
|
|
||||||
|
pub static SKIP_CONFIG_VALIDATION: AtomicBool = AtomicBool::new(false);
|
||||||
|
|
||||||
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
|
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
|
||||||
Config::load().unwrap_or_else(|e| {
|
Config::load().unwrap_or_else(|e| {
|
||||||
println!("Error loading config:\n {e:?}\n");
|
println!("Error loading config:\n {e:?}\n");
|
||||||
@@ -331,7 +336,7 @@ macro_rules! make_config {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}};
|
}};
|
||||||
( @build $value:expr, $config:expr, gen, $default_fn:expr ) => {{
|
( @build $value:expr, $config:expr, generated, $default_fn:expr ) => {{
|
||||||
let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
|
let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
|
||||||
f($config)
|
f($config)
|
||||||
}};
|
}};
|
||||||
@@ -349,10 +354,10 @@ macro_rules! make_config {
|
|||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// Where action applied when the value wasn't provided and can be:
|
// Where action applied when the value wasn't provided and can be:
|
||||||
// def: Use a default value
|
// def: Use a default value
|
||||||
// auto: Value is auto generated based on other values
|
// auto: Value is auto generated based on other values
|
||||||
// option: Value is optional
|
// option: Value is optional
|
||||||
// gen: Value is always autogenerated and it's original value ignored
|
// generated: Value is always autogenerated and it's original value ignored
|
||||||
make_config! {
|
make_config! {
|
||||||
folders {
|
folders {
|
||||||
/// Data folder |> Main data folder
|
/// Data folder |> Main data folder
|
||||||
@@ -492,11 +497,11 @@ make_config! {
|
|||||||
/// Password iterations |> Number of server-side passwords hashing iterations for the password hash.
|
/// Password iterations |> Number of server-side passwords hashing iterations for the password hash.
|
||||||
/// The default for new users. If changed, it will be updated during login for existing users.
|
/// The default for new users. If changed, it will be updated during login for existing users.
|
||||||
password_iterations: i32, true, def, 600_000;
|
password_iterations: i32, true, def, 600_000;
|
||||||
/// Allow password hints |> Controls whether users can set password hints. This setting applies globally to all users.
|
/// Allow password hints |> Controls whether users can set or show password hints. This setting applies globally to all users.
|
||||||
password_hints_allowed: bool, true, def, true;
|
password_hints_allowed: bool, true, def, true;
|
||||||
/// Show password hint |> Controls whether a password hint should be shown directly in the web page
|
/// Show password hint (Know the risks!) |> Controls whether a password hint should be shown directly in the web page
|
||||||
/// if SMTP service is not configured. Not recommended for publicly-accessible instances as this
|
/// if SMTP service is not configured and password hints are allowed. Not recommended for publicly-accessible instances
|
||||||
/// provides unauthenticated access to potentially sensitive data.
|
/// because this provides unauthenticated access to potentially sensitive data.
|
||||||
show_password_hint: bool, true, def, false;
|
show_password_hint: bool, true, def, false;
|
||||||
|
|
||||||
/// Admin token/Argon2 PHC |> The plain text token or Argon2 PHC string used to authenticate in this very same page. Changing it here will not deauthorize the current session!
|
/// Admin token/Argon2 PHC |> The plain text token or Argon2 PHC string used to authenticate in this very same page. Changing it here will not deauthorize the current session!
|
||||||
@@ -515,7 +520,7 @@ make_config! {
|
|||||||
/// Set to the string "none" (without quotes), to disable any headers and just use the remote IP
|
/// Set to the string "none" (without quotes), to disable any headers and just use the remote IP
|
||||||
ip_header: String, true, def, "X-Real-IP".to_string();
|
ip_header: String, true, def, "X-Real-IP".to_string();
|
||||||
/// Internal IP header property, used to avoid recomputing each time
|
/// Internal IP header property, used to avoid recomputing each time
|
||||||
_ip_header_enabled: bool, false, gen, |c| &c.ip_header.trim().to_lowercase() != "none";
|
_ip_header_enabled: bool, false, generated, |c| &c.ip_header.trim().to_lowercase() != "none";
|
||||||
/// Icon service |> The predefined icon services are: internal, bitwarden, duckduckgo, google.
|
/// Icon service |> The predefined icon services are: internal, bitwarden, duckduckgo, google.
|
||||||
/// To specify a custom icon service, set a URL template with exactly one instance of `{}`,
|
/// To specify a custom icon service, set a URL template with exactly one instance of `{}`,
|
||||||
/// which is replaced with the domain. For example: `https://icon.example.com/domain/{}`.
|
/// which is replaced with the domain. For example: `https://icon.example.com/domain/{}`.
|
||||||
@@ -524,9 +529,9 @@ make_config! {
|
|||||||
/// corresponding icon at the external service.
|
/// corresponding icon at the external service.
|
||||||
icon_service: String, false, def, "internal".to_string();
|
icon_service: String, false, def, "internal".to_string();
|
||||||
/// _icon_service_url
|
/// _icon_service_url
|
||||||
_icon_service_url: String, false, gen, |c| generate_icon_service_url(&c.icon_service);
|
_icon_service_url: String, false, generated, |c| generate_icon_service_url(&c.icon_service);
|
||||||
/// _icon_service_csp
|
/// _icon_service_csp
|
||||||
_icon_service_csp: String, false, gen, |c| generate_icon_service_csp(&c.icon_service, &c._icon_service_url);
|
_icon_service_csp: String, false, generated, |c| generate_icon_service_csp(&c.icon_service, &c._icon_service_url);
|
||||||
/// Icon redirect code |> The HTTP status code to use for redirects to an external icon service.
|
/// Icon redirect code |> The HTTP status code to use for redirects to an external icon service.
|
||||||
/// The supported codes are 301 (legacy permanent), 302 (legacy temporary), 307 (temporary), and 308 (permanent).
|
/// The supported codes are 301 (legacy permanent), 302 (legacy temporary), 307 (temporary), and 308 (permanent).
|
||||||
/// Temporary redirects are useful while testing different icon services, but once a service
|
/// Temporary redirects are useful while testing different icon services, but once a service
|
||||||
@@ -624,7 +629,12 @@ make_config! {
|
|||||||
/// WARNING: This could cause issues with clients. Also exports will not work on Bitwarden servers!
|
/// WARNING: This could cause issues with clients. Also exports will not work on Bitwarden servers!
|
||||||
increase_note_size_limit: bool, true, def, false;
|
increase_note_size_limit: bool, true, def, false;
|
||||||
/// Generated max_note_size value to prevent if..else matching during every check
|
/// Generated max_note_size value to prevent if..else matching during every check
|
||||||
_max_note_size: usize, false, gen, |c| if c.increase_note_size_limit {100_000} else {10_000};
|
_max_note_size: usize, false, generated, |c| if c.increase_note_size_limit {100_000} else {10_000};
|
||||||
|
|
||||||
|
/// Enforce Single Org with Reset Password Policy |> Enforce that the Single Org policy is enabled before setting the Reset Password policy
|
||||||
|
/// Bitwarden enforces this by default. In Vaultwarden we encouraged to use multiple organizations because groups were not available.
|
||||||
|
/// Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy.
|
||||||
|
enforce_single_org_with_reset_pw_policy: bool, false, def, false;
|
||||||
},
|
},
|
||||||
|
|
||||||
/// Yubikey settings
|
/// Yubikey settings
|
||||||
@@ -690,7 +700,7 @@ make_config! {
|
|||||||
/// Embed images as email attachments.
|
/// Embed images as email attachments.
|
||||||
smtp_embed_images: bool, true, def, true;
|
smtp_embed_images: bool, true, def, true;
|
||||||
/// _smtp_img_src
|
/// _smtp_img_src
|
||||||
_smtp_img_src: String, false, gen, |c| generate_smtp_img_src(c.smtp_embed_images, &c.domain);
|
_smtp_img_src: String, false, generated, |c| generate_smtp_img_src(c.smtp_embed_images, &c.domain);
|
||||||
/// Enable SMTP debugging (Know the risks!) |> DANGEROUS: Enabling this will output very detailed SMTP messages. This could contain sensitive information like passwords and usernames! Only enable this during troubleshooting!
|
/// Enable SMTP debugging (Know the risks!) |> DANGEROUS: Enabling this will output very detailed SMTP messages. This could contain sensitive information like passwords and usernames! Only enable this during troubleshooting!
|
||||||
smtp_debug: bool, false, def, false;
|
smtp_debug: bool, false, def, false;
|
||||||
/// Accept Invalid Certs (Know the risks!) |> DANGEROUS: Allow invalid certificates. This option introduces significant vulnerabilities to man-in-the-middle attacks!
|
/// Accept Invalid Certs (Know the risks!) |> DANGEROUS: Allow invalid certificates. This option introduces significant vulnerabilities to man-in-the-middle attacks!
|
||||||
@@ -801,8 +811,15 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO: deal with deprecated flags so they can be removed from this list, cf. #4263
|
// TODO: deal with deprecated flags so they can be removed from this list, cf. #4263
|
||||||
const KNOWN_FLAGS: &[&str] =
|
const KNOWN_FLAGS: &[&str] = &[
|
||||||
&["autofill-overlay", "autofill-v2", "browser-fileless-import", "fido2-vault-credentials"];
|
"autofill-overlay",
|
||||||
|
"autofill-v2",
|
||||||
|
"browser-fileless-import",
|
||||||
|
"extension-refresh",
|
||||||
|
"fido2-vault-credentials",
|
||||||
|
"ssh-key-vault-item",
|
||||||
|
"ssh-agent",
|
||||||
|
];
|
||||||
let configured_flags = parse_experimental_client_feature_flags(&cfg.experimental_client_feature_flags);
|
let configured_flags = parse_experimental_client_feature_flags(&cfg.experimental_client_feature_flags);
|
||||||
let invalid_flags: Vec<_> = configured_flags.keys().filter(|flag| !KNOWN_FLAGS.contains(&flag.as_str())).collect();
|
let invalid_flags: Vec<_> = configured_flags.keys().filter(|flag| !KNOWN_FLAGS.contains(&flag.as_str())).collect();
|
||||||
if !invalid_flags.is_empty() {
|
if !invalid_flags.is_empty() {
|
||||||
@@ -1100,7 +1117,9 @@ impl Config {
|
|||||||
|
|
||||||
// Fill any missing with defaults
|
// Fill any missing with defaults
|
||||||
let config = builder.build();
|
let config = builder.build();
|
||||||
validate_config(&config)?;
|
if !SKIP_CONFIG_VALIDATION.load(Ordering::Relaxed) {
|
||||||
|
validate_config(&config)?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Config {
|
Ok(Config {
|
||||||
inner: RwLock::new(Inner {
|
inner: RwLock::new(Inner {
|
||||||
@@ -1220,7 +1239,7 @@ impl Config {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn private_rsa_key(&self) -> String {
|
pub fn private_rsa_key(&self) -> String {
|
||||||
format!("{}.pem", CONFIG.rsa_key_filename())
|
format!("{}.pem", self.rsa_key_filename())
|
||||||
}
|
}
|
||||||
pub fn mail_enabled(&self) -> bool {
|
pub fn mail_enabled(&self) -> bool {
|
||||||
let inner = &self.inner.read().unwrap().config;
|
let inner = &self.inner.read().unwrap().config;
|
||||||
@@ -1251,21 +1270,22 @@ impl Config {
|
|||||||
token.is_some() && !token.unwrap().trim().is_empty()
|
token.is_some() && !token.unwrap().trim().is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn render_template<T: serde::ser::Serialize>(
|
pub fn render_template<T: serde::ser::Serialize>(&self, name: &str, data: &T) -> Result<String, Error> {
|
||||||
&self,
|
if self.reload_templates() {
|
||||||
name: &str,
|
|
||||||
data: &T,
|
|
||||||
) -> Result<String, crate::error::Error> {
|
|
||||||
if CONFIG.reload_templates() {
|
|
||||||
warn!("RELOADING TEMPLATES");
|
warn!("RELOADING TEMPLATES");
|
||||||
let hb = load_templates(CONFIG.templates_folder());
|
let hb = load_templates(CONFIG.templates_folder());
|
||||||
hb.render(name, data).map_err(Into::into)
|
hb.render(name, data).map_err(Into::into)
|
||||||
} else {
|
} else {
|
||||||
let hb = &CONFIG.inner.read().unwrap().templates;
|
let hb = &self.inner.read().unwrap().templates;
|
||||||
hb.render(name, data).map_err(Into::into)
|
hb.render(name, data).map_err(Into::into)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn render_fallback_template<T: serde::ser::Serialize>(&self, name: &str, data: &T) -> Result<String, Error> {
|
||||||
|
let hb = &self.inner.read().unwrap().templates;
|
||||||
|
hb.render(&format!("fallback_{name}"), data).map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn set_rocket_shutdown_handle(&self, handle: rocket::Shutdown) {
|
pub fn set_rocket_shutdown_handle(&self, handle: rocket::Shutdown) {
|
||||||
self.inner.write().unwrap().rocket_shutdown_handle = Some(handle);
|
self.inner.write().unwrap().rocket_shutdown_handle = Some(handle);
|
||||||
}
|
}
|
||||||
@@ -1304,6 +1324,11 @@ where
|
|||||||
reg!($name);
|
reg!($name);
|
||||||
reg!(concat!($name, $ext));
|
reg!(concat!($name, $ext));
|
||||||
}};
|
}};
|
||||||
|
(@withfallback $name:expr) => {{
|
||||||
|
let template = include_str!(concat!("static/templates/", $name, ".hbs"));
|
||||||
|
hb.register_template_string($name, template).unwrap();
|
||||||
|
hb.register_template_string(concat!("fallback_", $name), template).unwrap();
|
||||||
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
// First register default templates here
|
// First register default templates here
|
||||||
@@ -1347,6 +1372,9 @@ where
|
|||||||
|
|
||||||
reg!("404");
|
reg!("404");
|
||||||
|
|
||||||
|
reg!(@withfallback "scss/vaultwarden.scss");
|
||||||
|
reg!("scss/user.vaultwarden.scss");
|
||||||
|
|
||||||
// And then load user templates to overwrite the defaults
|
// And then load user templates to overwrite the defaults
|
||||||
// Use .hbs extension for the files
|
// Use .hbs extension for the files
|
||||||
// Templates get registered with their relative name
|
// Templates get registered with their relative name
|
||||||
|
@@ -300,19 +300,17 @@ pub trait FromDb {
|
|||||||
|
|
||||||
impl<T: FromDb> FromDb for Vec<T> {
|
impl<T: FromDb> FromDb for Vec<T> {
|
||||||
type Output = Vec<T::Output>;
|
type Output = Vec<T::Output>;
|
||||||
#[allow(clippy::wrong_self_convention)]
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn from_db(self) -> Self::Output {
|
fn from_db(self) -> Self::Output {
|
||||||
self.into_iter().map(crate::db::FromDb::from_db).collect()
|
self.into_iter().map(FromDb::from_db).collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: FromDb> FromDb for Option<T> {
|
impl<T: FromDb> FromDb for Option<T> {
|
||||||
type Output = Option<T::Output>;
|
type Output = Option<T::Output>;
|
||||||
#[allow(clippy::wrong_self_convention)]
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn from_db(self) -> Self::Output {
|
fn from_db(self) -> Self::Output {
|
||||||
self.map(crate::db::FromDb::from_db)
|
self.map(FromDb::from_db)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -368,23 +366,31 @@ pub mod models;
|
|||||||
|
|
||||||
/// Creates a back-up of the sqlite database
|
/// Creates a back-up of the sqlite database
|
||||||
/// MySQL/MariaDB and PostgreSQL are not supported.
|
/// MySQL/MariaDB and PostgreSQL are not supported.
|
||||||
pub async fn backup_database(conn: &mut DbConn) -> Result<(), Error> {
|
pub async fn backup_database(conn: &mut DbConn) -> Result<String, Error> {
|
||||||
db_run! {@raw conn:
|
db_run! {@raw conn:
|
||||||
postgresql, mysql {
|
postgresql, mysql {
|
||||||
let _ = conn;
|
let _ = conn;
|
||||||
err!("PostgreSQL and MySQL/MariaDB do not support this backup feature");
|
err!("PostgreSQL and MySQL/MariaDB do not support this backup feature");
|
||||||
}
|
}
|
||||||
sqlite {
|
sqlite {
|
||||||
use std::path::Path;
|
backup_sqlite_database(conn)
|
||||||
let db_url = CONFIG.database_url();
|
|
||||||
let db_path = Path::new(&db_url).parent().unwrap().to_string_lossy();
|
|
||||||
let file_date = chrono::Utc::now().format("%Y%m%d_%H%M%S").to_string();
|
|
||||||
diesel::sql_query(format!("VACUUM INTO '{db_path}/db_{file_date}.sqlite3'")).execute(conn)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(sqlite)]
|
||||||
|
pub fn backup_sqlite_database(conn: &mut diesel::sqlite::SqliteConnection) -> Result<String, Error> {
|
||||||
|
use diesel::RunQueryDsl;
|
||||||
|
let db_url = CONFIG.database_url();
|
||||||
|
let db_path = std::path::Path::new(&db_url).parent().unwrap();
|
||||||
|
let backup_file = db_path
|
||||||
|
.join(format!("db_{}.sqlite3", chrono::Utc::now().format("%Y%m%d_%H%M%S")))
|
||||||
|
.to_string_lossy()
|
||||||
|
.into_owned();
|
||||||
|
diesel::sql_query(format!("VACUUM INTO '{backup_file}'")).execute(conn)?;
|
||||||
|
Ok(backup_file)
|
||||||
|
}
|
||||||
|
|
||||||
/// Get the SQL Server version
|
/// Get the SQL Server version
|
||||||
pub async fn get_sql_server_version(conn: &mut DbConn) -> String {
|
pub async fn get_sql_server_version(conn: &mut DbConn) -> String {
|
||||||
db_run! {@raw conn:
|
db_run! {@raw conn:
|
||||||
|
@@ -30,7 +30,8 @@ db_object! {
|
|||||||
Login = 1,
|
Login = 1,
|
||||||
SecureNote = 2,
|
SecureNote = 2,
|
||||||
Card = 3,
|
Card = 3,
|
||||||
Identity = 4
|
Identity = 4,
|
||||||
|
SshKey = 5
|
||||||
*/
|
*/
|
||||||
pub atype: i32,
|
pub atype: i32,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
@@ -79,19 +80,39 @@ impl Cipher {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn validate_notes(cipher_data: &[CipherData]) -> EmptyResult {
|
pub fn validate_cipher_data(cipher_data: &[CipherData]) -> EmptyResult {
|
||||||
let mut validation_errors = serde_json::Map::new();
|
let mut validation_errors = serde_json::Map::new();
|
||||||
let max_note_size = CONFIG._max_note_size();
|
let max_note_size = CONFIG._max_note_size();
|
||||||
let max_note_size_msg =
|
let max_note_size_msg =
|
||||||
format!("The field Notes exceeds the maximum encrypted value length of {} characters.", &max_note_size);
|
format!("The field Notes exceeds the maximum encrypted value length of {} characters.", &max_note_size);
|
||||||
for (index, cipher) in cipher_data.iter().enumerate() {
|
for (index, cipher) in cipher_data.iter().enumerate() {
|
||||||
|
// Validate the note size and if it is exceeded return a warning
|
||||||
if let Some(note) = &cipher.notes {
|
if let Some(note) = &cipher.notes {
|
||||||
if note.len() > max_note_size {
|
if note.len() > max_note_size {
|
||||||
validation_errors
|
validation_errors
|
||||||
.insert(format!("Ciphers[{index}].Notes"), serde_json::to_value([&max_note_size_msg]).unwrap());
|
.insert(format!("Ciphers[{index}].Notes"), serde_json::to_value([&max_note_size_msg]).unwrap());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate the password history if it contains `null` values and if so, return a warning
|
||||||
|
if let Some(Value::Array(password_history)) = &cipher.password_history {
|
||||||
|
for pwh in password_history {
|
||||||
|
if let Value::Object(pwo) = pwh {
|
||||||
|
if pwo.get("password").is_some_and(|p| !p.is_string()) {
|
||||||
|
validation_errors.insert(
|
||||||
|
format!("Ciphers[{index}].Notes"),
|
||||||
|
serde_json::to_value([
|
||||||
|
"The password history contains a `null` value. Only strings are allowed.",
|
||||||
|
])
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !validation_errors.is_empty() {
|
if !validation_errors.is_empty() {
|
||||||
let err_json = json!({
|
let err_json = json!({
|
||||||
"message": "The model state is invalid.",
|
"message": "The model state is invalid.",
|
||||||
@@ -153,27 +174,70 @@ impl Cipher {
|
|||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|s| {
|
.and_then(|s| {
|
||||||
serde_json::from_str::<Vec<LowerCase<Value>>>(s)
|
serde_json::from_str::<Vec<LowerCase<Value>>>(s)
|
||||||
.inspect_err(|e| warn!("Error parsing fields {:?}", e))
|
.inspect_err(|e| warn!("Error parsing fields {e:?} for {}", self.uuid))
|
||||||
.ok()
|
.ok()
|
||||||
})
|
})
|
||||||
.map(|d| d.into_iter().map(|d| d.data).collect())
|
.map(|d| {
|
||||||
|
d.into_iter()
|
||||||
|
.map(|mut f| {
|
||||||
|
// Check if the `type` key is a number, strings break some clients
|
||||||
|
// The fallback type is the hidden type `1`. this should prevent accidental data disclosure
|
||||||
|
// If not try to convert the string value to a number and fallback to `1`
|
||||||
|
// If it is both not a number and not a string, fallback to `1`
|
||||||
|
match f.data.get("type") {
|
||||||
|
Some(t) if t.is_number() => {}
|
||||||
|
Some(t) if t.is_string() => {
|
||||||
|
let type_num = &t.as_str().unwrap_or("1").parse::<u8>().unwrap_or(1);
|
||||||
|
f.data["type"] = json!(type_num);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
f.data["type"] = json!(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
f.data
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
let password_history_json: Vec<_> = self
|
let password_history_json: Vec<_> = self
|
||||||
.password_history
|
.password_history
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|s| {
|
.and_then(|s| {
|
||||||
serde_json::from_str::<Vec<LowerCase<Value>>>(s)
|
serde_json::from_str::<Vec<LowerCase<Value>>>(s)
|
||||||
.inspect_err(|e| warn!("Error parsing password history {:?}", e))
|
.inspect_err(|e| warn!("Error parsing password history {e:?} for {}", self.uuid))
|
||||||
.ok()
|
.ok()
|
||||||
})
|
})
|
||||||
.map(|d| d.into_iter().map(|d| d.data).collect())
|
.map(|d| {
|
||||||
|
// Check every password history item if they are valid and return it.
|
||||||
|
// If a password field has the type `null` skip it, it breaks newer Bitwarden clients
|
||||||
|
// A second check is done to verify the lastUsedDate exists and is a valid DateTime string, if not the epoch start time will be used
|
||||||
|
d.into_iter()
|
||||||
|
.filter_map(|d| match d.data.get("password") {
|
||||||
|
Some(p) if p.is_string() => Some(d.data),
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.map(|mut d| match d.get("lastUsedDate").and_then(|l| l.as_str()) {
|
||||||
|
Some(l) => {
|
||||||
|
d["lastUsedDate"] = json!(crate::util::validate_and_format_date(l));
|
||||||
|
d
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
d["lastUsedDate"] = json!("1970-01-01T00:00:00.000000Z");
|
||||||
|
d
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
// Get the type_data or a default to an empty json object '{}'.
|
// Get the type_data or a default to an empty json object '{}'.
|
||||||
// If not passing an empty object, mobile clients will crash.
|
// If not passing an empty object, mobile clients will crash.
|
||||||
let mut type_data_json = serde_json::from_str::<LowerCase<Value>>(&self.data)
|
let mut type_data_json =
|
||||||
.map(|d| d.data)
|
serde_json::from_str::<LowerCase<Value>>(&self.data).map(|d| d.data).unwrap_or_else(|_| {
|
||||||
.unwrap_or_else(|_| Value::Object(serde_json::Map::new()));
|
warn!("Error parsing data field for {}", self.uuid);
|
||||||
|
Value::Object(serde_json::Map::new())
|
||||||
|
});
|
||||||
|
|
||||||
// NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream
|
// NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream
|
||||||
// Set the first element of the Uris array as Uri, this is needed several (mobile) clients.
|
// Set the first element of the Uris array as Uri, this is needed several (mobile) clients.
|
||||||
@@ -189,10 +253,13 @@ impl Cipher {
|
|||||||
|
|
||||||
// Fix secure note issues when data is invalid
|
// Fix secure note issues when data is invalid
|
||||||
// This breaks at least the native mobile clients
|
// This breaks at least the native mobile clients
|
||||||
if self.atype == 2
|
if self.atype == 2 {
|
||||||
&& (self.data.is_empty() || self.data.eq("{}") || self.data.to_ascii_lowercase().eq("{\"type\":null}"))
|
match type_data_json {
|
||||||
{
|
Value::Object(ref t) if t.get("type").is_some_and(|t| t.is_number()) => {}
|
||||||
type_data_json = json!({"type": 0});
|
_ => {
|
||||||
|
type_data_json = json!({"type": 0});
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clone the type_data and add some default value.
|
// Clone the type_data and add some default value.
|
||||||
@@ -200,7 +267,7 @@ impl Cipher {
|
|||||||
|
|
||||||
// NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream
|
// NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream
|
||||||
// data_json should always contain the following keys with every atype
|
// data_json should always contain the following keys with every atype
|
||||||
data_json["fields"] = Value::Array(fields_json.clone());
|
data_json["fields"] = json!(fields_json);
|
||||||
data_json["name"] = json!(self.name);
|
data_json["name"] = json!(self.name);
|
||||||
data_json["notes"] = json!(self.notes);
|
data_json["notes"] = json!(self.notes);
|
||||||
data_json["passwordHistory"] = Value::Array(password_history_json.clone());
|
data_json["passwordHistory"] = Value::Array(password_history_json.clone());
|
||||||
@@ -253,6 +320,7 @@ impl Cipher {
|
|||||||
"secureNote": null,
|
"secureNote": null,
|
||||||
"card": null,
|
"card": null,
|
||||||
"identity": null,
|
"identity": null,
|
||||||
|
"sshKey": null,
|
||||||
});
|
});
|
||||||
|
|
||||||
// These values are only needed for user/default syncs
|
// These values are only needed for user/default syncs
|
||||||
@@ -281,6 +349,7 @@ impl Cipher {
|
|||||||
2 => "secureNote",
|
2 => "secureNote",
|
||||||
3 => "card",
|
3 => "card",
|
||||||
4 => "identity",
|
4 => "identity",
|
||||||
|
5 => "sshKey",
|
||||||
_ => panic!("Wrong type"),
|
_ => panic!("Wrong type"),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@@ -78,28 +78,46 @@ impl Collection {
|
|||||||
cipher_sync_data: Option<&crate::api::core::CipherSyncData>,
|
cipher_sync_data: Option<&crate::api::core::CipherSyncData>,
|
||||||
conn: &mut DbConn,
|
conn: &mut DbConn,
|
||||||
) -> Value {
|
) -> Value {
|
||||||
let (read_only, hide_passwords) = if let Some(cipher_sync_data) = cipher_sync_data {
|
let (read_only, hide_passwords, can_manage) = if let Some(cipher_sync_data) = cipher_sync_data {
|
||||||
match cipher_sync_data.user_organizations.get(&self.org_uuid) {
|
match cipher_sync_data.user_organizations.get(&self.org_uuid) {
|
||||||
Some(uo) if uo.has_full_access() => (false, false),
|
// Only for Manager types Bitwarden returns true for the can_manage option
|
||||||
Some(_) => {
|
// Owners and Admins always have true
|
||||||
|
Some(uo) if uo.has_full_access() => (false, false, uo.atype >= UserOrgType::Manager),
|
||||||
|
Some(uo) => {
|
||||||
|
// Only let a manager manage collections when the have full read/write access
|
||||||
|
let is_manager = uo.atype == UserOrgType::Manager;
|
||||||
if let Some(uc) = cipher_sync_data.user_collections.get(&self.uuid) {
|
if let Some(uc) = cipher_sync_data.user_collections.get(&self.uuid) {
|
||||||
(uc.read_only, uc.hide_passwords)
|
(uc.read_only, uc.hide_passwords, is_manager && !uc.read_only && !uc.hide_passwords)
|
||||||
} else if let Some(cg) = cipher_sync_data.user_collections_groups.get(&self.uuid) {
|
} else if let Some(cg) = cipher_sync_data.user_collections_groups.get(&self.uuid) {
|
||||||
(cg.read_only, cg.hide_passwords)
|
(cg.read_only, cg.hide_passwords, is_manager && !cg.read_only && !cg.hide_passwords)
|
||||||
} else {
|
} else {
|
||||||
(false, false)
|
(false, false, false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => (true, true),
|
_ => (true, true, false),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
(!self.is_writable_by_user(user_uuid, conn).await, self.hide_passwords_for_user(user_uuid, conn).await)
|
match UserOrganization::find_confirmed_by_user_and_org(user_uuid, &self.org_uuid, conn).await {
|
||||||
|
Some(ou) if ou.has_full_access() => (false, false, ou.atype >= UserOrgType::Manager),
|
||||||
|
Some(ou) => {
|
||||||
|
let is_manager = ou.atype == UserOrgType::Manager;
|
||||||
|
let read_only = !self.is_writable_by_user(user_uuid, conn).await;
|
||||||
|
let hide_passwords = self.hide_passwords_for_user(user_uuid, conn).await;
|
||||||
|
(read_only, hide_passwords, is_manager && !read_only && !hide_passwords)
|
||||||
|
}
|
||||||
|
_ => (
|
||||||
|
!self.is_writable_by_user(user_uuid, conn).await,
|
||||||
|
self.hide_passwords_for_user(user_uuid, conn).await,
|
||||||
|
false,
|
||||||
|
),
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut json_object = self.to_json();
|
let mut json_object = self.to_json();
|
||||||
json_object["object"] = json!("collectionDetails");
|
json_object["object"] = json!("collectionDetails");
|
||||||
json_object["readOnly"] = json!(read_only);
|
json_object["readOnly"] = json!(read_only);
|
||||||
json_object["hidePasswords"] = json!(hide_passwords);
|
json_object["hidePasswords"] = json!(hide_passwords);
|
||||||
|
json_object["manage"] = json!(can_manage);
|
||||||
json_object
|
json_object
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -16,7 +16,7 @@ db_object! {
|
|||||||
pub user_uuid: String,
|
pub user_uuid: String,
|
||||||
|
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub atype: i32, // https://github.com/bitwarden/server/blob/master/src/Core/Enums/DeviceType.cs
|
pub atype: i32, // https://github.com/bitwarden/server/blob/dcc199bcce4aa2d5621f6fab80f1b49d8b143418/src/Core/Enums/DeviceType.cs
|
||||||
pub push_uuid: Option<String>,
|
pub push_uuid: Option<String>,
|
||||||
pub push_token: Option<String>,
|
pub push_token: Option<String>,
|
||||||
|
|
||||||
@@ -267,6 +267,9 @@ pub enum DeviceType {
|
|||||||
SafariExtension = 20,
|
SafariExtension = 20,
|
||||||
Sdk = 21,
|
Sdk = 21,
|
||||||
Server = 22,
|
Server = 22,
|
||||||
|
WindowsCLI = 23,
|
||||||
|
MacOsCLI = 24,
|
||||||
|
LinuxCLI = 25,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for DeviceType {
|
impl fmt::Display for DeviceType {
|
||||||
@@ -278,23 +281,26 @@ impl fmt::Display for DeviceType {
|
|||||||
DeviceType::FirefoxExtension => write!(f, "Firefox Extension"),
|
DeviceType::FirefoxExtension => write!(f, "Firefox Extension"),
|
||||||
DeviceType::OperaExtension => write!(f, "Opera Extension"),
|
DeviceType::OperaExtension => write!(f, "Opera Extension"),
|
||||||
DeviceType::EdgeExtension => write!(f, "Edge Extension"),
|
DeviceType::EdgeExtension => write!(f, "Edge Extension"),
|
||||||
DeviceType::WindowsDesktop => write!(f, "Windows Desktop"),
|
DeviceType::WindowsDesktop => write!(f, "Windows"),
|
||||||
DeviceType::MacOsDesktop => write!(f, "MacOS Desktop"),
|
DeviceType::MacOsDesktop => write!(f, "macOS"),
|
||||||
DeviceType::LinuxDesktop => write!(f, "Linux Desktop"),
|
DeviceType::LinuxDesktop => write!(f, "Linux"),
|
||||||
DeviceType::ChromeBrowser => write!(f, "Chrome Browser"),
|
DeviceType::ChromeBrowser => write!(f, "Chrome"),
|
||||||
DeviceType::FirefoxBrowser => write!(f, "Firefox Browser"),
|
DeviceType::FirefoxBrowser => write!(f, "Firefox"),
|
||||||
DeviceType::OperaBrowser => write!(f, "Opera Browser"),
|
DeviceType::OperaBrowser => write!(f, "Opera"),
|
||||||
DeviceType::EdgeBrowser => write!(f, "Edge Browser"),
|
DeviceType::EdgeBrowser => write!(f, "Edge"),
|
||||||
DeviceType::IEBrowser => write!(f, "Internet Explorer"),
|
DeviceType::IEBrowser => write!(f, "Internet Explorer"),
|
||||||
DeviceType::UnknownBrowser => write!(f, "Unknown Browser"),
|
DeviceType::UnknownBrowser => write!(f, "Unknown Browser"),
|
||||||
DeviceType::AndroidAmazon => write!(f, "Android Amazon"),
|
DeviceType::AndroidAmazon => write!(f, "Android"),
|
||||||
DeviceType::Uwp => write!(f, "UWP"),
|
DeviceType::Uwp => write!(f, "UWP"),
|
||||||
DeviceType::SafariBrowser => write!(f, "Safari Browser"),
|
DeviceType::SafariBrowser => write!(f, "Safari"),
|
||||||
DeviceType::VivaldiBrowser => write!(f, "Vivaldi Browser"),
|
DeviceType::VivaldiBrowser => write!(f, "Vivaldi"),
|
||||||
DeviceType::VivaldiExtension => write!(f, "Vivaldi Extension"),
|
DeviceType::VivaldiExtension => write!(f, "Vivaldi Extension"),
|
||||||
DeviceType::SafariExtension => write!(f, "Safari Extension"),
|
DeviceType::SafariExtension => write!(f, "Safari Extension"),
|
||||||
DeviceType::Sdk => write!(f, "SDK"),
|
DeviceType::Sdk => write!(f, "SDK"),
|
||||||
DeviceType::Server => write!(f, "Server"),
|
DeviceType::Server => write!(f, "Server"),
|
||||||
|
DeviceType::WindowsCLI => write!(f, "Windows CLI"),
|
||||||
|
DeviceType::MacOsCLI => write!(f, "macOS CLI"),
|
||||||
|
DeviceType::LinuxCLI => write!(f, "Linux CLI"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -325,6 +331,9 @@ impl DeviceType {
|
|||||||
20 => DeviceType::SafariExtension,
|
20 => DeviceType::SafariExtension,
|
||||||
21 => DeviceType::Sdk,
|
21 => DeviceType::Sdk,
|
||||||
22 => DeviceType::Server,
|
22 => DeviceType::Server,
|
||||||
|
23 => DeviceType::WindowsCLI,
|
||||||
|
24 => DeviceType::MacOsCLI,
|
||||||
|
25 => DeviceType::LinuxCLI,
|
||||||
_ => DeviceType::UnknownBrowser,
|
_ => DeviceType::UnknownBrowser,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -26,7 +26,7 @@ db_object! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Local methods
|
// Local methods
|
||||||
|
|
||||||
impl EmergencyAccess {
|
impl EmergencyAccess {
|
||||||
pub fn new(grantor_uuid: String, email: String, status: i32, atype: i32, wait_time_days: i32) -> Self {
|
pub fn new(grantor_uuid: String, email: String, status: i32, atype: i32, wait_time_days: i32) -> Self {
|
||||||
@@ -89,7 +89,7 @@ impl EmergencyAccess {
|
|||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => {
|
None => {
|
||||||
// remove outstanding invitations which should not exist
|
// remove outstanding invitations which should not exist
|
||||||
let _ = Self::delete_all_by_grantee_email(email, conn).await;
|
Self::delete_all_by_grantee_email(email, conn).await.ok();
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,3 +1,7 @@
|
|||||||
|
use super::{User, UserOrganization};
|
||||||
|
use crate::api::EmptyResult;
|
||||||
|
use crate::db::DbConn;
|
||||||
|
use crate::error::MapResult;
|
||||||
use chrono::{NaiveDateTime, Utc};
|
use chrono::{NaiveDateTime, Utc};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
@@ -77,7 +81,8 @@ impl Group {
|
|||||||
json!({
|
json!({
|
||||||
"id": entry.collections_uuid,
|
"id": entry.collections_uuid,
|
||||||
"readOnly": entry.read_only,
|
"readOnly": entry.read_only,
|
||||||
"hidePasswords": entry.hide_passwords
|
"hidePasswords": entry.hide_passwords,
|
||||||
|
"manage": false
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
@@ -122,13 +127,6 @@ impl GroupUser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
use crate::db::DbConn;
|
|
||||||
|
|
||||||
use crate::api::EmptyResult;
|
|
||||||
use crate::error::MapResult;
|
|
||||||
|
|
||||||
use super::{User, UserOrganization};
|
|
||||||
|
|
||||||
/// Database methods
|
/// Database methods
|
||||||
impl Group {
|
impl Group {
|
||||||
pub async fn save(&mut self, conn: &mut DbConn) -> EmptyResult {
|
pub async fn save(&mut self, conn: &mut DbConn) -> EmptyResult {
|
||||||
|
@@ -342,9 +342,11 @@ impl OrgPolicy {
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn is_enabled_by_org(org_uuid: &str, policy_type: OrgPolicyType, conn: &mut DbConn) -> bool {
|
pub async fn is_enabled_for_member(org_user_uuid: &str, policy_type: OrgPolicyType, conn: &mut DbConn) -> bool {
|
||||||
if let Some(policy) = OrgPolicy::find_by_org_and_type(org_uuid, policy_type, conn).await {
|
if let Some(membership) = UserOrganization::find_by_uuid(org_user_uuid, conn).await {
|
||||||
return policy.enabled;
|
if let Some(policy) = OrgPolicy::find_by_org_and_type(&membership.org_uuid, policy_type, conn).await {
|
||||||
|
return policy.enabled;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
@@ -1,9 +1,13 @@
|
|||||||
use chrono::{NaiveDateTime, Utc};
|
use chrono::{NaiveDateTime, Utc};
|
||||||
use num_traits::FromPrimitive;
|
use num_traits::FromPrimitive;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::cmp::Ordering;
|
use std::{
|
||||||
|
cmp::Ordering,
|
||||||
|
collections::{HashMap, HashSet},
|
||||||
|
};
|
||||||
|
|
||||||
use super::{CollectionUser, Group, GroupUser, OrgPolicy, OrgPolicyType, TwoFactor, User};
|
use super::{CollectionUser, Group, GroupUser, OrgPolicy, OrgPolicyType, TwoFactor, User};
|
||||||
|
use crate::db::models::{Collection, CollectionGroup};
|
||||||
use crate::CONFIG;
|
use crate::CONFIG;
|
||||||
|
|
||||||
db_object! {
|
db_object! {
|
||||||
@@ -112,7 +116,7 @@ impl PartialOrd<i32> for UserOrgType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn ge(&self, other: &i32) -> bool {
|
fn ge(&self, other: &i32) -> bool {
|
||||||
matches!(self.partial_cmp(other), Some(Ordering::Greater) | Some(Ordering::Equal))
|
matches!(self.partial_cmp(other), Some(Ordering::Greater | Ordering::Equal))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -135,7 +139,7 @@ impl PartialOrd<UserOrgType> for i32 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn le(&self, other: &UserOrgType) -> bool {
|
fn le(&self, other: &UserOrgType) -> bool {
|
||||||
matches!(self.partial_cmp(other), Some(Ordering::Less) | Some(Ordering::Equal) | None)
|
matches!(self.partial_cmp(other), Some(Ordering::Less | Ordering::Equal) | None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -157,7 +161,6 @@ impl Organization {
|
|||||||
"identifier": null, // not supported by us
|
"identifier": null, // not supported by us
|
||||||
"name": self.name,
|
"name": self.name,
|
||||||
"seats": null,
|
"seats": null,
|
||||||
"maxAutoscaleSeats": null,
|
|
||||||
"maxCollections": null,
|
"maxCollections": null,
|
||||||
"maxStorageGb": i16::MAX, // The value doesn't matter, we don't check server-side
|
"maxStorageGb": i16::MAX, // The value doesn't matter, we don't check server-side
|
||||||
"use2fa": true,
|
"use2fa": true,
|
||||||
@@ -229,6 +232,14 @@ impl UserOrganization {
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return the status of the user in an unrevoked state
|
||||||
|
pub fn get_unrevoked_status(&self) -> i32 {
|
||||||
|
if self.status <= UserOrgStatus::Revoked as i32 {
|
||||||
|
return self.status + ACTIVATE_REVOKE_DIFF;
|
||||||
|
}
|
||||||
|
self.status
|
||||||
|
}
|
||||||
|
|
||||||
pub fn set_external_id(&mut self, external_id: Option<String>) -> bool {
|
pub fn set_external_id(&mut self, external_id: Option<String>) -> bool {
|
||||||
//Check if external id is empty. We don't want to have
|
//Check if external id is empty. We don't want to have
|
||||||
//empty strings in the database
|
//empty strings in the database
|
||||||
@@ -370,7 +381,6 @@ impl UserOrganization {
|
|||||||
"identifier": null, // Not supported
|
"identifier": null, // Not supported
|
||||||
"name": org.name,
|
"name": org.name,
|
||||||
"seats": null,
|
"seats": null,
|
||||||
"maxAutoscaleSeats": null,
|
|
||||||
"maxCollections": null,
|
"maxCollections": null,
|
||||||
"usersGetPremium": true,
|
"usersGetPremium": true,
|
||||||
"use2fa": true,
|
"use2fa": true,
|
||||||
@@ -407,7 +417,7 @@ impl UserOrganization {
|
|||||||
"familySponsorshipValidUntil": null,
|
"familySponsorshipValidUntil": null,
|
||||||
"familySponsorshipToDelete": null,
|
"familySponsorshipToDelete": null,
|
||||||
"accessSecretsManager": false,
|
"accessSecretsManager": false,
|
||||||
"limitCollectionCreationDeletion": true,
|
"limitCollectionCreationDeletion": false, // This should be set to true only when we can handle roles like createNewCollections
|
||||||
"allowAdminAccessToAllCollectionItems": true,
|
"allowAdminAccessToAllCollectionItems": true,
|
||||||
"flexibleCollections": false,
|
"flexibleCollections": false,
|
||||||
|
|
||||||
@@ -453,27 +463,79 @@ impl UserOrganization {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let collections: Vec<Value> = if include_collections {
|
let collections: Vec<Value> = if include_collections {
|
||||||
CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn)
|
// Get all collections for the user here already to prevent more queries
|
||||||
|
let cu: HashMap<String, CollectionUser> =
|
||||||
|
CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn)
|
||||||
|
.await
|
||||||
|
.into_iter()
|
||||||
|
.map(|cu| (cu.collection_uuid.clone(), cu))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Get all collection groups for this user to prevent there inclusion
|
||||||
|
let cg: HashSet<String> = CollectionGroup::find_by_user(&self.user_uuid, conn)
|
||||||
.await
|
.await
|
||||||
.iter()
|
.into_iter()
|
||||||
.map(|cu| {
|
.map(|cg| cg.collections_uuid)
|
||||||
json!({
|
.collect();
|
||||||
"id": cu.collection_uuid,
|
|
||||||
"readOnly": cu.read_only,
|
Collection::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn)
|
||||||
"hidePasswords": cu.hide_passwords,
|
.await
|
||||||
})
|
.into_iter()
|
||||||
|
.filter_map(|c| {
|
||||||
|
let (read_only, hide_passwords, can_manage) = if self.has_full_access() {
|
||||||
|
(false, false, self.atype >= UserOrgType::Manager)
|
||||||
|
} else if let Some(cu) = cu.get(&c.uuid) {
|
||||||
|
(
|
||||||
|
cu.read_only,
|
||||||
|
cu.hide_passwords,
|
||||||
|
self.atype == UserOrgType::Manager && !cu.read_only && !cu.hide_passwords,
|
||||||
|
)
|
||||||
|
// If previous checks failed it might be that this user has access via a group, but we should not return those elements here
|
||||||
|
// Those are returned via a special group endpoint
|
||||||
|
} else if cg.contains(&c.uuid) {
|
||||||
|
return None;
|
||||||
|
} else {
|
||||||
|
(true, true, false)
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(json!({
|
||||||
|
"id": c.uuid,
|
||||||
|
"readOnly": read_only,
|
||||||
|
"hidePasswords": hide_passwords,
|
||||||
|
"manage": can_manage,
|
||||||
|
}))
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
} else {
|
} else {
|
||||||
Vec::with_capacity(0)
|
Vec::with_capacity(0)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let permissions = json!({
|
||||||
|
// TODO: Add support for Custom User Roles
|
||||||
|
// See: https://bitwarden.com/help/article/user-types-access-control/#custom-role
|
||||||
|
"accessEventLogs": false,
|
||||||
|
"accessImportExport": false,
|
||||||
|
"accessReports": false,
|
||||||
|
"createNewCollections": false,
|
||||||
|
"editAnyCollection": false,
|
||||||
|
"deleteAnyCollection": false,
|
||||||
|
"editAssignedCollections": false,
|
||||||
|
"deleteAssignedCollections": false,
|
||||||
|
"manageGroups": false,
|
||||||
|
"managePolicies": false,
|
||||||
|
"manageSso": false, // Not supported
|
||||||
|
"manageUsers": false,
|
||||||
|
"manageResetPassword": false,
|
||||||
|
"manageScim": false // Not supported (Not AGPLv3 Licensed)
|
||||||
|
});
|
||||||
|
|
||||||
json!({
|
json!({
|
||||||
"id": self.uuid,
|
"id": self.uuid,
|
||||||
"userId": self.user_uuid,
|
"userId": self.user_uuid,
|
||||||
"name": user.name,
|
"name": if self.get_unrevoked_status() >= UserOrgStatus::Accepted as i32 { Some(user.name) } else { None },
|
||||||
"email": user.email,
|
"email": user.email,
|
||||||
"externalId": self.external_id,
|
"externalId": self.external_id,
|
||||||
|
"avatarColor": user.avatar_color,
|
||||||
"groups": groups,
|
"groups": groups,
|
||||||
"collections": collections,
|
"collections": collections,
|
||||||
|
|
||||||
@@ -482,6 +544,13 @@ impl UserOrganization {
|
|||||||
"accessAll": self.access_all,
|
"accessAll": self.access_all,
|
||||||
"twoFactorEnabled": twofactor_enabled,
|
"twoFactorEnabled": twofactor_enabled,
|
||||||
"resetPasswordEnrolled": self.reset_password_key.is_some(),
|
"resetPasswordEnrolled": self.reset_password_key.is_some(),
|
||||||
|
"hasMasterPassword": !user.password_hash.is_empty(),
|
||||||
|
|
||||||
|
"permissions": permissions,
|
||||||
|
|
||||||
|
"ssoBound": false, // Not supported
|
||||||
|
"usesKeyConnector": false, // Not supported
|
||||||
|
"accessSecretsManager": false, // Not supported (Not AGPLv3 Licensed)
|
||||||
|
|
||||||
"object": "organizationUserUserDetails",
|
"object": "organizationUserUserDetails",
|
||||||
})
|
})
|
||||||
@@ -595,7 +664,7 @@ impl UserOrganization {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn find_by_email_and_org(email: &str, org_id: &str, conn: &mut DbConn) -> Option<UserOrganization> {
|
pub async fn find_by_email_and_org(email: &str, org_id: &str, conn: &mut DbConn) -> Option<UserOrganization> {
|
||||||
if let Some(user) = super::User::find_by_mail(email, conn).await {
|
if let Some(user) = User::find_by_mail(email, conn).await {
|
||||||
if let Some(user_org) = UserOrganization::find_by_user_and_org(&user.uuid, org_id, conn).await {
|
if let Some(user_org) = UserOrganization::find_by_user_and_org(&user.uuid, org_id, conn).await {
|
||||||
return Some(user_org);
|
return Some(user_org);
|
||||||
}
|
}
|
||||||
|
@@ -13,6 +13,7 @@ db_object! {
|
|||||||
// must complete 2FA login before being added into the devices table.
|
// must complete 2FA login before being added into the devices table.
|
||||||
pub device_uuid: String,
|
pub device_uuid: String,
|
||||||
pub device_name: String,
|
pub device_name: String,
|
||||||
|
pub device_type: i32,
|
||||||
pub login_time: NaiveDateTime,
|
pub login_time: NaiveDateTime,
|
||||||
pub ip_address: String,
|
pub ip_address: String,
|
||||||
}
|
}
|
||||||
@@ -23,6 +24,7 @@ impl TwoFactorIncomplete {
|
|||||||
user_uuid: &str,
|
user_uuid: &str,
|
||||||
device_uuid: &str,
|
device_uuid: &str,
|
||||||
device_name: &str,
|
device_name: &str,
|
||||||
|
device_type: i32,
|
||||||
ip: &ClientIp,
|
ip: &ClientIp,
|
||||||
conn: &mut DbConn,
|
conn: &mut DbConn,
|
||||||
) -> EmptyResult {
|
) -> EmptyResult {
|
||||||
@@ -44,6 +46,7 @@ impl TwoFactorIncomplete {
|
|||||||
twofactor_incomplete::user_uuid.eq(user_uuid),
|
twofactor_incomplete::user_uuid.eq(user_uuid),
|
||||||
twofactor_incomplete::device_uuid.eq(device_uuid),
|
twofactor_incomplete::device_uuid.eq(device_uuid),
|
||||||
twofactor_incomplete::device_name.eq(device_name),
|
twofactor_incomplete::device_name.eq(device_name),
|
||||||
|
twofactor_incomplete::device_type.eq(device_type),
|
||||||
twofactor_incomplete::login_time.eq(Utc::now().naive_utc()),
|
twofactor_incomplete::login_time.eq(Utc::now().naive_utc()),
|
||||||
twofactor_incomplete::ip_address.eq(ip.ip.to_string()),
|
twofactor_incomplete::ip_address.eq(ip.ip.to_string()),
|
||||||
))
|
))
|
||||||
|
@@ -1,3 +1,4 @@
|
|||||||
|
use crate::util::{format_date, get_uuid, retry};
|
||||||
use chrono::{NaiveDateTime, TimeDelta, Utc};
|
use chrono::{NaiveDateTime, TimeDelta, Utc};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
@@ -90,7 +91,7 @@ impl User {
|
|||||||
let email = email.to_lowercase();
|
let email = email.to_lowercase();
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
uuid: crate::util::get_uuid(),
|
uuid: get_uuid(),
|
||||||
enabled: true,
|
enabled: true,
|
||||||
created_at: now,
|
created_at: now,
|
||||||
updated_at: now,
|
updated_at: now,
|
||||||
@@ -107,7 +108,7 @@ impl User {
|
|||||||
salt: crypto::get_random_bytes::<64>().to_vec(),
|
salt: crypto::get_random_bytes::<64>().to_vec(),
|
||||||
password_iterations: CONFIG.password_iterations(),
|
password_iterations: CONFIG.password_iterations(),
|
||||||
|
|
||||||
security_stamp: crate::util::get_uuid(),
|
security_stamp: get_uuid(),
|
||||||
stamp_exception: None,
|
stamp_exception: None,
|
||||||
|
|
||||||
password_hint: None,
|
password_hint: None,
|
||||||
@@ -144,14 +145,14 @@ impl User {
|
|||||||
|
|
||||||
pub fn check_valid_recovery_code(&self, recovery_code: &str) -> bool {
|
pub fn check_valid_recovery_code(&self, recovery_code: &str) -> bool {
|
||||||
if let Some(ref totp_recover) = self.totp_recover {
|
if let Some(ref totp_recover) = self.totp_recover {
|
||||||
crate::crypto::ct_eq(recovery_code, totp_recover.to_lowercase())
|
crypto::ct_eq(recovery_code, totp_recover.to_lowercase())
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_valid_api_key(&self, key: &str) -> bool {
|
pub fn check_valid_api_key(&self, key: &str) -> bool {
|
||||||
matches!(self.api_key, Some(ref api_key) if crate::crypto::ct_eq(api_key, key))
|
matches!(self.api_key, Some(ref api_key) if crypto::ct_eq(api_key, key))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the password hash generated
|
/// Set the password hash generated
|
||||||
@@ -188,7 +189,7 @@ impl User {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn reset_security_stamp(&mut self) {
|
pub fn reset_security_stamp(&mut self) {
|
||||||
self.security_stamp = crate::util::get_uuid();
|
self.security_stamp = get_uuid();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the stamp_exception to only allow a subsequent request matching a specific route using the current security-stamp.
|
/// Set the stamp_exception to only allow a subsequent request matching a specific route using the current security-stamp.
|
||||||
@@ -259,6 +260,7 @@ impl User {
|
|||||||
"forcePasswordReset": false,
|
"forcePasswordReset": false,
|
||||||
"avatarColor": self.avatar_color,
|
"avatarColor": self.avatar_color,
|
||||||
"usesKeyConnector": false,
|
"usesKeyConnector": false,
|
||||||
|
"creationDate": format_date(&self.created_at),
|
||||||
"object": "profile",
|
"object": "profile",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -340,7 +342,7 @@ impl User {
|
|||||||
let updated_at = Utc::now().naive_utc();
|
let updated_at = Utc::now().naive_utc();
|
||||||
|
|
||||||
db_run! {conn: {
|
db_run! {conn: {
|
||||||
crate::util::retry(|| {
|
retry(|| {
|
||||||
diesel::update(users::table)
|
diesel::update(users::table)
|
||||||
.set(users::updated_at.eq(updated_at))
|
.set(users::updated_at.eq(updated_at))
|
||||||
.execute(conn)
|
.execute(conn)
|
||||||
@@ -357,7 +359,7 @@ impl User {
|
|||||||
|
|
||||||
async fn _update_revision(uuid: &str, date: &NaiveDateTime, conn: &mut DbConn) -> EmptyResult {
|
async fn _update_revision(uuid: &str, date: &NaiveDateTime, conn: &mut DbConn) -> EmptyResult {
|
||||||
db_run! {conn: {
|
db_run! {conn: {
|
||||||
crate::util::retry(|| {
|
retry(|| {
|
||||||
diesel::update(users::table.filter(users::uuid.eq(uuid)))
|
diesel::update(users::table.filter(users::uuid.eq(uuid)))
|
||||||
.set(users::updated_at.eq(date))
|
.set(users::updated_at.eq(date))
|
||||||
.execute(conn)
|
.execute(conn)
|
||||||
|
@@ -169,6 +169,7 @@ table! {
|
|||||||
user_uuid -> Text,
|
user_uuid -> Text,
|
||||||
device_uuid -> Text,
|
device_uuid -> Text,
|
||||||
device_name -> Text,
|
device_name -> Text,
|
||||||
|
device_type -> Integer,
|
||||||
login_time -> Timestamp,
|
login_time -> Timestamp,
|
||||||
ip_address -> Text,
|
ip_address -> Text,
|
||||||
}
|
}
|
||||||
|
@@ -169,6 +169,7 @@ table! {
|
|||||||
user_uuid -> Text,
|
user_uuid -> Text,
|
||||||
device_uuid -> Text,
|
device_uuid -> Text,
|
||||||
device_name -> Text,
|
device_name -> Text,
|
||||||
|
device_type -> Integer,
|
||||||
login_time -> Timestamp,
|
login_time -> Timestamp,
|
||||||
ip_address -> Text,
|
ip_address -> Text,
|
||||||
}
|
}
|
||||||
|
@@ -169,6 +169,7 @@ table! {
|
|||||||
user_uuid -> Text,
|
user_uuid -> Text,
|
||||||
device_uuid -> Text,
|
device_uuid -> Text,
|
||||||
device_name -> Text,
|
device_name -> Text,
|
||||||
|
device_type -> Integer,
|
||||||
login_time -> Timestamp,
|
login_time -> Timestamp,
|
||||||
ip_address -> Text,
|
ip_address -> Text,
|
||||||
}
|
}
|
||||||
|
@@ -209,7 +209,7 @@ use rocket::http::{ContentType, Status};
|
|||||||
use rocket::request::Request;
|
use rocket::request::Request;
|
||||||
use rocket::response::{self, Responder, Response};
|
use rocket::response::{self, Responder, Response};
|
||||||
|
|
||||||
impl<'r> Responder<'r, 'static> for Error {
|
impl Responder<'_, 'static> for Error {
|
||||||
fn respond_to(self, _: &Request<'_>) -> response::Result<'static> {
|
fn respond_to(self, _: &Request<'_>) -> response::Result<'static> {
|
||||||
match self.error {
|
match self.error {
|
||||||
ErrorKind::Empty(_) => {} // Don't print the error in this situation
|
ErrorKind::Empty(_) => {} // Don't print the error in this situation
|
||||||
|
@@ -102,9 +102,9 @@ fn should_block_address_regex(domain_or_ip: &str) -> bool {
|
|||||||
|
|
||||||
fn should_block_host(host: Host<&str>) -> Result<(), CustomHttpClientError> {
|
fn should_block_host(host: Host<&str>) -> Result<(), CustomHttpClientError> {
|
||||||
let (ip, host_str): (Option<IpAddr>, String) = match host {
|
let (ip, host_str): (Option<IpAddr>, String) = match host {
|
||||||
url::Host::Ipv4(ip) => (Some(ip.into()), ip.to_string()),
|
Host::Ipv4(ip) => (Some(ip.into()), ip.to_string()),
|
||||||
url::Host::Ipv6(ip) => (Some(ip.into()), ip.to_string()),
|
Host::Ipv6(ip) => (Some(ip.into()), ip.to_string()),
|
||||||
url::Host::Domain(d) => (None, d.to_string()),
|
Host::Domain(d) => (None, d.to_string()),
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(ip) = ip {
|
if let Some(ip) = ip {
|
||||||
|
104
src/mail.rs
104
src/mail.rs
@@ -17,6 +17,7 @@ use crate::{
|
|||||||
encode_jwt, generate_delete_claims, generate_emergency_access_invite_claims, generate_invite_claims,
|
encode_jwt, generate_delete_claims, generate_emergency_access_invite_claims, generate_invite_claims,
|
||||||
generate_verify_email_claims,
|
generate_verify_email_claims,
|
||||||
},
|
},
|
||||||
|
db::models::{Device, DeviceType, User},
|
||||||
error::Error,
|
error::Error,
|
||||||
CONFIG,
|
CONFIG,
|
||||||
};
|
};
|
||||||
@@ -95,7 +96,31 @@ fn smtp_transport() -> AsyncSmtpTransport<Tokio1Executor> {
|
|||||||
smtp_client.build()
|
smtp_client.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This will sanitize the string values by stripping all the html tags to prevent XSS and HTML Injections
|
||||||
|
fn sanitize_data(data: &mut serde_json::Value) {
|
||||||
|
use regex::Regex;
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
static RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"<[^>]+>").unwrap());
|
||||||
|
|
||||||
|
match data {
|
||||||
|
serde_json::Value::String(s) => *s = RE.replace_all(s, "").to_string(),
|
||||||
|
serde_json::Value::Object(obj) => {
|
||||||
|
for d in obj.values_mut() {
|
||||||
|
sanitize_data(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
serde_json::Value::Array(arr) => {
|
||||||
|
for d in arr.iter_mut() {
|
||||||
|
sanitize_data(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn get_text(template_name: &'static str, data: serde_json::Value) -> Result<(String, String, String), Error> {
|
fn get_text(template_name: &'static str, data: serde_json::Value) -> Result<(String, String, String), Error> {
|
||||||
|
let mut data = data;
|
||||||
|
sanitize_data(&mut data);
|
||||||
let (subject_html, body_html) = get_template(&format!("{template_name}.html"), &data)?;
|
let (subject_html, body_html) = get_template(&format!("{template_name}.html"), &data)?;
|
||||||
let (_subject_text, body_text) = get_template(template_name, &data)?;
|
let (_subject_text, body_text) = get_template(template_name, &data)?;
|
||||||
Ok((subject_html, body_html, body_text))
|
Ok((subject_html, body_html, body_text))
|
||||||
@@ -115,6 +140,10 @@ fn get_template(template_name: &str, data: &serde_json::Value) -> Result<(String
|
|||||||
None => err!("Template doesn't contain body"),
|
None => err!("Template doesn't contain body"),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if text_split.next().is_some() {
|
||||||
|
err!("Template contains more than one body");
|
||||||
|
}
|
||||||
|
|
||||||
Ok((subject, body))
|
Ok((subject, body))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -229,37 +258,50 @@ pub async fn send_single_org_removed_from_org(address: &str, org_name: &str) ->
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send_invite(
|
pub async fn send_invite(
|
||||||
address: &str,
|
user: &User,
|
||||||
uuid: &str,
|
|
||||||
org_id: Option<String>,
|
org_id: Option<String>,
|
||||||
org_user_id: Option<String>,
|
org_user_id: Option<String>,
|
||||||
org_name: &str,
|
org_name: &str,
|
||||||
invited_by_email: Option<String>,
|
invited_by_email: Option<String>,
|
||||||
) -> EmptyResult {
|
) -> EmptyResult {
|
||||||
let claims = generate_invite_claims(
|
let claims = generate_invite_claims(
|
||||||
uuid.to_string(),
|
user.uuid.clone(),
|
||||||
String::from(address),
|
user.email.clone(),
|
||||||
org_id.clone(),
|
org_id.clone(),
|
||||||
org_user_id.clone(),
|
org_user_id.clone(),
|
||||||
invited_by_email,
|
invited_by_email,
|
||||||
);
|
);
|
||||||
let invite_token = encode_jwt(&claims);
|
let invite_token = encode_jwt(&claims);
|
||||||
|
let mut query = url::Url::parse("https://query.builder").unwrap();
|
||||||
|
{
|
||||||
|
let mut query_params = query.query_pairs_mut();
|
||||||
|
query_params
|
||||||
|
.append_pair("email", &user.email)
|
||||||
|
.append_pair("organizationName", org_name)
|
||||||
|
.append_pair("organizationId", org_id.as_deref().unwrap_or("_"))
|
||||||
|
.append_pair("organizationUserId", org_user_id.as_deref().unwrap_or("_"))
|
||||||
|
.append_pair("token", &invite_token);
|
||||||
|
if user.private_key.is_some() {
|
||||||
|
query_params.append_pair("orgUserHasExistingUser", "true");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let query_string = match query.query() {
|
||||||
|
None => err!("Failed to build invite URL query parameters"),
|
||||||
|
Some(query) => query,
|
||||||
|
};
|
||||||
|
|
||||||
let (subject, body_html, body_text) = get_text(
|
let (subject, body_html, body_text) = get_text(
|
||||||
"email/send_org_invite",
|
"email/send_org_invite",
|
||||||
json!({
|
json!({
|
||||||
"url": CONFIG.domain(),
|
// `url.Url` would place the anchor `#` after the query parameters
|
||||||
|
"url": format!("{}/#/accept-organization/?{}", CONFIG.domain(), query_string),
|
||||||
"img_src": CONFIG._smtp_img_src(),
|
"img_src": CONFIG._smtp_img_src(),
|
||||||
"org_id": org_id.as_deref().unwrap_or("_"),
|
|
||||||
"org_user_id": org_user_id.as_deref().unwrap_or("_"),
|
|
||||||
"email": percent_encode(address.as_bytes(), NON_ALPHANUMERIC).to_string(),
|
|
||||||
"org_name_encoded": percent_encode(org_name.as_bytes(), NON_ALPHANUMERIC).to_string(),
|
|
||||||
"org_name": org_name,
|
"org_name": org_name,
|
||||||
"token": invite_token,
|
|
||||||
}),
|
}),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
send_email(address, &subject, body_html, body_text).await
|
send_email(&user.email, &subject, body_html, body_text).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send_emergency_access_invite(
|
pub async fn send_emergency_access_invite(
|
||||||
@@ -277,17 +319,29 @@ pub async fn send_emergency_access_invite(
|
|||||||
String::from(grantor_email),
|
String::from(grantor_email),
|
||||||
);
|
);
|
||||||
|
|
||||||
let invite_token = encode_jwt(&claims);
|
// Build the query here to ensure proper escaping
|
||||||
|
let mut query = url::Url::parse("https://query.builder").unwrap();
|
||||||
|
{
|
||||||
|
let mut query_params = query.query_pairs_mut();
|
||||||
|
query_params
|
||||||
|
.append_pair("id", emer_id)
|
||||||
|
.append_pair("name", grantor_name)
|
||||||
|
.append_pair("email", address)
|
||||||
|
.append_pair("token", &encode_jwt(&claims));
|
||||||
|
}
|
||||||
|
|
||||||
|
let query_string = match query.query() {
|
||||||
|
None => err!("Failed to build emergency invite URL query parameters"),
|
||||||
|
Some(query) => query,
|
||||||
|
};
|
||||||
|
|
||||||
let (subject, body_html, body_text) = get_text(
|
let (subject, body_html, body_text) = get_text(
|
||||||
"email/send_emergency_access_invite",
|
"email/send_emergency_access_invite",
|
||||||
json!({
|
json!({
|
||||||
"url": CONFIG.domain(),
|
// `url.Url` would place the anchor `#` after the query parameters
|
||||||
|
"url": format!("{}/#/accept-emergency/?{query_string}", CONFIG.domain()),
|
||||||
"img_src": CONFIG._smtp_img_src(),
|
"img_src": CONFIG._smtp_img_src(),
|
||||||
"emer_id": emer_id,
|
|
||||||
"email": percent_encode(address.as_bytes(), NON_ALPHANUMERIC).to_string(),
|
|
||||||
"grantor_name": grantor_name,
|
"grantor_name": grantor_name,
|
||||||
"token": invite_token,
|
|
||||||
}),
|
}),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
@@ -427,9 +481,8 @@ pub async fn send_invite_confirmed(address: &str, org_name: &str) -> EmptyResult
|
|||||||
send_email(address, &subject, body_html, body_text).await
|
send_email(address, &subject, body_html, body_text).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send_new_device_logged_in(address: &str, ip: &str, dt: &NaiveDateTime, device: &str) -> EmptyResult {
|
pub async fn send_new_device_logged_in(address: &str, ip: &str, dt: &NaiveDateTime, device: &Device) -> EmptyResult {
|
||||||
use crate::util::upcase_first;
|
use crate::util::upcase_first;
|
||||||
let device = upcase_first(device);
|
|
||||||
|
|
||||||
let fmt = "%A, %B %_d, %Y at %r %Z";
|
let fmt = "%A, %B %_d, %Y at %r %Z";
|
||||||
let (subject, body_html, body_text) = get_text(
|
let (subject, body_html, body_text) = get_text(
|
||||||
@@ -438,7 +491,8 @@ pub async fn send_new_device_logged_in(address: &str, ip: &str, dt: &NaiveDateTi
|
|||||||
"url": CONFIG.domain(),
|
"url": CONFIG.domain(),
|
||||||
"img_src": CONFIG._smtp_img_src(),
|
"img_src": CONFIG._smtp_img_src(),
|
||||||
"ip": ip,
|
"ip": ip,
|
||||||
"device": device,
|
"device_name": upcase_first(&device.name),
|
||||||
|
"device_type": DeviceType::from_i32(device.atype).to_string(),
|
||||||
"datetime": crate::util::format_naive_datetime_local(dt, fmt),
|
"datetime": crate::util::format_naive_datetime_local(dt, fmt),
|
||||||
}),
|
}),
|
||||||
)?;
|
)?;
|
||||||
@@ -446,9 +500,14 @@ pub async fn send_new_device_logged_in(address: &str, ip: &str, dt: &NaiveDateTi
|
|||||||
send_email(address, &subject, body_html, body_text).await
|
send_email(address, &subject, body_html, body_text).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send_incomplete_2fa_login(address: &str, ip: &str, dt: &NaiveDateTime, device: &str) -> EmptyResult {
|
pub async fn send_incomplete_2fa_login(
|
||||||
|
address: &str,
|
||||||
|
ip: &str,
|
||||||
|
dt: &NaiveDateTime,
|
||||||
|
device_name: &str,
|
||||||
|
device_type: &str,
|
||||||
|
) -> EmptyResult {
|
||||||
use crate::util::upcase_first;
|
use crate::util::upcase_first;
|
||||||
let device = upcase_first(device);
|
|
||||||
|
|
||||||
let fmt = "%A, %B %_d, %Y at %r %Z";
|
let fmt = "%A, %B %_d, %Y at %r %Z";
|
||||||
let (subject, body_html, body_text) = get_text(
|
let (subject, body_html, body_text) = get_text(
|
||||||
@@ -457,7 +516,8 @@ pub async fn send_incomplete_2fa_login(address: &str, ip: &str, dt: &NaiveDateTi
|
|||||||
"url": CONFIG.domain(),
|
"url": CONFIG.domain(),
|
||||||
"img_src": CONFIG._smtp_img_src(),
|
"img_src": CONFIG._smtp_img_src(),
|
||||||
"ip": ip,
|
"ip": ip,
|
||||||
"device": device,
|
"device_name": upcase_first(device_name),
|
||||||
|
"device_type": device_type,
|
||||||
"datetime": crate::util::format_naive_datetime_local(dt, fmt),
|
"datetime": crate::util::format_naive_datetime_local(dt, fmt),
|
||||||
"time_limit": CONFIG.incomplete_2fa_time_limit(),
|
"time_limit": CONFIG.incomplete_2fa_time_limit(),
|
||||||
}),
|
}),
|
||||||
|
84
src/main.rs
84
src/main.rs
@@ -40,6 +40,9 @@ use tokio::{
|
|||||||
io::{AsyncBufReadExt, BufReader},
|
io::{AsyncBufReadExt, BufReader},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
use tokio::signal::unix::SignalKind;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod error;
|
mod error;
|
||||||
mod api;
|
mod api;
|
||||||
@@ -59,7 +62,7 @@ use crate::api::{WS_ANONYMOUS_SUBSCRIPTIONS, WS_USERS};
|
|||||||
pub use config::CONFIG;
|
pub use config::CONFIG;
|
||||||
pub use error::{Error, MapResult};
|
pub use error::{Error, MapResult};
|
||||||
use rocket::data::{Limits, ToByteUnit};
|
use rocket::data::{Limits, ToByteUnit};
|
||||||
use std::sync::Arc;
|
use std::sync::{atomic::Ordering, Arc};
|
||||||
pub use util::is_running_in_container;
|
pub use util::is_running_in_container;
|
||||||
|
|
||||||
#[rocket::main]
|
#[rocket::main]
|
||||||
@@ -83,7 +86,7 @@ async fn main() -> Result<(), Error> {
|
|||||||
|
|
||||||
let pool = create_db_pool().await;
|
let pool = create_db_pool().await;
|
||||||
schedule_jobs(pool.clone());
|
schedule_jobs(pool.clone());
|
||||||
crate::db::models::TwoFactor::migrate_u2f_to_webauthn(&mut pool.get().await.unwrap()).await.unwrap();
|
db::models::TwoFactor::migrate_u2f_to_webauthn(&mut pool.get().await.unwrap()).await.unwrap();
|
||||||
|
|
||||||
let extra_debug = matches!(level, log::LevelFilter::Trace | log::LevelFilter::Debug);
|
let extra_debug = matches!(level, log::LevelFilter::Trace | log::LevelFilter::Debug);
|
||||||
launch_rocket(pool, extra_debug).await // Blocks until program termination.
|
launch_rocket(pool, extra_debug).await // Blocks until program termination.
|
||||||
@@ -97,10 +100,12 @@ USAGE:
|
|||||||
|
|
||||||
FLAGS:
|
FLAGS:
|
||||||
-h, --help Prints help information
|
-h, --help Prints help information
|
||||||
-v, --version Prints the app version
|
-v, --version Prints the app and web-vault version
|
||||||
|
|
||||||
COMMAND:
|
COMMAND:
|
||||||
hash [--preset {bitwarden|owasp}] Generate an Argon2id PHC ADMIN_TOKEN
|
hash [--preset {bitwarden|owasp}] Generate an Argon2id PHC ADMIN_TOKEN
|
||||||
|
backup Create a backup of the SQLite database
|
||||||
|
You can also send the USR1 signal to trigger a backup
|
||||||
|
|
||||||
PRESETS: m= t= p=
|
PRESETS: m= t= p=
|
||||||
bitwarden (default) 64MiB, 3 Iterations, 4 Threads
|
bitwarden (default) 64MiB, 3 Iterations, 4 Threads
|
||||||
@@ -115,11 +120,14 @@ fn parse_args() {
|
|||||||
let version = VERSION.unwrap_or("(Version info from Git not present)");
|
let version = VERSION.unwrap_or("(Version info from Git not present)");
|
||||||
|
|
||||||
if pargs.contains(["-h", "--help"]) {
|
if pargs.contains(["-h", "--help"]) {
|
||||||
println!("vaultwarden {version}");
|
println!("Vaultwarden {version}");
|
||||||
print!("{HELP}");
|
print!("{HELP}");
|
||||||
exit(0);
|
exit(0);
|
||||||
} else if pargs.contains(["-v", "--version"]) {
|
} else if pargs.contains(["-v", "--version"]) {
|
||||||
println!("vaultwarden {version}");
|
config::SKIP_CONFIG_VALIDATION.store(true, Ordering::Relaxed);
|
||||||
|
let web_vault_version = util::get_web_vault_version();
|
||||||
|
println!("Vaultwarden {version}");
|
||||||
|
println!("Web-Vault {web_vault_version}");
|
||||||
exit(0);
|
exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -163,7 +171,7 @@ fn parse_args() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let argon2 = Argon2::new(Argon2id, V0x13, argon2_params.build().unwrap());
|
let argon2 = Argon2::new(Argon2id, V0x13, argon2_params.build().unwrap());
|
||||||
let salt = SaltString::encode_b64(&crate::crypto::get_random_bytes::<32>()).unwrap();
|
let salt = SaltString::encode_b64(&crypto::get_random_bytes::<32>()).unwrap();
|
||||||
|
|
||||||
let argon2_timer = tokio::time::Instant::now();
|
let argon2_timer = tokio::time::Instant::now();
|
||||||
if let Ok(password_hash) = argon2.hash_password(password.as_bytes(), &salt) {
|
if let Ok(password_hash) = argon2.hash_password(password.as_bytes(), &salt) {
|
||||||
@@ -174,13 +182,47 @@ fn parse_args() {
|
|||||||
argon2_timer.elapsed()
|
argon2_timer.elapsed()
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
error!("Unable to generate Argon2id PHC hash.");
|
println!("Unable to generate Argon2id PHC hash.");
|
||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
|
} else if command == "backup" {
|
||||||
|
match backup_sqlite() {
|
||||||
|
Ok(f) => {
|
||||||
|
println!("Backup to '{f}' was successful");
|
||||||
|
exit(0);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
println!("Backup failed. {e:?}");
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
exit(0);
|
exit(0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn backup_sqlite() -> Result<String, Error> {
|
||||||
|
#[cfg(sqlite)]
|
||||||
|
{
|
||||||
|
use crate::db::{backup_sqlite_database, DbConnType};
|
||||||
|
if DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false) {
|
||||||
|
use diesel::Connection;
|
||||||
|
let url = CONFIG.database_url();
|
||||||
|
|
||||||
|
// Establish a connection to the sqlite database
|
||||||
|
let mut conn = diesel::sqlite::SqliteConnection::establish(&url)?;
|
||||||
|
let backup_file = backup_sqlite_database(&mut conn)?;
|
||||||
|
Ok(backup_file)
|
||||||
|
} else {
|
||||||
|
err_silent!("The database type is not SQLite. Backups only works for SQLite databases")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[cfg(not(sqlite))]
|
||||||
|
{
|
||||||
|
err_silent!("The 'sqlite' feature is not enabled. Backups only works for SQLite databases")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn launch_info() {
|
fn launch_info() {
|
||||||
println!(
|
println!(
|
||||||
"\
|
"\
|
||||||
@@ -344,15 +386,15 @@ fn init_logging() -> Result<log::LevelFilter, Error> {
|
|||||||
{
|
{
|
||||||
logger = logger.chain(fern::log_file(log_file)?);
|
logger = logger.chain(fern::log_file(log_file)?);
|
||||||
}
|
}
|
||||||
#[cfg(not(windows))]
|
#[cfg(unix)]
|
||||||
{
|
{
|
||||||
const SIGHUP: i32 = tokio::signal::unix::SignalKind::hangup().as_raw_value();
|
const SIGHUP: i32 = SignalKind::hangup().as_raw_value();
|
||||||
let path = Path::new(&log_file);
|
let path = Path::new(&log_file);
|
||||||
logger = logger.chain(fern::log_reopen1(path, [SIGHUP])?);
|
logger = logger.chain(fern::log_reopen1(path, [SIGHUP])?);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(windows))]
|
#[cfg(unix)]
|
||||||
{
|
{
|
||||||
if cfg!(feature = "enable_syslog") || CONFIG.use_syslog() {
|
if cfg!(feature = "enable_syslog") || CONFIG.use_syslog() {
|
||||||
logger = chain_syslog(logger);
|
logger = chain_syslog(logger);
|
||||||
@@ -402,7 +444,7 @@ fn init_logging() -> Result<log::LevelFilter, Error> {
|
|||||||
Ok(level)
|
Ok(level)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(windows))]
|
#[cfg(unix)]
|
||||||
fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch {
|
fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch {
|
||||||
let syslog_fmt = syslog::Formatter3164 {
|
let syslog_fmt = syslog::Formatter3164 {
|
||||||
facility: syslog::Facility::LOG_USER,
|
facility: syslog::Facility::LOG_USER,
|
||||||
@@ -474,10 +516,10 @@ async fn container_data_folder_is_persistent(data_folder: &str) -> bool {
|
|||||||
format!(" /{data_folder} ")
|
format!(" /{data_folder} ")
|
||||||
};
|
};
|
||||||
let mut lines = BufReader::new(mountinfo).lines();
|
let mut lines = BufReader::new(mountinfo).lines();
|
||||||
|
let re = regex::Regex::new(r"/volumes/[a-z0-9]{64}/_data /").unwrap();
|
||||||
while let Some(line) = lines.next_line().await.unwrap_or_default() {
|
while let Some(line) = lines.next_line().await.unwrap_or_default() {
|
||||||
// Only execute a regex check if we find the base match
|
// Only execute a regex check if we find the base match
|
||||||
if line.contains(&data_folder_match) {
|
if line.contains(&data_folder_match) {
|
||||||
let re = regex::Regex::new(r"/volumes/[a-z0-9]{64}/_data /").unwrap();
|
|
||||||
if re.is_match(&line) {
|
if re.is_match(&line) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -560,7 +602,23 @@ async fn launch_rocket(pool: db::DbPool, extra_debug: bool) -> Result<(), Error>
|
|||||||
CONFIG.shutdown();
|
CONFIG.shutdown();
|
||||||
});
|
});
|
||||||
|
|
||||||
let _ = instance.launch().await?;
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let mut signal_user1 = tokio::signal::unix::signal(SignalKind::user_defined1()).unwrap();
|
||||||
|
loop {
|
||||||
|
// If we need more signals to act upon, we might want to use select! here.
|
||||||
|
// With only one item to listen for this is enough.
|
||||||
|
let _ = signal_user1.recv().await;
|
||||||
|
match backup_sqlite() {
|
||||||
|
Ok(f) => info!("Backup to '{f}' was successful"),
|
||||||
|
Err(e) => error!("Backup failed. {e:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
instance.launch().await?;
|
||||||
|
|
||||||
info!("Vaultwarden process exited!");
|
info!("Vaultwarden process exited!");
|
||||||
Ok(())
|
Ok(())
|
||||||
|
4
src/static/scripts/admin.js
vendored
4
src/static/scripts/admin.js
vendored
@@ -49,8 +49,8 @@ function _post(url, successMsg, errMsg, body, reload_page = true) {
|
|||||||
}).then(respText => {
|
}).then(respText => {
|
||||||
try {
|
try {
|
||||||
const respJson = JSON.parse(respText);
|
const respJson = JSON.parse(respText);
|
||||||
if (respJson.ErrorModel && respJson.ErrorModel.Message) {
|
if (respJson.errorModel && respJson.errorModel.message) {
|
||||||
return respJson.ErrorModel.Message;
|
return respJson.errorModel.message;
|
||||||
} else {
|
} else {
|
||||||
return Promise.reject({ body: `${respStatus} - ${respStatusText}\n\nUnknown error`, error: true });
|
return Promise.reject({ body: `${respStatus} - ${respStatusText}\n\nUnknown error`, error: true });
|
||||||
}
|
}
|
||||||
|
@@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
<table class="footer" cellpadding="0" cellspacing="0" width="100%" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; clear: both; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; width: 100%;">
|
<table class="footer" cellpadding="0" cellspacing="0" width="100%" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; clear: both; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; width: 100%;">
|
||||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
||||||
<td class="aligncenter social-icons" align="center" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; padding: 15px 0 0 0;" valign="top">
|
<td class="aligncenter social-icons" align="center" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; padding: 5px 0 20px 0;" valign="top">
|
||||||
<table cellpadding="0" cellspacing="0" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0 auto;">
|
<table cellpadding="0" cellspacing="0" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0 auto;">
|
||||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
||||||
<td style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; padding: 0 10px;" valign="top"><a href="https://github.com/dani-garcia/vaultwarden" target="_blank" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; text-decoration: underline;"><img src="{{img_src}}mail-github.png" alt="GitHub" width="30" height="30" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; border: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; max-width: 100%;" /></a></td>
|
<td style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; padding: 0 10px;" valign="top"><a href="https://github.com/dani-garcia/vaultwarden" target="_blank" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; text-decoration: underline;"><img src="{{img_src}}mail-github.png" alt="GitHub" width="30" height="30" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; border: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; max-width: 100%;" /></a></td>
|
||||||
|
@@ -1,10 +1,11 @@
|
|||||||
Incomplete Two-Step Login From {{{device}}}
|
Incomplete Two-Step Login From {{{device_name}}}
|
||||||
<!---------------->
|
<!---------------->
|
||||||
Someone attempted to log into your account with the correct master password, but did not provide the correct token or action required to complete the two-step login process within {{time_limit}} minutes of the initial login attempt.
|
Someone attempted to log into your account with the correct master password, but did not provide the correct token or action required to complete the two-step login process within {{time_limit}} minutes of the initial login attempt.
|
||||||
|
|
||||||
* Date: {{datetime}}
|
* Date: {{datetime}}
|
||||||
* IP Address: {{ip}}
|
* IP Address: {{ip}}
|
||||||
* Device Type: {{device}}
|
* Device Name: {{device_name}}
|
||||||
|
* Device Type: {{device_type}}
|
||||||
|
|
||||||
If this was not you or someone you authorized, then you should change your master password as soon as possible, as it is likely to be compromised.
|
If this was not you or someone you authorized, then you should change your master password as soon as possible, as it is likely to be compromised.
|
||||||
{{> email/email_footer_text }}
|
{{> email/email_footer_text }}
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
Incomplete Two-Step Login From {{{device}}}
|
Incomplete Two-Step Login From {{{device_name}}}
|
||||||
<!---------------->
|
<!---------------->
|
||||||
{{> email/email_header }}
|
{{> email/email_header }}
|
||||||
<table width="100%" cellpadding="0" cellspacing="0" style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
<table width="100%" cellpadding="0" cellspacing="0" style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
@@ -19,7 +19,12 @@ Incomplete Two-Step Login From {{{device}}}
|
|||||||
</tr>
|
</tr>
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none;" valign="top">
|
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none;" valign="top">
|
||||||
<b>Device Type:</b> {{device}}
|
<b>Device Name:</b> {{device_name}}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
|
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none;" valign="top">
|
||||||
|
<b>Device Type:</b> {{device_type}}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
|
@@ -1,10 +1,11 @@
|
|||||||
New Device Logged In From {{{device}}}
|
New Device Logged In From {{{device_name}}}
|
||||||
<!---------------->
|
<!---------------->
|
||||||
Your account was just logged into from a new device.
|
Your account was just logged into from a new device.
|
||||||
|
|
||||||
* Date: {{datetime}}
|
* Date: {{datetime}}
|
||||||
* IP Address: {{ip}}
|
* IP Address: {{ip}}
|
||||||
* Device Type: {{device}}
|
* Device Name: {{device_name}}
|
||||||
|
* Device Type: {{device_type}}
|
||||||
|
|
||||||
You can deauthorize all devices that have access to your account from the web vault ( {{url}} ) under Settings > My Account > Deauthorize Sessions.
|
You can deauthorize all devices that have access to your account from the web vault ( {{url}} ) under Settings > My Account > Deauthorize Sessions.
|
||||||
{{> email/email_footer_text }}
|
{{> email/email_footer_text }}
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
New Device Logged In From {{{device}}}
|
New Device Logged In From {{{device_name}}}
|
||||||
<!---------------->
|
<!---------------->
|
||||||
{{> email/email_header }}
|
{{> email/email_header }}
|
||||||
<table width="100%" cellpadding="0" cellspacing="0" style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
<table width="100%" cellpadding="0" cellspacing="0" style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
@@ -9,7 +9,7 @@ New Device Logged In From {{{device}}}
|
|||||||
</tr>
|
</tr>
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none;" valign="top">
|
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none;" valign="top">
|
||||||
<b>Date</b>: {{datetime}}
|
<b>Date:</b> {{datetime}}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
@@ -19,7 +19,12 @@ New Device Logged In From {{{device}}}
|
|||||||
</tr>
|
</tr>
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none;" valign="top">
|
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none;" valign="top">
|
||||||
<b>Device Type:</b> {{device}}
|
<b>Device Name:</b> {{device_name}}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
|
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none;" valign="top">
|
||||||
|
<b>Device Type:</b> {{device_type}}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
@@ -28,4 +33,4 @@ New Device Logged In From {{{device}}}
|
|||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
{{> email/email_footer }}
|
{{> email/email_footer }}
|
||||||
|
@@ -2,7 +2,7 @@ Emergency access for {{{grantor_name}}}
|
|||||||
<!---------------->
|
<!---------------->
|
||||||
You have been invited to become an emergency contact for {{grantor_name}}. To accept this invite, click the following link:
|
You have been invited to become an emergency contact for {{grantor_name}}. To accept this invite, click the following link:
|
||||||
|
|
||||||
Click here to join: {{url}}/#/accept-emergency/?id={{emer_id}}&name={{grantor_name}}&email={{email}}&token={{token}}
|
Click here to join: {{{url}}}
|
||||||
|
|
||||||
If you do not wish to become an emergency contact for {{grantor_name}}, you can safely ignore this email.
|
If you do not wish to become an emergency contact for {{grantor_name}}, you can safely ignore this email.
|
||||||
{{> email/email_footer_text }}
|
{{> email/email_footer_text }}
|
||||||
|
@@ -9,7 +9,7 @@ Emergency access for {{{grantor_name}}}
|
|||||||
</tr>
|
</tr>
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
||||||
<a href="{{url}}/#/accept-emergency/?id={{emer_id}}&name={{grantor_name}}&email={{email}}&token={{token}}"
|
<a href="{{{url}}}"
|
||||||
clicktracking=off target="_blank" style="color: #ffffff; text-decoration: none; text-align: center; cursor: pointer; display: inline-block; border-radius: 5px; background-color: #3c8dbc; border-color: #3c8dbc; border-style: solid; border-width: 10px 20px; margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
clicktracking=off target="_blank" style="color: #ffffff; text-decoration: none; text-align: center; cursor: pointer; display: inline-block; border-radius: 5px; background-color: #3c8dbc; border-color: #3c8dbc; border-style: solid; border-width: 10px 20px; margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
Become emergency contact
|
Become emergency contact
|
||||||
</a>
|
</a>
|
||||||
@@ -21,4 +21,4 @@ Emergency access for {{{grantor_name}}}
|
|||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
{{> email/email_footer }}
|
{{> email/email_footer }}
|
||||||
|
@@ -3,8 +3,8 @@ Join {{{org_name}}}
|
|||||||
You have been invited to join the *{{org_name}}* organization.
|
You have been invited to join the *{{org_name}}* organization.
|
||||||
|
|
||||||
|
|
||||||
Click here to join: {{url}}/#/accept-organization/?organizationId={{org_id}}&organizationUserId={{org_user_id}}&email={{email}}&organizationName={{org_name_encoded}}&token={{token}}
|
Click here to join: {{{url}}}
|
||||||
|
|
||||||
|
|
||||||
If you do not wish to join this organization, you can safely ignore this email.
|
If you do not wish to join this organization, you can safely ignore this email.
|
||||||
{{> email/email_footer_text }}
|
{{> email/email_footer_text }}
|
||||||
|
@@ -9,7 +9,7 @@ Join {{{org_name}}}
|
|||||||
</tr>
|
</tr>
|
||||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
|
||||||
<a href="{{url}}/#/accept-organization/?organizationId={{org_id}}&organizationUserId={{org_user_id}}&email={{email}}&organizationName={{org_name_encoded}}&token={{token}}"
|
<a href="{{{url}}}"
|
||||||
clicktracking=off target="_blank" style="color: #ffffff; text-decoration: none; text-align: center; cursor: pointer; display: inline-block; border-radius: 5px; background-color: #3c8dbc; border-color: #3c8dbc; border-style: solid; border-width: 10px 20px; margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
clicktracking=off target="_blank" style="color: #ffffff; text-decoration: none; text-align: center; cursor: pointer; display: inline-block; border-radius: 5px; background-color: #3c8dbc; border-color: #3c8dbc; border-style: solid; border-width: 10px 20px; margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||||
Join Organization Now
|
Join Organization Now
|
||||||
</a>
|
</a>
|
||||||
@@ -21,4 +21,4 @@ Join {{{org_name}}}
|
|||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
{{> email/email_footer }}
|
{{> email/email_footer }}
|
||||||
|
1
src/static/templates/scss/user.vaultwarden.scss.hbs
Normal file
1
src/static/templates/scss/user.vaultwarden.scss.hbs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
/* See the wiki for examples and details: https://github.com/dani-garcia/vaultwarden/wiki/Customize-Vaultwarden-CSS */
|
105
src/static/templates/scss/vaultwarden.scss.hbs
Normal file
105
src/static/templates/scss/vaultwarden.scss.hbs
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
/**** START Static Vaultwarden changes ****/
|
||||||
|
/* This combines all selectors extending it into one */
|
||||||
|
%vw-hide {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This allows searching for the combined style in the browsers dev-tools (look into the head tag) */
|
||||||
|
.vw-hide,
|
||||||
|
head {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide the Subscription Page tab */
|
||||||
|
bit-nav-item[route="settings/subscription"] {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide any link pointing to Free Bitwarden Families */
|
||||||
|
a[href$="/settings/sponsored-families"] {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide the `Enterprise Single Sign-On` button on the login page */
|
||||||
|
a[routerlink="/sso"] {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide Two-Factor menu in Organization settings */
|
||||||
|
bit-nav-item[route="settings/two-factor"],
|
||||||
|
a[href$="/settings/two-factor"] {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide Business Owned checkbox */
|
||||||
|
app-org-info > form:nth-child(1) > div:nth-child(3) {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide the `This account is owned by a business` checkbox and label */
|
||||||
|
#ownedBusiness,
|
||||||
|
label[for^="ownedBusiness"] {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide the radio button and label for the `Custom` org user type */
|
||||||
|
#userTypeCustom,
|
||||||
|
label[for^="userTypeCustom"] {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide Business Name */
|
||||||
|
app-org-account form div bit-form-field.tw-block:nth-child(3) {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide organization plans */
|
||||||
|
app-organization-plans > form > bit-section:nth-child(2) {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide Device Verification form at the Two Step Login screen */
|
||||||
|
app-security > app-two-factor-setup > form {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
/**** END Static Vaultwarden Changes ****/
|
||||||
|
/**** START Dynamic Vaultwarden Changes ****/
|
||||||
|
{{#if signup_disabled}}
|
||||||
|
/* Hide the register link on the login screen */
|
||||||
|
app-frontend-layout > app-login > form > div > div > div > p {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
|
/* Hide `Email` 2FA if mail is not enabled */
|
||||||
|
{{#unless mail_enabled}}
|
||||||
|
app-two-factor-setup ul.list-group.list-group-2fa li.list-group-item:nth-child(5) {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
{{/unless}}
|
||||||
|
|
||||||
|
/* Hide `YubiKey OTP security key` 2FA if it is not enabled */
|
||||||
|
{{#unless yubico_enabled}}
|
||||||
|
app-two-factor-setup ul.list-group.list-group-2fa li.list-group-item:nth-child(2) {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
{{/unless}}
|
||||||
|
|
||||||
|
/* Hide Emergency Access if not allowed */
|
||||||
|
{{#unless emergency_access_allowed}}
|
||||||
|
bit-nav-item[route="settings/emergency-access"] {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
{{/unless}}
|
||||||
|
|
||||||
|
/* Hide Sends if not allowed */
|
||||||
|
{{#unless sends_allowed}}
|
||||||
|
bit-nav-item[route="sends"] {
|
||||||
|
@extend %vw-hide;
|
||||||
|
}
|
||||||
|
{{/unless}}
|
||||||
|
/**** End Dynamic Vaultwarden Changes ****/
|
||||||
|
/**** Include a special user stylesheet for custom changes ****/
|
||||||
|
{{#if load_user_scss}}
|
||||||
|
{{> scss/user.vaultwarden.scss }}
|
||||||
|
{{/if}}
|
46
src/util.rs
46
src/util.rs
@@ -213,7 +213,7 @@ impl<'r, R: 'r + Responder<'r, 'static> + Send> Responder<'r, 'static> for Cache
|
|||||||
};
|
};
|
||||||
res.set_raw_header("Cache-Control", cache_control_header);
|
res.set_raw_header("Cache-Control", cache_control_header);
|
||||||
|
|
||||||
let time_now = chrono::Local::now();
|
let time_now = Local::now();
|
||||||
let expiry_time = time_now + chrono::TimeDelta::try_seconds(self.ttl.try_into().unwrap()).unwrap();
|
let expiry_time = time_now + chrono::TimeDelta::try_seconds(self.ttl.try_into().unwrap()).unwrap();
|
||||||
res.set_raw_header("Expires", format_datetime_http(&expiry_time));
|
res.set_raw_header("Expires", format_datetime_http(&expiry_time));
|
||||||
Ok(res)
|
Ok(res)
|
||||||
@@ -222,8 +222,8 @@ impl<'r, R: 'r + Responder<'r, 'static> + Send> Responder<'r, 'static> for Cache
|
|||||||
|
|
||||||
pub struct SafeString(String);
|
pub struct SafeString(String);
|
||||||
|
|
||||||
impl std::fmt::Display for SafeString {
|
impl fmt::Display for SafeString {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
self.0.fmt(f)
|
self.0.fmt(f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -438,13 +438,19 @@ pub fn get_env_bool(key: &str) -> Option<bool> {
|
|||||||
|
|
||||||
use chrono::{DateTime, Local, NaiveDateTime, TimeZone};
|
use chrono::{DateTime, Local, NaiveDateTime, TimeZone};
|
||||||
|
|
||||||
// Format used by Bitwarden API
|
|
||||||
const DATETIME_FORMAT: &str = "%Y-%m-%dT%H:%M:%S%.6fZ";
|
|
||||||
|
|
||||||
/// Formats a UTC-offset `NaiveDateTime` in the format used by Bitwarden API
|
/// Formats a UTC-offset `NaiveDateTime` in the format used by Bitwarden API
|
||||||
/// responses with "date" fields (`CreationDate`, `RevisionDate`, etc.).
|
/// responses with "date" fields (`CreationDate`, `RevisionDate`, etc.).
|
||||||
pub fn format_date(dt: &NaiveDateTime) -> String {
|
pub fn format_date(dt: &NaiveDateTime) -> String {
|
||||||
dt.format(DATETIME_FORMAT).to_string()
|
dt.and_utc().to_rfc3339_opts(chrono::SecondsFormat::Micros, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Validates and formats a RFC3339 timestamp
|
||||||
|
/// If parsing fails it will return the start of the unix datetime
|
||||||
|
pub fn validate_and_format_date(dt: &str) -> String {
|
||||||
|
match DateTime::parse_from_rfc3339(dt) {
|
||||||
|
Ok(dt) => dt.to_rfc3339_opts(chrono::SecondsFormat::Micros, true),
|
||||||
|
_ => String::from("1970-01-01T00:00:00.000000Z"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Formats a `DateTime<Local>` using the specified format string.
|
/// Formats a `DateTime<Local>` using the specified format string.
|
||||||
@@ -486,7 +492,7 @@ pub fn format_datetime_http(dt: &DateTime<Local>) -> String {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_date(date: &str) -> NaiveDateTime {
|
pub fn parse_date(date: &str) -> NaiveDateTime {
|
||||||
NaiveDateTime::parse_from_str(date, DATETIME_FORMAT).unwrap()
|
DateTime::parse_from_rfc3339(date).unwrap().naive_utc()
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
@@ -513,6 +519,28 @@ pub fn container_base_image() -> &'static str {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct WebVaultVersion {
|
||||||
|
version: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_web_vault_version() -> String {
|
||||||
|
let version_files = [
|
||||||
|
format!("{}/vw-version.json", CONFIG.web_vault_folder()),
|
||||||
|
format!("{}/version.json", CONFIG.web_vault_folder()),
|
||||||
|
];
|
||||||
|
|
||||||
|
for version_file in version_files {
|
||||||
|
if let Ok(version_str) = std::fs::read_to_string(&version_file) {
|
||||||
|
if let Ok(version) = serde_json::from_str::<WebVaultVersion>(&version_str) {
|
||||||
|
return String::from(version.version.trim_start_matches('v'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
String::from("Version file missing")
|
||||||
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
// Deserialization methods
|
// Deserialization methods
|
||||||
//
|
//
|
||||||
@@ -590,7 +618,7 @@ impl<'de> Visitor<'de> for LowerCaseVisitor {
|
|||||||
fn _process_key(key: &str) -> String {
|
fn _process_key(key: &str) -> String {
|
||||||
match key.to_lowercase().as_ref() {
|
match key.to_lowercase().as_ref() {
|
||||||
"ssn" => "ssn".into(),
|
"ssn" => "ssn".into(),
|
||||||
_ => self::lcase_first(key),
|
_ => lcase_first(key),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user