mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-09-10 18:55:57 +03:00
Compare commits
255 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
ad48e9ed0f | ||
|
f724addf9a | ||
|
aa20974703 | ||
|
a846f6c610 | ||
|
c218c34812 | ||
|
2626e66873 | ||
|
81e0e1b339 | ||
|
fd1354d00e | ||
|
071a3b2a32 | ||
|
32cfaab5ee | ||
|
d348f12a0e | ||
|
11845d9f5b | ||
|
de70fbf88a | ||
|
0b04caab78 | ||
|
4c78c5a9c9 | ||
|
73f0841f17 | ||
|
4559e85daa | ||
|
bbef332e25 | ||
|
1e950c7dbc | ||
|
f14e19a3d8 | ||
|
668d5c23dc | ||
|
fb6f96f5c3 | ||
|
6e6e34ff18 | ||
|
790146bfac | ||
|
af625930d6 | ||
|
a28ebcb401 | ||
|
77e47ddd1f | ||
|
5b620ba6cd | ||
|
d5f9b33f66 | ||
|
596c9b8691 | ||
|
d4357eb55a | ||
|
b37f0dfde3 | ||
|
624791e09a | ||
|
f9a73a9bbe | ||
|
35868dd72c | ||
|
979d010dc2 | ||
|
b34d548246 | ||
|
a87646b8cb | ||
|
a2411eef56 | ||
|
52ed8e4d75 | ||
|
24c914799d | ||
|
db53511855 | ||
|
325691e588 | ||
|
fac3cb687d | ||
|
afbf1db331 | ||
|
1aefaec297 | ||
|
f1d3fb5d40 | ||
|
ac2723f898 | ||
|
2fffaec226 | ||
|
5c54dfee3a | ||
|
967d2d78ec | ||
|
1aa5e0d4dc | ||
|
b47cf97409 | ||
|
5e802f8aa3 | ||
|
0bdeb02a31 | ||
|
b03698fadb | ||
|
39d1a09704 | ||
|
a447e4e7ef | ||
|
4eee6e7aee | ||
|
b6fde857a7 | ||
|
3c66deb5cc | ||
|
4146612a32 | ||
|
a314933557 | ||
|
c5d7e3f2bc | ||
|
c95a2881b5 | ||
|
4c3727b4a3 | ||
|
a1f304dff7 | ||
|
a8870eef0d | ||
|
afaebc6cf3 | ||
|
8f4a1f4fc2 | ||
|
0807783388 | ||
|
80d4061d14 | ||
|
dc2f8e5c85 | ||
|
aee1ea032b | ||
|
484e82fb9f | ||
|
322a08edfb | ||
|
08afc312c3 | ||
|
5571a5d8ed | ||
|
6a8c65493f | ||
|
dfdf4473ea | ||
|
8bbbff7567 | ||
|
42e37ebea1 | ||
|
632f4d5453 | ||
|
6c5e35ce5c | ||
|
4ff15f6dc2 | ||
|
ec8028aef2 | ||
|
63cbd9ef9c | ||
|
9cca64003a | ||
|
819d5e2dc8 | ||
|
3b06ab296b | ||
|
0de52c6c99 | ||
|
e3b00b59a7 | ||
|
5a390a973f | ||
|
1ee8e44912 | ||
|
86685c1cd2 | ||
|
e3feba2a2c | ||
|
0a68de6c24 | ||
|
4be8dae626 | ||
|
e4d08836e2 | ||
|
c2a324e5da | ||
|
77f95146d6 | ||
|
6cd8512bbd | ||
|
843604c9e7 | ||
|
7407b8326a | ||
|
adf47827c9 | ||
|
5471088e93 | ||
|
4e85a1dee1 | ||
|
ec60839064 | ||
|
d4bfa1a189 | ||
|
862d401077 | ||
|
255a06382d | ||
|
bbb0484d03 | ||
|
93346bc05d | ||
|
fdf50f0064 | ||
|
ccf6ee79d0 | ||
|
91dd19473d | ||
|
c06162b22f | ||
|
7a6a3e4160 | ||
|
94341f9f3f | ||
|
ff19fb3426 | ||
|
baac8d9627 | ||
|
669b101e6a | ||
|
935f38692f | ||
|
d2d9fb08cc | ||
|
b85d548879 | ||
|
35f30088b2 | ||
|
dce054e632 | ||
|
ba725e1c25 | ||
|
b837348b25 | ||
|
7d9c7017c9 | ||
|
d6b9b8bf0c | ||
|
bd09fe1a3d | ||
|
bcbe6177b8 | ||
|
9b1d07365e | ||
|
37b212427c | ||
|
078234d8b3 | ||
|
3ce0c3d1a5 | ||
|
2ee07ea1d8 | ||
|
40c339db9b | ||
|
402c1cd06c | ||
|
819f340f39 | ||
|
1b4b40c95d | ||
|
afd9f4e278 | ||
|
47a9461f39 | ||
|
c6f64d8368 | ||
|
edabf19ddf | ||
|
a30d5f4cf9 | ||
|
3fa78e7bb1 | ||
|
a8a7e4f9a5 | ||
|
5d3b765a23 | ||
|
70f3ab8ec3 | ||
|
b6612e90ca | ||
|
161cccca30 | ||
|
84dc2eda1f | ||
|
390d10d656 | ||
|
1f775f4414 | ||
|
cc404b4edc | ||
|
536672ac1b | ||
|
e41e7c07db | ||
|
f1d3b03c60 | ||
|
2ebff958a4 | ||
|
edfdda86ae | ||
|
97fb7b5b96 | ||
|
f6de144cbb | ||
|
5a974c7b94 | ||
|
5f61607419 | ||
|
7439aeb63e | ||
|
cd8907542a | ||
|
8a5450e830 | ||
|
ad9f2b2d8e | ||
|
2f4a9865e1 | ||
|
0a3008e753 | ||
|
29a0795219 | ||
|
63459c5f72 | ||
|
916e96b143 | ||
|
325039c316 | ||
|
c5b97f4146 | ||
|
03233429f4 | ||
|
0a72c4b6db | ||
|
8867626de8 | ||
|
f5916ec396 | ||
|
ebb36235a7 | ||
|
def174a517 | ||
|
2798f623d4 | ||
|
480ba933fa | ||
|
3d1ee9ef62 | ||
|
5352321fe1 | ||
|
c4101162d6 | ||
|
632d55265b | ||
|
e277f7d1c1 | ||
|
ff7b4a3d38 | ||
|
d212dfe735 | ||
|
84ed185579 | ||
|
c0ba3406ef | ||
|
e196ba6e86 | ||
|
76743aee48 | ||
|
9ebca99290 | ||
|
a734ad2d36 | ||
|
baf7d1be4e | ||
|
31bcd1bf7c | ||
|
a3b30ed65a | ||
|
59e50b03bd | ||
|
0a88f020e1 | ||
|
c058a1d63c | ||
|
96a189deb9 | ||
|
8c229920ad | ||
|
d592323e39 | ||
|
402c857d17 | ||
|
def858854b | ||
|
f6761ac30e | ||
|
f8e49ea3f4 | ||
|
f6a4a2127b | ||
|
446fc3f1f8 | ||
|
146525db91 | ||
|
1698b43f9b | ||
|
078b21db85 | ||
|
43adcde094 | ||
|
7a0bb18dcf | ||
|
47a5a4e1fc | ||
|
0f0e5876ae | ||
|
43aa75dc89 | ||
|
95dd1cd7ad | ||
|
36ae946655 | ||
|
24edc94f9d | ||
|
4deae76347 | ||
|
8280d200ea | ||
|
8ee0c57224 | ||
|
cb6f392774 | ||
|
f250c54813 | ||
|
5c6081c4e2 | ||
|
88c56de97b | ||
|
e274af6e3d | ||
|
a0ece3754b | ||
|
0bcc2ae7ab | ||
|
bdb90460c4 | ||
|
824137a02c | ||
|
2edc699eac | ||
|
8e79366076 | ||
|
c1e39b182f | ||
|
13eb276085 | ||
|
4cec502f7b | ||
|
2545469713 | ||
|
f09996a21d | ||
|
5cabf4d040 | ||
|
a03db6d224 | ||
|
8d1b72b951 | ||
|
912e1f93b7 | ||
|
a5aa4d9b54 | ||
|
e777be3dde | ||
|
b5441f6b77 | ||
|
dbbd63e519 | ||
|
adc443ea80 | ||
|
0d32179d07 | ||
|
b45b02b37e | ||
|
12928b832c |
@@ -21,6 +21,10 @@
|
|||||||
## Automatically reload the templates for every request, slow, use only for development
|
## Automatically reload the templates for every request, slow, use only for development
|
||||||
# RELOAD_TEMPLATES=false
|
# RELOAD_TEMPLATES=false
|
||||||
|
|
||||||
|
## Client IP Header, used to identify the IP of the client, defaults to "X-Client-IP"
|
||||||
|
## Set to the string "none" (without quotes), to disable any headers and just use the remote IP
|
||||||
|
# IP_HEADER=X-Client-IP
|
||||||
|
|
||||||
## Cache time-to-live for successfully obtained icons, in seconds (0 is "forever")
|
## Cache time-to-live for successfully obtained icons, in seconds (0 is "forever")
|
||||||
# ICON_CACHE_TTL=2592000
|
# ICON_CACHE_TTL=2592000
|
||||||
## Cache time-to-live for icons which weren't available, in seconds (0 is "forever")
|
## Cache time-to-live for icons which weren't available, in seconds (0 is "forever")
|
||||||
@@ -37,14 +41,14 @@
|
|||||||
# WEBSOCKET_ADDRESS=0.0.0.0
|
# WEBSOCKET_ADDRESS=0.0.0.0
|
||||||
# WEBSOCKET_PORT=3012
|
# WEBSOCKET_PORT=3012
|
||||||
|
|
||||||
## Enable extended logging
|
## Enable extended logging, which shows timestamps and targets in the logs
|
||||||
## This shows timestamps and allows logging to file and to syslog
|
|
||||||
### To enable logging to file, use the LOG_FILE env variable
|
|
||||||
### To enable syslog, use the USE_SYSLOG env variable
|
|
||||||
# EXTENDED_LOGGING=true
|
# EXTENDED_LOGGING=true
|
||||||
|
|
||||||
|
## Timestamp format used in extended logging.
|
||||||
|
## Format specifiers: https://docs.rs/chrono/latest/chrono/format/strftime
|
||||||
|
# LOG_TIMESTAMP_FORMAT="%Y-%m-%d %H:%M:%S.%3f"
|
||||||
|
|
||||||
## Logging to file
|
## Logging to file
|
||||||
## This requires extended logging
|
|
||||||
## It's recommended to also set 'ROCKET_CLI_COLORS=off'
|
## It's recommended to also set 'ROCKET_CLI_COLORS=off'
|
||||||
# LOG_FILE=/path/to/log
|
# LOG_FILE=/path/to/log
|
||||||
|
|
||||||
@@ -56,7 +60,8 @@
|
|||||||
## Log level
|
## Log level
|
||||||
## Change the verbosity of the log output
|
## Change the verbosity of the log output
|
||||||
## Valid values are "trace", "debug", "info", "warn", "error" and "off"
|
## Valid values are "trace", "debug", "info", "warn", "error" and "off"
|
||||||
## This requires extended logging
|
## Setting it to "trace" or "debug" would also show logs for mounted
|
||||||
|
## routes and static file, websocket and alive requests
|
||||||
# LOG_LEVEL=Info
|
# LOG_LEVEL=Info
|
||||||
|
|
||||||
## Enable WAL for the DB
|
## Enable WAL for the DB
|
||||||
@@ -184,6 +189,7 @@
|
|||||||
# SMTP_FROM_NAME=Bitwarden_RS
|
# SMTP_FROM_NAME=Bitwarden_RS
|
||||||
# SMTP_PORT=587
|
# SMTP_PORT=587
|
||||||
# SMTP_SSL=true
|
# SMTP_SSL=true
|
||||||
|
# SMTP_EXPLICIT_TLS=true # N.B. This variable configures Implicit TLS. It's currently mislabelled (see bug #851)
|
||||||
# SMTP_USERNAME=username
|
# SMTP_USERNAME=username
|
||||||
# SMTP_PASSWORD=password
|
# SMTP_PASSWORD=password
|
||||||
# SMTP_AUTH_MECHANISM="Plain"
|
# SMTP_AUTH_MECHANISM="Plain"
|
||||||
|
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
@@ -1 +1,2 @@
|
|||||||
github: dani-garcia
|
github: dani-garcia
|
||||||
|
custom: ["https://paypal.me/DaniGG"]
|
||||||
|
42
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
42
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
title: ''
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Please fill out the following template to make solving your problem easier and faster for us.
|
||||||
|
This is only a guideline. If you think that parts are unneccessary for your issue, feel free to remove them.
|
||||||
|
|
||||||
|
Remember to hide/obfuscate personal and confidential information,
|
||||||
|
such as names, global IP/DNS adresses and especially passwords, if neccessary.
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Subject of the issue
|
||||||
|
<!-- Describe your issue here.-->
|
||||||
|
|
||||||
|
### Your environment
|
||||||
|
<!-- The version number, obtained from the logs or the admin page -->
|
||||||
|
* Bitwarden_rs version:
|
||||||
|
<!-- How the server was installed: Docker image / package / built from source -->
|
||||||
|
* Install method:
|
||||||
|
* Clients used: <!-- if applicable -->
|
||||||
|
* Reverse proxy and version: <!-- if applicable -->
|
||||||
|
* Version of mysql/postgresql: <!-- if applicable -->
|
||||||
|
* Other relevant information:
|
||||||
|
|
||||||
|
### Steps to reproduce
|
||||||
|
<!-- Tell us how to reproduce this issue. What parameters did you set (differently from the defaults)
|
||||||
|
and how did you start bitwarden_rs? -->
|
||||||
|
|
||||||
|
### Expected behaviour
|
||||||
|
<!-- Tell us what should happen -->
|
||||||
|
|
||||||
|
### Actual behaviour
|
||||||
|
<!-- Tell us what happens instead -->
|
||||||
|
|
||||||
|
### Relevant logs
|
||||||
|
<!-- Share some logfiles, screenshots or output of relevant programs with us. -->
|
11
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
11
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
title: ''
|
||||||
|
labels: better for forum
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Please submit all your feature requests to the forum
|
||||||
|
Link: https://bitwardenrs.discourse.group/c/feature-requests
|
11
.github/ISSUE_TEMPLATE/help-with-installation-configuration.md
vendored
Normal file
11
.github/ISSUE_TEMPLATE/help-with-installation-configuration.md
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
---
|
||||||
|
name: Help with installation/configuration
|
||||||
|
about: Any questions about the setup of bitwarden_rs
|
||||||
|
title: ''
|
||||||
|
labels: better for forum
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Please submit all your third party help requests to the forum
|
||||||
|
Link: https://bitwardenrs.discourse.group/c/help
|
11
.github/ISSUE_TEMPLATE/help-with-proxy-database-nas-setup.md
vendored
Normal file
11
.github/ISSUE_TEMPLATE/help-with-proxy-database-nas-setup.md
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
---
|
||||||
|
name: Help with proxy/database/NAS setup
|
||||||
|
about: Any questions about third party software
|
||||||
|
title: ''
|
||||||
|
labels: better for forum
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Please submit all your third party help requests to the forum
|
||||||
|
Link: https://bitwardenrs.discourse.group/c/third-party-help
|
148
.github/workflows/workspace.yml
vendored
Normal file
148
.github/workflows/workspace.yml
vendored
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
name: Workflow
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths-ignore:
|
||||||
|
- "**.md"
|
||||||
|
#pull_request:
|
||||||
|
# paths-ignore:
|
||||||
|
# - "**.md"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: Build
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
db-backend: [sqlite, mysql, postgresql]
|
||||||
|
target:
|
||||||
|
- x86_64-unknown-linux-gnu
|
||||||
|
# - x86_64-unknown-linux-musl
|
||||||
|
# - x86_64-apple-darwin
|
||||||
|
# - x86_64-pc-windows-msvc
|
||||||
|
include:
|
||||||
|
- target: x86_64-unknown-linux-gnu
|
||||||
|
os: ubuntu-latest
|
||||||
|
ext:
|
||||||
|
# - target: x86_64-unknown-linux-musl
|
||||||
|
# os: ubuntu-latest
|
||||||
|
# ext:
|
||||||
|
# - target: x86_64-apple-darwin
|
||||||
|
# os: macOS-latest
|
||||||
|
# ext:
|
||||||
|
# - target: x86_64-pc-windows-msvc
|
||||||
|
# os: windows-latest
|
||||||
|
# ext: .exe
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
|
||||||
|
# - name: Cache choco cache
|
||||||
|
# uses: actions/cache@v1.0.3
|
||||||
|
# if: matrix.os == 'windows-latest'
|
||||||
|
# with:
|
||||||
|
# path: ~\AppData\Local\Temp\chocolatey
|
||||||
|
# key: ${{ runner.os }}-choco-cache-${{ matrix.db-backend }}
|
||||||
|
|
||||||
|
- name: Cache vcpkg installed
|
||||||
|
uses: actions/cache@v1.0.3
|
||||||
|
if: matrix.os == 'windows-latest'
|
||||||
|
with:
|
||||||
|
path: $VCPKG_ROOT/installed
|
||||||
|
key: ${{ runner.os }}-vcpkg-cache-${{ matrix.db-backend }}
|
||||||
|
env:
|
||||||
|
VCPKG_ROOT: 'C:\vcpkg'
|
||||||
|
|
||||||
|
- name: Cache vcpkg downloads
|
||||||
|
uses: actions/cache@v1.0.3
|
||||||
|
if: matrix.os == 'windows-latest'
|
||||||
|
with:
|
||||||
|
path: $VCPKG_ROOT/downloads
|
||||||
|
key: ${{ runner.os }}-vcpkg-cache-${{ matrix.db-backend }}
|
||||||
|
env:
|
||||||
|
VCPKG_ROOT: 'C:\vcpkg'
|
||||||
|
|
||||||
|
# - name: Cache homebrew
|
||||||
|
# uses: actions/cache@v1.0.3
|
||||||
|
# if: matrix.os == 'macOS-latest'
|
||||||
|
# with:
|
||||||
|
# path: ~/Library/Caches/Homebrew
|
||||||
|
# key: ${{ runner.os }}-brew-cache
|
||||||
|
|
||||||
|
# - name: Cache apt
|
||||||
|
# uses: actions/cache@v1.0.3
|
||||||
|
# if: matrix.os == 'ubuntu-latest'
|
||||||
|
# with:
|
||||||
|
# path: /var/cache/apt/archives
|
||||||
|
# key: ${{ runner.os }}-apt-cache
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
- name: Install dependencies macOS
|
||||||
|
run: brew update; brew install openssl sqlite libpq mysql
|
||||||
|
if: matrix.os == 'macOS-latest'
|
||||||
|
|
||||||
|
- name: Install dependencies Ubuntu
|
||||||
|
run: sudo apt-get update && sudo apt-get install --no-install-recommends openssl sqlite libpq-dev libmysql++-dev
|
||||||
|
if: matrix.os == 'ubuntu-latest'
|
||||||
|
|
||||||
|
- name: Install dependencies Windows
|
||||||
|
run: vcpkg integrate install; vcpkg install sqlite3:x64-windows openssl:x64-windows libpq:x64-windows libmysql:x64-windows
|
||||||
|
if: matrix.os == 'windows-latest'
|
||||||
|
env:
|
||||||
|
VCPKG_ROOT: 'C:\vcpkg'
|
||||||
|
# End Install dependencies
|
||||||
|
|
||||||
|
# Install rust nightly toolchain
|
||||||
|
- name: Cache cargo registry
|
||||||
|
uses: actions/cache@v1.0.3
|
||||||
|
with:
|
||||||
|
path: ~/.cargo/registry
|
||||||
|
key: ${{ runner.os }}-${{matrix.db-backend}}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
- name: Cache cargo index
|
||||||
|
uses: actions/cache@v1.0.3
|
||||||
|
with:
|
||||||
|
path: ~/.cargo/git
|
||||||
|
key: ${{ runner.os }}-${{matrix.db-backend}}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
- name: Cache cargo build
|
||||||
|
uses: actions/cache@v1.0.3
|
||||||
|
with:
|
||||||
|
path: target
|
||||||
|
key: ${{ runner.os }}-${{matrix.db-backend}}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Install latest nightly
|
||||||
|
uses: actions-rs/toolchain@v1.0.5
|
||||||
|
with:
|
||||||
|
# Uses rust-toolchain to determine version
|
||||||
|
profile: minimal
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
|
||||||
|
# Build
|
||||||
|
- name: Build Win
|
||||||
|
if: matrix.os == 'windows-latest'
|
||||||
|
run: cargo.exe build --features ${{ matrix.db-backend }} --release --target ${{ matrix.target }}
|
||||||
|
env:
|
||||||
|
RUSTFLAGS: -Ctarget-feature=+crt-static
|
||||||
|
VCPKG_ROOT: 'C:\vcpkg'
|
||||||
|
|
||||||
|
- name: Build macOS / Ubuntu
|
||||||
|
if: matrix.os == 'macOS-latest' || matrix.os == 'ubuntu-latest'
|
||||||
|
run: cargo build --verbose --features ${{ matrix.db-backend }} --release --target ${{ matrix.target }}
|
||||||
|
|
||||||
|
# Test
|
||||||
|
- name: Run tests
|
||||||
|
run: cargo test --features ${{ matrix.db-backend }}
|
||||||
|
|
||||||
|
# Upload & Release
|
||||||
|
- name: Upload artifact
|
||||||
|
uses: actions/upload-artifact@v1.0.0
|
||||||
|
with:
|
||||||
|
name: bitwarden_rs-${{ matrix.db-backend }}-${{ matrix.target }}${{ matrix.ext }}
|
||||||
|
path: target/${{ matrix.target }}/release/bitwarden_rs${{ matrix.ext }}
|
||||||
|
|
||||||
|
- name: Release
|
||||||
|
uses: Shopify/upload-to-release@1.0.0
|
||||||
|
if: startsWith(github.ref, 'refs/tags/')
|
||||||
|
with:
|
||||||
|
name: bitwarden_rs-${{ matrix.db-backend }}-${{ matrix.target }}${{ matrix.ext }}
|
||||||
|
path: target/${{ matrix.target }}/release/bitwarden_rs${{ matrix.ext }}
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
@@ -17,5 +17,5 @@ before_install:
|
|||||||
install: true
|
install: true
|
||||||
script:
|
script:
|
||||||
- git ls-files --exclude='Dockerfile*' --ignored | xargs --max-lines=1 hadolint
|
- git ls-files --exclude='Dockerfile*' --ignored | xargs --max-lines=1 hadolint
|
||||||
- cargo build --features "sqlite"
|
- cargo test --features "sqlite"
|
||||||
- cargo build --features "mysql"
|
- cargo test --features "mysql"
|
||||||
|
3292
Cargo.lock
generated
3292
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
88
Cargo.toml
88
Cargo.toml
@@ -14,9 +14,13 @@ build = "build.rs"
|
|||||||
# Empty to keep compatibility, prefer to set USE_SYSLOG=true
|
# Empty to keep compatibility, prefer to set USE_SYSLOG=true
|
||||||
enable_syslog = []
|
enable_syslog = []
|
||||||
mysql = ["diesel/mysql", "diesel_migrations/mysql"]
|
mysql = ["diesel/mysql", "diesel_migrations/mysql"]
|
||||||
postgresql = ["diesel/postgres", "diesel_migrations/postgres", "openssl"]
|
postgresql = ["diesel/postgres", "diesel_migrations/postgres"]
|
||||||
sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "libsqlite3-sys"]
|
sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "libsqlite3-sys"]
|
||||||
|
|
||||||
|
# Enable unstable features, requires nightly
|
||||||
|
# Currently only used to enable rusts official ip support
|
||||||
|
unstable = []
|
||||||
|
|
||||||
[target."cfg(not(windows))".dependencies]
|
[target."cfg(not(windows))".dependencies]
|
||||||
syslog = "4.0.1"
|
syslog = "4.0.1"
|
||||||
|
|
||||||
@@ -26,104 +30,102 @@ rocket = { version = "0.5.0-dev", features = ["tls"], default-features = false }
|
|||||||
rocket_contrib = "0.5.0-dev"
|
rocket_contrib = "0.5.0-dev"
|
||||||
|
|
||||||
# HTTP client
|
# HTTP client
|
||||||
reqwest = "0.9.22"
|
reqwest = { version = "0.10.6", features = ["blocking", "json"] }
|
||||||
|
|
||||||
# multipart/form-data support
|
# multipart/form-data support
|
||||||
multipart = { version = "0.16.1", features = ["server"], default-features = false }
|
multipart = { version = "0.17.0", features = ["server"], default-features = false }
|
||||||
|
|
||||||
# WebSockets library
|
# WebSockets library
|
||||||
ws = "0.9.1"
|
ws = "0.9.1"
|
||||||
|
|
||||||
# MessagePack library
|
# MessagePack library
|
||||||
rmpv = "0.4.2"
|
rmpv = "0.4.4"
|
||||||
|
|
||||||
# Concurrent hashmap implementation
|
# Concurrent hashmap implementation
|
||||||
chashmap = "2.2.2"
|
chashmap = "2.2.2"
|
||||||
|
|
||||||
# A generic serialization/deserialization framework
|
# A generic serialization/deserialization framework
|
||||||
serde = "1.0.103"
|
serde = "1.0.114"
|
||||||
serde_derive = "1.0.103"
|
serde_derive = "1.0.114"
|
||||||
serde_json = "1.0.42"
|
serde_json = "1.0.56"
|
||||||
|
|
||||||
# Logging
|
# Logging
|
||||||
log = "0.4.8"
|
log = "0.4.11"
|
||||||
fern = { version = "0.5.9", features = ["syslog-4"] }
|
fern = { version = "0.6.0", features = ["syslog-4"] }
|
||||||
|
|
||||||
# A safe, extensible ORM and Query builder
|
# A safe, extensible ORM and Query builder
|
||||||
diesel = { version = "1.4.3", features = [ "chrono", "r2d2"] }
|
diesel = { version = "1.4.5", features = [ "chrono", "r2d2"] }
|
||||||
diesel_migrations = "1.4.0"
|
diesel_migrations = "1.4.0"
|
||||||
|
|
||||||
# Bundled SQLite
|
# Bundled SQLite
|
||||||
libsqlite3-sys = { version = "0.16.0", features = ["bundled"], optional = true }
|
libsqlite3-sys = { version = "0.18.0", features = ["bundled"], optional = true }
|
||||||
|
|
||||||
# Crypto library
|
# Crypto library
|
||||||
ring = "0.14.6"
|
ring = "0.16.15"
|
||||||
|
|
||||||
# UUID generation
|
# UUID generation
|
||||||
uuid = { version = "0.8.1", features = ["v4"] }
|
uuid = { version = "0.8.1", features = ["v4"] }
|
||||||
|
|
||||||
# Date and time library for Rust
|
# Date and time libraries
|
||||||
chrono = "0.4.10"
|
chrono = "0.4.13"
|
||||||
|
chrono-tz = "0.5.2"
|
||||||
|
time = "0.2.16"
|
||||||
|
|
||||||
# TOTP library
|
# TOTP library
|
||||||
oath = "0.10.2"
|
oath = "0.10.2"
|
||||||
|
|
||||||
# Data encoding library
|
# Data encoding library
|
||||||
data-encoding = "2.1.2"
|
data-encoding = "2.2.1"
|
||||||
|
|
||||||
# JWT library
|
# JWT library
|
||||||
jsonwebtoken = "6.0.1"
|
jsonwebtoken = "7.2.0"
|
||||||
|
|
||||||
# U2F library
|
# U2F library
|
||||||
u2f = "0.1.6"
|
u2f = "0.2.0"
|
||||||
|
|
||||||
# Yubico Library
|
# Yubico Library
|
||||||
yubico = { version = "0.7.1", features = ["online-tokio"], default-features = false }
|
yubico = { version = "0.9.1", features = ["online-tokio"], default-features = false }
|
||||||
|
|
||||||
# A `dotenv` implementation for Rust
|
# A `dotenv` implementation for Rust
|
||||||
dotenv = { version = "0.15.0", default-features = false }
|
dotenv = { version = "0.15.0", default-features = false }
|
||||||
|
|
||||||
# Lazy static macro
|
# Lazy initialization
|
||||||
lazy_static = "1.4.0"
|
once_cell = "1.4.0"
|
||||||
|
|
||||||
# More derives
|
|
||||||
derive_more = "0.99.2"
|
|
||||||
|
|
||||||
# Numerical libraries
|
# Numerical libraries
|
||||||
num-traits = "0.2.10"
|
num-traits = "0.2.12"
|
||||||
num-derive = "0.3.0"
|
num-derive = "0.3.0"
|
||||||
|
|
||||||
# Email libraries
|
# Email libraries
|
||||||
lettre = "0.9.2"
|
lettre = { version = "0.10.0-alpha.1", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname"], default-features = false }
|
||||||
lettre_email = "0.9.2"
|
native-tls = "0.2.4"
|
||||||
native-tls = "0.2.3"
|
|
||||||
quoted_printable = "0.4.1"
|
|
||||||
|
|
||||||
# Template library
|
# Template library
|
||||||
handlebars = "2.0.2"
|
handlebars = { version = "3.3.0", features = ["dir_source"] }
|
||||||
|
|
||||||
# For favicon extraction from main website
|
# For favicon extraction from main website
|
||||||
soup = "0.4.1"
|
soup = "0.5.0"
|
||||||
regex = "1.3.1"
|
regex = "1.3.9"
|
||||||
data-url = "0.1.0"
|
data-url = "0.1.0"
|
||||||
|
|
||||||
# Required for SSL support for PostgreSQL
|
# Used by U2F, JWT and Postgres
|
||||||
openssl = { version = "0.10.26", optional = true }
|
openssl = "0.10.30"
|
||||||
|
|
||||||
# URL encoding library
|
# URL encoding library
|
||||||
percent-encoding = "2.1.0"
|
percent-encoding = "2.1.0"
|
||||||
|
# Punycode conversion
|
||||||
|
idna = "0.2.0"
|
||||||
|
|
||||||
|
# CLI argument parsing
|
||||||
|
structopt = "0.3.15"
|
||||||
|
|
||||||
|
# Logging panics to logfile instead stderr only
|
||||||
|
backtrace = "0.3.50"
|
||||||
|
|
||||||
[patch.crates-io]
|
[patch.crates-io]
|
||||||
# Add support for Timestamp type
|
|
||||||
rmp = { git = 'https://github.com/3Hren/msgpack-rust', rev = 'd6c6c672e470341207ed9feb69b56322b5597a11' }
|
|
||||||
|
|
||||||
# Use newest ring
|
# Use newest ring
|
||||||
rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = 'b95b6765e1cc8be7c1e7eaef8a9d9ad940b0ac13' }
|
rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = '1010f6a2a88fac899dec0cd2f642156908038a53' }
|
||||||
rocket_contrib = { git = 'https://github.com/SergioBenitez/Rocket', rev = 'b95b6765e1cc8be7c1e7eaef8a9d9ad940b0ac13' }
|
rocket_contrib = { git = 'https://github.com/SergioBenitez/Rocket', rev = '1010f6a2a88fac899dec0cd2f642156908038a53' }
|
||||||
|
|
||||||
# Use git version for timeout fix #706
|
|
||||||
lettre = { git = 'https://github.com/lettre/lettre', rev = '24d694db3be017d82b1cdc8bf9da601420b31bb0' }
|
|
||||||
lettre_email = { git = 'https://github.com/lettre/lettre', rev = '24d694db3be017d82b1cdc8bf9da601420b31bb0' }
|
|
||||||
|
|
||||||
# For favicon extraction from main website
|
# For favicon extraction from main website
|
||||||
data-url = { git = 'https://github.com/servo/rust-url', package="data-url", rev = '7f1bd6ce1c2fde599a757302a843a60e714c5f72' }
|
data-url = { git = 'https://github.com/servo/rust-url', package="data-url", rev = '7f1bd6ce1c2fde599a757302a843a60e714c5f72' }
|
||||||
|
11
README.md
11
README.md
@@ -13,7 +13,7 @@ Image is based on [Rust implementation of Bitwarden API](https://github.com/dani
|
|||||||
|
|
||||||
**This project is not associated with the [Bitwarden](https://bitwarden.com/) project nor 8bit Solutions LLC.**
|
**This project is not associated with the [Bitwarden](https://bitwarden.com/) project nor 8bit Solutions LLC.**
|
||||||
|
|
||||||
#### ⚠️**IMPORTANT**⚠️: When using this server, please report any Bitwarden related bug-reports or suggestions [here](https://github.com/dani-garcia/bitwarden_rs/issues/new), regardless of whatever clients you are using (mobile, desktop, browser...). DO NOT use the official support channels.
|
#### ⚠️**IMPORTANT**⚠️: When using this server, please report any bugs or suggestions to us directly (look at the bottom of this page for ways to get in touch), regardless of whatever clients you are using (mobile, desktop, browser...). DO NOT use the official support channels.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -21,14 +21,14 @@ Image is based on [Rust implementation of Bitwarden API](https://github.com/dani
|
|||||||
|
|
||||||
Basically full implementation of Bitwarden API is provided including:
|
Basically full implementation of Bitwarden API is provided including:
|
||||||
|
|
||||||
* Basic single user functionality
|
* Single user functionality
|
||||||
* Organizations support
|
* Organizations support
|
||||||
* Attachments
|
* Attachments
|
||||||
* Vault API support
|
* Vault API support
|
||||||
* Serving the static files for Vault interface
|
* Serving the static files for Vault interface
|
||||||
* Website icons API
|
* Website icons API
|
||||||
* Authenticator and U2F support
|
* Authenticator and U2F support
|
||||||
* YubiKey OTP
|
* YubiKey and Duo support
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
Pull the docker image and mount a volume from the host for persistent storage:
|
Pull the docker image and mount a volume from the host for persistent storage:
|
||||||
@@ -49,12 +49,13 @@ If you have an available domain name, you can get HTTPS certificates with [Let's
|
|||||||
See the [bitwarden_rs wiki](https://github.com/dani-garcia/bitwarden_rs/wiki) for more information on how to configure and run the bitwarden_rs server.
|
See the [bitwarden_rs wiki](https://github.com/dani-garcia/bitwarden_rs/wiki) for more information on how to configure and run the bitwarden_rs server.
|
||||||
|
|
||||||
## Get in touch
|
## Get in touch
|
||||||
|
To ask a question, offer suggestions or new features or to get help configuring or installing the software, please [use the forum](https://bitwardenrs.discourse.group/).
|
||||||
|
|
||||||
To ask a question, [raising an issue](https://github.com/dani-garcia/bitwarden_rs/issues/new) is fine. Please also report any bugs spotted here.
|
If you spot any bugs or crashes with bitwarden_rs itself, please [create an issue](https://github.com/dani-garcia/bitwarden_rs/issues/). Make sure there aren't any similar issues open, though!
|
||||||
|
|
||||||
If you prefer to chat, we're usually hanging around at [#bitwarden_rs:matrix.org](https://matrix.to/#/#bitwarden_rs:matrix.org) room on Matrix. Feel free to join us!
|
If you prefer to chat, we're usually hanging around at [#bitwarden_rs:matrix.org](https://matrix.to/#/#bitwarden_rs:matrix.org) room on Matrix. Feel free to join us!
|
||||||
|
|
||||||
### Sponsors
|
### Sponsors
|
||||||
Thanks for your contribution to the project!
|
Thanks for your contribution to the project!
|
||||||
|
|
||||||
- [@Skaronator](https://github.com/Skaronator)
|
- [@ChonoN](https://github.com/ChonoN)
|
||||||
|
@@ -18,8 +18,8 @@ steps:
|
|||||||
cargo -V
|
cargo -V
|
||||||
displayName: Query rust and cargo versions
|
displayName: Query rust and cargo versions
|
||||||
|
|
||||||
- script : cargo build --features "sqlite"
|
- script : cargo test --features "sqlite"
|
||||||
displayName: 'Build project with sqlite backend'
|
displayName: 'Test project with sqlite backend'
|
||||||
|
|
||||||
- script : cargo build --features "mysql"
|
- script : cargo test --features "mysql"
|
||||||
displayName: 'Build project with mysql backend'
|
displayName: 'Test project with mysql backend'
|
||||||
|
16
build.rs
16
build.rs
@@ -1,4 +1,5 @@
|
|||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
use std::env;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
#[cfg(all(feature = "sqlite", feature = "mysql"))]
|
#[cfg(all(feature = "sqlite", feature = "mysql"))]
|
||||||
@@ -10,8 +11,13 @@ fn main() {
|
|||||||
|
|
||||||
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
|
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
|
||||||
compile_error!("You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite");
|
compile_error!("You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite");
|
||||||
|
|
||||||
read_git_info().ok();
|
if let Ok(version) = env::var("BWRS_VERSION") {
|
||||||
|
println!("cargo:rustc-env=BWRS_VERSION={}", version);
|
||||||
|
println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version);
|
||||||
|
} else {
|
||||||
|
read_git_info().ok();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run(args: &[&str]) -> Result<String, std::io::Error> {
|
fn run(args: &[&str]) -> Result<String, std::io::Error> {
|
||||||
@@ -54,14 +60,16 @@ fn read_git_info() -> Result<(), std::io::Error> {
|
|||||||
} else {
|
} else {
|
||||||
format!("{}-{}", last_tag, rev_short)
|
format!("{}-{}", last_tag, rev_short)
|
||||||
};
|
};
|
||||||
println!("cargo:rustc-env=GIT_VERSION={}", version);
|
|
||||||
|
println!("cargo:rustc-env=BWRS_VERSION={}", version);
|
||||||
|
println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version);
|
||||||
|
|
||||||
// To access these values, use:
|
// To access these values, use:
|
||||||
// env!("GIT_EXACT_TAG")
|
// env!("GIT_EXACT_TAG")
|
||||||
// env!("GIT_LAST_TAG")
|
// env!("GIT_LAST_TAG")
|
||||||
// env!("GIT_BRANCH")
|
// env!("GIT_BRANCH")
|
||||||
// env!("GIT_REV")
|
// env!("GIT_REV")
|
||||||
// env!("GIT_VERSION")
|
// env!("BWRS_VERSION")
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
285
docker/Dockerfile.j2
Normal file
285
docker/Dockerfile.j2
Normal file
@@ -0,0 +1,285 @@
|
|||||||
|
# This file was generated using a Jinja2 template.
|
||||||
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||||
|
|
||||||
|
{% set build_stage_base_image = "rust:1.40" %}
|
||||||
|
{% if "alpine" in target_file %}
|
||||||
|
{% set build_stage_base_image = "clux/muslrust:nightly-2020-03-09" %}
|
||||||
|
{% set runtime_stage_base_image = "alpine:3.11" %}
|
||||||
|
{% set package_arch_name = "" %}
|
||||||
|
{% elif "amd64" in target_file %}
|
||||||
|
{% set runtime_stage_base_image = "debian:buster-slim" %}
|
||||||
|
{% set package_arch_name = "" %}
|
||||||
|
{% elif "arm64v8" in target_file %}
|
||||||
|
{% set runtime_stage_base_image = "balenalib/aarch64-debian:buster" %}
|
||||||
|
{% set package_arch_name = "arm64" %}
|
||||||
|
{% elif "arm32v6" in target_file %}
|
||||||
|
{% set runtime_stage_base_image = "balenalib/rpi-debian:buster" %}
|
||||||
|
{% set package_arch_name = "armel" %}
|
||||||
|
{% elif "arm32v7" in target_file %}
|
||||||
|
{% set runtime_stage_base_image = "balenalib/armv7hf-debian:buster" %}
|
||||||
|
{% set package_arch_name = "armhf" %}
|
||||||
|
{% endif %}
|
||||||
|
{% set package_arch_prefix = ":" + package_arch_name %}
|
||||||
|
{% if package_arch_name == "" %}
|
||||||
|
{% set package_arch_prefix = "" %}
|
||||||
|
{% endif %}
|
||||||
|
# Using multistage build:
|
||||||
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
|
####################### VAULT BUILD IMAGE #######################
|
||||||
|
{% set vault_image_hash = "sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c" %}
|
||||||
|
{% raw %}
|
||||||
|
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||||
|
# It can be viewed in multiple ways:
|
||||||
|
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||||
|
# - From the console, with the following commands:
|
||||||
|
# docker pull bitwardenrs/web-vault:v2.15.1
|
||||||
|
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.15.1
|
||||||
|
#
|
||||||
|
# - To do the opposite, and get the tag from the hash, you can do:
|
||||||
|
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c
|
||||||
|
{% endraw %}
|
||||||
|
FROM bitwardenrs/web-vault@{{ vault_image_hash }} as vault
|
||||||
|
|
||||||
|
########################## BUILD IMAGE ##########################
|
||||||
|
{% if "musl" in build_stage_base_image %}
|
||||||
|
# Musl build image for statically compiled binary
|
||||||
|
{% else %}
|
||||||
|
# We need to use the Rust build image, because
|
||||||
|
# we need the Rust compiler and Cargo tooling
|
||||||
|
{% endif %}
|
||||||
|
FROM {{ build_stage_base_image }} as build
|
||||||
|
|
||||||
|
{% if "sqlite" in target_file %}
|
||||||
|
# set sqlite as default for DB ARG for backward compatibility
|
||||||
|
ARG DB=sqlite
|
||||||
|
|
||||||
|
{% elif "mysql" in target_file %}
|
||||||
|
# set mysql backend
|
||||||
|
ARG DB=mysql
|
||||||
|
|
||||||
|
{% elif "postgresql" in target_file %}
|
||||||
|
# set postgresql backend
|
||||||
|
ARG DB=postgresql
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||||
|
|
||||||
|
# Don't download rust docs
|
||||||
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
{% if "alpine" in target_file %}
|
||||||
|
ENV USER "root"
|
||||||
|
ENV RUSTFLAGS='-C link-arg=-s'
|
||||||
|
|
||||||
|
{% elif "arm32" in target_file or "arm64" in target_file %}
|
||||||
|
# Install required build libs for {{ package_arch_name }} architecture.
|
||||||
|
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||||
|
/etc/apt/sources.list.d/deb-src.list \
|
||||||
|
&& dpkg --add-architecture {{ package_arch_name }} \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libssl-dev{{ package_arch_prefix }} \
|
||||||
|
libc6-dev{{ package_arch_prefix }}
|
||||||
|
|
||||||
|
{% endif -%}
|
||||||
|
{% if "arm64v8" in target_file %}
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
gcc-aarch64-linux-gnu \
|
||||||
|
&& mkdir -p ~/.cargo \
|
||||||
|
&& echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config \
|
||||||
|
&& echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config
|
||||||
|
|
||||||
|
ENV CARGO_HOME "/root/.cargo"
|
||||||
|
ENV USER "root"
|
||||||
|
|
||||||
|
{% elif "arm32v6" in target_file %}
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
gcc-arm-linux-gnueabi \
|
||||||
|
&& mkdir -p ~/.cargo \
|
||||||
|
&& echo '[target.arm-unknown-linux-gnueabi]' >> ~/.cargo/config \
|
||||||
|
&& echo 'linker = "arm-linux-gnueabi-gcc"' >> ~/.cargo/config
|
||||||
|
|
||||||
|
ENV CARGO_HOME "/root/.cargo"
|
||||||
|
ENV USER "root"
|
||||||
|
|
||||||
|
{% elif "arm32v7" in target_file %}
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
gcc-arm-linux-gnueabihf \
|
||||||
|
&& mkdir -p ~/.cargo \
|
||||||
|
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> ~/.cargo/config \
|
||||||
|
&& echo 'linker = "arm-linux-gnueabihf-gcc"' >> ~/.cargo/config
|
||||||
|
|
||||||
|
ENV CARGO_HOME "/root/.cargo"
|
||||||
|
ENV USER "root"
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
{% if "mysql" in target_file %}
|
||||||
|
# Install MySQL package
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
{% if "musl" in build_stage_base_image %}
|
||||||
|
libmysqlclient-dev{{ package_arch_prefix }} \
|
||||||
|
{% else %}
|
||||||
|
libmariadb-dev{{ package_arch_prefix }} \
|
||||||
|
{% endif %}
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
{% elif "postgresql" in target_file %}
|
||||||
|
# Install PostgreSQL package
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libpq-dev{{ package_arch_prefix }} \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
# Creates a dummy project used to grab dependencies
|
||||||
|
RUN USER=root cargo new --bin /app
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copies over *only* your manifests and build files
|
||||||
|
COPY ./Cargo.* ./
|
||||||
|
COPY ./rust-toolchain ./rust-toolchain
|
||||||
|
COPY ./build.rs ./build.rs
|
||||||
|
|
||||||
|
{% if "arm64v8" in target_file %}
|
||||||
|
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc"
|
||||||
|
ENV CROSS_COMPILE="1"
|
||||||
|
ENV OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu"
|
||||||
|
ENV OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
||||||
|
{% elif "arm32v6" in target_file %}
|
||||||
|
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
||||||
|
ENV CROSS_COMPILE="1"
|
||||||
|
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi"
|
||||||
|
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||||
|
{% elif "arm32v7" in target_file %}
|
||||||
|
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc"
|
||||||
|
ENV CROSS_COMPILE="1"
|
||||||
|
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf"
|
||||||
|
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
||||||
|
{% endif -%}
|
||||||
|
|
||||||
|
{% if "alpine" in target_file %}
|
||||||
|
RUN rustup target add x86_64-unknown-linux-musl
|
||||||
|
|
||||||
|
{% elif "arm64v8" in target_file %}
|
||||||
|
RUN rustup target add aarch64-unknown-linux-gnu
|
||||||
|
|
||||||
|
{% elif "arm32v6" in target_file %}
|
||||||
|
RUN rustup target add arm-unknown-linux-gnueabi
|
||||||
|
|
||||||
|
{% elif "arm32v7" in target_file %}
|
||||||
|
RUN rustup target add armv7-unknown-linux-gnueabihf
|
||||||
|
{% endif %}
|
||||||
|
# Builds your dependencies and removes the
|
||||||
|
# dummy project, except the target folder
|
||||||
|
# This folder contains the compiled dependencies
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
RUN find . -not -path "./target*" -delete
|
||||||
|
|
||||||
|
# Copies the complete project
|
||||||
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Make sure that we actually build the project
|
||||||
|
RUN touch src/main.rs
|
||||||
|
|
||||||
|
# Builds again, this time it'll just be
|
||||||
|
# your actual source files being built
|
||||||
|
{% if "amd64" in target_file %}
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
{% elif "arm64v8" in target_file %}
|
||||||
|
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||||
|
{% elif "arm32v6" in target_file %}
|
||||||
|
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||||
|
{% elif "arm32v7" in target_file %}
|
||||||
|
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
######################## RUNTIME IMAGE ########################
|
||||||
|
# Create a new stage with a minimal image
|
||||||
|
# because we already have a binary built
|
||||||
|
FROM {{ runtime_stage_base_image }}
|
||||||
|
|
||||||
|
ENV ROCKET_ENV "staging"
|
||||||
|
ENV ROCKET_PORT=80
|
||||||
|
ENV ROCKET_WORKERS=10
|
||||||
|
{% if "alpine" in runtime_stage_base_image %}
|
||||||
|
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if "amd64" not in target_file %}
|
||||||
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
# Install needed libraries
|
||||||
|
{% if "alpine" in runtime_stage_base_image %}
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
openssl \
|
||||||
|
curl \
|
||||||
|
{% if "sqlite" in target_file %}
|
||||||
|
sqlite \
|
||||||
|
{% elif "mysql" in target_file %}
|
||||||
|
mariadb-connector-c \
|
||||||
|
{% elif "postgresql" in target_file %}
|
||||||
|
postgresql-libs \
|
||||||
|
{% endif %}
|
||||||
|
ca-certificates
|
||||||
|
{% else %}
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
openssl \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
{% if "sqlite" in target_file %}
|
||||||
|
sqlite3 \
|
||||||
|
{% elif "mysql" in target_file %}
|
||||||
|
libmariadbclient-dev \
|
||||||
|
{% elif "postgresql" in target_file %}
|
||||||
|
libpq5 \
|
||||||
|
{% endif %}
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
RUN mkdir /data
|
||||||
|
{% if "amd64" not in target_file %}
|
||||||
|
|
||||||
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
VOLUME /data
|
||||||
|
EXPOSE 80
|
||||||
|
EXPOSE 3012
|
||||||
|
|
||||||
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
|
# and the binary from the "build" stage to the current stage
|
||||||
|
COPY Rocket.toml .
|
||||||
|
COPY --from=vault /web-vault ./web-vault
|
||||||
|
{% if "alpine" in target_file %}
|
||||||
|
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||||
|
{% elif "arm64v8" in target_file %}
|
||||||
|
COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs .
|
||||||
|
{% elif "arm32v6" in target_file %}
|
||||||
|
COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs .
|
||||||
|
{% elif "arm32v7" in target_file %}
|
||||||
|
COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs .
|
||||||
|
{% else %}
|
||||||
|
COPY --from=build app/target/release/bitwarden_rs .
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh /healthcheck.sh
|
||||||
|
COPY docker/start.sh /start.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||||
|
|
||||||
|
# Configures the startup!
|
||||||
|
WORKDIR /
|
||||||
|
CMD ["/start.sh"]
|
9
docker/Makefile
Normal file
9
docker/Makefile
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
OBJECTS := $(shell find -mindepth 2 -name 'Dockerfile*')
|
||||||
|
|
||||||
|
all: $(OBJECTS)
|
||||||
|
|
||||||
|
%/Dockerfile: Dockerfile.j2 render_template
|
||||||
|
./render_template "$<" "{\"target_file\":\"$@\"}" > "$@"
|
||||||
|
|
||||||
|
%/Dockerfile.alpine: Dockerfile.j2 render_template
|
||||||
|
./render_template "$<" "{\"target_file\":\"$@\"}" > "$@"
|
3
docker/README.md
Normal file
3
docker/README.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
The arch-specific directory names follow the arch identifiers used by the Docker official images:
|
||||||
|
|
||||||
|
https://github.com/docker-library/official-images/blob/master/README.md#architectures-other-than-amd64
|
@@ -1,33 +1,33 @@
|
|||||||
# Using multistage build:
|
# This file was generated using a Jinja2 template.
|
||||||
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||||
|
|
||||||
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine:3.10 as vault
|
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.12.0b"
|
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||||
|
# It can be viewed in multiple ways:
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||||
|
# - From the console, with the following commands:
|
||||||
RUN apk add --no-cache --upgrade \
|
# docker pull bitwardenrs/web-vault:v2.15.1
|
||||||
curl \
|
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.15.1
|
||||||
tar
|
#
|
||||||
|
# - To do the opposite, and get the tag from the hash, you can do:
|
||||||
RUN mkdir /web-vault
|
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c
|
||||||
WORKDIR /web-vault
|
FROM bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c as vault
|
||||||
|
|
||||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
|
||||||
RUN ls
|
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# We need to use the Rust build image, because
|
# We need to use the Rust build image, because
|
||||||
# we need the Rust compiler and Cargo tooling
|
# we need the Rust compiler and Cargo tooling
|
||||||
FROM rust:1.39 as build
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
# set mysql backend
|
# set mysql backend
|
||||||
ARG DB=mysql
|
ARG DB=mysql
|
||||||
|
|
||||||
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||||
|
|
||||||
# Don't download rust docs
|
# Don't download rust docs
|
||||||
RUN rustup set profile minimal
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
@@ -38,7 +38,7 @@ RUN apt-get update && apt-get install -y \
|
|||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin app
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copies over *only* your manifests and build files
|
# Copies over *only* your manifests and build files
|
||||||
@@ -92,10 +92,11 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build app/target/release/bitwarden_rs .
|
COPY --from=build app/target/release/bitwarden_rs .
|
||||||
|
|
||||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
COPY docker/healthcheck.sh /healthcheck.sh
|
||||||
|
COPY docker/start.sh /start.sh
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
CMD ["/bitwarden_rs"]
|
CMD ["/start.sh"]
|
||||||
|
@@ -1,61 +1,76 @@
|
|||||||
# Using multistage build:
|
# This file was generated using a Jinja2 template.
|
||||||
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||||
|
|
||||||
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine:3.10 as vault
|
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.12.0b"
|
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||||
|
# It can be viewed in multiple ways:
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||||
|
# - From the console, with the following commands:
|
||||||
RUN apk add --no-cache --upgrade \
|
# docker pull bitwardenrs/web-vault:v2.15.1
|
||||||
curl \
|
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.15.1
|
||||||
tar
|
#
|
||||||
|
# - To do the opposite, and get the tag from the hash, you can do:
|
||||||
RUN mkdir /web-vault
|
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c
|
||||||
WORKDIR /web-vault
|
FROM bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c as vault
|
||||||
|
|
||||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
|
||||||
RUN ls
|
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# Musl build image for statically compiled binary
|
# Musl build image for statically compiled binary
|
||||||
FROM clux/muslrust:nightly-2019-11-23 as build
|
FROM clux/muslrust:nightly-2020-03-09 as build
|
||||||
|
|
||||||
# set mysql backend
|
# set mysql backend
|
||||||
ARG DB=mysql
|
ARG DB=mysql
|
||||||
|
|
||||||
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||||
|
|
||||||
# Don't download rust docs
|
# Don't download rust docs
|
||||||
RUN rustup set profile minimal
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
ENV USER "root"
|
ENV USER "root"
|
||||||
|
ENV RUSTFLAGS='-C link-arg=-s'
|
||||||
|
|
||||||
# Install needed libraries
|
# Install MySQL package
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
libmysqlclient-dev \
|
libmysqlclient-dev \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Creates a dummy project used to grab dependencies
|
||||||
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copies over *only* your manifests and build files
|
||||||
|
COPY ./Cargo.* ./
|
||||||
|
COPY ./rust-toolchain ./rust-toolchain
|
||||||
|
COPY ./build.rs ./build.rs
|
||||||
|
|
||||||
|
RUN rustup target add x86_64-unknown-linux-musl
|
||||||
|
|
||||||
|
# Builds your dependencies and removes the
|
||||||
|
# dummy project, except the target folder
|
||||||
|
# This folder contains the compiled dependencies
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
RUN find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
RUN rustup target add x86_64-unknown-linux-musl
|
|
||||||
|
|
||||||
# Make sure that we actually build the project
|
# Make sure that we actually build the project
|
||||||
RUN touch src/main.rs
|
RUN touch src/main.rs
|
||||||
|
|
||||||
# Build
|
# Builds again, this time it'll just be
|
||||||
|
# your actual source files being built
|
||||||
RUN cargo build --features ${DB} --release
|
RUN cargo build --features ${DB} --release
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM alpine:3.10
|
FROM alpine:3.11
|
||||||
|
|
||||||
ENV ROCKET_ENV "staging"
|
ENV ROCKET_ENV "staging"
|
||||||
ENV ROCKET_PORT=80
|
ENV ROCKET_PORT=80
|
||||||
@@ -65,8 +80,8 @@ ENV SSL_CERT_DIR=/etc/ssl/certs
|
|||||||
# Install needed libraries
|
# Install needed libraries
|
||||||
RUN apk add --no-cache \
|
RUN apk add --no-cache \
|
||||||
openssl \
|
openssl \
|
||||||
mariadb-connector-c \
|
|
||||||
curl \
|
curl \
|
||||||
|
mariadb-connector-c \
|
||||||
ca-certificates
|
ca-certificates
|
||||||
|
|
||||||
RUN mkdir /data
|
RUN mkdir /data
|
||||||
@@ -80,10 +95,11 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||||
|
|
||||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
COPY docker/healthcheck.sh /healthcheck.sh
|
||||||
|
COPY docker/start.sh /start.sh
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
CMD ["/bitwarden_rs"]
|
CMD ["/start.sh"]
|
||||||
|
@@ -1,50 +1,44 @@
|
|||||||
# Using multistage build:
|
# This file was generated using a Jinja2 template.
|
||||||
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||||
|
|
||||||
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine:3.10 as vault
|
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.12.0b"
|
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||||
|
# It can be viewed in multiple ways:
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||||
|
# - From the console, with the following commands:
|
||||||
RUN apk add --no-cache --upgrade \
|
# docker pull bitwardenrs/web-vault:v2.15.1
|
||||||
curl \
|
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.15.1
|
||||||
tar
|
#
|
||||||
|
# - To do the opposite, and get the tag from the hash, you can do:
|
||||||
RUN mkdir /web-vault
|
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c
|
||||||
WORKDIR /web-vault
|
FROM bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c as vault
|
||||||
|
|
||||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
|
||||||
RUN ls
|
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# We need to use the Rust build image, because
|
# We need to use the Rust build image, because
|
||||||
# we need the Rust compiler and Cargo tooling
|
# we need the Rust compiler and Cargo tooling
|
||||||
FROM rust:1.39 as build
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
# set mysql backend
|
# set postgresql backend
|
||||||
ARG DB=postgresql
|
ARG DB=postgresql
|
||||||
|
|
||||||
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||||
|
|
||||||
# Don't download rust docs
|
# Don't download rust docs
|
||||||
RUN rustup set profile minimal
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
# Using bundled SQLite, no need to install it
|
# Install PostgreSQL package
|
||||||
# RUN apt-get update && apt-get install -y\
|
|
||||||
# --no-install-recommends \
|
|
||||||
# sqlite3\
|
|
||||||
# && rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Install MySQL package
|
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
libpq-dev \
|
libpq-dev \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin app
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copies over *only* your manifests and build files
|
# Copies over *only* your manifests and build files
|
||||||
@@ -84,7 +78,6 @@ RUN apt-get update && apt-get install -y \
|
|||||||
openssl \
|
openssl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
sqlite3 \
|
|
||||||
libpq5 \
|
libpq5 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
@@ -99,10 +92,11 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build app/target/release/bitwarden_rs .
|
COPY --from=build app/target/release/bitwarden_rs .
|
||||||
|
|
||||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
COPY docker/healthcheck.sh /healthcheck.sh
|
||||||
|
COPY docker/start.sh /start.sh
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
CMD ["/bitwarden_rs"]
|
CMD ["/start.sh"]
|
||||||
|
@@ -1,61 +1,76 @@
|
|||||||
# Using multistage build:
|
# This file was generated using a Jinja2 template.
|
||||||
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||||
|
|
||||||
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine:3.10 as vault
|
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.12.0b"
|
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||||
|
# It can be viewed in multiple ways:
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||||
|
# - From the console, with the following commands:
|
||||||
RUN apk add --no-cache --upgrade \
|
# docker pull bitwardenrs/web-vault:v2.15.1
|
||||||
curl \
|
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.15.1
|
||||||
tar
|
#
|
||||||
|
# - To do the opposite, and get the tag from the hash, you can do:
|
||||||
RUN mkdir /web-vault
|
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c
|
||||||
WORKDIR /web-vault
|
FROM bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c as vault
|
||||||
|
|
||||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
|
||||||
RUN ls
|
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# Musl build image for statically compiled binary
|
# Musl build image for statically compiled binary
|
||||||
FROM clux/muslrust:nightly-2019-11-23 as build
|
FROM clux/muslrust:nightly-2020-03-09 as build
|
||||||
|
|
||||||
# set mysql backend
|
# set postgresql backend
|
||||||
ARG DB=postgresql
|
ARG DB=postgresql
|
||||||
|
|
||||||
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||||
|
|
||||||
# Don't download rust docs
|
# Don't download rust docs
|
||||||
RUN rustup set profile minimal
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
ENV USER "root"
|
ENV USER "root"
|
||||||
|
ENV RUSTFLAGS='-C link-arg=-s'
|
||||||
|
|
||||||
# Install needed libraries
|
# Install PostgreSQL package
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
libpq-dev \
|
libpq-dev \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Creates a dummy project used to grab dependencies
|
||||||
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copies over *only* your manifests and build files
|
||||||
|
COPY ./Cargo.* ./
|
||||||
|
COPY ./rust-toolchain ./rust-toolchain
|
||||||
|
COPY ./build.rs ./build.rs
|
||||||
|
|
||||||
|
RUN rustup target add x86_64-unknown-linux-musl
|
||||||
|
|
||||||
|
# Builds your dependencies and removes the
|
||||||
|
# dummy project, except the target folder
|
||||||
|
# This folder contains the compiled dependencies
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
RUN find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
RUN rustup target add x86_64-unknown-linux-musl
|
|
||||||
|
|
||||||
# Make sure that we actually build the project
|
# Make sure that we actually build the project
|
||||||
RUN touch src/main.rs
|
RUN touch src/main.rs
|
||||||
|
|
||||||
# Build
|
# Builds again, this time it'll just be
|
||||||
|
# your actual source files being built
|
||||||
RUN cargo build --features ${DB} --release
|
RUN cargo build --features ${DB} --release
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM alpine:3.10
|
FROM alpine:3.11
|
||||||
|
|
||||||
ENV ROCKET_ENV "staging"
|
ENV ROCKET_ENV "staging"
|
||||||
ENV ROCKET_PORT=80
|
ENV ROCKET_PORT=80
|
||||||
@@ -65,9 +80,8 @@ ENV SSL_CERT_DIR=/etc/ssl/certs
|
|||||||
# Install needed libraries
|
# Install needed libraries
|
||||||
RUN apk add --no-cache \
|
RUN apk add --no-cache \
|
||||||
openssl \
|
openssl \
|
||||||
postgresql-libs \
|
|
||||||
curl \
|
curl \
|
||||||
sqlite \
|
postgresql-libs \
|
||||||
ca-certificates
|
ca-certificates
|
||||||
|
|
||||||
RUN mkdir /data
|
RUN mkdir /data
|
||||||
@@ -81,10 +95,11 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||||
|
|
||||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
COPY docker/healthcheck.sh /healthcheck.sh
|
||||||
|
COPY docker/start.sh /start.sh
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
CMD ["/bitwarden_rs"]
|
CMD ["/start.sh"]
|
||||||
|
@@ -1,38 +1,38 @@
|
|||||||
# Using multistage build:
|
# This file was generated using a Jinja2 template.
|
||||||
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||||
|
|
||||||
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine:3.10 as vault
|
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.12.0b"
|
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||||
|
# It can be viewed in multiple ways:
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||||
|
# - From the console, with the following commands:
|
||||||
RUN apk add --no-cache --upgrade \
|
# docker pull bitwardenrs/web-vault:v2.15.1
|
||||||
curl \
|
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.15.1
|
||||||
tar
|
#
|
||||||
|
# - To do the opposite, and get the tag from the hash, you can do:
|
||||||
RUN mkdir /web-vault
|
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c
|
||||||
WORKDIR /web-vault
|
FROM bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c as vault
|
||||||
|
|
||||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
|
||||||
RUN ls
|
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# We need to use the Rust build image, because
|
# We need to use the Rust build image, because
|
||||||
# we need the Rust compiler and Cargo tooling
|
# we need the Rust compiler and Cargo tooling
|
||||||
FROM rust:1.39 as build
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
# set sqlite as default for DB ARG for backward comaptibility
|
# set sqlite as default for DB ARG for backward compatibility
|
||||||
ARG DB=sqlite
|
ARG DB=sqlite
|
||||||
|
|
||||||
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||||
|
|
||||||
# Don't download rust docs
|
# Don't download rust docs
|
||||||
RUN rustup set profile minimal
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin app
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copies over *only* your manifests and build files
|
# Copies over *only* your manifests and build files
|
||||||
@@ -86,10 +86,11 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build app/target/release/bitwarden_rs .
|
COPY --from=build app/target/release/bitwarden_rs .
|
||||||
|
|
||||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
COPY docker/healthcheck.sh /healthcheck.sh
|
||||||
|
COPY docker/start.sh /start.sh
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
CMD ["/bitwarden_rs"]
|
CMD ["/start.sh"]
|
||||||
|
@@ -1,55 +1,70 @@
|
|||||||
# Using multistage build:
|
# This file was generated using a Jinja2 template.
|
||||||
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||||
|
|
||||||
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine:3.10 as vault
|
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.12.0b"
|
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||||
|
# It can be viewed in multiple ways:
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||||
|
# - From the console, with the following commands:
|
||||||
RUN apk add --no-cache --upgrade \
|
# docker pull bitwardenrs/web-vault:v2.15.1
|
||||||
curl \
|
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.15.1
|
||||||
tar
|
#
|
||||||
|
# - To do the opposite, and get the tag from the hash, you can do:
|
||||||
RUN mkdir /web-vault
|
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c
|
||||||
WORKDIR /web-vault
|
FROM bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c as vault
|
||||||
|
|
||||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
|
||||||
RUN ls
|
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# Musl build image for statically compiled binary
|
# Musl build image for statically compiled binary
|
||||||
FROM clux/muslrust:nightly-2019-11-23 as build
|
FROM clux/muslrust:nightly-2020-03-09 as build
|
||||||
|
|
||||||
# set sqlite as default for DB ARG for backward comaptibility
|
# set sqlite as default for DB ARG for backward compatibility
|
||||||
ARG DB=sqlite
|
ARG DB=sqlite
|
||||||
|
|
||||||
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||||
|
|
||||||
# Don't download rust docs
|
# Don't download rust docs
|
||||||
RUN rustup set profile minimal
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
ENV USER "root"
|
ENV USER "root"
|
||||||
|
ENV RUSTFLAGS='-C link-arg=-s'
|
||||||
|
|
||||||
|
# Creates a dummy project used to grab dependencies
|
||||||
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copies over *only* your manifests and build files
|
||||||
|
COPY ./Cargo.* ./
|
||||||
|
COPY ./rust-toolchain ./rust-toolchain
|
||||||
|
COPY ./build.rs ./build.rs
|
||||||
|
|
||||||
|
RUN rustup target add x86_64-unknown-linux-musl
|
||||||
|
|
||||||
|
# Builds your dependencies and removes the
|
||||||
|
# dummy project, except the target folder
|
||||||
|
# This folder contains the compiled dependencies
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
RUN find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
RUN rustup target add x86_64-unknown-linux-musl
|
|
||||||
|
|
||||||
# Make sure that we actually build the project
|
# Make sure that we actually build the project
|
||||||
RUN touch src/main.rs
|
RUN touch src/main.rs
|
||||||
|
|
||||||
# Build
|
# Builds again, this time it'll just be
|
||||||
|
# your actual source files being built
|
||||||
RUN cargo build --features ${DB} --release
|
RUN cargo build --features ${DB} --release
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM alpine:3.10
|
FROM alpine:3.11
|
||||||
|
|
||||||
ENV ROCKET_ENV "staging"
|
ENV ROCKET_ENV "staging"
|
||||||
ENV ROCKET_PORT=80
|
ENV ROCKET_PORT=80
|
||||||
@@ -74,11 +89,11 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||||
|
|
||||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
COPY docker/healthcheck.sh /healthcheck.sh
|
||||||
|
COPY docker/start.sh /start.sh
|
||||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
CMD ["/bitwarden_rs"]
|
CMD ["/start.sh"]
|
||||||
|
@@ -1,36 +1,46 @@
|
|||||||
# Using multistage build:
|
# This file was generated using a Jinja2 template.
|
||||||
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||||
|
|
||||||
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine:3.10 as vault
|
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.12.0b"
|
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||||
|
# It can be viewed in multiple ways:
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||||
|
# - From the console, with the following commands:
|
||||||
RUN apk add --no-cache --upgrade \
|
# docker pull bitwardenrs/web-vault:v2.15.1
|
||||||
curl \
|
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.15.1
|
||||||
tar
|
#
|
||||||
|
# - To do the opposite, and get the tag from the hash, you can do:
|
||||||
RUN mkdir /web-vault
|
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c
|
||||||
WORKDIR /web-vault
|
FROM bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c as vault
|
||||||
|
|
||||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
|
||||||
RUN ls
|
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# We need to use the Rust build image, because
|
# We need to use the Rust build image, because
|
||||||
# we need the Rust compiler and Cargo tooling
|
# we need the Rust compiler and Cargo tooling
|
||||||
FROM rust:1.39 as build
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
# set mysql backend
|
# set mysql backend
|
||||||
ARG DB=mysql
|
ARG DB=mysql
|
||||||
|
|
||||||
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||||
|
|
||||||
# Don't download rust docs
|
# Don't download rust docs
|
||||||
RUN rustup set profile minimal
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
# Install required build libs for armel architecture.
|
||||||
|
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||||
|
/etc/apt/sources.list.d/deb-src.list \
|
||||||
|
&& dpkg --add-architecture armel \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libssl-dev:armel \
|
||||||
|
libc6-dev:armel
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
@@ -42,30 +52,42 @@ RUN apt-get update \
|
|||||||
ENV CARGO_HOME "/root/.cargo"
|
ENV CARGO_HOME "/root/.cargo"
|
||||||
ENV USER "root"
|
ENV USER "root"
|
||||||
|
|
||||||
|
# Install MySQL package
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libmariadb-dev:armel \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Creates a dummy project used to grab dependencies
|
||||||
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Prepare openssl armel libs
|
# Copies over *only* your manifests and build files
|
||||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
COPY ./Cargo.* ./
|
||||||
/etc/apt/sources.list.d/deb-src.list \
|
COPY ./rust-toolchain ./rust-toolchain
|
||||||
&& dpkg --add-architecture armel \
|
COPY ./build.rs ./build.rs
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install -y \
|
|
||||||
--no-install-recommends \
|
|
||||||
libssl-dev:armel \
|
|
||||||
libc6-dev:armel \
|
|
||||||
libmariadb-dev:armel
|
|
||||||
|
|
||||||
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
||||||
ENV CROSS_COMPILE="1"
|
ENV CROSS_COMPILE="1"
|
||||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi"
|
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi"
|
||||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||||
|
RUN rustup target add arm-unknown-linux-gnueabi
|
||||||
|
|
||||||
|
# Builds your dependencies and removes the
|
||||||
|
# dummy project, except the target folder
|
||||||
|
# This folder contains the compiled dependencies
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
RUN find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Build
|
# Make sure that we actually build the project
|
||||||
RUN rustup target add arm-unknown-linux-gnueabi
|
RUN touch src/main.rs
|
||||||
|
|
||||||
|
# Builds again, this time it'll just be
|
||||||
|
# your actual source files being built
|
||||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
@@ -90,10 +112,11 @@ RUN apt-get update && apt-get install -y \
|
|||||||
|
|
||||||
RUN mkdir /data
|
RUN mkdir /data
|
||||||
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
EXPOSE 80
|
EXPOSE 80
|
||||||
|
EXPOSE 3012
|
||||||
|
|
||||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
# and the binary from the "build" stage to the current stage
|
# and the binary from the "build" stage to the current stage
|
||||||
@@ -101,10 +124,11 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs .
|
COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs .
|
||||||
|
|
||||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
COPY docker/healthcheck.sh /healthcheck.sh
|
||||||
|
COPY docker/start.sh /start.sh
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
CMD ["/bitwarden_rs"]
|
CMD ["/start.sh"]
|
@@ -1,36 +1,46 @@
|
|||||||
# Using multistage build:
|
# This file was generated using a Jinja2 template.
|
||||||
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||||
|
|
||||||
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine:3.10 as vault
|
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.12.0b"
|
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||||
|
# It can be viewed in multiple ways:
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||||
|
# - From the console, with the following commands:
|
||||||
RUN apk add --no-cache --upgrade \
|
# docker pull bitwardenrs/web-vault:v2.15.1
|
||||||
curl \
|
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.15.1
|
||||||
tar
|
#
|
||||||
|
# - To do the opposite, and get the tag from the hash, you can do:
|
||||||
RUN mkdir /web-vault
|
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c
|
||||||
WORKDIR /web-vault
|
FROM bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c as vault
|
||||||
|
|
||||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
|
||||||
RUN ls
|
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# We need to use the Rust build image, because
|
# We need to use the Rust build image, because
|
||||||
# we need the Rust compiler and Cargo tooling
|
# we need the Rust compiler and Cargo tooling
|
||||||
FROM rust:1.39 as build
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
# set sqlite as default for DB ARG for backward comaptibility
|
# set sqlite as default for DB ARG for backward compatibility
|
||||||
ARG DB=sqlite
|
ARG DB=sqlite
|
||||||
|
|
||||||
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||||
|
|
||||||
# Don't download rust docs
|
# Don't download rust docs
|
||||||
RUN rustup set profile minimal
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
# Install required build libs for armel architecture.
|
||||||
|
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||||
|
/etc/apt/sources.list.d/deb-src.list \
|
||||||
|
&& dpkg --add-architecture armel \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libssl-dev:armel \
|
||||||
|
libc6-dev:armel
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
@@ -42,29 +52,36 @@ RUN apt-get update \
|
|||||||
ENV CARGO_HOME "/root/.cargo"
|
ENV CARGO_HOME "/root/.cargo"
|
||||||
ENV USER "root"
|
ENV USER "root"
|
||||||
|
|
||||||
|
# Creates a dummy project used to grab dependencies
|
||||||
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Prepare openssl armel libs
|
# Copies over *only* your manifests and build files
|
||||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
COPY ./Cargo.* ./
|
||||||
/etc/apt/sources.list.d/deb-src.list \
|
COPY ./rust-toolchain ./rust-toolchain
|
||||||
&& dpkg --add-architecture armel \
|
COPY ./build.rs ./build.rs
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install -y \
|
|
||||||
--no-install-recommends \
|
|
||||||
libssl-dev:armel \
|
|
||||||
libc6-dev:armel
|
|
||||||
|
|
||||||
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
||||||
ENV CROSS_COMPILE="1"
|
ENV CROSS_COMPILE="1"
|
||||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi"
|
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi"
|
||||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||||
|
RUN rustup target add arm-unknown-linux-gnueabi
|
||||||
|
|
||||||
|
# Builds your dependencies and removes the
|
||||||
|
# dummy project, except the target folder
|
||||||
|
# This folder contains the compiled dependencies
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
RUN find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Build
|
# Make sure that we actually build the project
|
||||||
RUN rustup target add arm-unknown-linux-gnueabi
|
RUN touch src/main.rs
|
||||||
|
|
||||||
|
# Builds again, this time it'll just be
|
||||||
|
# your actual source files being built
|
||||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
@@ -89,10 +106,11 @@ RUN apt-get update && apt-get install -y \
|
|||||||
|
|
||||||
RUN mkdir /data
|
RUN mkdir /data
|
||||||
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
EXPOSE 80
|
EXPOSE 80
|
||||||
|
EXPOSE 3012
|
||||||
|
|
||||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
# and the binary from the "build" stage to the current stage
|
# and the binary from the "build" stage to the current stage
|
||||||
@@ -100,10 +118,11 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs .
|
COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs .
|
||||||
|
|
||||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
COPY docker/healthcheck.sh /healthcheck.sh
|
||||||
|
COPY docker/start.sh /start.sh
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
CMD ["/bitwarden_rs"]
|
CMD ["/start.sh"]
|
@@ -1,36 +1,46 @@
|
|||||||
# Using multistage build:
|
# This file was generated using a Jinja2 template.
|
||||||
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||||
|
|
||||||
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine:3.10 as vault
|
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.12.0b"
|
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||||
|
# It can be viewed in multiple ways:
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||||
|
# - From the console, with the following commands:
|
||||||
RUN apk add --no-cache --upgrade \
|
# docker pull bitwardenrs/web-vault:v2.15.1
|
||||||
curl \
|
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.15.1
|
||||||
tar
|
#
|
||||||
|
# - To do the opposite, and get the tag from the hash, you can do:
|
||||||
RUN mkdir /web-vault
|
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c
|
||||||
WORKDIR /web-vault
|
FROM bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c as vault
|
||||||
|
|
||||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
|
||||||
RUN ls
|
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# We need to use the Rust build image, because
|
# We need to use the Rust build image, because
|
||||||
# we need the Rust compiler and Cargo tooling
|
# we need the Rust compiler and Cargo tooling
|
||||||
FROM rust:1.39 as build
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
# set mysql backend
|
# set mysql backend
|
||||||
ARG DB=mysql
|
ARG DB=mysql
|
||||||
|
|
||||||
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||||
|
|
||||||
# Don't download rust docs
|
# Don't download rust docs
|
||||||
RUN rustup set profile minimal
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
# Install required build libs for armhf architecture.
|
||||||
|
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||||
|
/etc/apt/sources.list.d/deb-src.list \
|
||||||
|
&& dpkg --add-architecture armhf \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libssl-dev:armhf \
|
||||||
|
libc6-dev:armhf
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
@@ -42,31 +52,41 @@ RUN apt-get update \
|
|||||||
ENV CARGO_HOME "/root/.cargo"
|
ENV CARGO_HOME "/root/.cargo"
|
||||||
ENV USER "root"
|
ENV USER "root"
|
||||||
|
|
||||||
|
# Install MySQL package
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libmariadb-dev:armhf \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Creates a dummy project used to grab dependencies
|
||||||
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Prepare openssl armhf libs
|
# Copies over *only* your manifests and build files
|
||||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
COPY ./Cargo.* ./
|
||||||
/etc/apt/sources.list.d/deb-src.list \
|
COPY ./rust-toolchain ./rust-toolchain
|
||||||
&& dpkg --add-architecture armhf \
|
COPY ./build.rs ./build.rs
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install -y \
|
|
||||||
--no-install-recommends \
|
|
||||||
libssl-dev:armhf \
|
|
||||||
libc6-dev:armhf \
|
|
||||||
libmariadb-dev:armhf
|
|
||||||
|
|
||||||
|
|
||||||
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc"
|
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc"
|
||||||
ENV CROSS_COMPILE="1"
|
ENV CROSS_COMPILE="1"
|
||||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf"
|
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf"
|
||||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
||||||
|
RUN rustup target add armv7-unknown-linux-gnueabihf
|
||||||
|
# Builds your dependencies and removes the
|
||||||
|
# dummy project, except the target folder
|
||||||
|
# This folder contains the compiled dependencies
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
RUN find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Build
|
# Make sure that we actually build the project
|
||||||
RUN rustup target add armv7-unknown-linux-gnueabihf
|
RUN touch src/main.rs
|
||||||
|
|
||||||
|
# Builds again, this time it'll just be
|
||||||
|
# your actual source files being built
|
||||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
@@ -91,10 +111,11 @@ RUN apt-get update && apt-get install -y \
|
|||||||
|
|
||||||
RUN mkdir /data
|
RUN mkdir /data
|
||||||
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
EXPOSE 80
|
EXPOSE 80
|
||||||
|
EXPOSE 3012
|
||||||
|
|
||||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
# and the binary from the "build" stage to the current stage
|
# and the binary from the "build" stage to the current stage
|
||||||
@@ -102,10 +123,11 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs .
|
COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs .
|
||||||
|
|
||||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
COPY docker/healthcheck.sh /healthcheck.sh
|
||||||
|
COPY docker/start.sh /start.sh
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
CMD ["/bitwarden_rs"]
|
CMD ["/start.sh"]
|
@@ -1,36 +1,46 @@
|
|||||||
# Using multistage build:
|
# This file was generated using a Jinja2 template.
|
||||||
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||||
|
|
||||||
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine:3.10 as vault
|
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.12.0b"
|
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||||
|
# It can be viewed in multiple ways:
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||||
|
# - From the console, with the following commands:
|
||||||
RUN apk add --no-cache --upgrade \
|
# docker pull bitwardenrs/web-vault:v2.15.1
|
||||||
curl \
|
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.15.1
|
||||||
tar
|
#
|
||||||
|
# - To do the opposite, and get the tag from the hash, you can do:
|
||||||
RUN mkdir /web-vault
|
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c
|
||||||
WORKDIR /web-vault
|
FROM bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c as vault
|
||||||
|
|
||||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
|
||||||
RUN ls
|
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# We need to use the Rust build image, because
|
# We need to use the Rust build image, because
|
||||||
# we need the Rust compiler and Cargo tooling
|
# we need the Rust compiler and Cargo tooling
|
||||||
FROM rust:1.39 as build
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
# set sqlite as default for DB ARG for backward comaptibility
|
# set sqlite as default for DB ARG for backward compatibility
|
||||||
ARG DB=sqlite
|
ARG DB=sqlite
|
||||||
|
|
||||||
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||||
|
|
||||||
# Don't download rust docs
|
# Don't download rust docs
|
||||||
RUN rustup set profile minimal
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
# Install required build libs for armhf architecture.
|
||||||
|
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||||
|
/etc/apt/sources.list.d/deb-src.list \
|
||||||
|
&& dpkg --add-architecture armhf \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libssl-dev:armhf \
|
||||||
|
libc6-dev:armhf
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
@@ -42,29 +52,35 @@ RUN apt-get update \
|
|||||||
ENV CARGO_HOME "/root/.cargo"
|
ENV CARGO_HOME "/root/.cargo"
|
||||||
ENV USER "root"
|
ENV USER "root"
|
||||||
|
|
||||||
|
# Creates a dummy project used to grab dependencies
|
||||||
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Prepare openssl armhf libs
|
# Copies over *only* your manifests and build files
|
||||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
COPY ./Cargo.* ./
|
||||||
/etc/apt/sources.list.d/deb-src.list \
|
COPY ./rust-toolchain ./rust-toolchain
|
||||||
&& dpkg --add-architecture armhf \
|
COPY ./build.rs ./build.rs
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install -y \
|
|
||||||
--no-install-recommends \
|
|
||||||
libssl-dev:armhf \
|
|
||||||
libc6-dev:armhf
|
|
||||||
|
|
||||||
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc"
|
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc"
|
||||||
ENV CROSS_COMPILE="1"
|
ENV CROSS_COMPILE="1"
|
||||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf"
|
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf"
|
||||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
||||||
|
RUN rustup target add armv7-unknown-linux-gnueabihf
|
||||||
|
# Builds your dependencies and removes the
|
||||||
|
# dummy project, except the target folder
|
||||||
|
# This folder contains the compiled dependencies
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
RUN find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Build
|
# Make sure that we actually build the project
|
||||||
RUN rustup target add armv7-unknown-linux-gnueabihf
|
RUN touch src/main.rs
|
||||||
|
|
||||||
|
# Builds again, this time it'll just be
|
||||||
|
# your actual source files being built
|
||||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
@@ -89,10 +105,11 @@ RUN apt-get update && apt-get install -y \
|
|||||||
|
|
||||||
RUN mkdir /data
|
RUN mkdir /data
|
||||||
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
EXPOSE 80
|
EXPOSE 80
|
||||||
|
EXPOSE 3012
|
||||||
|
|
||||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
# and the binary from the "build" stage to the current stage
|
# and the binary from the "build" stage to the current stage
|
||||||
@@ -100,10 +117,11 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs .
|
COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs .
|
||||||
|
|
||||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
COPY docker/healthcheck.sh /healthcheck.sh
|
||||||
|
COPY docker/start.sh /start.sh
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
CMD ["/bitwarden_rs"]
|
CMD ["/start.sh"]
|
@@ -1,36 +1,46 @@
|
|||||||
# Using multistage build:
|
# This file was generated using a Jinja2 template.
|
||||||
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||||
|
|
||||||
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine:3.10 as vault
|
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.12.0b"
|
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||||
|
# It can be viewed in multiple ways:
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||||
|
# - From the console, with the following commands:
|
||||||
RUN apk add --no-cache --upgrade \
|
# docker pull bitwardenrs/web-vault:v2.15.1
|
||||||
curl \
|
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.15.1
|
||||||
tar
|
#
|
||||||
|
# - To do the opposite, and get the tag from the hash, you can do:
|
||||||
RUN mkdir /web-vault
|
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c
|
||||||
WORKDIR /web-vault
|
FROM bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c as vault
|
||||||
|
|
||||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
|
||||||
RUN ls
|
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# We need to use the Rust build image, because
|
# We need to use the Rust build image, because
|
||||||
# we need the Rust compiler and Cargo tooling
|
# we need the Rust compiler and Cargo tooling
|
||||||
FROM rust:1.39 as build
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
# set mysql backend
|
# set mysql backend
|
||||||
ARG DB=mysql
|
ARG DB=mysql
|
||||||
|
|
||||||
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||||
|
|
||||||
# Don't download rust docs
|
# Don't download rust docs
|
||||||
RUN rustup set profile minimal
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
# Install required build libs for arm64 architecture.
|
||||||
|
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||||
|
/etc/apt/sources.list.d/deb-src.list \
|
||||||
|
&& dpkg --add-architecture arm64 \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libssl-dev:arm64 \
|
||||||
|
libc6-dev:arm64
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
@@ -42,30 +52,42 @@ RUN apt-get update \
|
|||||||
ENV CARGO_HOME "/root/.cargo"
|
ENV CARGO_HOME "/root/.cargo"
|
||||||
ENV USER "root"
|
ENV USER "root"
|
||||||
|
|
||||||
|
# Install MySQL package
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libmariadb-dev:arm64 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Creates a dummy project used to grab dependencies
|
||||||
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Prepare openssl arm64 libs
|
# Copies over *only* your manifests and build files
|
||||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
COPY ./Cargo.* ./
|
||||||
/etc/apt/sources.list.d/deb-src.list \
|
COPY ./rust-toolchain ./rust-toolchain
|
||||||
&& dpkg --add-architecture arm64 \
|
COPY ./build.rs ./build.rs
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install -y \
|
|
||||||
--no-install-recommends \
|
|
||||||
libssl-dev:arm64 \
|
|
||||||
libc6-dev:arm64 \
|
|
||||||
libmariadb-dev:arm64
|
|
||||||
|
|
||||||
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc"
|
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc"
|
||||||
ENV CROSS_COMPILE="1"
|
ENV CROSS_COMPILE="1"
|
||||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu"
|
ENV OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu"
|
||||||
ENV OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
ENV OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
||||||
|
RUN rustup target add aarch64-unknown-linux-gnu
|
||||||
|
|
||||||
|
# Builds your dependencies and removes the
|
||||||
|
# dummy project, except the target folder
|
||||||
|
# This folder contains the compiled dependencies
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
RUN find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Build
|
# Make sure that we actually build the project
|
||||||
RUN rustup target add aarch64-unknown-linux-gnu
|
RUN touch src/main.rs
|
||||||
|
|
||||||
|
# Builds again, this time it'll just be
|
||||||
|
# your actual source files being built
|
||||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
@@ -86,14 +108,15 @@ RUN apt-get update && apt-get install -y \
|
|||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
libmariadbclient-dev \
|
libmariadbclient-dev \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN mkdir /data
|
RUN mkdir /data
|
||||||
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
EXPOSE 80
|
EXPOSE 80
|
||||||
|
EXPOSE 3012
|
||||||
|
|
||||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
# and the binary from the "build" stage to the current stage
|
# and the binary from the "build" stage to the current stage
|
||||||
@@ -101,10 +124,11 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs .
|
COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs .
|
||||||
|
|
||||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
COPY docker/healthcheck.sh /healthcheck.sh
|
||||||
|
COPY docker/start.sh /start.sh
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
CMD ["/bitwarden_rs"]
|
CMD ["/start.sh"]
|
@@ -1,36 +1,46 @@
|
|||||||
# Using multistage build:
|
# This file was generated using a Jinja2 template.
|
||||||
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||||
|
|
||||||
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine:3.10 as vault
|
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.12.0b"
|
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||||
|
# It can be viewed in multiple ways:
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||||
|
# - From the console, with the following commands:
|
||||||
RUN apk add --no-cache --upgrade \
|
# docker pull bitwardenrs/web-vault:v2.15.1
|
||||||
curl \
|
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.15.1
|
||||||
tar
|
#
|
||||||
|
# - To do the opposite, and get the tag from the hash, you can do:
|
||||||
RUN mkdir /web-vault
|
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c
|
||||||
WORKDIR /web-vault
|
FROM bitwardenrs/web-vault@sha256:afba1e3bded09dc0a6a0dbacb3363ac33b6f122b4b26d3682cafb9115bdf785c as vault
|
||||||
|
|
||||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
|
||||||
RUN ls
|
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# We need to use the Rust build image, because
|
# We need to use the Rust build image, because
|
||||||
# we need the Rust compiler and Cargo tooling
|
# we need the Rust compiler and Cargo tooling
|
||||||
FROM rust:1.39 as build
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
# set sqlite as default for DB ARG for backward comaptibility
|
# set sqlite as default for DB ARG for backward compatibility
|
||||||
ARG DB=sqlite
|
ARG DB=sqlite
|
||||||
|
|
||||||
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||||
|
|
||||||
# Don't download rust docs
|
# Don't download rust docs
|
||||||
RUN rustup set profile minimal
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
# Install required build libs for arm64 architecture.
|
||||||
|
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||||
|
/etc/apt/sources.list.d/deb-src.list \
|
||||||
|
&& dpkg --add-architecture arm64 \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libssl-dev:arm64 \
|
||||||
|
libc6-dev:arm64
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
@@ -42,29 +52,36 @@ RUN apt-get update \
|
|||||||
ENV CARGO_HOME "/root/.cargo"
|
ENV CARGO_HOME "/root/.cargo"
|
||||||
ENV USER "root"
|
ENV USER "root"
|
||||||
|
|
||||||
|
# Creates a dummy project used to grab dependencies
|
||||||
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Prepare openssl arm64 libs
|
# Copies over *only* your manifests and build files
|
||||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
COPY ./Cargo.* ./
|
||||||
/etc/apt/sources.list.d/deb-src.list \
|
COPY ./rust-toolchain ./rust-toolchain
|
||||||
&& dpkg --add-architecture arm64 \
|
COPY ./build.rs ./build.rs
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install -y \
|
|
||||||
--no-install-recommends \
|
|
||||||
libssl-dev:arm64 \
|
|
||||||
libc6-dev:arm64
|
|
||||||
|
|
||||||
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc"
|
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc"
|
||||||
ENV CROSS_COMPILE="1"
|
ENV CROSS_COMPILE="1"
|
||||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu"
|
ENV OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu"
|
||||||
ENV OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
ENV OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
||||||
|
RUN rustup target add aarch64-unknown-linux-gnu
|
||||||
|
|
||||||
|
# Builds your dependencies and removes the
|
||||||
|
# dummy project, except the target folder
|
||||||
|
# This folder contains the compiled dependencies
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
RUN find . -not -path "./target*" -delete
|
||||||
|
|
||||||
# Copies the complete project
|
# Copies the complete project
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Build
|
# Make sure that we actually build the project
|
||||||
RUN rustup target add aarch64-unknown-linux-gnu
|
RUN touch src/main.rs
|
||||||
|
|
||||||
|
# Builds again, this time it'll just be
|
||||||
|
# your actual source files being built
|
||||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
@@ -85,14 +102,15 @@ RUN apt-get update && apt-get install -y \
|
|||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
sqlite3 \
|
sqlite3 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN mkdir /data
|
RUN mkdir /data
|
||||||
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
EXPOSE 80
|
EXPOSE 80
|
||||||
|
EXPOSE 3012
|
||||||
|
|
||||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
# and the binary from the "build" stage to the current stage
|
# and the binary from the "build" stage to the current stage
|
||||||
@@ -100,10 +118,11 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs .
|
COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs .
|
||||||
|
|
||||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
COPY docker/healthcheck.sh /healthcheck.sh
|
||||||
|
COPY docker/start.sh /start.sh
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
CMD ["/bitwarden_rs"]
|
CMD ["/start.sh"]
|
59
docker/healthcheck.sh
Normal file → Executable file
59
docker/healthcheck.sh
Normal file → Executable file
@@ -1,8 +1,53 @@
|
|||||||
#!/usr/bin/env sh
|
#!/bin/sh
|
||||||
|
|
||||||
if [ -z "$ROCKET_TLS"]
|
# Use the value of the corresponding env var (if present),
|
||||||
then
|
# or a default value otherwise.
|
||||||
curl --fail http://localhost:${ROCKET_PORT:-"80"}/alive || exit 1
|
: ${DATA_FOLDER:="data"}
|
||||||
else
|
: ${ROCKET_PORT:="80"}
|
||||||
curl --insecure --fail https://localhost:${ROCKET_PORT:-"80"}/alive || exit 1
|
|
||||||
fi
|
CONFIG_FILE="${DATA_FOLDER}"/config.json
|
||||||
|
|
||||||
|
# Given a config key, return the corresponding config value from the
|
||||||
|
# config file. If the key doesn't exist, return an empty string.
|
||||||
|
get_config_val() {
|
||||||
|
local key="$1"
|
||||||
|
# Extract a line of the form:
|
||||||
|
# "domain": "https://bw.example.com/path",
|
||||||
|
grep "\"${key}\":" "${CONFIG_FILE}" |
|
||||||
|
# To extract just the value (https://bw.example.com/path), delete:
|
||||||
|
# (1) everything up to and including the first ':',
|
||||||
|
# (2) whitespace and '"' from the front,
|
||||||
|
# (3) ',' and '"' from the back.
|
||||||
|
sed -e 's/[^:]\+://' -e 's/^[ "]\+//' -e 's/[,"]\+$//'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Extract the base path from a domain URL. For example:
|
||||||
|
# - `` -> ``
|
||||||
|
# - `https://bw.example.com` -> ``
|
||||||
|
# - `https://bw.example.com/` -> ``
|
||||||
|
# - `https://bw.example.com/path` -> `/path`
|
||||||
|
# - `https://bw.example.com/multi/path` -> `/multi/path`
|
||||||
|
get_base_path() {
|
||||||
|
echo "$1" |
|
||||||
|
# Delete:
|
||||||
|
# (1) everything up to and including '://',
|
||||||
|
# (2) everything up to '/',
|
||||||
|
# (3) trailing '/' from the back.
|
||||||
|
sed -e 's|.*://||' -e 's|[^/]\+||' -e 's|/*$||'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Read domain URL from config.json, if present.
|
||||||
|
if [ -r "${CONFIG_FILE}" ]; then
|
||||||
|
domain="$(get_config_val 'domain')"
|
||||||
|
if [ -n "${domain}" ]; then
|
||||||
|
# config.json 'domain' overrides the DOMAIN env var.
|
||||||
|
DOMAIN="${domain}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
base_path="$(get_base_path "${DOMAIN}")"
|
||||||
|
if [ -n "${ROCKET_TLS}" ]; then
|
||||||
|
s='s'
|
||||||
|
fi
|
||||||
|
curl --insecure --fail --silent --show-error \
|
||||||
|
"http${s}://localhost:${ROCKET_PORT}${base_path}/alive" || exit 1
|
||||||
|
17
docker/render_template
Executable file
17
docker/render_template
Executable file
@@ -0,0 +1,17 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os, argparse, json
|
||||||
|
|
||||||
|
import jinja2
|
||||||
|
|
||||||
|
args_parser = argparse.ArgumentParser()
|
||||||
|
args_parser.add_argument('template_file', help='Jinja2 template file to render.')
|
||||||
|
args_parser.add_argument('render_vars', help='JSON-encoded data to pass to the templating engine.')
|
||||||
|
cli_args = args_parser.parse_args()
|
||||||
|
|
||||||
|
render_vars = json.loads(cli_args.render_vars)
|
||||||
|
environment = jinja2.Environment(
|
||||||
|
loader=jinja2.FileSystemLoader(os.getcwd()),
|
||||||
|
trim_blocks=True,
|
||||||
|
)
|
||||||
|
print(environment.get_template(cli_args.template_file).render(render_vars))
|
15
docker/start.sh
Executable file
15
docker/start.sh
Executable file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
if [ -r /etc/bitwarden_rs.sh ]; then
|
||||||
|
. /etc/bitwarden_rs.sh
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -d /etc/bitwarden_rs.d ]; then
|
||||||
|
for f in /etc/bitwarden_rs.d/*.sh; do
|
||||||
|
if [ -r $f ]; then
|
||||||
|
. $f
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec /bitwarden_rs "${@}"
|
20
hooks/README.md
Normal file
20
hooks/README.md
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
The hooks in this directory are used to create multi-arch images using Docker Hub automated builds.
|
||||||
|
|
||||||
|
Docker Hub hooks provide these predefined [environment variables](https://docs.docker.com/docker-hub/builds/advanced/#environment-variables-for-building-and-testing):
|
||||||
|
|
||||||
|
* `SOURCE_BRANCH`: the name of the branch or the tag that is currently being tested.
|
||||||
|
* `SOURCE_COMMIT`: the SHA1 hash of the commit being tested.
|
||||||
|
* `COMMIT_MSG`: the message from the commit being tested and built.
|
||||||
|
* `DOCKER_REPO`: the name of the Docker repository being built.
|
||||||
|
* `DOCKERFILE_PATH`: the dockerfile currently being built.
|
||||||
|
* `DOCKER_TAG`: the Docker repository tag being built.
|
||||||
|
* `IMAGE_NAME`: the name and tag of the Docker repository being built. (This variable is a combination of `DOCKER_REPO:DOCKER_TAG`.)
|
||||||
|
|
||||||
|
The current multi-arch image build relies on the original bitwarden_rs Dockerfiles, which use cross-compilation for architectures other than `amd64`, and don't yet support all arch/database/OS combinations. However, cross-compilation is much faster than QEMU-based builds (e.g., using `docker buildx`). This situation may need to be revisited at some point.
|
||||||
|
|
||||||
|
## References
|
||||||
|
|
||||||
|
* https://docs.docker.com/docker-hub/builds/advanced/
|
||||||
|
* https://docs.docker.com/engine/reference/commandline/manifest/
|
||||||
|
* https://www.docker.com/blog/multi-arch-build-and-images-the-simple-way/
|
||||||
|
* https://success.docker.com/article/how-do-i-authenticate-with-the-v2-api
|
30
hooks/arches.sh
Normal file
30
hooks/arches.sh
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# The default Debian-based SQLite images support these arches.
|
||||||
|
#
|
||||||
|
# Other images (Alpine-based, or with other database backends) currently
|
||||||
|
# support only a subset of these.
|
||||||
|
arches=(
|
||||||
|
amd64
|
||||||
|
arm32v6
|
||||||
|
arm32v7
|
||||||
|
arm64v8
|
||||||
|
)
|
||||||
|
|
||||||
|
case "${DOCKER_REPO}" in
|
||||||
|
*-mysql)
|
||||||
|
db=mysql
|
||||||
|
arches=(amd64)
|
||||||
|
;;
|
||||||
|
*-postgresql)
|
||||||
|
db=postgresql
|
||||||
|
arches=(amd64)
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
db=sqlite
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [[ "${DOCKER_TAG}" == *alpine ]]; then
|
||||||
|
# The Alpine build currently only works for amd64.
|
||||||
|
os_suffix=.alpine
|
||||||
|
arches=(amd64)
|
||||||
|
fi
|
14
hooks/build
Executable file
14
hooks/build
Executable file
@@ -0,0 +1,14 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
echo ">>> Building images..."
|
||||||
|
|
||||||
|
source ./hooks/arches.sh
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
for arch in "${arches[@]}"; do
|
||||||
|
docker build \
|
||||||
|
-t "${DOCKER_REPO}:${DOCKER_TAG}-${arch}" \
|
||||||
|
-f docker/${arch}/${db}/Dockerfile${os_suffix} \
|
||||||
|
.
|
||||||
|
done
|
112
hooks/push
Executable file
112
hooks/push
Executable file
@@ -0,0 +1,112 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
echo ">>> Pushing images..."
|
||||||
|
|
||||||
|
export DOCKER_CLI_EXPERIMENTAL=enabled
|
||||||
|
|
||||||
|
declare -A annotations=(
|
||||||
|
[amd64]="--os linux --arch amd64"
|
||||||
|
[arm32v6]="--os linux --arch arm --variant v6"
|
||||||
|
[arm32v7]="--os linux --arch arm --variant v7"
|
||||||
|
[arm64v8]="--os linux --arch arm64 --variant v8"
|
||||||
|
)
|
||||||
|
|
||||||
|
source ./hooks/arches.sh
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
declare -A images
|
||||||
|
for arch in ${arches[@]}; do
|
||||||
|
images[$arch]="${DOCKER_REPO}:${DOCKER_TAG}-${arch}"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Push the images that were just built; manifest list creation fails if the
|
||||||
|
# images (manifests) referenced don't already exist in the Docker registry.
|
||||||
|
for image in "${images[@]}"; do
|
||||||
|
docker push "${image}"
|
||||||
|
done
|
||||||
|
|
||||||
|
manifest_lists=("${DOCKER_REPO}:${DOCKER_TAG}")
|
||||||
|
|
||||||
|
# If the Docker tag starts with a version number, assume the latest release is
|
||||||
|
# being pushed. Add an extra manifest (`latest` or `alpine`, as appropriate)
|
||||||
|
# to make it easier for users to track the latest release.
|
||||||
|
if [[ "${DOCKER_TAG}" =~ ^[0-9]+\.[0-9]+\.[0-9]+ ]]; then
|
||||||
|
if [[ "${DOCKER_TAG}" == *alpine ]]; then
|
||||||
|
manifest_lists+=(${DOCKER_REPO}:alpine)
|
||||||
|
else
|
||||||
|
manifest_lists+=(${DOCKER_REPO}:latest)
|
||||||
|
|
||||||
|
# Add an extra `latest-arm32v6` tag; Docker can't seem to properly
|
||||||
|
# auto-select that image on Armv6 platforms like Raspberry Pi 1 and Zero
|
||||||
|
# (https://github.com/moby/moby/issues/41017).
|
||||||
|
#
|
||||||
|
# TODO: Also add an `alpine-arm32v6` tag if multi-arch support for
|
||||||
|
# Alpine-based bitwarden_rs images is implemented before this Docker
|
||||||
|
# issue is fixed.
|
||||||
|
docker tag "${DOCKER_REPO}:${DOCKER_TAG}-arm32v6" "${DOCKER_REPO}:latest-arm32v6"
|
||||||
|
docker push "${DOCKER_REPO}:latest-arm32v6"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
for manifest_list in "${manifest_lists[@]}"; do
|
||||||
|
# Create the (multi-arch) manifest list of arch-specific images.
|
||||||
|
docker manifest create ${manifest_list} ${images[@]}
|
||||||
|
|
||||||
|
# Make sure each image manifest is annotated with the correct arch info.
|
||||||
|
# Docker does not auto-detect the arch of each cross-compiled image, so
|
||||||
|
# everything would appear as `linux/amd64` otherwise.
|
||||||
|
for arch in "${arches[@]}"; do
|
||||||
|
docker manifest annotate ${annotations[$arch]} ${manifest_list} ${images[$arch]}
|
||||||
|
done
|
||||||
|
|
||||||
|
# Push the manifest list.
|
||||||
|
docker manifest push --purge ${manifest_list}
|
||||||
|
done
|
||||||
|
|
||||||
|
# Avoid logging credentials and tokens.
|
||||||
|
set +ex
|
||||||
|
|
||||||
|
# Delete the arch-specific tags, if credentials for doing so are available.
|
||||||
|
# Note that `DOCKER_PASSWORD` must be the actual user password. Passing a JWT
|
||||||
|
# obtained using a personal access token results in a 403 error with
|
||||||
|
# {"detail": "access to the resource is forbidden with personal access token"}
|
||||||
|
if [[ -z "${DOCKER_USERNAME}" || -z "${DOCKER_PASSWORD}" ]]; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Given a JSON input on stdin, extract the string value associated with the
|
||||||
|
# specified key. This avoids an extra dependency on a tool like `jq`.
|
||||||
|
extract() {
|
||||||
|
local key="$1"
|
||||||
|
# Extract "<key>":"<val>" (assumes key/val won't contain double quotes).
|
||||||
|
# The colon may have whitespace on either side.
|
||||||
|
grep -o "\"${key}\"[[:space:]]*:[[:space:]]*\"[^\"]\+\"" |
|
||||||
|
# Extract just <val> by deleting the last '"', and then greedily deleting
|
||||||
|
# everything up to '"'.
|
||||||
|
sed -e 's/"$//' -e 's/.*"//'
|
||||||
|
}
|
||||||
|
|
||||||
|
echo ">>> Getting API token..."
|
||||||
|
jwt=$(curl -sS -X POST \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{\"username\":\"${DOCKER_USERNAME}\",\"password\": \"${DOCKER_PASSWORD}\"}" \
|
||||||
|
"https://hub.docker.com/v2/users/login" |
|
||||||
|
extract 'token')
|
||||||
|
|
||||||
|
# Strip the registry portion from `index.docker.io/user/repo`.
|
||||||
|
repo="${DOCKER_REPO#*/}"
|
||||||
|
|
||||||
|
for arch in ${arches[@]}; do
|
||||||
|
# Don't delete the `arm32v6` tag; Docker can't seem to properly
|
||||||
|
# auto-select that image on Armv6 platforms like Raspberry Pi 1 and Zero
|
||||||
|
# (https://github.com/moby/moby/issues/41017).
|
||||||
|
if [[ ${arch} == 'arm32v6' ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
tag="${DOCKER_TAG}-${arch}"
|
||||||
|
echo ">>> Deleting '${repo}:${tag}'..."
|
||||||
|
curl -sS -X DELETE \
|
||||||
|
-H "Authorization: Bearer ${jwt}" \
|
||||||
|
"https://hub.docker.com/v2/repositories/${repo}/tags/${tag}/"
|
||||||
|
done
|
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE org_policies;
|
@@ -0,0 +1,9 @@
|
|||||||
|
CREATE TABLE org_policies (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
org_uuid CHAR(36) NOT NULL REFERENCES organizations (uuid),
|
||||||
|
atype INTEGER NOT NULL,
|
||||||
|
enabled BOOLEAN NOT NULL,
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
|
||||||
|
UNIQUE (org_uuid, atype)
|
||||||
|
);
|
@@ -0,0 +1 @@
|
|||||||
|
|
@@ -0,0 +1,3 @@
|
|||||||
|
ALTER TABLE ciphers
|
||||||
|
ADD COLUMN
|
||||||
|
deleted_at DATETIME;
|
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE users_collections
|
||||||
|
ADD COLUMN hide_passwords BOOLEAN NOT NULL DEFAULT FALSE;
|
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE org_policies;
|
@@ -0,0 +1,9 @@
|
|||||||
|
CREATE TABLE org_policies (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
org_uuid CHAR(36) NOT NULL REFERENCES organizations (uuid),
|
||||||
|
atype INTEGER NOT NULL,
|
||||||
|
enabled BOOLEAN NOT NULL,
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
|
||||||
|
UNIQUE (org_uuid, atype)
|
||||||
|
);
|
@@ -0,0 +1 @@
|
|||||||
|
|
@@ -0,0 +1,3 @@
|
|||||||
|
ALTER TABLE ciphers
|
||||||
|
ADD COLUMN
|
||||||
|
deleted_at TIMESTAMP;
|
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE users_collections
|
||||||
|
ADD COLUMN hide_passwords BOOLEAN NOT NULL DEFAULT FALSE;
|
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE org_policies;
|
@@ -0,0 +1,9 @@
|
|||||||
|
CREATE TABLE org_policies (
|
||||||
|
uuid TEXT NOT NULL PRIMARY KEY,
|
||||||
|
org_uuid TEXT NOT NULL REFERENCES organizations (uuid),
|
||||||
|
atype INTEGER NOT NULL,
|
||||||
|
enabled BOOLEAN NOT NULL,
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
|
||||||
|
UNIQUE (org_uuid, atype)
|
||||||
|
);
|
@@ -0,0 +1 @@
|
|||||||
|
|
@@ -0,0 +1,3 @@
|
|||||||
|
ALTER TABLE ciphers
|
||||||
|
ADD COLUMN
|
||||||
|
deleted_at DATETIME;
|
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE users_collections
|
||||||
|
ADD COLUMN hide_passwords BOOLEAN NOT NULL DEFAULT 0; -- FALSE
|
@@ -1 +1 @@
|
|||||||
nightly-2019-11-17
|
nightly-2020-07-11
|
@@ -1 +1,2 @@
|
|||||||
|
version = "Two"
|
||||||
max_width = 120
|
max_width = 120
|
||||||
|
295
src/api/admin.rs
295
src/api/admin.rs
@@ -1,31 +1,39 @@
|
|||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use serde::de::DeserializeOwned;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
use rocket::http::{Cookie, Cookies, SameSite};
|
use rocket::{
|
||||||
use rocket::request::{self, FlashMessage, Form, FromRequest, Request};
|
http::{Cookie, Cookies, SameSite},
|
||||||
use rocket::response::{content::Html, Flash, Redirect};
|
request::{self, FlashMessage, Form, FromRequest, Request, Outcome},
|
||||||
use rocket::{Outcome, Route};
|
response::{content::Html, Flash, Redirect},
|
||||||
|
Route,
|
||||||
|
};
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
|
|
||||||
use crate::api::{ApiResult, EmptyResult, JsonResult};
|
use crate::{
|
||||||
use crate::auth::{decode_admin, encode_jwt, generate_admin_claims, ClientIp};
|
api::{ApiResult, EmptyResult, JsonResult},
|
||||||
use crate::config::ConfigBuilder;
|
auth::{decode_admin, encode_jwt, generate_admin_claims, ClientIp},
|
||||||
use crate::db::{backup_database, models::*, DbConn};
|
config::ConfigBuilder,
|
||||||
use crate::error::Error;
|
db::{backup_database, models::*, DbConn},
|
||||||
use crate::mail;
|
error::{Error, MapResult},
|
||||||
use crate::CONFIG;
|
mail,
|
||||||
|
util::get_display_size,
|
||||||
|
CONFIG,
|
||||||
|
};
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
if CONFIG.admin_token().is_none() && !CONFIG.disable_admin_token() {
|
if !CONFIG.disable_admin_token() && !CONFIG.is_admin_token_set() {
|
||||||
return routes![admin_disabled];
|
return routes![admin_disabled];
|
||||||
}
|
}
|
||||||
|
|
||||||
routes![
|
routes![
|
||||||
admin_login,
|
admin_login,
|
||||||
get_users,
|
get_users_json,
|
||||||
post_admin_login,
|
post_admin_login,
|
||||||
admin_page,
|
admin_page,
|
||||||
invite_user,
|
invite_user,
|
||||||
|
logout,
|
||||||
delete_user,
|
delete_user,
|
||||||
deauth_user,
|
deauth_user,
|
||||||
remove_2fa,
|
remove_2fa,
|
||||||
@@ -33,12 +41,15 @@ pub fn routes() -> Vec<Route> {
|
|||||||
post_config,
|
post_config,
|
||||||
delete_config,
|
delete_config,
|
||||||
backup_db,
|
backup_db,
|
||||||
|
test_smtp,
|
||||||
|
users_overview,
|
||||||
|
organizations_overview,
|
||||||
|
diagnostics,
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
lazy_static! {
|
static CAN_BACKUP: Lazy<bool> =
|
||||||
static ref CAN_BACKUP: bool = cfg!(feature = "sqlite") && Command::new("sqlite3").arg("-version").status().is_ok();
|
Lazy::new(|| cfg!(feature = "sqlite") && Command::new("sqlite3").arg("-version").status().is_ok());
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/")]
|
#[get("/")]
|
||||||
fn admin_disabled() -> &'static str {
|
fn admin_disabled() -> &'static str {
|
||||||
@@ -49,13 +60,25 @@ const COOKIE_NAME: &str = "BWRS_ADMIN";
|
|||||||
const ADMIN_PATH: &str = "/admin";
|
const ADMIN_PATH: &str = "/admin";
|
||||||
|
|
||||||
const BASE_TEMPLATE: &str = "admin/base";
|
const BASE_TEMPLATE: &str = "admin/base";
|
||||||
const VERSION: Option<&str> = option_env!("GIT_VERSION");
|
const VERSION: Option<&str> = option_env!("BWRS_VERSION");
|
||||||
|
|
||||||
|
fn admin_path() -> String {
|
||||||
|
format!("{}{}", CONFIG.domain_path(), ADMIN_PATH)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Used for `Location` response headers, which must specify an absolute URI
|
||||||
|
/// (see https://tools.ietf.org/html/rfc2616#section-14.30).
|
||||||
|
fn admin_url() -> String {
|
||||||
|
// Don't use CONFIG.domain() directly, since the user may want to keep a
|
||||||
|
// trailing slash there, particularly when running under a subpath.
|
||||||
|
format!("{}{}{}", CONFIG.domain_origin(), CONFIG.domain_path(), ADMIN_PATH)
|
||||||
|
}
|
||||||
|
|
||||||
#[get("/", rank = 2)]
|
#[get("/", rank = 2)]
|
||||||
fn admin_login(flash: Option<FlashMessage>) -> ApiResult<Html<String>> {
|
fn admin_login(flash: Option<FlashMessage>) -> ApiResult<Html<String>> {
|
||||||
// If there is an error, show it
|
// If there is an error, show it
|
||||||
let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg()));
|
let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg()));
|
||||||
let json = json!({"page_content": "admin/login", "version": VERSION, "error": msg});
|
let json = json!({"page_content": "admin/login", "version": VERSION, "error": msg, "urlpath": CONFIG.domain_path()});
|
||||||
|
|
||||||
// Return the page
|
// Return the page
|
||||||
let text = CONFIG.render_template(BASE_TEMPLATE, &json)?;
|
let text = CONFIG.render_template(BASE_TEMPLATE, &json)?;
|
||||||
@@ -75,7 +98,7 @@ fn post_admin_login(data: Form<LoginForm>, mut cookies: Cookies, ip: ClientIp) -
|
|||||||
if !_validate_token(&data.token) {
|
if !_validate_token(&data.token) {
|
||||||
error!("Invalid admin token. IP: {}", ip.ip);
|
error!("Invalid admin token. IP: {}", ip.ip);
|
||||||
Err(Flash::error(
|
Err(Flash::error(
|
||||||
Redirect::to(ADMIN_PATH),
|
Redirect::to(admin_url()),
|
||||||
"Invalid admin token, please try again.",
|
"Invalid admin token, please try again.",
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
@@ -84,14 +107,14 @@ fn post_admin_login(data: Form<LoginForm>, mut cookies: Cookies, ip: ClientIp) -
|
|||||||
let jwt = encode_jwt(&claims);
|
let jwt = encode_jwt(&claims);
|
||||||
|
|
||||||
let cookie = Cookie::build(COOKIE_NAME, jwt)
|
let cookie = Cookie::build(COOKIE_NAME, jwt)
|
||||||
.path(ADMIN_PATH)
|
.path(admin_path())
|
||||||
.max_age(chrono::Duration::minutes(20))
|
.max_age(time::Duration::minutes(20))
|
||||||
.same_site(SameSite::Strict)
|
.same_site(SameSite::Strict)
|
||||||
.http_only(true)
|
.http_only(true)
|
||||||
.finish();
|
.finish();
|
||||||
|
|
||||||
cookies.add(cookie);
|
cookies.add(cookie);
|
||||||
Ok(Redirect::to(ADMIN_PATH))
|
Ok(Redirect::to(admin_url()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -106,19 +129,69 @@ fn _validate_token(token: &str) -> bool {
|
|||||||
struct AdminTemplateData {
|
struct AdminTemplateData {
|
||||||
page_content: String,
|
page_content: String,
|
||||||
version: Option<&'static str>,
|
version: Option<&'static str>,
|
||||||
users: Vec<Value>,
|
users: Option<Vec<Value>>,
|
||||||
|
organizations: Option<Vec<Value>>,
|
||||||
|
diagnostics: Option<Value>,
|
||||||
config: Value,
|
config: Value,
|
||||||
can_backup: bool,
|
can_backup: bool,
|
||||||
|
logged_in: bool,
|
||||||
|
urlpath: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AdminTemplateData {
|
impl AdminTemplateData {
|
||||||
fn new(users: Vec<Value>) -> Self {
|
fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
page_content: String::from("admin/page"),
|
page_content: String::from("admin/settings"),
|
||||||
version: VERSION,
|
version: VERSION,
|
||||||
users,
|
|
||||||
config: CONFIG.prepare_json(),
|
config: CONFIG.prepare_json(),
|
||||||
can_backup: *CAN_BACKUP,
|
can_backup: *CAN_BACKUP,
|
||||||
|
logged_in: true,
|
||||||
|
urlpath: CONFIG.domain_path(),
|
||||||
|
users: None,
|
||||||
|
organizations: None,
|
||||||
|
diagnostics: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn users(users: Vec<Value>) -> Self {
|
||||||
|
Self {
|
||||||
|
page_content: String::from("admin/users"),
|
||||||
|
version: VERSION,
|
||||||
|
users: Some(users),
|
||||||
|
config: CONFIG.prepare_json(),
|
||||||
|
can_backup: *CAN_BACKUP,
|
||||||
|
logged_in: true,
|
||||||
|
urlpath: CONFIG.domain_path(),
|
||||||
|
organizations: None,
|
||||||
|
diagnostics: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn organizations(organizations: Vec<Value>) -> Self {
|
||||||
|
Self {
|
||||||
|
page_content: String::from("admin/organizations"),
|
||||||
|
version: VERSION,
|
||||||
|
organizations: Some(organizations),
|
||||||
|
config: CONFIG.prepare_json(),
|
||||||
|
can_backup: *CAN_BACKUP,
|
||||||
|
logged_in: true,
|
||||||
|
urlpath: CONFIG.domain_path(),
|
||||||
|
users: None,
|
||||||
|
diagnostics: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn diagnostics(diagnostics: Value) -> Self {
|
||||||
|
Self {
|
||||||
|
page_content: String::from("admin/diagnostics"),
|
||||||
|
version: VERSION,
|
||||||
|
organizations: None,
|
||||||
|
config: CONFIG.prepare_json(),
|
||||||
|
can_backup: *CAN_BACKUP,
|
||||||
|
logged_in: true,
|
||||||
|
urlpath: CONFIG.domain_path(),
|
||||||
|
users: None,
|
||||||
|
diagnostics: Some(diagnostics),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -128,11 +201,8 @@ impl AdminTemplateData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[get("/", rank = 1)]
|
#[get("/", rank = 1)]
|
||||||
fn admin_page(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
fn admin_page(_token: AdminToken, _conn: DbConn) -> ApiResult<Html<String>> {
|
||||||
let users = User::get_all(&conn);
|
let text = AdminTemplateData::new().render()?;
|
||||||
let users_json: Vec<Value> = users.iter().map(|u| u.to_json(&conn)).collect();
|
|
||||||
|
|
||||||
let text = AdminTemplateData::new(users_json).render()?;
|
|
||||||
Ok(Html(text))
|
Ok(Html(text))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -150,47 +220,67 @@ fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -> Empt
|
|||||||
err!("User already exists")
|
err!("User already exists")
|
||||||
}
|
}
|
||||||
|
|
||||||
if !CONFIG.invitations_allowed() {
|
|
||||||
err!("Invitations are not allowed")
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut user = User::new(email);
|
let mut user = User::new(email);
|
||||||
user.save(&conn)?;
|
user.save(&conn)?;
|
||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
let org_name = "bitwarden_rs";
|
mail::send_invite(&user.email, &user.uuid, None, None, &CONFIG.invitation_org_name(), None)
|
||||||
mail::send_invite(&user.email, &user.uuid, None, None, &org_name, None)
|
|
||||||
} else {
|
} else {
|
||||||
let invitation = Invitation::new(data.email);
|
let invitation = Invitation::new(data.email);
|
||||||
invitation.save(&conn)
|
invitation.save(&conn)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[post("/test/smtp", data = "<data>")]
|
||||||
|
fn test_smtp(data: Json<InviteData>, _token: AdminToken) -> EmptyResult {
|
||||||
|
let data: InviteData = data.into_inner();
|
||||||
|
|
||||||
|
if CONFIG.mail_enabled() {
|
||||||
|
mail::send_test(&data.email)
|
||||||
|
} else {
|
||||||
|
err!("Mail is not enabled")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/logout")]
|
||||||
|
fn logout(mut cookies: Cookies) -> Result<Redirect, ()> {
|
||||||
|
cookies.remove(Cookie::named(COOKIE_NAME));
|
||||||
|
Ok(Redirect::to(admin_url()))
|
||||||
|
}
|
||||||
|
|
||||||
#[get("/users")]
|
#[get("/users")]
|
||||||
fn get_users(_token: AdminToken, conn: DbConn) -> JsonResult {
|
fn get_users_json(_token: AdminToken, conn: DbConn) -> JsonResult {
|
||||||
let users = User::get_all(&conn);
|
let users = User::get_all(&conn);
|
||||||
let users_json: Vec<Value> = users.iter().map(|u| u.to_json(&conn)).collect();
|
let users_json: Vec<Value> = users.iter().map(|u| u.to_json(&conn)).collect();
|
||||||
|
|
||||||
Ok(Json(Value::Array(users_json)))
|
Ok(Json(Value::Array(users_json)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[get("/users/overview")]
|
||||||
|
fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
||||||
|
let users = User::get_all(&conn);
|
||||||
|
let users_json: Vec<Value> = users.iter()
|
||||||
|
.map(|u| {
|
||||||
|
let mut usr = u.to_json(&conn);
|
||||||
|
usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn));
|
||||||
|
usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &conn));
|
||||||
|
usr["attachment_size"] = json!(get_display_size(Attachment::size_by_user(&u.uuid, &conn) as i32));
|
||||||
|
usr
|
||||||
|
}).collect();
|
||||||
|
|
||||||
|
let text = AdminTemplateData::users(users_json).render()?;
|
||||||
|
Ok(Html(text))
|
||||||
|
}
|
||||||
|
|
||||||
#[post("/users/<uuid>/delete")]
|
#[post("/users/<uuid>/delete")]
|
||||||
fn delete_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
fn delete_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||||
let user = match User::find_by_uuid(&uuid, &conn) {
|
let user = User::find_by_uuid(&uuid, &conn).map_res("User doesn't exist")?;
|
||||||
Some(user) => user,
|
|
||||||
None => err!("User doesn't exist"),
|
|
||||||
};
|
|
||||||
|
|
||||||
user.delete(&conn)
|
user.delete(&conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/users/<uuid>/deauth")]
|
#[post("/users/<uuid>/deauth")]
|
||||||
fn deauth_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
fn deauth_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||||
let mut user = match User::find_by_uuid(&uuid, &conn) {
|
let mut user = User::find_by_uuid(&uuid, &conn).map_res("User doesn't exist")?;
|
||||||
Some(user) => user,
|
|
||||||
None => err!("User doesn't exist"),
|
|
||||||
};
|
|
||||||
|
|
||||||
Device::delete_all_by_user(&user.uuid, &conn)?;
|
Device::delete_all_by_user(&user.uuid, &conn)?;
|
||||||
user.reset_security_stamp();
|
user.reset_security_stamp();
|
||||||
|
|
||||||
@@ -199,11 +289,7 @@ fn deauth_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
|||||||
|
|
||||||
#[post("/users/<uuid>/remove-2fa")]
|
#[post("/users/<uuid>/remove-2fa")]
|
||||||
fn remove_2fa(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
fn remove_2fa(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||||
let mut user = match User::find_by_uuid(&uuid, &conn) {
|
let mut user = User::find_by_uuid(&uuid, &conn).map_res("User doesn't exist")?;
|
||||||
Some(user) => user,
|
|
||||||
None => err!("User doesn't exist"),
|
|
||||||
};
|
|
||||||
|
|
||||||
TwoFactor::delete_all_by_user(&user.uuid, &conn)?;
|
TwoFactor::delete_all_by_user(&user.uuid, &conn)?;
|
||||||
user.totp_recover = None;
|
user.totp_recover = None;
|
||||||
user.save(&conn)
|
user.save(&conn)
|
||||||
@@ -214,6 +300,109 @@ fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
|
|||||||
User::update_all_revisions(&conn)
|
User::update_all_revisions(&conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[get("/organizations/overview")]
|
||||||
|
fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
||||||
|
let organizations = Organization::get_all(&conn);
|
||||||
|
let organizations_json: Vec<Value> = organizations.iter().map(|o| {
|
||||||
|
let mut org = o.to_json();
|
||||||
|
org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn));
|
||||||
|
org["cipher_count"] = json!(Cipher::count_by_org(&o.uuid, &conn));
|
||||||
|
org["attachment_count"] = json!(Attachment::count_by_org(&o.uuid, &conn));
|
||||||
|
org["attachment_size"] = json!(get_display_size(Attachment::size_by_org(&o.uuid, &conn) as i32));
|
||||||
|
org
|
||||||
|
}).collect();
|
||||||
|
|
||||||
|
let text = AdminTemplateData::organizations(organizations_json).render()?;
|
||||||
|
Ok(Html(text))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct WebVaultVersion {
|
||||||
|
version: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct GitRelease {
|
||||||
|
tag_name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct GitCommit {
|
||||||
|
sha: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_github_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> {
|
||||||
|
use reqwest::{blocking::Client, header::USER_AGENT};
|
||||||
|
use std::time::Duration;
|
||||||
|
let github_api = Client::builder().build()?;
|
||||||
|
|
||||||
|
Ok(
|
||||||
|
github_api.get(url)
|
||||||
|
.timeout(Duration::from_secs(10))
|
||||||
|
.header(USER_AGENT, "Bitwarden_RS")
|
||||||
|
.send()?
|
||||||
|
.error_for_status()?
|
||||||
|
.json::<T>()?
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/diagnostics")]
|
||||||
|
fn diagnostics(_token: AdminToken, _conn: DbConn) -> ApiResult<Html<String>> {
|
||||||
|
use std::net::ToSocketAddrs;
|
||||||
|
use chrono::prelude::*;
|
||||||
|
use crate::util::read_file_string;
|
||||||
|
|
||||||
|
let vault_version_path = format!("{}/{}", CONFIG.web_vault_folder(), "version.json");
|
||||||
|
let vault_version_str = read_file_string(&vault_version_path)?;
|
||||||
|
let web_vault_version: WebVaultVersion = serde_json::from_str(&vault_version_str)?;
|
||||||
|
|
||||||
|
let github_ips = ("github.com", 0).to_socket_addrs().map(|mut i| i.next());
|
||||||
|
let (dns_resolved, dns_ok) = match github_ips {
|
||||||
|
Ok(Some(a)) => (a.ip().to_string(), true),
|
||||||
|
_ => ("Could not resolve domain name.".to_string(), false),
|
||||||
|
};
|
||||||
|
|
||||||
|
// If the DNS Check failed, do not even attempt to check for new versions since we were not able to resolve github.com
|
||||||
|
let (latest_release, latest_commit, latest_web_build) = if dns_ok {
|
||||||
|
(
|
||||||
|
match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest") {
|
||||||
|
Ok(r) => r.tag_name,
|
||||||
|
_ => "-".to_string()
|
||||||
|
},
|
||||||
|
match get_github_api::<GitCommit>("https://api.github.com/repos/dani-garcia/bitwarden_rs/commits/master") {
|
||||||
|
Ok(mut c) => {
|
||||||
|
c.sha.truncate(8);
|
||||||
|
c.sha
|
||||||
|
},
|
||||||
|
_ => "-".to_string()
|
||||||
|
},
|
||||||
|
match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest") {
|
||||||
|
Ok(r) => r.tag_name.trim_start_matches('v').to_string(),
|
||||||
|
_ => "-".to_string()
|
||||||
|
},
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
("-".to_string(), "-".to_string(), "-".to_string())
|
||||||
|
};
|
||||||
|
|
||||||
|
// Run the date check as the last item right before filling the json.
|
||||||
|
// This should ensure that the time difference between the browser and the server is as minimal as possible.
|
||||||
|
let dt = Utc::now();
|
||||||
|
let server_time = dt.format("%Y-%m-%d %H:%M:%S").to_string();
|
||||||
|
|
||||||
|
let diagnostics_json = json!({
|
||||||
|
"dns_resolved": dns_resolved,
|
||||||
|
"server_time": server_time,
|
||||||
|
"web_vault_version": web_vault_version.version,
|
||||||
|
"latest_release": latest_release,
|
||||||
|
"latest_commit": latest_commit,
|
||||||
|
"latest_web_build": latest_web_build,
|
||||||
|
});
|
||||||
|
|
||||||
|
let text = AdminTemplateData::diagnostics(diagnostics_json).render()?;
|
||||||
|
Ok(Html(text))
|
||||||
|
}
|
||||||
|
|
||||||
#[post("/config", data = "<data>")]
|
#[post("/config", data = "<data>")]
|
||||||
fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult {
|
fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult {
|
||||||
let data: ConfigBuilder = data.into_inner();
|
let data: ConfigBuilder = data.into_inner();
|
||||||
|
@@ -1,19 +1,15 @@
|
|||||||
use rocket_contrib::json::Json;
|
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
|
use rocket_contrib::json::Json;
|
||||||
|
|
||||||
use crate::db::models::*;
|
use crate::{
|
||||||
use crate::db::DbConn;
|
api::{EmptyResult, JsonResult, JsonUpcase, Notify, NumberOrString, PasswordData, UpdateType},
|
||||||
|
auth::{decode_delete, decode_invite, decode_verify_email, Headers},
|
||||||
|
crypto,
|
||||||
|
db::{models::*, DbConn},
|
||||||
|
mail, CONFIG,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::api::{EmptyResult, JsonResult, JsonUpcase, Notify, NumberOrString, PasswordData, UpdateType};
|
pub fn routes() -> Vec<rocket::Route> {
|
||||||
use crate::auth::{decode_invite, decode_delete, decode_verify_email, Headers};
|
|
||||||
use crate::mail;
|
|
||||||
use crate::crypto;
|
|
||||||
|
|
||||||
use crate::CONFIG;
|
|
||||||
|
|
||||||
use rocket::Route;
|
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
|
||||||
routes![
|
routes![
|
||||||
register,
|
register,
|
||||||
profile,
|
profile,
|
||||||
@@ -68,7 +64,7 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
|
|||||||
let mut user = match User::find_by_mail(&data.Email, &conn) {
|
let mut user = match User::find_by_mail(&data.Email, &conn) {
|
||||||
Some(user) => {
|
Some(user) => {
|
||||||
if !user.password_hash.is_empty() {
|
if !user.password_hash.is_empty() {
|
||||||
if CONFIG.signups_allowed() {
|
if CONFIG.is_signup_allowed(&data.Email) {
|
||||||
err!("User already exists")
|
err!("User already exists")
|
||||||
} else {
|
} else {
|
||||||
err!("Registration not allowed or user already exists")
|
err!("Registration not allowed or user already exists")
|
||||||
@@ -89,14 +85,17 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
user
|
user
|
||||||
} else if CONFIG.signups_allowed() {
|
} else if CONFIG.is_signup_allowed(&data.Email) {
|
||||||
err!("Account with this email already exists")
|
err!("Account with this email already exists")
|
||||||
} else {
|
} else {
|
||||||
err!("Registration not allowed or user already exists")
|
err!("Registration not allowed or user already exists")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
if CONFIG.signups_allowed() || Invitation::take(&data.Email, &conn) || CONFIG.can_signup_user(&data.Email) {
|
// Order is important here; the invitation check must come first
|
||||||
|
// because the bitwarden_rs admin can invite anyone, regardless
|
||||||
|
// of other signup restrictions.
|
||||||
|
if Invitation::take(&data.Email, &conn) || CONFIG.is_signup_allowed(&data.Email) {
|
||||||
User::new(data.Email.clone())
|
User::new(data.Email.clone())
|
||||||
} else {
|
} else {
|
||||||
err!("Registration not allowed or user already exists")
|
err!("Registration not allowed or user already exists")
|
||||||
@@ -207,7 +206,12 @@ fn post_keys(data: JsonUpcase<KeysData>, headers: Headers, conn: DbConn) -> Json
|
|||||||
user.public_key = Some(data.PublicKey);
|
user.public_key = Some(data.PublicKey);
|
||||||
|
|
||||||
user.save(&conn)?;
|
user.save(&conn)?;
|
||||||
Ok(Json(user.to_json(&conn)))
|
|
||||||
|
Ok(Json(json!({
|
||||||
|
"PrivateKey": user.private_key,
|
||||||
|
"PublicKey": user.public_key,
|
||||||
|
"Object":"keys"
|
||||||
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@@ -371,8 +375,8 @@ fn post_email_token(data: JsonUpcase<EmailTokenData>, headers: Headers, conn: Db
|
|||||||
err!("Email already in use");
|
err!("Email already in use");
|
||||||
}
|
}
|
||||||
|
|
||||||
if !CONFIG.signups_allowed() && !CONFIG.can_signup_user(&data.NewEmail) {
|
if !CONFIG.is_email_domain_allowed(&data.NewEmail) {
|
||||||
err!("Email cannot be changed to this address");
|
err!("Email domain not allowed");
|
||||||
}
|
}
|
||||||
|
|
||||||
let token = crypto::generate_token(6)?;
|
let token = crypto::generate_token(6)?;
|
||||||
@@ -414,20 +418,21 @@ fn post_email(data: JsonUpcase<ChangeEmailData>, headers: Headers, conn: DbConn)
|
|||||||
|
|
||||||
match user.email_new {
|
match user.email_new {
|
||||||
Some(ref val) => {
|
Some(ref val) => {
|
||||||
if *val != data.NewEmail.to_string() {
|
if val != &data.NewEmail {
|
||||||
err!("Email change mismatch");
|
err!("Email change mismatch");
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
None => err!("No email change pending"),
|
None => err!("No email change pending"),
|
||||||
}
|
}
|
||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
// Only check the token if we sent out an email...
|
// Only check the token if we sent out an email...
|
||||||
match user.email_new_token {
|
match user.email_new_token {
|
||||||
Some(ref val) =>
|
Some(ref val) => {
|
||||||
if *val != data.Token.into_string() {
|
if *val != data.Token.into_string() {
|
||||||
err!("Token mismatch");
|
err!("Token mismatch");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
None => err!("No email change pending"),
|
None => err!("No email change pending"),
|
||||||
}
|
}
|
||||||
user.verified_at = Some(Utc::now().naive_utc());
|
user.verified_at = Some(Utc::now().naive_utc());
|
||||||
@@ -480,11 +485,9 @@ fn post_verify_email_token(data: JsonUpcase<VerifyEmailTokenData>, conn: DbConn)
|
|||||||
Ok(claims) => claims,
|
Ok(claims) => claims,
|
||||||
Err(_) => err!("Invalid claim"),
|
Err(_) => err!("Invalid claim"),
|
||||||
};
|
};
|
||||||
|
|
||||||
if claims.sub != user.uuid {
|
if claims.sub != user.uuid {
|
||||||
err!("Invalid claim");
|
err!("Invalid claim");
|
||||||
}
|
}
|
||||||
|
|
||||||
user.verified_at = Some(Utc::now().naive_utc());
|
user.verified_at = Some(Utc::now().naive_utc());
|
||||||
user.last_verifying_at = None;
|
user.last_verifying_at = None;
|
||||||
user.login_verify_count = 0;
|
user.login_verify_count = 0;
|
||||||
@@ -501,7 +504,7 @@ struct DeleteRecoverData {
|
|||||||
Email: String,
|
Email: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/accounts/delete-recover", data="<data>")]
|
#[post("/accounts/delete-recover", data = "<data>")]
|
||||||
fn post_delete_recover(data: JsonUpcase<DeleteRecoverData>, conn: DbConn) -> EmptyResult {
|
fn post_delete_recover(data: JsonUpcase<DeleteRecoverData>, conn: DbConn) -> EmptyResult {
|
||||||
let data: DeleteRecoverData = data.into_inner().data;
|
let data: DeleteRecoverData = data.into_inner().data;
|
||||||
|
|
||||||
@@ -530,7 +533,7 @@ struct DeleteRecoverTokenData {
|
|||||||
Token: String,
|
Token: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/accounts/delete-recover-token", data="<data>")]
|
#[post("/accounts/delete-recover-token", data = "<data>")]
|
||||||
fn post_delete_recover_token(data: JsonUpcase<DeleteRecoverTokenData>, conn: DbConn) -> EmptyResult {
|
fn post_delete_recover_token(data: JsonUpcase<DeleteRecoverTokenData>, conn: DbConn) -> EmptyResult {
|
||||||
let data: DeleteRecoverTokenData = data.into_inner().data;
|
let data: DeleteRecoverTokenData = data.into_inner().data;
|
||||||
|
|
||||||
@@ -543,11 +546,9 @@ fn post_delete_recover_token(data: JsonUpcase<DeleteRecoverTokenData>, conn: DbC
|
|||||||
Ok(claims) => claims,
|
Ok(claims) => claims,
|
||||||
Err(_) => err!("Invalid claim"),
|
Err(_) => err!("Invalid claim"),
|
||||||
};
|
};
|
||||||
|
|
||||||
if claims.sub != user.uuid {
|
if claims.sub != user.uuid {
|
||||||
err!("Invalid claim");
|
err!("Invalid claim");
|
||||||
}
|
}
|
||||||
|
|
||||||
user.delete(&conn)
|
user.delete(&conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -1,26 +1,20 @@
|
|||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use rocket::http::ContentType;
|
use rocket::{http::ContentType, request::Form, Data, Route};
|
||||||
use rocket::{request::Form, Data, Route};
|
|
||||||
|
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use multipart::server::save::SavedData;
|
|
||||||
use multipart::server::{Multipart, SaveResult};
|
|
||||||
|
|
||||||
use data_encoding::HEXLOWER;
|
use data_encoding::HEXLOWER;
|
||||||
|
use multipart::server::{save::SavedData, Multipart, SaveResult};
|
||||||
|
|
||||||
use crate::db::models::*;
|
use crate::{
|
||||||
use crate::db::DbConn;
|
api::{self, EmptyResult, JsonResult, JsonUpcase, Notify, PasswordData, UpdateType},
|
||||||
|
auth::Headers,
|
||||||
use crate::crypto;
|
crypto,
|
||||||
|
db::{models::*, DbConn},
|
||||||
use crate::api::{self, EmptyResult, JsonResult, JsonUpcase, Notify, PasswordData, UpdateType};
|
CONFIG,
|
||||||
use crate::auth::Headers;
|
};
|
||||||
|
|
||||||
use crate::CONFIG;
|
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
routes![
|
routes![
|
||||||
@@ -49,10 +43,16 @@ pub fn routes() -> Vec<Route> {
|
|||||||
put_cipher,
|
put_cipher,
|
||||||
delete_cipher_post,
|
delete_cipher_post,
|
||||||
delete_cipher_post_admin,
|
delete_cipher_post_admin,
|
||||||
|
delete_cipher_put,
|
||||||
|
delete_cipher_put_admin,
|
||||||
delete_cipher,
|
delete_cipher,
|
||||||
delete_cipher_admin,
|
delete_cipher_admin,
|
||||||
delete_cipher_selected,
|
delete_cipher_selected,
|
||||||
delete_cipher_selected_post,
|
delete_cipher_selected_post,
|
||||||
|
delete_cipher_selected_put,
|
||||||
|
restore_cipher_put,
|
||||||
|
restore_cipher_put_admin,
|
||||||
|
restore_cipher_selected,
|
||||||
delete_all,
|
delete_all,
|
||||||
move_cipher_selected,
|
move_cipher_selected,
|
||||||
move_cipher_selected_put,
|
move_cipher_selected_put,
|
||||||
@@ -79,6 +79,9 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|||||||
let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn);
|
let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn);
|
||||||
let collections_json: Vec<Value> = collections.iter().map(Collection::to_json).collect();
|
let collections_json: Vec<Value> = collections.iter().map(Collection::to_json).collect();
|
||||||
|
|
||||||
|
let policies = OrgPolicy::find_by_user(&headers.user.uuid, &conn);
|
||||||
|
let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect();
|
||||||
|
|
||||||
let ciphers = Cipher::find_by_user(&headers.user.uuid, &conn);
|
let ciphers = Cipher::find_by_user(&headers.user.uuid, &conn);
|
||||||
let ciphers_json: Vec<Value> = ciphers
|
let ciphers_json: Vec<Value> = ciphers
|
||||||
.iter()
|
.iter()
|
||||||
@@ -95,6 +98,7 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|||||||
"Profile": user_json,
|
"Profile": user_json,
|
||||||
"Folders": folders_json,
|
"Folders": folders_json,
|
||||||
"Collections": collections_json,
|
"Collections": collections_json,
|
||||||
|
"Policies": policies_json,
|
||||||
"Ciphers": ciphers_json,
|
"Ciphers": ciphers_json,
|
||||||
"Domains": domains_json,
|
"Domains": domains_json,
|
||||||
"Object": "sync"
|
"Object": "sync"
|
||||||
@@ -264,7 +268,10 @@ pub fn update_cipher_from_data(
|
|||||||
};
|
};
|
||||||
|
|
||||||
if saved_att.cipher_uuid != cipher.uuid {
|
if saved_att.cipher_uuid != cipher.uuid {
|
||||||
err!("Attachment is not owned by the cipher")
|
// Warn and break here since cloning ciphers provides attachment data but will not be cloned.
|
||||||
|
// If we error out here it will break the whole cloning and causes empty ciphers to appear.
|
||||||
|
warn!("Attachment is not owned by the cipher");
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
saved_att.akey = Some(attachment.Key);
|
saved_att.akey = Some(attachment.Key);
|
||||||
@@ -599,10 +606,13 @@ fn share_cipher_by_uuid(
|
|||||||
None => err!("Cipher doesn't exist"),
|
None => err!("Cipher doesn't exist"),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let mut shared_to_collection = false;
|
||||||
|
|
||||||
match data.Cipher.OrganizationId.clone() {
|
match data.Cipher.OrganizationId.clone() {
|
||||||
None => err!("Organization id not provided"),
|
// If we don't get an organization ID, we don't do anything
|
||||||
|
// No error because this is used when using the Clone functionality
|
||||||
|
None => {}
|
||||||
Some(organization_uuid) => {
|
Some(organization_uuid) => {
|
||||||
let mut shared_to_collection = false;
|
|
||||||
for uuid in &data.CollectionIds {
|
for uuid in &data.CollectionIds {
|
||||||
match Collection::find_by_uuid_and_org(uuid, &organization_uuid, &conn) {
|
match Collection::find_by_uuid_and_org(uuid, &organization_uuid, &conn) {
|
||||||
None => err!("Invalid collection ID provided"),
|
None => err!("Invalid collection ID provided"),
|
||||||
@@ -616,19 +626,20 @@ fn share_cipher_by_uuid(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
update_cipher_from_data(
|
|
||||||
&mut cipher,
|
|
||||||
data.Cipher,
|
|
||||||
&headers,
|
|
||||||
shared_to_collection,
|
|
||||||
&conn,
|
|
||||||
&nt,
|
|
||||||
UpdateType::CipherUpdate,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
|
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
|
update_cipher_from_data(
|
||||||
|
&mut cipher,
|
||||||
|
data.Cipher,
|
||||||
|
&headers,
|
||||||
|
shared_to_collection,
|
||||||
|
&conn,
|
||||||
|
&nt,
|
||||||
|
UpdateType::CipherUpdate,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/ciphers/<uuid>/attachment", format = "multipart/form-data", data = "<data>")]
|
#[post("/ciphers/<uuid>/attachment", format = "multipart/form-data", data = "<data>")]
|
||||||
@@ -642,20 +653,49 @@ fn post_attachment(
|
|||||||
) -> JsonResult {
|
) -> JsonResult {
|
||||||
let cipher = match Cipher::find_by_uuid(&uuid, &conn) {
|
let cipher = match Cipher::find_by_uuid(&uuid, &conn) {
|
||||||
Some(cipher) => cipher,
|
Some(cipher) => cipher,
|
||||||
None => err!("Cipher doesn't exist"),
|
None => err_discard!("Cipher doesn't exist", data),
|
||||||
};
|
};
|
||||||
|
|
||||||
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
|
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
|
||||||
err!("Cipher is not write accessible")
|
err_discard!("Cipher is not write accessible", data)
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut params = content_type.params();
|
let mut params = content_type.params();
|
||||||
let boundary_pair = params.next().expect("No boundary provided");
|
let boundary_pair = params.next().expect("No boundary provided");
|
||||||
let boundary = boundary_pair.1;
|
let boundary = boundary_pair.1;
|
||||||
|
|
||||||
|
let size_limit = if let Some(ref user_uuid) = cipher.user_uuid {
|
||||||
|
match CONFIG.user_attachment_limit() {
|
||||||
|
Some(0) => err_discard!("Attachments are disabled", data),
|
||||||
|
Some(limit_kb) => {
|
||||||
|
let left = (limit_kb * 1024) - Attachment::size_by_user(user_uuid, &conn);
|
||||||
|
if left <= 0 {
|
||||||
|
err_discard!("Attachment size limit reached! Delete some files to open space", data)
|
||||||
|
}
|
||||||
|
Some(left as u64)
|
||||||
|
}
|
||||||
|
None => None,
|
||||||
|
}
|
||||||
|
} else if let Some(ref org_uuid) = cipher.organization_uuid {
|
||||||
|
match CONFIG.org_attachment_limit() {
|
||||||
|
Some(0) => err_discard!("Attachments are disabled", data),
|
||||||
|
Some(limit_kb) => {
|
||||||
|
let left = (limit_kb * 1024) - Attachment::size_by_org(org_uuid, &conn);
|
||||||
|
if left <= 0 {
|
||||||
|
err_discard!("Attachment size limit reached! Delete some files to open space", data)
|
||||||
|
}
|
||||||
|
Some(left as u64)
|
||||||
|
}
|
||||||
|
None => None,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
err_discard!("Cipher is neither owned by a user nor an organization", data);
|
||||||
|
};
|
||||||
|
|
||||||
let base_path = Path::new(&CONFIG.attachments_folder()).join(&cipher.uuid);
|
let base_path = Path::new(&CONFIG.attachments_folder()).join(&cipher.uuid);
|
||||||
|
|
||||||
let mut attachment_key = None;
|
let mut attachment_key = None;
|
||||||
|
let mut error = None;
|
||||||
|
|
||||||
Multipart::with_body(data.open(), boundary)
|
Multipart::with_body(data.open(), boundary)
|
||||||
.foreach_entry(|mut field| {
|
.foreach_entry(|mut field| {
|
||||||
@@ -674,18 +714,21 @@ fn post_attachment(
|
|||||||
let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10]));
|
let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10]));
|
||||||
let path = base_path.join(&file_name);
|
let path = base_path.join(&file_name);
|
||||||
|
|
||||||
let size = match field.data.save().memory_threshold(0).size_limit(None).with_path(path) {
|
let size = match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) {
|
||||||
SaveResult::Full(SavedData::File(_, size)) => size as i32,
|
SaveResult::Full(SavedData::File(_, size)) => size as i32,
|
||||||
SaveResult::Full(other) => {
|
SaveResult::Full(other) => {
|
||||||
error!("Attachment is not a file: {:?}", other);
|
std::fs::remove_file(path).ok();
|
||||||
|
error = Some(format!("Attachment is not a file: {:?}", other));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
SaveResult::Partial(_, reason) => {
|
SaveResult::Partial(_, reason) => {
|
||||||
error!("Partial result: {:?}", reason);
|
std::fs::remove_file(path).ok();
|
||||||
|
error = Some(format!("Attachment size limit exceeded with this file: {:?}", reason));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
SaveResult::Error(e) => {
|
SaveResult::Error(e) => {
|
||||||
error!("Error: {:?}", e);
|
std::fs::remove_file(path).ok();
|
||||||
|
error = Some(format!("Error: {:?}", e));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -699,6 +742,10 @@ fn post_attachment(
|
|||||||
})
|
})
|
||||||
.expect("Error processing multipart data");
|
.expect("Error processing multipart data");
|
||||||
|
|
||||||
|
if let Some(ref e) = error {
|
||||||
|
err!(e);
|
||||||
|
}
|
||||||
|
|
||||||
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&conn));
|
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&conn));
|
||||||
|
|
||||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
|
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
|
||||||
@@ -716,11 +763,7 @@ fn post_attachment_admin(
|
|||||||
post_attachment(uuid, data, content_type, headers, conn, nt)
|
post_attachment(uuid, data, content_type, headers, conn, nt)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post(
|
#[post("/ciphers/<uuid>/attachment/<attachment_id>/share", format = "multipart/form-data", data = "<data>")]
|
||||||
"/ciphers/<uuid>/attachment/<attachment_id>/share",
|
|
||||||
format = "multipart/form-data",
|
|
||||||
data = "<data>"
|
|
||||||
)]
|
|
||||||
fn post_attachment_share(
|
fn post_attachment_share(
|
||||||
uuid: String,
|
uuid: String,
|
||||||
attachment_id: String,
|
attachment_id: String,
|
||||||
@@ -774,48 +817,62 @@ fn delete_attachment_admin(
|
|||||||
|
|
||||||
#[post("/ciphers/<uuid>/delete")]
|
#[post("/ciphers/<uuid>/delete")]
|
||||||
fn delete_cipher_post(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
fn delete_cipher_post(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||||
_delete_cipher_by_uuid(&uuid, &headers, &conn, &nt)
|
_delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/ciphers/<uuid>/delete-admin")]
|
#[post("/ciphers/<uuid>/delete-admin")]
|
||||||
fn delete_cipher_post_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
fn delete_cipher_post_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||||
_delete_cipher_by_uuid(&uuid, &headers, &conn, &nt)
|
_delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[put("/ciphers/<uuid>/delete")]
|
||||||
|
fn delete_cipher_put(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||||
|
_delete_cipher_by_uuid(&uuid, &headers, &conn, true, &nt)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[put("/ciphers/<uuid>/delete-admin")]
|
||||||
|
fn delete_cipher_put_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||||
|
_delete_cipher_by_uuid(&uuid, &headers, &conn, true, &nt)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("/ciphers/<uuid>")]
|
#[delete("/ciphers/<uuid>")]
|
||||||
fn delete_cipher(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
fn delete_cipher(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||||
_delete_cipher_by_uuid(&uuid, &headers, &conn, &nt)
|
_delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("/ciphers/<uuid>/admin")]
|
#[delete("/ciphers/<uuid>/admin")]
|
||||||
fn delete_cipher_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
fn delete_cipher_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||||
_delete_cipher_by_uuid(&uuid, &headers, &conn, &nt)
|
_delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("/ciphers", data = "<data>")]
|
#[delete("/ciphers", data = "<data>")]
|
||||||
fn delete_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
fn delete_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||||
let data: Value = data.into_inner().data;
|
_delete_multiple_ciphers(data, headers, conn, false, nt)
|
||||||
|
|
||||||
let uuids = match data.get("Ids") {
|
|
||||||
Some(ids) => match ids.as_array() {
|
|
||||||
Some(ids) => ids.iter().filter_map(Value::as_str),
|
|
||||||
None => err!("Posted ids field is not an array"),
|
|
||||||
},
|
|
||||||
None => err!("Request missing ids field"),
|
|
||||||
};
|
|
||||||
|
|
||||||
for uuid in uuids {
|
|
||||||
if let error @ Err(_) = _delete_cipher_by_uuid(uuid, &headers, &conn, &nt) {
|
|
||||||
return error;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/ciphers/delete", data = "<data>")]
|
#[post("/ciphers/delete", data = "<data>")]
|
||||||
fn delete_cipher_selected_post(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
fn delete_cipher_selected_post(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||||
delete_cipher_selected(data, headers, conn, nt)
|
_delete_multiple_ciphers(data, headers, conn, false, nt)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[put("/ciphers/delete", data = "<data>")]
|
||||||
|
fn delete_cipher_selected_put(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||||
|
_delete_multiple_ciphers(data, headers, conn, true, nt)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[put("/ciphers/<uuid>/restore")]
|
||||||
|
fn restore_cipher_put(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||||
|
_restore_cipher_by_uuid(&uuid, &headers, &conn, &nt)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[put("/ciphers/<uuid>/restore-admin")]
|
||||||
|
fn restore_cipher_put_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||||
|
_restore_cipher_by_uuid(&uuid, &headers, &conn, &nt)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[put("/ciphers/restore", data = "<data>")]
|
||||||
|
fn restore_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||||
|
_restore_multiple_ciphers(data, headers, conn, nt)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@@ -929,8 +986,8 @@ fn delete_all(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, nt: &Notify) -> EmptyResult {
|
fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, soft_delete: bool, nt: &Notify) -> EmptyResult {
|
||||||
let cipher = match Cipher::find_by_uuid(&uuid, &conn) {
|
let mut cipher = match Cipher::find_by_uuid(&uuid, &conn) {
|
||||||
Some(cipher) => cipher,
|
Some(cipher) => cipher,
|
||||||
None => err!("Cipher doesn't exist"),
|
None => err!("Cipher doesn't exist"),
|
||||||
};
|
};
|
||||||
@@ -939,8 +996,72 @@ fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, nt: &Not
|
|||||||
err!("Cipher can't be deleted by user")
|
err!("Cipher can't be deleted by user")
|
||||||
}
|
}
|
||||||
|
|
||||||
cipher.delete(&conn)?;
|
if soft_delete {
|
||||||
nt.send_cipher_update(UpdateType::CipherDelete, &cipher, &cipher.update_users_revision(&conn));
|
cipher.deleted_at = Some(chrono::Utc::now().naive_utc());
|
||||||
|
cipher.save(&conn)?;
|
||||||
|
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&conn));
|
||||||
|
} else {
|
||||||
|
cipher.delete(&conn)?;
|
||||||
|
nt.send_cipher_update(UpdateType::CipherDelete, &cipher, &cipher.update_users_revision(&conn));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn _delete_multiple_ciphers(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, soft_delete: bool, nt: Notify) -> EmptyResult {
|
||||||
|
let data: Value = data.into_inner().data;
|
||||||
|
|
||||||
|
let uuids = match data.get("Ids") {
|
||||||
|
Some(ids) => match ids.as_array() {
|
||||||
|
Some(ids) => ids.iter().filter_map(Value::as_str),
|
||||||
|
None => err!("Posted ids field is not an array"),
|
||||||
|
},
|
||||||
|
None => err!("Request missing ids field"),
|
||||||
|
};
|
||||||
|
|
||||||
|
for uuid in uuids {
|
||||||
|
if let error @ Err(_) = _delete_cipher_by_uuid(uuid, &headers, &conn, soft_delete, &nt) {
|
||||||
|
return error;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, nt: &Notify) -> EmptyResult {
|
||||||
|
let mut cipher = match Cipher::find_by_uuid(&uuid, &conn) {
|
||||||
|
Some(cipher) => cipher,
|
||||||
|
None => err!("Cipher doesn't exist"),
|
||||||
|
};
|
||||||
|
|
||||||
|
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
|
||||||
|
err!("Cipher can't be restored by user")
|
||||||
|
}
|
||||||
|
|
||||||
|
cipher.deleted_at = None;
|
||||||
|
cipher.save(&conn)?;
|
||||||
|
|
||||||
|
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&conn));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn _restore_multiple_ciphers(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||||
|
let data: Value = data.into_inner().data;
|
||||||
|
|
||||||
|
let uuids = match data.get("Ids") {
|
||||||
|
Some(ids) => match ids.as_array() {
|
||||||
|
Some(ids) => ids.iter().filter_map(Value::as_str),
|
||||||
|
None => err!("Posted ids field is not an array"),
|
||||||
|
},
|
||||||
|
None => err!("Request missing ids field"),
|
||||||
|
};
|
||||||
|
|
||||||
|
for uuid in uuids {
|
||||||
|
if let error @ Err(_) = _restore_cipher_by_uuid(uuid, &headers, &conn, &nt) {
|
||||||
|
return error;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -1,15 +1,13 @@
|
|||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::db::models::*;
|
use crate::{
|
||||||
use crate::db::DbConn;
|
api::{EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType},
|
||||||
|
auth::Headers,
|
||||||
|
db::{models::*, DbConn},
|
||||||
|
};
|
||||||
|
|
||||||
use crate::api::{EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType};
|
pub fn routes() -> Vec<rocket::Route> {
|
||||||
use crate::auth::Headers;
|
|
||||||
|
|
||||||
use rocket::Route;
|
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
|
||||||
routes![
|
routes![
|
||||||
get_folders,
|
get_folders,
|
||||||
get_folder,
|
get_folder,
|
||||||
@@ -50,7 +48,6 @@ fn get_folder(uuid: String, headers: Headers, conn: DbConn) -> JsonResult {
|
|||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
|
|
||||||
pub struct FolderData {
|
pub struct FolderData {
|
||||||
pub Name: String,
|
pub Name: String,
|
||||||
}
|
}
|
||||||
|
@@ -2,7 +2,7 @@ mod accounts;
|
|||||||
mod ciphers;
|
mod ciphers;
|
||||||
mod folders;
|
mod folders;
|
||||||
mod organizations;
|
mod organizations;
|
||||||
pub(crate) mod two_factor;
|
pub mod two_factor;
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
let mut mod_routes = routes![
|
let mut mod_routes = routes![
|
||||||
@@ -29,14 +29,15 @@ pub fn routes() -> Vec<Route> {
|
|||||||
// Move this somewhere else
|
// Move this somewhere else
|
||||||
//
|
//
|
||||||
use rocket::Route;
|
use rocket::Route;
|
||||||
|
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::api::{EmptyResult, JsonResult, JsonUpcase};
|
use crate::{
|
||||||
use crate::auth::Headers;
|
api::{EmptyResult, JsonResult, JsonUpcase},
|
||||||
use crate::db::DbConn;
|
auth::Headers,
|
||||||
use crate::error::Error;
|
db::DbConn,
|
||||||
|
error::Error,
|
||||||
|
};
|
||||||
|
|
||||||
#[put("/devices/identifier/<uuid>/clear-token")]
|
#[put("/devices/identifier/<uuid>/clear-token")]
|
||||||
fn clear_device_token(uuid: String) -> EmptyResult {
|
fn clear_device_token(uuid: String) -> EmptyResult {
|
||||||
@@ -146,14 +147,13 @@ fn hibp_breach(username: String) -> JsonResult {
|
|||||||
username
|
username
|
||||||
);
|
);
|
||||||
|
|
||||||
use reqwest::{header::USER_AGENT, Client};
|
use reqwest::{blocking::Client, header::USER_AGENT};
|
||||||
|
|
||||||
if let Some(api_key) = crate::CONFIG.hibp_api_key() {
|
if let Some(api_key) = crate::CONFIG.hibp_api_key() {
|
||||||
let hibp_client = Client::builder()
|
let hibp_client = Client::builder().build()?;
|
||||||
.use_sys_proxy()
|
|
||||||
.build()?;
|
|
||||||
|
|
||||||
let res = hibp_client.get(&url)
|
let res = hibp_client
|
||||||
|
.get(&url)
|
||||||
.header(USER_AGENT, user_agent)
|
.header(USER_AGENT, user_agent)
|
||||||
.header("hibp-api-key", api_key)
|
.header("hibp-api-key", api_key)
|
||||||
.send()?;
|
.send()?;
|
||||||
@@ -173,7 +173,7 @@ fn hibp_breach(username: String) -> JsonResult {
|
|||||||
"BreachDate": "2019-08-18T00:00:00Z",
|
"BreachDate": "2019-08-18T00:00:00Z",
|
||||||
"AddedDate": "2019-08-18T00:00:00Z",
|
"AddedDate": "2019-08-18T00:00:00Z",
|
||||||
"Description": format!("Go to: <a href=\"https://haveibeenpwned.com/account/{account}\" target=\"_blank\" rel=\"noopener\">https://haveibeenpwned.com/account/{account}</a> for a manual check.<br/><br/>HaveIBeenPwned API key not set!<br/>Go to <a href=\"https://haveibeenpwned.com/API/Key\" target=\"_blank\" rel=\"noopener\">https://haveibeenpwned.com/API/Key</a> to purchase an API key from HaveIBeenPwned.<br/><br/>", account=username),
|
"Description": format!("Go to: <a href=\"https://haveibeenpwned.com/account/{account}\" target=\"_blank\" rel=\"noopener\">https://haveibeenpwned.com/account/{account}</a> for a manual check.<br/><br/>HaveIBeenPwned API key not set!<br/>Go to <a href=\"https://haveibeenpwned.com/API/Key\" target=\"_blank\" rel=\"noopener\">https://haveibeenpwned.com/API/Key</a> to purchase an API key from HaveIBeenPwned.<br/><br/>", account=username),
|
||||||
"LogoPath": "/bwrs_static/hibp.png",
|
"LogoPath": "bwrs_static/hibp.png",
|
||||||
"PwnCount": 0,
|
"PwnCount": 0,
|
||||||
"DataClasses": [
|
"DataClasses": [
|
||||||
"Error - No API key set!"
|
"Error - No API key set!"
|
||||||
|
@@ -1,16 +1,14 @@
|
|||||||
use rocket::request::Form;
|
use num_traits::FromPrimitive;
|
||||||
use rocket::Route;
|
use rocket::{request::Form, Route};
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::api::{
|
use crate::{
|
||||||
EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType,
|
api::{EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType},
|
||||||
|
auth::{decode_invite, AdminHeaders, Headers, OwnerHeaders},
|
||||||
|
db::{models::*, DbConn},
|
||||||
|
mail, CONFIG,
|
||||||
};
|
};
|
||||||
use crate::auth::{decode_invite, AdminHeaders, Headers, OwnerHeaders};
|
|
||||||
use crate::db::models::*;
|
|
||||||
use crate::db::DbConn;
|
|
||||||
use crate::mail;
|
|
||||||
use crate::CONFIG;
|
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
routes![
|
routes![
|
||||||
@@ -45,6 +43,10 @@ pub fn routes() -> Vec<Route> {
|
|||||||
delete_user,
|
delete_user,
|
||||||
post_delete_user,
|
post_delete_user,
|
||||||
post_org_import,
|
post_org_import,
|
||||||
|
list_policies,
|
||||||
|
list_policies_token,
|
||||||
|
get_policy,
|
||||||
|
put_policy,
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -369,7 +371,7 @@ fn get_collection_users(org_id: String, coll_id: String, _headers: AdminHeaders,
|
|||||||
.map(|col_user| {
|
.map(|col_user| {
|
||||||
UserOrganization::find_by_user_and_org(&col_user.user_uuid, &org_id, &conn)
|
UserOrganization::find_by_user_and_org(&col_user.user_uuid, &org_id, &conn)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_json_collection_user_details(col_user.read_only)
|
.to_json_user_access_restrictions(&col_user)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
@@ -403,7 +405,9 @@ fn put_collection_users(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
CollectionUser::save(&user.user_uuid, &coll_id, d.ReadOnly, &conn)?;
|
CollectionUser::save(&user.user_uuid, &coll_id,
|
||||||
|
d.ReadOnly, d.HidePasswords,
|
||||||
|
&conn)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -447,6 +451,7 @@ fn get_org_users(org_id: String, _headers: AdminHeaders, conn: DbConn) -> JsonRe
|
|||||||
struct CollectionData {
|
struct CollectionData {
|
||||||
Id: String,
|
Id: String,
|
||||||
ReadOnly: bool,
|
ReadOnly: bool,
|
||||||
|
HidePasswords: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@@ -480,7 +485,11 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
|
|||||||
let user = match User::find_by_mail(&email, &conn) {
|
let user = match User::find_by_mail(&email, &conn) {
|
||||||
None => {
|
None => {
|
||||||
if !CONFIG.invitations_allowed() {
|
if !CONFIG.invitations_allowed() {
|
||||||
err!(format!("User email does not exist: {}", email))
|
err!(format!("User does not exist: {}", email))
|
||||||
|
}
|
||||||
|
|
||||||
|
if !CONFIG.is_email_domain_allowed(&email) {
|
||||||
|
err!("Email domain not eligible for invitations")
|
||||||
}
|
}
|
||||||
|
|
||||||
if !CONFIG.mail_enabled() {
|
if !CONFIG.mail_enabled() {
|
||||||
@@ -514,7 +523,9 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
|
|||||||
match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) {
|
match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) {
|
||||||
None => err!("Collection not found in Organization"),
|
None => err!("Collection not found in Organization"),
|
||||||
Some(collection) => {
|
Some(collection) => {
|
||||||
CollectionUser::save(&user.uuid, &collection.uuid, col.ReadOnly, &conn)?;
|
CollectionUser::save(&user.uuid, &collection.uuid,
|
||||||
|
col.ReadOnly, col.HidePasswords,
|
||||||
|
&conn)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -769,7 +780,9 @@ fn edit_user(
|
|||||||
match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) {
|
match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) {
|
||||||
None => err!("Collection not found in Organization"),
|
None => err!("Collection not found in Organization"),
|
||||||
Some(collection) => {
|
Some(collection) => {
|
||||||
CollectionUser::save(&user_to_edit.user_uuid, &collection.uuid, col.ReadOnly, &conn)?;
|
CollectionUser::save(&user_to_edit.user_uuid, &collection.uuid,
|
||||||
|
col.ReadOnly, col.HidePasswords,
|
||||||
|
&conn)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -830,22 +843,13 @@ struct RelationsData {
|
|||||||
fn post_org_import(
|
fn post_org_import(
|
||||||
query: Form<OrgIdData>,
|
query: Form<OrgIdData>,
|
||||||
data: JsonUpcase<ImportData>,
|
data: JsonUpcase<ImportData>,
|
||||||
headers: Headers,
|
headers: AdminHeaders,
|
||||||
conn: DbConn,
|
conn: DbConn,
|
||||||
nt: Notify,
|
nt: Notify,
|
||||||
) -> EmptyResult {
|
) -> EmptyResult {
|
||||||
let data: ImportData = data.into_inner().data;
|
let data: ImportData = data.into_inner().data;
|
||||||
let org_id = query.into_inner().organization_id;
|
let org_id = query.into_inner().organization_id;
|
||||||
|
|
||||||
let org_user = match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) {
|
|
||||||
Some(user) => user,
|
|
||||||
None => err!("User is not part of the organization"),
|
|
||||||
};
|
|
||||||
|
|
||||||
if org_user.atype < UserOrgType::Admin {
|
|
||||||
err!("Only admins or owners can import into an organization")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read and create the collections
|
// Read and create the collections
|
||||||
let collections: Vec<_> = data
|
let collections: Vec<_> = data
|
||||||
.Collections
|
.Collections
|
||||||
@@ -866,6 +870,8 @@ fn post_org_import(
|
|||||||
relations.push((relation.Key, relation.Value));
|
relations.push((relation.Key, relation.Value));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let headers: Headers = headers.into();
|
||||||
|
|
||||||
// Read and create the ciphers
|
// Read and create the ciphers
|
||||||
let ciphers: Vec<_> = data
|
let ciphers: Vec<_> = data
|
||||||
.Ciphers
|
.Ciphers
|
||||||
@@ -901,3 +907,83 @@ fn post_org_import(
|
|||||||
let mut user = headers.user;
|
let mut user = headers.user;
|
||||||
user.update_revision(&conn)
|
user.update_revision(&conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[get("/organizations/<org_id>/policies")]
|
||||||
|
fn list_policies(org_id: String, _headers: AdminHeaders, conn: DbConn) -> JsonResult {
|
||||||
|
let policies = OrgPolicy::find_by_org(&org_id, &conn);
|
||||||
|
let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect();
|
||||||
|
|
||||||
|
Ok(Json(json!({
|
||||||
|
"Data": policies_json,
|
||||||
|
"Object": "list",
|
||||||
|
"ContinuationToken": null
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/organizations/<org_id>/policies/token?<token>")]
|
||||||
|
fn list_policies_token(org_id: String, token: String, conn: DbConn) -> JsonResult {
|
||||||
|
let invite = crate::auth::decode_invite(&token)?;
|
||||||
|
|
||||||
|
let invite_org_id = match invite.org_id {
|
||||||
|
Some(invite_org_id) => invite_org_id,
|
||||||
|
None => err!("Invalid token"),
|
||||||
|
};
|
||||||
|
|
||||||
|
if invite_org_id != org_id {
|
||||||
|
err!("Token doesn't match request organization");
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: We receive the invite token as ?token=<>, validate it contains the org id
|
||||||
|
let policies = OrgPolicy::find_by_org(&org_id, &conn);
|
||||||
|
let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect();
|
||||||
|
|
||||||
|
Ok(Json(json!({
|
||||||
|
"Data": policies_json,
|
||||||
|
"Object": "list",
|
||||||
|
"ContinuationToken": null
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/organizations/<org_id>/policies/<pol_type>")]
|
||||||
|
fn get_policy(org_id: String, pol_type: i32, _headers: AdminHeaders, conn: DbConn) -> JsonResult {
|
||||||
|
let pol_type_enum = match OrgPolicyType::from_i32(pol_type) {
|
||||||
|
Some(pt) => pt,
|
||||||
|
None => err!("Invalid policy type"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type, &conn) {
|
||||||
|
Some(p) => p,
|
||||||
|
None => OrgPolicy::new(org_id, pol_type_enum, "{}".to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Json(policy.to_json()))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct PolicyData {
|
||||||
|
enabled: bool,
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
_type: i32,
|
||||||
|
data: Value,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[put("/organizations/<org_id>/policies/<pol_type>", data = "<data>")]
|
||||||
|
fn put_policy(org_id: String, pol_type: i32, data: Json<PolicyData>, _headers: AdminHeaders, conn: DbConn) -> JsonResult {
|
||||||
|
let data: PolicyData = data.into_inner();
|
||||||
|
|
||||||
|
let pol_type_enum = match OrgPolicyType::from_i32(pol_type) {
|
||||||
|
Some(pt) => pt,
|
||||||
|
None => err!("Invalid policy type"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type, &conn) {
|
||||||
|
Some(p) => p,
|
||||||
|
None => OrgPolicy::new(org_id, pol_type_enum, "{}".to_string()),
|
||||||
|
};
|
||||||
|
|
||||||
|
policy.enabled = data.enabled;
|
||||||
|
policy.data = serde_json::to_string(&data.data)?;
|
||||||
|
policy.save(&conn)?;
|
||||||
|
|
||||||
|
Ok(Json(policy.to_json()))
|
||||||
|
}
|
||||||
|
@@ -2,13 +2,16 @@ use data_encoding::BASE32;
|
|||||||
use rocket::Route;
|
use rocket::Route;
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
|
|
||||||
use crate::api::core::two_factor::_generate_recover_code;
|
use crate::{
|
||||||
use crate::api::{EmptyResult, JsonResult, JsonUpcase, NumberOrString, PasswordData};
|
api::{
|
||||||
use crate::auth::Headers;
|
core::two_factor::_generate_recover_code, EmptyResult, JsonResult, JsonUpcase, NumberOrString, PasswordData,
|
||||||
use crate::crypto;
|
},
|
||||||
use crate::db::{
|
auth::{ClientIp, Headers},
|
||||||
models::{TwoFactor, TwoFactorType},
|
crypto,
|
||||||
DbConn,
|
db::{
|
||||||
|
models::{TwoFactor, TwoFactorType},
|
||||||
|
DbConn,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub use crate::config::CONFIG;
|
pub use crate::config::CONFIG;
|
||||||
@@ -20,6 +23,7 @@ pub fn routes() -> Vec<Route> {
|
|||||||
activate_authenticator_put,
|
activate_authenticator_put,
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/two-factor/get-authenticator", data = "<data>")]
|
#[post("/two-factor/get-authenticator", data = "<data>")]
|
||||||
fn generate_authenticator(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
fn generate_authenticator(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||||
let data: PasswordData = data.into_inner().data;
|
let data: PasswordData = data.into_inner().data;
|
||||||
@@ -53,7 +57,12 @@ struct EnableAuthenticatorData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[post("/two-factor/authenticator", data = "<data>")]
|
#[post("/two-factor/authenticator", data = "<data>")]
|
||||||
fn activate_authenticator(data: JsonUpcase<EnableAuthenticatorData>, headers: Headers, conn: DbConn) -> JsonResult {
|
fn activate_authenticator(
|
||||||
|
data: JsonUpcase<EnableAuthenticatorData>,
|
||||||
|
headers: Headers,
|
||||||
|
ip: ClientIp,
|
||||||
|
conn: DbConn,
|
||||||
|
) -> JsonResult {
|
||||||
let data: EnableAuthenticatorData = data.into_inner().data;
|
let data: EnableAuthenticatorData = data.into_inner().data;
|
||||||
let password_hash = data.MasterPasswordHash;
|
let password_hash = data.MasterPasswordHash;
|
||||||
let key = data.Key;
|
let key = data.Key;
|
||||||
@@ -76,7 +85,7 @@ fn activate_authenticator(data: JsonUpcase<EnableAuthenticatorData>, headers: He
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Validate the token provided with the key, and save new twofactor
|
// Validate the token provided with the key, and save new twofactor
|
||||||
validate_totp_code(&user.uuid, token, &key.to_uppercase(), &conn)?;
|
validate_totp_code(&user.uuid, token, &key.to_uppercase(), &ip, &conn)?;
|
||||||
|
|
||||||
_generate_recover_code(&mut user, &conn);
|
_generate_recover_code(&mut user, &conn);
|
||||||
|
|
||||||
@@ -88,22 +97,32 @@ fn activate_authenticator(data: JsonUpcase<EnableAuthenticatorData>, headers: He
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[put("/two-factor/authenticator", data = "<data>")]
|
#[put("/two-factor/authenticator", data = "<data>")]
|
||||||
fn activate_authenticator_put(data: JsonUpcase<EnableAuthenticatorData>, headers: Headers, conn: DbConn) -> JsonResult {
|
fn activate_authenticator_put(
|
||||||
activate_authenticator(data, headers, conn)
|
data: JsonUpcase<EnableAuthenticatorData>,
|
||||||
|
headers: Headers,
|
||||||
|
ip: ClientIp,
|
||||||
|
conn: DbConn,
|
||||||
|
) -> JsonResult {
|
||||||
|
activate_authenticator(data, headers, ip, conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn validate_totp_code_str(user_uuid: &str, totp_code: &str, secret: &str, conn: &DbConn) -> EmptyResult {
|
pub fn validate_totp_code_str(
|
||||||
|
user_uuid: &str,
|
||||||
|
totp_code: &str,
|
||||||
|
secret: &str,
|
||||||
|
ip: &ClientIp,
|
||||||
|
conn: &DbConn,
|
||||||
|
) -> EmptyResult {
|
||||||
let totp_code: u64 = match totp_code.parse() {
|
let totp_code: u64 = match totp_code.parse() {
|
||||||
Ok(code) => code,
|
Ok(code) => code,
|
||||||
_ => err!("TOTP code is not a number"),
|
_ => err!("TOTP code is not a number"),
|
||||||
};
|
};
|
||||||
|
|
||||||
validate_totp_code(user_uuid, totp_code, secret, &conn)
|
validate_totp_code(user_uuid, totp_code, secret, ip, &conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, conn: &DbConn) -> EmptyResult {
|
pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &ClientIp, conn: &DbConn) -> EmptyResult {
|
||||||
use oath::{totp_raw_custom_time, HashType};
|
use oath::{totp_raw_custom_time, HashType};
|
||||||
use std::time::{UNIX_EPOCH, SystemTime};
|
|
||||||
|
|
||||||
let decoded_secret = match BASE32.decode(secret.as_bytes()) {
|
let decoded_secret = match BASE32.decode(secret.as_bytes()) {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
@@ -116,24 +135,23 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, conn: &
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Get the current system time in UNIX Epoch (UTC)
|
// Get the current system time in UNIX Epoch (UTC)
|
||||||
let current_time: u64 = SystemTime::now().duration_since(UNIX_EPOCH)
|
let current_time = chrono::Utc::now();
|
||||||
.expect("Earlier than 1970-01-01 00:00:00 UTC").as_secs();
|
let current_timestamp = current_time.timestamp();
|
||||||
|
|
||||||
// The amount of steps back and forward in time
|
// The amount of steps back and forward in time
|
||||||
// Also check if we need to disable time drifted TOTP codes.
|
// Also check if we need to disable time drifted TOTP codes.
|
||||||
// If that is the case, we set the steps to 0 so only the current TOTP is valid.
|
// If that is the case, we set the steps to 0 so only the current TOTP is valid.
|
||||||
let steps = if CONFIG.authenticator_disable_time_drift() { 0 } else { 1 };
|
let steps: i64 = if CONFIG.authenticator_disable_time_drift() { 0 } else { 1 };
|
||||||
|
|
||||||
for step in -steps..=steps {
|
for step in -steps..=steps {
|
||||||
let time_step = (current_time / 30) as i32 + step;
|
let time_step = current_timestamp / 30i64 + step;
|
||||||
// We need to calculate the time offsite and cast it as an i128.
|
// We need to calculate the time offsite and cast it as an i128.
|
||||||
// Else we can't do math with it on a default u64 variable.
|
// Else we can't do math with it on a default u64 variable.
|
||||||
let time_offset: i128 = (step * 30).into();
|
let time = (current_timestamp + step * 30i64) as u64;
|
||||||
let generated = totp_raw_custom_time(&decoded_secret, 6, 0, 30, (current_time as i128 + time_offset) as u64, &HashType::SHA1);
|
let generated = totp_raw_custom_time(&decoded_secret, 6, 0, 30, time, &HashType::SHA1);
|
||||||
|
|
||||||
// Check the the given code equals the generated and if the time_step is larger then the one last used.
|
// Check the the given code equals the generated and if the time_step is larger then the one last used.
|
||||||
if generated == totp_code && time_step > twofactor.last_used {
|
if generated == totp_code && time_step > twofactor.last_used as i64 {
|
||||||
|
|
||||||
// If the step does not equals 0 the time is drifted either server or client side.
|
// If the step does not equals 0 the time is drifted either server or client side.
|
||||||
if step != 0 {
|
if step != 0 {
|
||||||
info!("TOTP Time drift detected. The step offset is {}", step);
|
info!("TOTP Time drift detected. The step offset is {}", step);
|
||||||
@@ -141,15 +159,26 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, conn: &
|
|||||||
|
|
||||||
// Save the last used time step so only totp time steps higher then this one are allowed.
|
// Save the last used time step so only totp time steps higher then this one are allowed.
|
||||||
// This will also save a newly created twofactor if the code is correct.
|
// This will also save a newly created twofactor if the code is correct.
|
||||||
twofactor.last_used = time_step;
|
twofactor.last_used = time_step as i32;
|
||||||
twofactor.save(&conn)?;
|
twofactor.save(&conn)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
} else if generated == totp_code && time_step <= twofactor.last_used {
|
} else if generated == totp_code && time_step <= twofactor.last_used as i64 {
|
||||||
warn!("This or a TOTP code within {} steps back and forward has already been used!", steps);
|
warn!(
|
||||||
err!("Invalid TOTP Code!");
|
"This or a TOTP code within {} steps back and forward has already been used!",
|
||||||
|
steps
|
||||||
|
);
|
||||||
|
err!(format!(
|
||||||
|
"Invalid TOTP code! Server time: {} IP: {}",
|
||||||
|
current_time.format("%F %T UTC"),
|
||||||
|
ip.ip
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Else no valide code received, deny access
|
// Else no valide code received, deny access
|
||||||
err!("Invalid TOTP code!");
|
err!(format!(
|
||||||
|
"Invalid TOTP code! Server time: {} IP: {}",
|
||||||
|
current_time.format("%F %T UTC"),
|
||||||
|
ip.ip
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
@@ -2,32 +2,28 @@ use chrono::Utc;
|
|||||||
use data_encoding::BASE64;
|
use data_encoding::BASE64;
|
||||||
use rocket::Route;
|
use rocket::Route;
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json;
|
|
||||||
|
|
||||||
use crate::api::core::two_factor::_generate_recover_code;
|
use crate::{
|
||||||
use crate::api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, PasswordData};
|
api::{core::two_factor::_generate_recover_code, ApiResult, EmptyResult, JsonResult, JsonUpcase, PasswordData},
|
||||||
use crate::auth::Headers;
|
auth::Headers,
|
||||||
use crate::crypto;
|
crypto,
|
||||||
use crate::db::{
|
db::{
|
||||||
models::{TwoFactor, TwoFactorType, User},
|
models::{TwoFactor, TwoFactorType, User},
|
||||||
DbConn,
|
DbConn,
|
||||||
|
},
|
||||||
|
error::MapResult,
|
||||||
|
CONFIG,
|
||||||
};
|
};
|
||||||
use crate::error::MapResult;
|
|
||||||
use crate::CONFIG;
|
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
routes![
|
routes![get_duo, activate_duo, activate_duo_put,]
|
||||||
get_duo,
|
|
||||||
activate_duo,
|
|
||||||
activate_duo_put,
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
struct DuoData {
|
struct DuoData {
|
||||||
host: String,
|
host: String, // Duo API hostname
|
||||||
ik: String,
|
ik: String, // integration key
|
||||||
sk: String,
|
sk: String, // secret key
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DuoData {
|
impl DuoData {
|
||||||
@@ -171,7 +167,7 @@ fn activate_duo(data: JsonUpcase<EnableDuoData>, headers: Headers, conn: DbConn)
|
|||||||
let type_ = TwoFactorType::Duo;
|
let type_ = TwoFactorType::Duo;
|
||||||
let twofactor = TwoFactor::new(user.uuid.clone(), type_, data_str);
|
let twofactor = TwoFactor::new(user.uuid.clone(), type_, data_str);
|
||||||
twofactor.save(&conn)?;
|
twofactor.save(&conn)?;
|
||||||
|
|
||||||
_generate_recover_code(&mut user, &conn);
|
_generate_recover_code(&mut user, &conn);
|
||||||
|
|
||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
@@ -191,9 +187,10 @@ fn activate_duo_put(data: JsonUpcase<EnableDuoData>, headers: Headers, conn: DbC
|
|||||||
fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> EmptyResult {
|
fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> EmptyResult {
|
||||||
const AGENT: &str = "bitwarden_rs:Duo/1.0 (Rust)";
|
const AGENT: &str = "bitwarden_rs:Duo/1.0 (Rust)";
|
||||||
|
|
||||||
use reqwest::{header::*, Client, Method};
|
use reqwest::{blocking::Client, header::*, Method};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
// https://duo.com/docs/authapi#api-details
|
||||||
let url = format!("https://{}{}", &data.host, path);
|
let url = format!("https://{}{}", &data.host, path);
|
||||||
let date = Utc::now().to_rfc2822();
|
let date = Utc::now().to_rfc2822();
|
||||||
let username = &data.ik;
|
let username = &data.ik;
|
||||||
@@ -272,6 +269,10 @@ fn sign_duo_values(key: &str, email: &str, ikey: &str, prefix: &str, expire: i64
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn validate_duo_login(email: &str, response: &str, conn: &DbConn) -> EmptyResult {
|
pub fn validate_duo_login(email: &str, response: &str, conn: &DbConn) -> EmptyResult {
|
||||||
|
// email is as entered by the user, so it needs to be normalized before
|
||||||
|
// comparison with auth_user below.
|
||||||
|
let email = &email.to_lowercase();
|
||||||
|
|
||||||
let split: Vec<&str> = response.split(':').collect();
|
let split: Vec<&str> = response.split(':').collect();
|
||||||
if split.len() != 2 {
|
if split.len() != 2 {
|
||||||
err!("Invalid response length");
|
err!("Invalid response length");
|
||||||
|
@@ -1,29 +1,21 @@
|
|||||||
|
use chrono::{Duration, NaiveDateTime, Utc};
|
||||||
use rocket::Route;
|
use rocket::Route;
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json;
|
|
||||||
|
|
||||||
use crate::api::core::two_factor::_generate_recover_code;
|
use crate::{
|
||||||
use crate::api::{EmptyResult, JsonResult, JsonUpcase, PasswordData};
|
api::{core::two_factor::_generate_recover_code, EmptyResult, JsonResult, JsonUpcase, PasswordData},
|
||||||
use crate::auth::Headers;
|
auth::Headers,
|
||||||
use crate::crypto;
|
crypto,
|
||||||
use crate::db::{
|
db::{
|
||||||
models::{TwoFactor, TwoFactorType},
|
models::{TwoFactor, TwoFactorType},
|
||||||
DbConn,
|
DbConn,
|
||||||
|
},
|
||||||
|
error::{Error, MapResult},
|
||||||
|
mail, CONFIG,
|
||||||
};
|
};
|
||||||
use crate::error::Error;
|
|
||||||
use crate::mail;
|
|
||||||
use crate::CONFIG;
|
|
||||||
|
|
||||||
use chrono::{Duration, NaiveDateTime, Utc};
|
|
||||||
use std::ops::Add;
|
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
routes![
|
routes![get_email, send_email_login, send_email, email,]
|
||||||
get_email,
|
|
||||||
send_email_login,
|
|
||||||
send_email,
|
|
||||||
email,
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@@ -64,7 +56,7 @@ fn send_email_login(data: JsonUpcase<SendEmailLoginData>, conn: DbConn) -> Empty
|
|||||||
/// Generate the token, save the data for later verification and send email to user
|
/// Generate the token, save the data for later verification and send email to user
|
||||||
pub fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
pub fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
let type_ = TwoFactorType::Email as i32;
|
let type_ = TwoFactorType::Email as i32;
|
||||||
let mut twofactor = TwoFactor::find_by_user_and_type(user_uuid, type_, &conn)?;
|
let mut twofactor = TwoFactor::find_by_user_and_type(user_uuid, type_, &conn).map_res("Two factor not found")?;
|
||||||
|
|
||||||
let generated_token = crypto::generate_token(CONFIG.email_token_size())?;
|
let generated_token = crypto::generate_token(CONFIG.email_token_size())?;
|
||||||
|
|
||||||
@@ -73,7 +65,7 @@ pub fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
|||||||
twofactor.data = twofactor_data.to_json();
|
twofactor.data = twofactor_data.to_json();
|
||||||
twofactor.save(&conn)?;
|
twofactor.save(&conn)?;
|
||||||
|
|
||||||
mail::send_token(&twofactor_data.email, &twofactor_data.last_token?)?;
|
mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -140,7 +132,7 @@ fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, conn: DbConn) -
|
|||||||
);
|
);
|
||||||
twofactor.save(&conn)?;
|
twofactor.save(&conn)?;
|
||||||
|
|
||||||
mail::send_token(&twofactor_data.email, &twofactor_data.last_token?)?;
|
mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -164,7 +156,7 @@ fn email(data: JsonUpcase<EmailData>, headers: Headers, conn: DbConn) -> JsonRes
|
|||||||
}
|
}
|
||||||
|
|
||||||
let type_ = TwoFactorType::EmailVerificationChallenge as i32;
|
let type_ = TwoFactorType::EmailVerificationChallenge as i32;
|
||||||
let mut twofactor = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn)?;
|
let mut twofactor = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn).map_res("Two factor not found")?;
|
||||||
|
|
||||||
let mut email_data = EmailTokenData::from_json(&twofactor.data)?;
|
let mut email_data = EmailTokenData::from_json(&twofactor.data)?;
|
||||||
|
|
||||||
@@ -194,7 +186,7 @@ fn email(data: JsonUpcase<EmailData>, headers: Headers, conn: DbConn) -> JsonRes
|
|||||||
/// Validate the email code when used as TwoFactor token mechanism
|
/// Validate the email code when used as TwoFactor token mechanism
|
||||||
pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult {
|
pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult {
|
||||||
let mut email_data = EmailTokenData::from_json(&data)?;
|
let mut email_data = EmailTokenData::from_json(&data)?;
|
||||||
let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn)?;
|
let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn).map_res("Two factor not found")?;
|
||||||
let issued_token = match &email_data.last_token {
|
let issued_token = match &email_data.last_token {
|
||||||
Some(t) => t,
|
Some(t) => t,
|
||||||
_ => err!("No token available"),
|
_ => err!("No token available"),
|
||||||
@@ -217,7 +209,7 @@ pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &
|
|||||||
|
|
||||||
let date = NaiveDateTime::from_timestamp(email_data.token_sent, 0);
|
let date = NaiveDateTime::from_timestamp(email_data.token_sent, 0);
|
||||||
let max_time = CONFIG.email_expiration_time() as i64;
|
let max_time = CONFIG.email_expiration_time() as i64;
|
||||||
if date.add(Duration::seconds(max_time)) < Utc::now().naive_utc() {
|
if date + Duration::seconds(max_time) < Utc::now().naive_utc() {
|
||||||
err!("Token has expired")
|
err!("Token has expired")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -276,10 +268,10 @@ impl EmailTokenData {
|
|||||||
|
|
||||||
/// Takes an email address and obscures it by replacing it with asterisks except two characters.
|
/// Takes an email address and obscures it by replacing it with asterisks except two characters.
|
||||||
pub fn obscure_email(email: &str) -> String {
|
pub fn obscure_email(email: &str) -> String {
|
||||||
let split: Vec<&str> = email.split('@').collect();
|
let split: Vec<&str> = email.rsplitn(2, '@').collect();
|
||||||
|
|
||||||
let mut name = split[0].to_string();
|
let mut name = split[1].to_string();
|
||||||
let domain = &split[1];
|
let domain = &split[0];
|
||||||
|
|
||||||
let name_size = name.chars().count();
|
let name_size = name.chars().count();
|
||||||
|
|
||||||
@@ -321,14 +313,14 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_token() {
|
fn test_token() {
|
||||||
let result = generate_token(19).unwrap();
|
let result = crypto::generate_token(19).unwrap();
|
||||||
|
|
||||||
assert_eq!(result.chars().count(), 19);
|
assert_eq!(result.chars().count(), 19);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_token_too_large() {
|
fn test_token_too_large() {
|
||||||
let result = generate_token(20);
|
let result = crypto::generate_token(20);
|
||||||
|
|
||||||
assert!(result.is_err(), "too large token should give an error");
|
assert!(result.is_err(), "too large token should give an error");
|
||||||
}
|
}
|
||||||
|
@@ -1,22 +1,23 @@
|
|||||||
use data_encoding::BASE32;
|
use data_encoding::BASE32;
|
||||||
use rocket::Route;
|
use rocket::Route;
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json;
|
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::api::{JsonResult, JsonUpcase, NumberOrString, PasswordData};
|
use crate::{
|
||||||
use crate::auth::Headers;
|
api::{JsonResult, JsonUpcase, NumberOrString, PasswordData},
|
||||||
use crate::crypto;
|
auth::Headers,
|
||||||
use crate::db::{
|
crypto,
|
||||||
models::{TwoFactor, User},
|
db::{
|
||||||
DbConn,
|
models::{TwoFactor, User},
|
||||||
|
DbConn,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) mod authenticator;
|
pub mod authenticator;
|
||||||
pub(crate) mod duo;
|
pub mod duo;
|
||||||
pub(crate) mod email;
|
pub mod email;
|
||||||
pub(crate) mod u2f;
|
pub mod u2f;
|
||||||
pub(crate) mod yubikey;
|
pub mod yubikey;
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
let mut routes = routes![
|
let mut routes = routes![
|
||||||
@@ -39,7 +40,7 @@ pub fn routes() -> Vec<Route> {
|
|||||||
#[get("/two-factor")]
|
#[get("/two-factor")]
|
||||||
fn get_twofactor(headers: Headers, conn: DbConn) -> JsonResult {
|
fn get_twofactor(headers: Headers, conn: DbConn) -> JsonResult {
|
||||||
let twofactors = TwoFactor::find_by_user(&headers.user.uuid, &conn);
|
let twofactors = TwoFactor::find_by_user(&headers.user.uuid, &conn);
|
||||||
let twofactors_json: Vec<Value> = twofactors.iter().map(TwoFactor::to_json_list).collect();
|
let twofactors_json: Vec<Value> = twofactors.iter().map(TwoFactor::to_json_provider).collect();
|
||||||
|
|
||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
"Data": twofactors_json,
|
"Data": twofactors_json,
|
||||||
|
@@ -1,27 +1,31 @@
|
|||||||
|
use once_cell::sync::Lazy;
|
||||||
use rocket::Route;
|
use rocket::Route;
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json;
|
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use u2f::messages::{RegisterResponse, SignResponse, U2fSignRequest};
|
use u2f::{
|
||||||
use u2f::protocol::{Challenge, U2f};
|
messages::{RegisterResponse, SignResponse, U2fSignRequest},
|
||||||
use u2f::register::Registration;
|
protocol::{Challenge, U2f},
|
||||||
|
register::Registration,
|
||||||
use crate::api::core::two_factor::_generate_recover_code;
|
};
|
||||||
use crate::api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, NumberOrString, PasswordData};
|
|
||||||
use crate::auth::Headers;
|
use crate::{
|
||||||
use crate::db::{
|
api::{
|
||||||
models::{TwoFactor, TwoFactorType},
|
core::two_factor::_generate_recover_code, ApiResult, EmptyResult, JsonResult, JsonUpcase, NumberOrString,
|
||||||
DbConn,
|
PasswordData,
|
||||||
|
},
|
||||||
|
auth::Headers,
|
||||||
|
db::{
|
||||||
|
models::{TwoFactor, TwoFactorType},
|
||||||
|
DbConn,
|
||||||
|
},
|
||||||
|
error::Error,
|
||||||
|
CONFIG,
|
||||||
};
|
};
|
||||||
use crate::error::Error;
|
|
||||||
use crate::CONFIG;
|
|
||||||
|
|
||||||
const U2F_VERSION: &str = "U2F_V2";
|
const U2F_VERSION: &str = "U2F_V2";
|
||||||
|
|
||||||
lazy_static! {
|
static APP_ID: Lazy<String> = Lazy::new(|| format!("{}/app-id.json", &CONFIG.domain()));
|
||||||
static ref APP_ID: String = format!("{}/app-id.json", &CONFIG.domain());
|
static U2F: Lazy<U2f> = Lazy::new(|| U2f::new(APP_ID.clone()));
|
||||||
static ref U2F: U2f = U2f::new(APP_ID.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
routes![
|
routes![
|
||||||
@@ -29,6 +33,7 @@ pub fn routes() -> Vec<Route> {
|
|||||||
generate_u2f_challenge,
|
generate_u2f_challenge,
|
||||||
activate_u2f,
|
activate_u2f,
|
||||||
activate_u2f_put,
|
activate_u2f_put,
|
||||||
|
delete_u2f,
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -91,6 +96,7 @@ struct RegistrationDef {
|
|||||||
key_handle: Vec<u8>,
|
key_handle: Vec<u8>,
|
||||||
pub_key: Vec<u8>,
|
pub_key: Vec<u8>,
|
||||||
attestation_cert: Option<Vec<u8>>,
|
attestation_cert: Option<Vec<u8>>,
|
||||||
|
device_name: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
@@ -194,6 +200,50 @@ fn activate_u2f_put(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbC
|
|||||||
activate_u2f(data, headers, conn)
|
activate_u2f(data, headers, conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
struct DeleteU2FData {
|
||||||
|
Id: NumberOrString,
|
||||||
|
MasterPasswordHash: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[delete("/two-factor/u2f", data = "<data>")]
|
||||||
|
fn delete_u2f(data: JsonUpcase<DeleteU2FData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||||
|
let data: DeleteU2FData = data.into_inner().data;
|
||||||
|
|
||||||
|
let id = data.Id.into_i32()?;
|
||||||
|
|
||||||
|
if !headers.user.check_valid_password(&data.MasterPasswordHash) {
|
||||||
|
err!("Invalid password");
|
||||||
|
}
|
||||||
|
|
||||||
|
let type_ = TwoFactorType::U2f as i32;
|
||||||
|
let mut tf = match TwoFactor::find_by_user_and_type(&headers.user.uuid, type_, &conn) {
|
||||||
|
Some(tf) => tf,
|
||||||
|
None => err!("U2F data not found!"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut data: Vec<U2FRegistration> = match serde_json::from_str(&tf.data) {
|
||||||
|
Ok(d) => d,
|
||||||
|
Err(_) => err!("Error parsing U2F data"),
|
||||||
|
};
|
||||||
|
|
||||||
|
data.retain(|r| r.id != id);
|
||||||
|
|
||||||
|
let new_data_str = serde_json::to_string(&data)?;
|
||||||
|
|
||||||
|
tf.data = new_data_str;
|
||||||
|
tf.save(&conn)?;
|
||||||
|
|
||||||
|
let keys_json: Vec<Value> = data.iter().map(U2FRegistration::to_json).collect();
|
||||||
|
|
||||||
|
Ok(Json(json!({
|
||||||
|
"Enabled": true,
|
||||||
|
"Keys": keys_json,
|
||||||
|
"Object": "twoFactorU2f"
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
|
||||||
fn _create_u2f_challenge(user_uuid: &str, type_: TwoFactorType, conn: &DbConn) -> Challenge {
|
fn _create_u2f_challenge(user_uuid: &str, type_: TwoFactorType, conn: &DbConn) -> Challenge {
|
||||||
let challenge = U2F.generate_challenge().unwrap();
|
let challenge = U2F.generate_challenge().unwrap();
|
||||||
|
|
||||||
|
@@ -1,26 +1,21 @@
|
|||||||
use rocket::Route;
|
use rocket::Route;
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json;
|
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use yubico::config::Config;
|
use yubico::{config::Config, verify};
|
||||||
use yubico::verify;
|
|
||||||
|
|
||||||
use crate::api::core::two_factor::_generate_recover_code;
|
use crate::{
|
||||||
use crate::api::{EmptyResult, JsonResult, JsonUpcase, PasswordData};
|
api::{core::two_factor::_generate_recover_code, EmptyResult, JsonResult, JsonUpcase, PasswordData},
|
||||||
use crate::auth::Headers;
|
auth::Headers,
|
||||||
use crate::db::{
|
db::{
|
||||||
models::{TwoFactor, TwoFactorType},
|
models::{TwoFactor, TwoFactorType},
|
||||||
DbConn,
|
DbConn,
|
||||||
|
},
|
||||||
|
error::{Error, MapResult},
|
||||||
|
CONFIG,
|
||||||
};
|
};
|
||||||
use crate::error::{Error, MapResult};
|
|
||||||
use crate::CONFIG;
|
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
routes![
|
routes![generate_yubikey, activate_yubikey, activate_yubikey_put,]
|
||||||
generate_yubikey,
|
|
||||||
activate_yubikey,
|
|
||||||
activate_yubikey_put,
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
|
161
src/api/icons.rs
161
src/api/icons.rs
@@ -1,21 +1,17 @@
|
|||||||
use std::fs::{create_dir_all, remove_file, symlink_metadata, File};
|
use std::{
|
||||||
use std::io::prelude::*;
|
fs::{create_dir_all, remove_file, symlink_metadata, File},
|
||||||
use std::net::ToSocketAddrs;
|
io::prelude::*,
|
||||||
use std::time::{Duration, SystemTime};
|
net::{IpAddr, ToSocketAddrs},
|
||||||
|
time::{Duration, SystemTime},
|
||||||
use rocket::http::ContentType;
|
};
|
||||||
use rocket::response::Content;
|
|
||||||
use rocket::Route;
|
|
||||||
|
|
||||||
use reqwest::{header::HeaderMap, Client, Response, Url};
|
|
||||||
|
|
||||||
use rocket::http::Cookie;
|
|
||||||
|
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
use reqwest::{blocking::Client, blocking::Response, header::HeaderMap, Url};
|
||||||
|
use rocket::{http::ContentType, http::Cookie, response::Content, Route};
|
||||||
use soup::prelude::*;
|
use soup::prelude::*;
|
||||||
|
|
||||||
use crate::error::Error;
|
use crate::{error::Error, util::Cached, CONFIG};
|
||||||
use crate::CONFIG;
|
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
routes![icon]
|
routes![icon]
|
||||||
@@ -25,16 +21,19 @@ const FALLBACK_ICON: &[u8; 344] = include_bytes!("../static/fallback-icon.png");
|
|||||||
|
|
||||||
const ALLOWED_CHARS: &str = "_-.";
|
const ALLOWED_CHARS: &str = "_-.";
|
||||||
|
|
||||||
lazy_static! {
|
static CLIENT: Lazy<Client> = Lazy::new(|| {
|
||||||
// Reuse the client between requests
|
// Reuse the client between requests
|
||||||
static ref CLIENT: Client = Client::builder()
|
Client::builder()
|
||||||
.use_sys_proxy()
|
|
||||||
.gzip(true)
|
|
||||||
.timeout(Duration::from_secs(CONFIG.icon_download_timeout()))
|
.timeout(Duration::from_secs(CONFIG.icon_download_timeout()))
|
||||||
.default_headers(_header_map())
|
.default_headers(_header_map())
|
||||||
.build()
|
.build()
|
||||||
.unwrap();
|
.unwrap()
|
||||||
}
|
});
|
||||||
|
|
||||||
|
static ICON_REL_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"icon$|apple.*icon").unwrap());
|
||||||
|
static ICON_HREF_REGEX: Lazy<Regex> =
|
||||||
|
Lazy::new(|| Regex::new(r"(?i)\w+\.(jpg|jpeg|png|ico)(\?.*)?$|^data:image.*base64").unwrap());
|
||||||
|
static ICON_SIZE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap());
|
||||||
|
|
||||||
fn is_valid_domain(domain: &str) -> bool {
|
fn is_valid_domain(domain: &str) -> bool {
|
||||||
// Don't allow empty or too big domains or path traversal
|
// Don't allow empty or too big domains or path traversal
|
||||||
@@ -53,15 +52,113 @@ fn is_valid_domain(domain: &str) -> bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[get("/<domain>/icon.png")]
|
#[get("/<domain>/icon.png")]
|
||||||
fn icon(domain: String) -> Content<Vec<u8>> {
|
fn icon(domain: String) -> Cached<Content<Vec<u8>>> {
|
||||||
let icon_type = ContentType::new("image", "x-icon");
|
let icon_type = ContentType::new("image", "x-icon");
|
||||||
|
|
||||||
if !is_valid_domain(&domain) {
|
if !is_valid_domain(&domain) {
|
||||||
warn!("Invalid domain: {:#?}", domain);
|
warn!("Invalid domain: {:#?}", domain);
|
||||||
return Content(icon_type, FALLBACK_ICON.to_vec());
|
return Cached::long(Content(icon_type, FALLBACK_ICON.to_vec()));
|
||||||
}
|
}
|
||||||
|
|
||||||
Content(icon_type, get_icon(&domain))
|
Cached::long(Content(icon_type, get_icon(&domain)))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// TODO: This is extracted from IpAddr::is_global, which is unstable:
|
||||||
|
/// https://doc.rust-lang.org/nightly/std/net/enum.IpAddr.html#method.is_global
|
||||||
|
/// Remove once https://github.com/rust-lang/rust/issues/27709 is merged
|
||||||
|
#[cfg(not(feature = "unstable"))]
|
||||||
|
fn is_global(ip: IpAddr) -> bool {
|
||||||
|
match ip {
|
||||||
|
IpAddr::V4(ip) => {
|
||||||
|
// check if this address is 192.0.0.9 or 192.0.0.10. These addresses are the only two
|
||||||
|
// globally routable addresses in the 192.0.0.0/24 range.
|
||||||
|
if u32::from(ip) == 0xc0000009 || u32::from(ip) == 0xc000000a {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
!ip.is_private()
|
||||||
|
&& !ip.is_loopback()
|
||||||
|
&& !ip.is_link_local()
|
||||||
|
&& !ip.is_broadcast()
|
||||||
|
&& !ip.is_documentation()
|
||||||
|
&& !(ip.octets()[0] == 100 && (ip.octets()[1] & 0b1100_0000 == 0b0100_0000))
|
||||||
|
&& !(ip.octets()[0] == 192 && ip.octets()[1] == 0 && ip.octets()[2] == 0)
|
||||||
|
&& !(ip.octets()[0] & 240 == 240 && !ip.is_broadcast())
|
||||||
|
&& !(ip.octets()[0] == 198 && (ip.octets()[1] & 0xfe) == 18)
|
||||||
|
// Make sure the address is not in 0.0.0.0/8
|
||||||
|
&& ip.octets()[0] != 0
|
||||||
|
}
|
||||||
|
IpAddr::V6(ip) => {
|
||||||
|
if ip.is_multicast() && ip.segments()[0] & 0x000f == 14 {
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
!ip.is_multicast()
|
||||||
|
&& !ip.is_loopback()
|
||||||
|
&& !((ip.segments()[0] & 0xffc0) == 0xfe80)
|
||||||
|
&& !((ip.segments()[0] & 0xfe00) == 0xfc00)
|
||||||
|
&& !ip.is_unspecified()
|
||||||
|
&& !((ip.segments()[0] == 0x2001) && (ip.segments()[1] == 0xdb8))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "unstable")]
|
||||||
|
fn is_global(ip: IpAddr) -> bool {
|
||||||
|
ip.is_global()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// These are some tests to check that the implementations match
|
||||||
|
/// The IPv4 can be all checked in 5 mins or so and they are correct as of nightly 2020-07-11
|
||||||
|
/// The IPV6 can't be checked in a reasonable time, so we check about ten billion random ones, so far correct
|
||||||
|
/// Note that the is_global implementation is subject to change as new IP RFCs are created
|
||||||
|
///
|
||||||
|
/// To run while showing progress output:
|
||||||
|
/// cargo test --features sqlite,unstable -- --nocapture --ignored
|
||||||
|
#[cfg(test)]
|
||||||
|
#[cfg(feature = "unstable")]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[ignore]
|
||||||
|
fn test_ipv4_global() {
|
||||||
|
for a in 0..u8::MAX {
|
||||||
|
println!("Iter: {}/255", a);
|
||||||
|
for b in 0..u8::MAX {
|
||||||
|
for c in 0..u8::MAX {
|
||||||
|
for d in 0..u8::MAX {
|
||||||
|
let ip = IpAddr::V4(std::net::Ipv4Addr::new(a, b, c, d));
|
||||||
|
assert_eq!(ip.is_global(), is_global(ip))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[ignore]
|
||||||
|
fn test_ipv6_global() {
|
||||||
|
use ring::rand::{SecureRandom, SystemRandom};
|
||||||
|
let mut v = [0u8; 16];
|
||||||
|
let rand = SystemRandom::new();
|
||||||
|
for i in 0..1_000 {
|
||||||
|
println!("Iter: {}/1_000", i);
|
||||||
|
for _ in 0..10_000_000 {
|
||||||
|
rand.fill(&mut v).expect("Error generating random values");
|
||||||
|
let ip = IpAddr::V6(std::net::Ipv6Addr::new(
|
||||||
|
(v[14] as u16) << 8 | v[15] as u16,
|
||||||
|
(v[12] as u16) << 8 | v[13] as u16,
|
||||||
|
(v[10] as u16) << 8 | v[11] as u16,
|
||||||
|
(v[8] as u16) << 8 | v[9] as u16,
|
||||||
|
(v[6] as u16) << 8 | v[7] as u16,
|
||||||
|
(v[4] as u16) << 8 | v[5] as u16,
|
||||||
|
(v[2] as u16) << 8 | v[3] as u16,
|
||||||
|
(v[0] as u16) << 8 | v[1] as u16,
|
||||||
|
));
|
||||||
|
assert_eq!(ip.is_global(), is_global(ip))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_icon_domain_is_blacklisted(domain: &str) -> bool {
|
fn check_icon_domain_is_blacklisted(domain: &str) -> bool {
|
||||||
@@ -70,7 +167,7 @@ fn check_icon_domain_is_blacklisted(domain: &str) -> bool {
|
|||||||
.to_socket_addrs()
|
.to_socket_addrs()
|
||||||
.map(|x| {
|
.map(|x| {
|
||||||
for ip_port in x {
|
for ip_port in x {
|
||||||
if !ip_port.ip().is_global() {
|
if !is_global(ip_port.ip()) {
|
||||||
warn!("IP {} for domain '{}' is not a global IP!", ip_port.ip(), domain);
|
warn!("IP {} for domain '{}' is not a global IP!", ip_port.ip(), domain);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -182,7 +279,7 @@ struct Icon {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Icon {
|
impl Icon {
|
||||||
fn new(priority: u8, href: String) -> Self {
|
const fn new(priority: u8, href: String) -> Self {
|
||||||
Self { href, priority }
|
Self { href, priority }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -233,12 +330,16 @@ fn get_icon_url(domain: &str) -> Result<(Vec<Icon>, String), Error> {
|
|||||||
// Add the default favicon.ico to the list with the domain the content responded from.
|
// Add the default favicon.ico to the list with the domain the content responded from.
|
||||||
iconlist.push(Icon::new(35, url.join("/favicon.ico").unwrap().into_string()));
|
iconlist.push(Icon::new(35, url.join("/favicon.ico").unwrap().into_string()));
|
||||||
|
|
||||||
let soup = Soup::from_reader(content)?;
|
// 512KB should be more than enough for the HTML, though as we only really need
|
||||||
|
// the HTML header, it could potentially be reduced even further
|
||||||
|
let limited_reader = content.take(512 * 1024);
|
||||||
|
|
||||||
|
let soup = Soup::from_reader(limited_reader)?;
|
||||||
// Search for and filter
|
// Search for and filter
|
||||||
let favicons = soup
|
let favicons = soup
|
||||||
.tag("link")
|
.tag("link")
|
||||||
.attr("rel", Regex::new(r"icon$|apple.*icon")?) // Only use icon rels
|
.attr("rel", ICON_REL_REGEX.clone()) // Only use icon rels
|
||||||
.attr("href", Regex::new(r"(?i)\w+\.(jpg|jpeg|png|ico)(\?.*)?$|^data:image.*base64")?) // Only allow specific extensions
|
.attr("href", ICON_HREF_REGEX.clone()) // Only allow specific extensions
|
||||||
.find_all();
|
.find_all();
|
||||||
|
|
||||||
// Loop through all the found icons and determine it's priority
|
// Loop through all the found icons and determine it's priority
|
||||||
@@ -350,7 +451,7 @@ fn parse_sizes(sizes: Option<String>) -> (u16, u16) {
|
|||||||
let mut height: u16 = 0;
|
let mut height: u16 = 0;
|
||||||
|
|
||||||
if let Some(sizes) = sizes {
|
if let Some(sizes) = sizes {
|
||||||
match Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap().captures(sizes.trim()) {
|
match ICON_SIZE_REGEX.captures(sizes.trim()) {
|
||||||
None => {}
|
None => {}
|
||||||
Some(dimensions) => {
|
Some(dimensions) => {
|
||||||
if dimensions.len() >= 3 {
|
if dimensions.len() >= 3 {
|
||||||
@@ -387,7 +488,7 @@ fn download_icon(domain: &str) -> Result<Vec<u8>, Error> {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => warn!("data uri is invalid")
|
_ => warn!("data uri is invalid"),
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
match get_page_with_cookies(&icon.href, &cookie_str) {
|
match get_page_with_cookies(&icon.href, &cookie_str) {
|
||||||
|
@@ -1,19 +1,22 @@
|
|||||||
|
use chrono::Local;
|
||||||
use num_traits::FromPrimitive;
|
use num_traits::FromPrimitive;
|
||||||
use rocket::request::{Form, FormItems, FromForm};
|
use rocket::{
|
||||||
use rocket::Route;
|
request::{Form, FormItems, FromForm},
|
||||||
|
Route,
|
||||||
|
};
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use chrono::Utc;
|
|
||||||
|
|
||||||
use crate::api::core::two_factor::email::EmailTokenData;
|
use crate::{
|
||||||
use crate::api::core::two_factor::{duo, email, yubikey};
|
api::{
|
||||||
use crate::api::{ApiResult, EmptyResult, JsonResult};
|
core::two_factor::{duo, email, email::EmailTokenData, yubikey},
|
||||||
use crate::auth::ClientIp;
|
ApiResult, EmptyResult, JsonResult,
|
||||||
use crate::db::models::*;
|
},
|
||||||
use crate::db::DbConn;
|
auth::ClientIp,
|
||||||
use crate::mail;
|
db::{models::*, DbConn},
|
||||||
use crate::util;
|
error::MapResult,
|
||||||
use crate::CONFIG;
|
mail, util, CONFIG,
|
||||||
|
};
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
routes![login]
|
routes![login]
|
||||||
@@ -38,7 +41,7 @@ fn login(data: Form<ConnectData>, conn: DbConn, ip: ClientIp) -> JsonResult {
|
|||||||
_check_is_some(&data.device_name, "device_name cannot be blank")?;
|
_check_is_some(&data.device_name, "device_name cannot be blank")?;
|
||||||
_check_is_some(&data.device_type, "device_type cannot be blank")?;
|
_check_is_some(&data.device_type, "device_type cannot be blank")?;
|
||||||
|
|
||||||
_password_login(data, conn, ip)
|
_password_login(data, conn, &ip)
|
||||||
}
|
}
|
||||||
t => err!("Invalid type", t),
|
t => err!("Invalid type", t),
|
||||||
}
|
}
|
||||||
@@ -49,10 +52,7 @@ fn _refresh_login(data: ConnectData, conn: DbConn) -> JsonResult {
|
|||||||
let token = data.refresh_token.unwrap();
|
let token = data.refresh_token.unwrap();
|
||||||
|
|
||||||
// Get device by refresh token
|
// Get device by refresh token
|
||||||
let mut device = match Device::find_by_refresh_token(&token, &conn) {
|
let mut device = Device::find_by_refresh_token(&token, &conn).map_res("Invalid refresh token")?;
|
||||||
Some(device) => device,
|
|
||||||
None => err!("Invalid refresh token"),
|
|
||||||
};
|
|
||||||
|
|
||||||
// COMMON
|
// COMMON
|
||||||
let user = User::find_by_uuid(&device.user_uuid, &conn).unwrap();
|
let user = User::find_by_uuid(&device.user_uuid, &conn).unwrap();
|
||||||
@@ -68,10 +68,15 @@ fn _refresh_login(data: ConnectData, conn: DbConn) -> JsonResult {
|
|||||||
"refresh_token": device.refresh_token,
|
"refresh_token": device.refresh_token,
|
||||||
"Key": user.akey,
|
"Key": user.akey,
|
||||||
"PrivateKey": user.private_key,
|
"PrivateKey": user.private_key,
|
||||||
|
|
||||||
|
"Kdf": user.client_kdf_type,
|
||||||
|
"KdfIterations": user.client_kdf_iter,
|
||||||
|
"ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing
|
||||||
|
"scope": "api offline_access"
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _password_login(data: ConnectData, conn: DbConn, ip: ClientIp) -> JsonResult {
|
fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult {
|
||||||
// Validate scope
|
// Validate scope
|
||||||
let scope = data.scope.as_ref().unwrap();
|
let scope = data.scope.as_ref().unwrap();
|
||||||
if scope != "api offline_access" {
|
if scope != "api offline_access" {
|
||||||
@@ -97,8 +102,10 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: ClientIp) -> JsonResult
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !user.verified_at.is_some() && CONFIG.mail_enabled() && CONFIG.signups_verify() {
|
let now = Local::now();
|
||||||
let now = Utc::now().naive_utc();
|
|
||||||
|
if user.verified_at.is_none() && CONFIG.mail_enabled() && CONFIG.signups_verify() {
|
||||||
|
let now = now.naive_utc();
|
||||||
if user.last_verifying_at.is_none() || now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds() > CONFIG.signups_verify_resend_time() as i64 {
|
if user.last_verifying_at.is_none() || now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds() > CONFIG.signups_verify_resend_time() as i64 {
|
||||||
let resend_limit = CONFIG.signups_verify_resend_limit() as i32;
|
let resend_limit = CONFIG.signups_verify_resend_limit() as i32;
|
||||||
if resend_limit == 0 || user.login_verify_count < resend_limit {
|
if resend_limit == 0 || user.login_verify_count < resend_limit {
|
||||||
@@ -106,7 +113,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: ClientIp) -> JsonResult
|
|||||||
// their email address, and we haven't sent them a reminder in a while...
|
// their email address, and we haven't sent them a reminder in a while...
|
||||||
let mut user = user;
|
let mut user = user;
|
||||||
user.last_verifying_at = Some(now);
|
user.last_verifying_at = Some(now);
|
||||||
user.login_verify_count = user.login_verify_count + 1;
|
user.login_verify_count += 1;
|
||||||
|
|
||||||
if let Err(e) = user.save(&conn) {
|
if let Err(e) = user.save(&conn) {
|
||||||
error!("Error updating user: {:#?}", e);
|
error!("Error updating user: {:#?}", e);
|
||||||
@@ -127,10 +134,10 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: ClientIp) -> JsonResult
|
|||||||
|
|
||||||
let (mut device, new_device) = get_device(&data, &conn, &user);
|
let (mut device, new_device) = get_device(&data, &conn, &user);
|
||||||
|
|
||||||
let twofactor_token = twofactor_auth(&user.uuid, &data, &mut device, &conn)?;
|
let twofactor_token = twofactor_auth(&user.uuid, &data, &mut device, &ip, &conn)?;
|
||||||
|
|
||||||
if CONFIG.mail_enabled() && new_device {
|
if CONFIG.mail_enabled() && new_device {
|
||||||
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &device.updated_at, &device.name) {
|
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name) {
|
||||||
error!("Error sending new device email: {:#?}", e);
|
error!("Error sending new device email: {:#?}", e);
|
||||||
|
|
||||||
if CONFIG.require_device_email() {
|
if CONFIG.require_device_email() {
|
||||||
@@ -154,6 +161,11 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: ClientIp) -> JsonResult
|
|||||||
"Key": user.akey,
|
"Key": user.akey,
|
||||||
"PrivateKey": user.private_key,
|
"PrivateKey": user.private_key,
|
||||||
//"TwoFactorToken": "11122233333444555666777888999"
|
//"TwoFactorToken": "11122233333444555666777888999"
|
||||||
|
|
||||||
|
"Kdf": user.client_kdf_type,
|
||||||
|
"KdfIterations": user.client_kdf_iter,
|
||||||
|
"ResetMasterPassword": false,// TODO: Same as above
|
||||||
|
"scope": "api offline_access"
|
||||||
});
|
});
|
||||||
|
|
||||||
if let Some(token) = twofactor_token {
|
if let Some(token) = twofactor_token {
|
||||||
@@ -197,6 +209,7 @@ fn twofactor_auth(
|
|||||||
user_uuid: &str,
|
user_uuid: &str,
|
||||||
data: &ConnectData,
|
data: &ConnectData,
|
||||||
device: &mut Device,
|
device: &mut Device,
|
||||||
|
ip: &ClientIp,
|
||||||
conn: &DbConn,
|
conn: &DbConn,
|
||||||
) -> ApiResult<Option<String>> {
|
) -> ApiResult<Option<String>> {
|
||||||
let twofactors = TwoFactor::find_by_user(user_uuid, conn);
|
let twofactors = TwoFactor::find_by_user(user_uuid, conn);
|
||||||
@@ -211,13 +224,12 @@ fn twofactor_auth(
|
|||||||
|
|
||||||
let twofactor_code = match data.two_factor_token {
|
let twofactor_code = match data.two_factor_token {
|
||||||
Some(ref code) => code,
|
Some(ref code) => code,
|
||||||
None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?),
|
None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA token not provided"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let selected_twofactor = twofactors
|
let selected_twofactor = twofactors
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|tf| tf.atype == selected_id && tf.enabled)
|
.find(|tf| tf.atype == selected_id && tf.enabled);
|
||||||
.nth(0);
|
|
||||||
|
|
||||||
use crate::api::core::two_factor as _tf;
|
use crate::api::core::two_factor as _tf;
|
||||||
use crate::crypto::ct_eq;
|
use crate::crypto::ct_eq;
|
||||||
@@ -226,7 +238,7 @@ fn twofactor_auth(
|
|||||||
let mut remember = data.two_factor_remember.unwrap_or(0);
|
let mut remember = data.two_factor_remember.unwrap_or(0);
|
||||||
|
|
||||||
match TwoFactorType::from_i32(selected_id) {
|
match TwoFactorType::from_i32(selected_id) {
|
||||||
Some(TwoFactorType::Authenticator) => _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, conn)?,
|
Some(TwoFactorType::Authenticator) => _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)?,
|
||||||
Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?,
|
Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?,
|
||||||
Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?,
|
Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?,
|
||||||
Some(TwoFactorType::Duo) => _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?,
|
Some(TwoFactorType::Duo) => _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?,
|
||||||
@@ -237,7 +249,7 @@ fn twofactor_auth(
|
|||||||
Some(ref code) if !CONFIG.disable_2fa_remember() && ct_eq(code, twofactor_code) => {
|
Some(ref code) if !CONFIG.disable_2fa_remember() && ct_eq(code, twofactor_code) => {
|
||||||
remember = 1; // Make sure we also return the token here, otherwise it will only remember the first time
|
remember = 1; // Make sure we also return the token here, otherwise it will only remember the first time
|
||||||
}
|
}
|
||||||
_ => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?),
|
_ => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => err!("Invalid two factor provider"),
|
_ => err!("Invalid two factor provider"),
|
||||||
@@ -252,10 +264,7 @@ fn twofactor_auth(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn _selected_data(tf: Option<TwoFactor>) -> ApiResult<String> {
|
fn _selected_data(tf: Option<TwoFactor>) -> ApiResult<String> {
|
||||||
match tf {
|
tf.map(|t| t.data).map_res("Two factor doesn't exist")
|
||||||
Some(tf) => Ok(tf.data),
|
|
||||||
None => err!("Two factor doesn't exist"),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> ApiResult<Value> {
|
fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> ApiResult<Value> {
|
||||||
@@ -347,6 +356,7 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api
|
|||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// https://github.com/bitwarden/mobile/blob/master/src/Core/Models/Request/TokenRequest.cs
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
struct ConnectData {
|
struct ConnectData {
|
||||||
@@ -364,6 +374,7 @@ struct ConnectData {
|
|||||||
device_identifier: Option<String>,
|
device_identifier: Option<String>,
|
||||||
device_name: Option<String>,
|
device_name: Option<String>,
|
||||||
device_type: Option<String>,
|
device_type: Option<String>,
|
||||||
|
device_push_token: Option<String>, // Unused; mobile device push not yet supported.
|
||||||
|
|
||||||
// Needed for two-factor auth
|
// Needed for two-factor auth
|
||||||
two_factor_provider: Option<i32>,
|
two_factor_provider: Option<i32>,
|
||||||
@@ -391,6 +402,7 @@ impl<'f> FromForm<'f> for ConnectData {
|
|||||||
"deviceidentifier" => form.device_identifier = Some(value),
|
"deviceidentifier" => form.device_identifier = Some(value),
|
||||||
"devicename" => form.device_name = Some(value),
|
"devicename" => form.device_name = Some(value),
|
||||||
"devicetype" => form.device_type = Some(value),
|
"devicetype" => form.device_type = Some(value),
|
||||||
|
"devicepushtoken" => form.device_push_token = Some(value),
|
||||||
"twofactorprovider" => form.two_factor_provider = value.parse().ok(),
|
"twofactorprovider" => form.two_factor_provider = value.parse().ok(),
|
||||||
"twofactortoken" => form.two_factor_token = Some(value),
|
"twofactortoken" => form.two_factor_token = Some(value),
|
||||||
"twofactorremember" => form.two_factor_remember = value.parse().ok(),
|
"twofactorremember" => form.two_factor_remember = value.parse().ok(),
|
||||||
|
@@ -1,27 +1,29 @@
|
|||||||
mod admin;
|
mod admin;
|
||||||
pub(crate) mod core;
|
pub mod core;
|
||||||
mod icons;
|
mod icons;
|
||||||
mod identity;
|
mod identity;
|
||||||
mod notifications;
|
mod notifications;
|
||||||
mod web;
|
mod web;
|
||||||
|
|
||||||
pub use self::admin::routes as admin_routes;
|
|
||||||
pub use self::core::routes as core_routes;
|
|
||||||
pub use self::icons::routes as icons_routes;
|
|
||||||
pub use self::identity::routes as identity_routes;
|
|
||||||
pub use self::notifications::routes as notifications_routes;
|
|
||||||
pub use self::notifications::{start_notification_server, Notify, UpdateType};
|
|
||||||
pub use self::web::routes as web_routes;
|
|
||||||
|
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
|
pub use crate::api::{
|
||||||
|
admin::routes as admin_routes,
|
||||||
|
core::routes as core_routes,
|
||||||
|
icons::routes as icons_routes,
|
||||||
|
identity::routes as identity_routes,
|
||||||
|
notifications::routes as notifications_routes,
|
||||||
|
notifications::{start_notification_server, Notify, UpdateType},
|
||||||
|
web::routes as web_routes,
|
||||||
|
};
|
||||||
|
use crate::util;
|
||||||
|
|
||||||
// Type aliases for API methods results
|
// Type aliases for API methods results
|
||||||
type ApiResult<T> = Result<T, crate::error::Error>;
|
type ApiResult<T> = Result<T, crate::error::Error>;
|
||||||
pub type JsonResult = ApiResult<Json<Value>>;
|
pub type JsonResult = ApiResult<Json<Value>>;
|
||||||
pub type EmptyResult = ApiResult<()>;
|
pub type EmptyResult = ApiResult<()>;
|
||||||
|
|
||||||
use crate::util;
|
|
||||||
type JsonUpcase<T> = Json<util::UpCase<T>>;
|
type JsonUpcase<T> = Json<util::UpCase<T>>;
|
||||||
type JsonUpcaseVec<T> = Json<Vec<util::UpCase<T>>>;
|
type JsonUpcaseVec<T> = Json<Vec<util::UpCase<T>>>;
|
||||||
|
|
||||||
|
@@ -1,20 +1,32 @@
|
|||||||
|
use std::sync::atomic::{AtomicBool, Ordering};
|
||||||
|
|
||||||
use rocket::Route;
|
use rocket::Route;
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json::Value as JsonValue;
|
use serde_json::Value as JsonValue;
|
||||||
|
|
||||||
use crate::api::JsonResult;
|
use crate::{
|
||||||
use crate::auth::Headers;
|
api::{EmptyResult, JsonResult},
|
||||||
use crate::db::DbConn;
|
auth::Headers,
|
||||||
|
db::DbConn,
|
||||||
use crate::CONFIG;
|
Error, CONFIG,
|
||||||
|
};
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
routes![negotiate, websockets_err]
|
routes![negotiate, websockets_err]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static SHOW_WEBSOCKETS_MSG: AtomicBool = AtomicBool::new(true);
|
||||||
|
|
||||||
#[get("/hub")]
|
#[get("/hub")]
|
||||||
fn websockets_err() -> JsonResult {
|
fn websockets_err() -> EmptyResult {
|
||||||
err!("'/notifications/hub' should be proxied to the websocket server or notifications won't work. Go to the README for more info.")
|
if CONFIG.websocket_enabled() && SHOW_WEBSOCKETS_MSG.compare_and_swap(true, false, Ordering::Relaxed) {
|
||||||
|
err!("###########################################################
|
||||||
|
'/notifications/hub' should be proxied to the websocket server or notifications won't work.
|
||||||
|
Go to the Wiki for more info, or disable WebSockets setting WEBSOCKET_ENABLED=false.
|
||||||
|
###########################################################################################")
|
||||||
|
} else {
|
||||||
|
Err(Error::empty())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/hub/negotiate")]
|
#[post("/hub/negotiate")]
|
||||||
@@ -43,10 +55,11 @@ fn negotiate(_headers: Headers, _conn: DbConn) -> JsonResult {
|
|||||||
//
|
//
|
||||||
// Websockets server
|
// Websockets server
|
||||||
//
|
//
|
||||||
|
use std::io;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::thread;
|
use std::thread;
|
||||||
|
|
||||||
use ws::{self, util::Token, Factory, Handler, Handshake, Message, Sender, WebSocket};
|
use ws::{self, util::Token, Factory, Handler, Handshake, Message, Sender};
|
||||||
|
|
||||||
use chashmap::CHashMap;
|
use chashmap::CHashMap;
|
||||||
use chrono::NaiveDateTime;
|
use chrono::NaiveDateTime;
|
||||||
@@ -124,20 +137,56 @@ struct InitialMessage {
|
|||||||
const PING_MS: u64 = 15_000;
|
const PING_MS: u64 = 15_000;
|
||||||
const PING: Token = Token(1);
|
const PING: Token = Token(1);
|
||||||
|
|
||||||
|
const ID_KEY: &str = "id=";
|
||||||
|
const ACCESS_TOKEN_KEY: &str = "access_token=";
|
||||||
|
|
||||||
|
impl WSHandler {
|
||||||
|
fn err(&self, msg: &'static str) -> ws::Result<()> {
|
||||||
|
self.out.close(ws::CloseCode::Invalid)?;
|
||||||
|
|
||||||
|
// We need to specifically return an IO error so ws closes the connection
|
||||||
|
let io_error = io::Error::from(io::ErrorKind::InvalidData);
|
||||||
|
Err(ws::Error::new(ws::ErrorKind::Io(io_error), msg))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Handler for WSHandler {
|
impl Handler for WSHandler {
|
||||||
fn on_open(&mut self, hs: Handshake) -> ws::Result<()> {
|
fn on_open(&mut self, hs: Handshake) -> ws::Result<()> {
|
||||||
// TODO: Improve this split
|
// Path == "/notifications/hub?id=<id>==&access_token=<access_token>"
|
||||||
|
//
|
||||||
|
// We don't use `id`, and as of around 2020-03-25, the official clients
|
||||||
|
// no longer seem to pass `id` (only `access_token`).
|
||||||
let path = hs.request.resource();
|
let path = hs.request.resource();
|
||||||
let mut query_split: Vec<_> = path.split('?').nth(1).unwrap().split('&').collect();
|
|
||||||
query_split.sort();
|
let (_id, access_token) = match path.split('?').nth(1) {
|
||||||
let access_token = &query_split[0][13..];
|
Some(params) => {
|
||||||
let _id = &query_split[1][3..];
|
let params_iter = params.split('&').take(2);
|
||||||
|
|
||||||
|
let mut id = None;
|
||||||
|
let mut access_token = None;
|
||||||
|
|
||||||
|
for val in params_iter {
|
||||||
|
if val.starts_with(ID_KEY) {
|
||||||
|
id = Some(&val[ID_KEY.len()..]);
|
||||||
|
} else if val.starts_with(ACCESS_TOKEN_KEY) {
|
||||||
|
access_token = Some(&val[ACCESS_TOKEN_KEY.len()..]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match (id, access_token) {
|
||||||
|
(Some(a), Some(b)) => (a, b),
|
||||||
|
(None, Some(b)) => ("", b), // Ignore missing `id`.
|
||||||
|
_ => return self.err("Missing access token"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => return self.err("Missing query parameters"),
|
||||||
|
};
|
||||||
|
|
||||||
// Validate the user
|
// Validate the user
|
||||||
use crate::auth;
|
use crate::auth;
|
||||||
let claims = match auth::decode_login(access_token) {
|
let claims = match auth::decode_login(access_token) {
|
||||||
Ok(claims) => claims,
|
Ok(claims) => claims,
|
||||||
Err(_) => return Err(ws::Error::new(ws::ErrorKind::Internal, "Invalid access token provided")),
|
Err(_) => return self.err("Invalid access token provided"),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Assign the user to the handler
|
// Assign the user to the handler
|
||||||
@@ -179,10 +228,7 @@ impl Handler for WSHandler {
|
|||||||
// reschedule the timeout
|
// reschedule the timeout
|
||||||
self.out.timeout(PING_MS, PING)
|
self.out.timeout(PING_MS, PING)
|
||||||
} else {
|
} else {
|
||||||
Err(ws::Error::new(
|
Ok(())
|
||||||
ws::ErrorKind::Internal,
|
|
||||||
"Invalid timeout token provided",
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -216,7 +262,9 @@ impl Factory for WSFactory {
|
|||||||
// Remove handler
|
// Remove handler
|
||||||
if let Some(user_uuid) = &handler.user_uuid {
|
if let Some(user_uuid) = &handler.user_uuid {
|
||||||
if let Some(mut user_conn) = self.users.map.get_mut(user_uuid) {
|
if let Some(mut user_conn) = self.users.map.get_mut(user_uuid) {
|
||||||
user_conn.remove_item(&handler.out);
|
if let Some(pos) = user_conn.iter().position(|x| x == &handler.out) {
|
||||||
|
user_conn.remove(pos);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -351,7 +399,14 @@ pub fn start_notification_server() -> WebSocketUsers {
|
|||||||
|
|
||||||
if CONFIG.websocket_enabled() {
|
if CONFIG.websocket_enabled() {
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
WebSocket::new(factory)
|
let mut settings = ws::Settings::default();
|
||||||
|
settings.max_connections = 500;
|
||||||
|
settings.queue_size = 2;
|
||||||
|
settings.panic_on_internal = false;
|
||||||
|
|
||||||
|
ws::Builder::new()
|
||||||
|
.with_settings(settings)
|
||||||
|
.build(factory)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.listen((CONFIG.websocket_address().as_str(), CONFIG.websocket_port()))
|
.listen((CONFIG.websocket_address().as_str(), CONFIG.websocket_port()))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@@ -1,17 +1,14 @@
|
|||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use rocket::http::ContentType;
|
use rocket::{http::ContentType, response::content::Content, response::NamedFile, Route};
|
||||||
use rocket::response::content::Content;
|
|
||||||
use rocket::response::NamedFile;
|
|
||||||
use rocket::Route;
|
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::util::Cached;
|
use crate::{error::Error, util::Cached, CONFIG};
|
||||||
use crate::error::Error;
|
|
||||||
use crate::CONFIG;
|
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
|
// If addding more routes here, consider also adding them to
|
||||||
|
// crate::utils::LOGGED_ROUTES to make sure they appear in the log
|
||||||
if CONFIG.web_vault_enabled() {
|
if CONFIG.web_vault_enabled() {
|
||||||
routes![web_index, app_id, web_files, attachments, alive, static_files]
|
routes![web_index, app_id, web_files, attachments, alive, static_files]
|
||||||
} else {
|
} else {
|
||||||
@@ -21,9 +18,7 @@ pub fn routes() -> Vec<Route> {
|
|||||||
|
|
||||||
#[get("/")]
|
#[get("/")]
|
||||||
fn web_index() -> Cached<Option<NamedFile>> {
|
fn web_index() -> Cached<Option<NamedFile>> {
|
||||||
Cached::short(NamedFile::open(
|
Cached::short(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join("index.html")).ok())
|
||||||
Path::new(&CONFIG.web_vault_folder()).join("index.html"),
|
|
||||||
).ok())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/app-id.json")]
|
#[get("/app-id.json")]
|
||||||
@@ -37,7 +32,17 @@ fn app_id() -> Cached<Content<Json<Value>>> {
|
|||||||
{
|
{
|
||||||
"version": { "major": 1, "minor": 0 },
|
"version": { "major": 1, "minor": 0 },
|
||||||
"ids": [
|
"ids": [
|
||||||
&CONFIG.domain(),
|
// Per <https://fidoalliance.org/specs/fido-v2.0-id-20180227/fido-appid-and-facets-v2.0-id-20180227.html#determining-the-facetid-of-a-calling-application>:
|
||||||
|
//
|
||||||
|
// "In the Web case, the FacetID MUST be the Web Origin [RFC6454]
|
||||||
|
// of the web page triggering the FIDO operation, written as
|
||||||
|
// a URI with an empty path. Default ports are omitted and any
|
||||||
|
// path component is ignored."
|
||||||
|
//
|
||||||
|
// This leaves it unclear as to whether the path must be empty,
|
||||||
|
// or whether it can be non-empty and will be ignored. To be on
|
||||||
|
// the safe side, use a proper web origin (with empty path).
|
||||||
|
&CONFIG.domain_origin(),
|
||||||
"ios:bundle-id:com.8bit.bitwarden",
|
"ios:bundle-id:com.8bit.bitwarden",
|
||||||
"android:apk-key-hash:dUGFzUzf3lmHSLBDBIv+WaFyZMI" ]
|
"android:apk-key-hash:dUGFzUzf3lmHSLBDBIv+WaFyZMI" ]
|
||||||
}]
|
}]
|
||||||
@@ -68,6 +73,7 @@ fn static_files(filename: String) -> Result<Content<&'static [u8]>, Error> {
|
|||||||
match filename.as_ref() {
|
match filename.as_ref() {
|
||||||
"mail-github.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/mail-github.png"))),
|
"mail-github.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/mail-github.png"))),
|
||||||
"logo-gray.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/logo-gray.png"))),
|
"logo-gray.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/logo-gray.png"))),
|
||||||
|
"shield-white.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/shield-white.png"))),
|
||||||
"error-x.svg" => Ok(Content(ContentType::SVG, include_bytes!("../static/images/error-x.svg"))),
|
"error-x.svg" => Ok(Content(ContentType::SVG, include_bytes!("../static/images/error-x.svg"))),
|
||||||
"hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))),
|
"hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))),
|
||||||
|
|
||||||
@@ -75,6 +81,6 @@ fn static_files(filename: String) -> Result<Content<&'static [u8]>, Error> {
|
|||||||
"bootstrap-native-v4.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native-v4.js"))),
|
"bootstrap-native-v4.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native-v4.js"))),
|
||||||
"md5.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/md5.js"))),
|
"md5.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/md5.js"))),
|
||||||
"identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))),
|
"identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))),
|
||||||
_ => err!("Image not found"),
|
_ => err!(format!("Static file not found: {}", filename)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
130
src/auth.rs
130
src/auth.rs
@@ -1,38 +1,40 @@
|
|||||||
//
|
//
|
||||||
// JWT Handling
|
// JWT Handling
|
||||||
//
|
//
|
||||||
use crate::util::read_file;
|
|
||||||
use chrono::{Duration, Utc};
|
use chrono::{Duration, Utc};
|
||||||
|
use num_traits::FromPrimitive;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
use jsonwebtoken::{self, Algorithm, Header};
|
use jsonwebtoken::{self, Algorithm, DecodingKey, EncodingKey, Header};
|
||||||
use serde::de::DeserializeOwned;
|
use serde::de::DeserializeOwned;
|
||||||
use serde::ser::Serialize;
|
use serde::ser::Serialize;
|
||||||
|
|
||||||
use crate::error::{Error, MapResult};
|
use crate::{
|
||||||
use crate::CONFIG;
|
error::{Error, MapResult},
|
||||||
|
util::read_file,
|
||||||
|
CONFIG,
|
||||||
|
};
|
||||||
|
|
||||||
const JWT_ALGORITHM: Algorithm = Algorithm::RS256;
|
const JWT_ALGORITHM: Algorithm = Algorithm::RS256;
|
||||||
|
|
||||||
lazy_static! {
|
pub static DEFAULT_VALIDITY: Lazy<Duration> = Lazy::new(|| Duration::hours(2));
|
||||||
pub static ref DEFAULT_VALIDITY: Duration = Duration::hours(2);
|
static JWT_HEADER: Lazy<Header> = Lazy::new(|| Header::new(JWT_ALGORITHM));
|
||||||
static ref JWT_HEADER: Header = Header::new(JWT_ALGORITHM);
|
pub static JWT_LOGIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|login", CONFIG.domain_origin()));
|
||||||
pub static ref JWT_LOGIN_ISSUER: String = format!("{}|login", CONFIG.domain());
|
static JWT_INVITE_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|invite", CONFIG.domain_origin()));
|
||||||
pub static ref JWT_INVITE_ISSUER: String = format!("{}|invite", CONFIG.domain());
|
static JWT_DELETE_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|delete", CONFIG.domain_origin()));
|
||||||
pub static ref JWT_DELETE_ISSUER: String = format!("{}|delete", CONFIG.domain());
|
static JWT_VERIFYEMAIL_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|verifyemail", CONFIG.domain_origin()));
|
||||||
pub static ref JWT_VERIFYEMAIL_ISSUER: String = format!("{}|verifyemail", CONFIG.domain());
|
static JWT_ADMIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|admin", CONFIG.domain_origin()));
|
||||||
pub static ref JWT_ADMIN_ISSUER: String = format!("{}|admin", CONFIG.domain());
|
static PRIVATE_RSA_KEY: Lazy<Vec<u8>> = Lazy::new(|| match read_file(&CONFIG.private_rsa_key()) {
|
||||||
static ref PRIVATE_RSA_KEY: Vec<u8> = match read_file(&CONFIG.private_rsa_key()) {
|
Ok(key) => key,
|
||||||
Ok(key) => key,
|
Err(e) => panic!("Error loading private RSA Key.\n Error: {}", e),
|
||||||
Err(e) => panic!("Error loading private RSA Key.\n Error: {}", e),
|
});
|
||||||
};
|
static PUBLIC_RSA_KEY: Lazy<Vec<u8>> = Lazy::new(|| match read_file(&CONFIG.public_rsa_key()) {
|
||||||
static ref PUBLIC_RSA_KEY: Vec<u8> = match read_file(&CONFIG.public_rsa_key()) {
|
Ok(key) => key,
|
||||||
Ok(key) => key,
|
Err(e) => panic!("Error loading public RSA Key.\n Error: {}", e),
|
||||||
Err(e) => panic!("Error loading public RSA Key.\n Error: {}", e),
|
});
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn encode_jwt<T: Serialize>(claims: &T) -> String {
|
pub fn encode_jwt<T: Serialize>(claims: &T) -> String {
|
||||||
match jsonwebtoken::encode(&JWT_HEADER, claims, &PRIVATE_RSA_KEY) {
|
match jsonwebtoken::encode(&JWT_HEADER, claims, &EncodingKey::from_rsa_der(&PRIVATE_RSA_KEY)) {
|
||||||
Ok(token) => token,
|
Ok(token) => token,
|
||||||
Err(e) => panic!("Error encoding jwt {}", e),
|
Err(e) => panic!("Error encoding jwt {}", e),
|
||||||
}
|
}
|
||||||
@@ -51,7 +53,7 @@ fn decode_jwt<T: DeserializeOwned>(token: &str, issuer: String) -> Result<T, Err
|
|||||||
|
|
||||||
let token = token.replace(char::is_whitespace, "");
|
let token = token.replace(char::is_whitespace, "");
|
||||||
|
|
||||||
jsonwebtoken::decode(&token, &PUBLIC_RSA_KEY, &validation)
|
jsonwebtoken::decode(&token, &DecodingKey::from_rsa_der(&PUBLIC_RSA_KEY), &validation)
|
||||||
.map(|d| d.claims)
|
.map(|d| d.claims)
|
||||||
.map_res("Error decoding JWT")
|
.map_res("Error decoding JWT")
|
||||||
}
|
}
|
||||||
@@ -156,9 +158,7 @@ pub struct DeleteJWTClaims {
|
|||||||
pub sub: String,
|
pub sub: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_delete_claims(
|
pub fn generate_delete_claims(uuid: String) -> DeleteJWTClaims {
|
||||||
uuid: String,
|
|
||||||
) -> DeleteJWTClaims {
|
|
||||||
let time_now = Utc::now().naive_utc();
|
let time_now = Utc::now().naive_utc();
|
||||||
DeleteJWTClaims {
|
DeleteJWTClaims {
|
||||||
nbf: time_now.timestamp(),
|
nbf: time_now.timestamp(),
|
||||||
@@ -180,9 +180,7 @@ pub struct VerifyEmailJWTClaims {
|
|||||||
pub sub: String,
|
pub sub: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_verify_email_claims(
|
pub fn generate_verify_email_claims(uuid: String) -> DeleteJWTClaims {
|
||||||
uuid: String,
|
|
||||||
) -> DeleteJWTClaims {
|
|
||||||
let time_now = Utc::now().naive_utc();
|
let time_now = Utc::now().naive_utc();
|
||||||
DeleteJWTClaims {
|
DeleteJWTClaims {
|
||||||
nbf: time_now.timestamp(),
|
nbf: time_now.timestamp(),
|
||||||
@@ -217,11 +215,14 @@ pub fn generate_admin_claims() -> AdminJWTClaims {
|
|||||||
//
|
//
|
||||||
// Bearer token authentication
|
// Bearer token authentication
|
||||||
//
|
//
|
||||||
use rocket::request::{self, FromRequest, Request};
|
use rocket::{
|
||||||
use rocket::Outcome;
|
request::{FromRequest, Request, Outcome},
|
||||||
|
};
|
||||||
|
|
||||||
use crate::db::models::{Device, User, UserOrgStatus, UserOrgType, UserOrganization};
|
use crate::db::{
|
||||||
use crate::db::DbConn;
|
models::{Device, User, UserOrgStatus, UserOrgType, UserOrganization},
|
||||||
|
DbConn,
|
||||||
|
};
|
||||||
|
|
||||||
pub struct Headers {
|
pub struct Headers {
|
||||||
pub host: String,
|
pub host: String,
|
||||||
@@ -232,7 +233,7 @@ pub struct Headers {
|
|||||||
impl<'a, 'r> FromRequest<'a, 'r> for Headers {
|
impl<'a, 'r> FromRequest<'a, 'r> for Headers {
|
||||||
type Error = &'static str;
|
type Error = &'static str;
|
||||||
|
|
||||||
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
|
fn from_request(request: &'a Request<'r>) -> Outcome<Self, Self::Error> {
|
||||||
let headers = request.headers();
|
let headers = request.headers();
|
||||||
|
|
||||||
// Get host
|
// Get host
|
||||||
@@ -311,17 +312,35 @@ pub struct OrgHeaders {
|
|||||||
pub org_user_type: UserOrgType,
|
pub org_user_type: UserOrgType,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// org_id is usually the second param ("/organizations/<org_id>")
|
||||||
|
// But there are cases where it is located in a query value.
|
||||||
|
// First check the param, if this is not a valid uuid, we will try the query value.
|
||||||
|
fn get_org_id(request: &Request) -> Option<String> {
|
||||||
|
if let Some(Ok(org_id)) = request.get_param::<String>(1) {
|
||||||
|
if uuid::Uuid::parse_str(&org_id).is_ok() {
|
||||||
|
return Some(org_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(Ok(org_id)) = request.get_query_value::<String>("organizationId") {
|
||||||
|
if uuid::Uuid::parse_str(&org_id).is_ok() {
|
||||||
|
return Some(org_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a, 'r> FromRequest<'a, 'r> for OrgHeaders {
|
impl<'a, 'r> FromRequest<'a, 'r> for OrgHeaders {
|
||||||
type Error = &'static str;
|
type Error = &'static str;
|
||||||
|
|
||||||
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
|
fn from_request(request: &'a Request<'r>) -> Outcome<Self, Self::Error> {
|
||||||
match request.guard::<Headers>() {
|
match request.guard::<Headers>() {
|
||||||
Outcome::Forward(_) => Outcome::Forward(()),
|
Outcome::Forward(_) => Outcome::Forward(()),
|
||||||
Outcome::Failure(f) => Outcome::Failure(f),
|
Outcome::Failure(f) => Outcome::Failure(f),
|
||||||
Outcome::Success(headers) => {
|
Outcome::Success(headers) => {
|
||||||
// org_id is expected to be the second param ("/organizations/<org_id>")
|
match get_org_id(request) {
|
||||||
match request.get_param::<String>(1) {
|
Some(org_id) => {
|
||||||
Some(Ok(org_id)) => {
|
|
||||||
let conn = match request.guard::<DbConn>() {
|
let conn = match request.guard::<DbConn>() {
|
||||||
Outcome::Success(conn) => conn,
|
Outcome::Success(conn) => conn,
|
||||||
_ => err_handler!("Error getting DB"),
|
_ => err_handler!("Error getting DB"),
|
||||||
@@ -370,7 +389,7 @@ pub struct AdminHeaders {
|
|||||||
impl<'a, 'r> FromRequest<'a, 'r> for AdminHeaders {
|
impl<'a, 'r> FromRequest<'a, 'r> for AdminHeaders {
|
||||||
type Error = &'static str;
|
type Error = &'static str;
|
||||||
|
|
||||||
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
|
fn from_request(request: &'a Request<'r>) -> Outcome<Self, Self::Error> {
|
||||||
match request.guard::<OrgHeaders>() {
|
match request.guard::<OrgHeaders>() {
|
||||||
Outcome::Forward(_) => Outcome::Forward(()),
|
Outcome::Forward(_) => Outcome::Forward(()),
|
||||||
Outcome::Failure(f) => Outcome::Failure(f),
|
Outcome::Failure(f) => Outcome::Failure(f),
|
||||||
@@ -390,6 +409,16 @@ impl<'a, 'r> FromRequest<'a, 'r> for AdminHeaders {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Into<Headers> for AdminHeaders {
|
||||||
|
fn into(self) -> Headers {
|
||||||
|
Headers {
|
||||||
|
host: self.host,
|
||||||
|
device: self.device,
|
||||||
|
user: self.user,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct OwnerHeaders {
|
pub struct OwnerHeaders {
|
||||||
pub host: String,
|
pub host: String,
|
||||||
pub device: Device,
|
pub device: Device,
|
||||||
@@ -399,7 +428,7 @@ pub struct OwnerHeaders {
|
|||||||
impl<'a, 'r> FromRequest<'a, 'r> for OwnerHeaders {
|
impl<'a, 'r> FromRequest<'a, 'r> for OwnerHeaders {
|
||||||
type Error = &'static str;
|
type Error = &'static str;
|
||||||
|
|
||||||
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
|
fn from_request(request: &'a Request<'r>) -> Outcome<Self, Self::Error> {
|
||||||
match request.guard::<OrgHeaders>() {
|
match request.guard::<OrgHeaders>() {
|
||||||
Outcome::Forward(_) => Outcome::Forward(()),
|
Outcome::Forward(_) => Outcome::Forward(()),
|
||||||
Outcome::Failure(f) => Outcome::Failure(f),
|
Outcome::Failure(f) => Outcome::Failure(f),
|
||||||
@@ -430,12 +459,25 @@ pub struct ClientIp {
|
|||||||
impl<'a, 'r> FromRequest<'a, 'r> for ClientIp {
|
impl<'a, 'r> FromRequest<'a, 'r> for ClientIp {
|
||||||
type Error = ();
|
type Error = ();
|
||||||
|
|
||||||
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
|
fn from_request(req: &'a Request<'r>) -> Outcome<Self, Self::Error> {
|
||||||
let ip = match request.client_ip() {
|
let ip = if CONFIG._ip_header_enabled() {
|
||||||
Some(addr) => addr,
|
req.headers().get_one(&CONFIG.ip_header()).and_then(|ip| {
|
||||||
None => "0.0.0.0".parse().unwrap(),
|
match ip.find(',') {
|
||||||
|
Some(idx) => &ip[..idx],
|
||||||
|
None => ip,
|
||||||
|
}
|
||||||
|
.parse()
|
||||||
|
.map_err(|_| warn!("'{}' header is malformed: {}", CONFIG.ip_header(), ip))
|
||||||
|
.ok()
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let ip = ip
|
||||||
|
.or_else(|| req.remote().map(|r| r.ip()))
|
||||||
|
.unwrap_or_else(|| "0.0.0.0".parse().unwrap());
|
||||||
|
|
||||||
Outcome::Success(ClientIp { ip })
|
Outcome::Success(ClientIp { ip })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
273
src/config.rs
273
src/config.rs
@@ -1,19 +1,25 @@
|
|||||||
use std::process::exit;
|
use std::process::exit;
|
||||||
use std::sync::RwLock;
|
use std::sync::RwLock;
|
||||||
|
|
||||||
use crate::error::Error;
|
use once_cell::sync::Lazy;
|
||||||
use crate::util::get_env;
|
use reqwest::Url;
|
||||||
|
|
||||||
lazy_static! {
|
use crate::{
|
||||||
pub static ref CONFIG: Config = Config::load().unwrap_or_else(|e| {
|
error::Error,
|
||||||
|
util::{get_env, get_env_bool},
|
||||||
|
};
|
||||||
|
|
||||||
|
static CONFIG_FILE: Lazy<String> = Lazy::new(|| {
|
||||||
|
let data_folder = get_env("DATA_FOLDER").unwrap_or_else(|| String::from("data"));
|
||||||
|
get_env("CONFIG_FILE").unwrap_or_else(|| format!("{}/config.json", data_folder))
|
||||||
|
});
|
||||||
|
|
||||||
|
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
|
||||||
|
Config::load().unwrap_or_else(|e| {
|
||||||
println!("Error loading config:\n\t{:?}\n", e);
|
println!("Error loading config:\n\t{:?}\n", e);
|
||||||
exit(12)
|
exit(12)
|
||||||
});
|
})
|
||||||
pub static ref CONFIG_FILE: String = {
|
});
|
||||||
let data_folder = get_env("DATA_FOLDER").unwrap_or_else(|| String::from("data"));
|
|
||||||
get_env("CONFIG_FILE").unwrap_or_else(|| format!("{}/config.json", data_folder))
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type Pass = String;
|
pub type Pass = String;
|
||||||
|
|
||||||
@@ -23,13 +29,13 @@ macro_rules! make_config {
|
|||||||
$group:ident $(: $group_enabled:ident)? {
|
$group:ident $(: $group_enabled:ident)? {
|
||||||
$(
|
$(
|
||||||
$(#[doc = $doc:literal])+
|
$(#[doc = $doc:literal])+
|
||||||
$name:ident : $ty:ty, $editable:literal, $none_action:ident $(, $default:expr)?;
|
$name:ident : $ty:ident, $editable:literal, $none_action:ident $(, $default:expr)?;
|
||||||
)+},
|
)+},
|
||||||
)+) => {
|
)+) => {
|
||||||
pub struct Config { inner: RwLock<Inner> }
|
pub struct Config { inner: RwLock<Inner> }
|
||||||
|
|
||||||
struct Inner {
|
struct Inner {
|
||||||
templates: Handlebars,
|
templates: Handlebars<'static>,
|
||||||
config: ConfigItems,
|
config: ConfigItems,
|
||||||
|
|
||||||
_env: ConfigBuilder,
|
_env: ConfigBuilder,
|
||||||
@@ -50,7 +56,7 @@ macro_rules! make_config {
|
|||||||
|
|
||||||
let mut builder = ConfigBuilder::default();
|
let mut builder = ConfigBuilder::default();
|
||||||
$($(
|
$($(
|
||||||
builder.$name = get_env(&stringify!($name).to_uppercase());
|
builder.$name = make_config! { @getenv &stringify!($name).to_uppercase(), $ty };
|
||||||
)+)+
|
)+)+
|
||||||
|
|
||||||
builder
|
builder
|
||||||
@@ -108,6 +114,8 @@ macro_rules! make_config {
|
|||||||
)+)+
|
)+)+
|
||||||
config.domain_set = _domain_set;
|
config.domain_set = _domain_set;
|
||||||
|
|
||||||
|
config.signups_domains_whitelist = config.signups_domains_whitelist.trim().to_lowercase();
|
||||||
|
|
||||||
config
|
config
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -129,7 +137,6 @@ macro_rules! make_config {
|
|||||||
(inner._env.build(), inner.config.clone())
|
(inner._env.build(), inner.config.clone())
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
fn _get_form_type(rust_type: &str) -> &'static str {
|
fn _get_form_type(rust_type: &str) -> &'static str {
|
||||||
match rust_type {
|
match rust_type {
|
||||||
"Pass" => "password",
|
"Pass" => "password",
|
||||||
@@ -185,19 +192,28 @@ macro_rules! make_config {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}};
|
}};
|
||||||
|
( @build $value:expr, $config:expr, gen, $default_fn:expr ) => {{
|
||||||
|
let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
|
||||||
|
f($config)
|
||||||
|
}};
|
||||||
|
|
||||||
|
( @getenv $name:expr, bool ) => { get_env_bool($name) };
|
||||||
|
( @getenv $name:expr, $ty:ident ) => { get_env($name) };
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//STRUCTURE:
|
//STRUCTURE:
|
||||||
// /// Short description (without this they won't appear on the list)
|
// /// Short description (without this they won't appear on the list)
|
||||||
// group {
|
// group {
|
||||||
// /// Friendly Name |> Description (Optional)
|
// /// Friendly Name |> Description (Optional)
|
||||||
// name: type, is_editable, none_action, <default_value (Optional)>
|
// name: type, is_editable, action, <default_value (Optional)>
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// Where none_action applied when the value wasn't provided and can be:
|
// Where action applied when the value wasn't provided and can be:
|
||||||
// def: Use a default value
|
// def: Use a default value
|
||||||
// auto: Value is auto generated based on other values
|
// auto: Value is auto generated based on other values
|
||||||
// option: Value is optional
|
// option: Value is optional
|
||||||
|
// gen: Value is always autogenerated and it's original value ignored
|
||||||
make_config! {
|
make_config! {
|
||||||
folders {
|
folders {
|
||||||
/// Data folder |> Main data folder
|
/// Data folder |> Main data folder
|
||||||
@@ -231,17 +247,26 @@ make_config! {
|
|||||||
domain: String, true, def, "http://localhost".to_string();
|
domain: String, true, def, "http://localhost".to_string();
|
||||||
/// Domain Set |> Indicates if the domain is set by the admin. Otherwise the default will be used.
|
/// Domain Set |> Indicates if the domain is set by the admin. Otherwise the default will be used.
|
||||||
domain_set: bool, false, def, false;
|
domain_set: bool, false, def, false;
|
||||||
|
/// Domain origin |> Domain URL origin (in https://example.com:8443/path, https://example.com:8443 is the origin)
|
||||||
|
domain_origin: String, false, auto, |c| extract_url_origin(&c.domain);
|
||||||
|
/// Domain path |> Domain URL path (in https://example.com:8443/path, /path is the path)
|
||||||
|
domain_path: String, false, auto, |c| extract_url_path(&c.domain);
|
||||||
/// Enable web vault
|
/// Enable web vault
|
||||||
web_vault_enabled: bool, false, def, true;
|
web_vault_enabled: bool, false, def, true;
|
||||||
|
|
||||||
/// HIBP Api Key |> HaveIBeenPwned API Key, request it here: https://haveibeenpwned.com/API/Key
|
/// HIBP Api Key |> HaveIBeenPwned API Key, request it here: https://haveibeenpwned.com/API/Key
|
||||||
hibp_api_key: Pass, true, option;
|
hibp_api_key: Pass, true, option;
|
||||||
|
|
||||||
|
/// Per-user attachment limit (KB) |> Limit in kilobytes for a users attachments, once the limit is exceeded it won't be possible to upload more
|
||||||
|
user_attachment_limit: i64, true, option;
|
||||||
|
/// Per-organization attachment limit (KB) |> Limit in kilobytes for an organization attachments, once the limit is exceeded it won't be possible to upload more
|
||||||
|
org_attachment_limit: i64, true, option;
|
||||||
|
|
||||||
/// Disable icon downloads |> Set to true to disable icon downloading, this would still serve icons from
|
/// Disable icon downloads |> Set to true to disable icon downloading, this would still serve icons from
|
||||||
/// $ICON_CACHE_FOLDER, but it won't produce any external network request. Needs to set $ICON_CACHE_TTL to 0,
|
/// $ICON_CACHE_FOLDER, but it won't produce any external network request. Needs to set $ICON_CACHE_TTL to 0,
|
||||||
/// otherwise it will delete them and they won't be downloaded again.
|
/// otherwise it will delete them and they won't be downloaded again.
|
||||||
disable_icon_download: bool, true, def, false;
|
disable_icon_download: bool, true, def, false;
|
||||||
/// Allow new signups |> Controls if new users can register. Note that while this is disabled, users could still be invited
|
/// Allow new signups |> Controls whether new users can register. Users can be invited by the bitwarden_rs admin even if this is disabled
|
||||||
signups_allowed: bool, true, def, true;
|
signups_allowed: bool, true, def, true;
|
||||||
/// Require email verification on signups. This will prevent logins from succeeding until the address has been verified
|
/// Require email verification on signups. This will prevent logins from succeeding until the address has been verified
|
||||||
signups_verify: bool, true, def, false;
|
signups_verify: bool, true, def, false;
|
||||||
@@ -249,9 +274,9 @@ make_config! {
|
|||||||
signups_verify_resend_time: u64, true, def, 3_600;
|
signups_verify_resend_time: u64, true, def, 3_600;
|
||||||
/// If signups require email verification, limit how many emails are automatically sent when login is attempted (0 means no limit)
|
/// If signups require email verification, limit how many emails are automatically sent when login is attempted (0 means no limit)
|
||||||
signups_verify_resend_limit: u32, true, def, 6;
|
signups_verify_resend_limit: u32, true, def, 6;
|
||||||
/// Allow signups only from this list of comma-separated domains
|
/// Email domain whitelist |> Allow signups only from this list of comma-separated domains, even when signups are otherwise disabled
|
||||||
signups_domains_whitelist: String, true, def, "".to_string();
|
signups_domains_whitelist: String, true, def, "".to_string();
|
||||||
/// Allow invitations |> Controls whether users can be invited by organization admins, even when signups are disabled
|
/// Allow invitations |> Controls whether users can be invited by organization admins, even when signups are otherwise disabled
|
||||||
invitations_allowed: bool, true, def, true;
|
invitations_allowed: bool, true, def, true;
|
||||||
/// Password iterations |> Number of server-side passwords hashing iterations.
|
/// Password iterations |> Number of server-side passwords hashing iterations.
|
||||||
/// The changes only apply when a user changes their password. Not recommended to lower the value
|
/// The changes only apply when a user changes their password. Not recommended to lower the value
|
||||||
@@ -262,10 +287,18 @@ make_config! {
|
|||||||
|
|
||||||
/// Admin page token |> The token used to authenticate in this very same page. Changing it here won't deauthorize the current session
|
/// Admin page token |> The token used to authenticate in this very same page. Changing it here won't deauthorize the current session
|
||||||
admin_token: Pass, true, option;
|
admin_token: Pass, true, option;
|
||||||
|
|
||||||
|
/// Invitation organization name |> Name shown in the invitation emails that don't come from a specific organization
|
||||||
|
invitation_org_name: String, true, def, "Bitwarden_RS".to_string();
|
||||||
},
|
},
|
||||||
|
|
||||||
/// Advanced settings
|
/// Advanced settings
|
||||||
advanced {
|
advanced {
|
||||||
|
/// Client IP header |> If not present, the remote IP is used.
|
||||||
|
/// Set to the string "none" (without quotes), to disable any headers and just use the remote IP
|
||||||
|
ip_header: String, true, def, "X-Real-IP".to_string();
|
||||||
|
/// Internal IP header property, used to avoid recomputing each time
|
||||||
|
_ip_header_enabled: bool, false, gen, |c| &c.ip_header.trim().to_lowercase() != "none";
|
||||||
/// Positive icon cache expiry |> Number of seconds to consider that an already cached icon is fresh. After this period, the icon will be redownloaded
|
/// Positive icon cache expiry |> Number of seconds to consider that an already cached icon is fresh. After this period, the icon will be redownloaded
|
||||||
icon_cache_ttl: u64, true, def, 2_592_000;
|
icon_cache_ttl: u64, true, def, 2_592_000;
|
||||||
/// Negative icon cache expiry |> Number of seconds before trying to download an icon that failed again.
|
/// Negative icon cache expiry |> Number of seconds before trying to download an icon that failed again.
|
||||||
@@ -285,7 +318,7 @@ make_config! {
|
|||||||
|
|
||||||
/// Disable authenticator time drifted codes to be valid |> Enabling this only allows the current TOTP code to be valid
|
/// Disable authenticator time drifted codes to be valid |> Enabling this only allows the current TOTP code to be valid
|
||||||
/// TOTP codes of the previous and next 30 seconds will be invalid.
|
/// TOTP codes of the previous and next 30 seconds will be invalid.
|
||||||
authenticator_disable_time_drift: bool, true, def, false;
|
authenticator_disable_time_drift: bool, true, def, false;
|
||||||
|
|
||||||
/// Require new device emails |> When a user logs in an email is required to be sent.
|
/// Require new device emails |> When a user logs in an email is required to be sent.
|
||||||
/// If sending the email fails the login attempt will fail.
|
/// If sending the email fails the login attempt will fail.
|
||||||
@@ -294,11 +327,10 @@ make_config! {
|
|||||||
/// Reload templates (Dev) |> When this is set to true, the templates get reloaded with every request.
|
/// Reload templates (Dev) |> When this is set to true, the templates get reloaded with every request.
|
||||||
/// ONLY use this during development, as it can slow down the server
|
/// ONLY use this during development, as it can slow down the server
|
||||||
reload_templates: bool, true, def, false;
|
reload_templates: bool, true, def, false;
|
||||||
|
|
||||||
/// Log routes at launch (Dev)
|
|
||||||
log_mounts: bool, true, def, false;
|
|
||||||
/// Enable extended logging
|
/// Enable extended logging
|
||||||
extended_logging: bool, false, def, true;
|
extended_logging: bool, false, def, true;
|
||||||
|
/// Log timestamp format
|
||||||
|
log_timestamp_format: String, true, def, "%Y-%m-%d %H:%M:%S.%3f".to_string();
|
||||||
/// Enable the log to output to Syslog
|
/// Enable the log to output to Syslog
|
||||||
use_syslog: bool, false, def, false;
|
use_syslog: bool, false, def, false;
|
||||||
/// Log file path
|
/// Log file path
|
||||||
@@ -312,6 +344,9 @@ make_config! {
|
|||||||
|
|
||||||
/// Bypass admin page security (Know the risks!) |> Disables the Admin Token for the admin page so you may use your own auth in-front
|
/// Bypass admin page security (Know the risks!) |> Disables the Admin Token for the admin page so you may use your own auth in-front
|
||||||
disable_admin_token: bool, true, def, false;
|
disable_admin_token: bool, true, def, false;
|
||||||
|
|
||||||
|
/// Allowed iframe ancestors (Know the risks!) |> Allows other domains to embed the web vault into an iframe, useful for embedding into secure intranets
|
||||||
|
allowed_iframe_ancestors: String, true, def, String::new();
|
||||||
},
|
},
|
||||||
|
|
||||||
/// Yubikey settings
|
/// Yubikey settings
|
||||||
@@ -363,7 +398,9 @@ make_config! {
|
|||||||
/// Json form auth mechanism |> Defaults for ssl is "Plain" and "Login" and nothing for non-ssl connections. Possible values: ["Plain", "Login", "Xoauth2"]
|
/// Json form auth mechanism |> Defaults for ssl is "Plain" and "Login" and nothing for non-ssl connections. Possible values: ["Plain", "Login", "Xoauth2"]
|
||||||
smtp_auth_mechanism: String, true, option;
|
smtp_auth_mechanism: String, true, option;
|
||||||
/// SMTP connection timeout |> Number of seconds when to stop trying to connect to the SMTP server
|
/// SMTP connection timeout |> Number of seconds when to stop trying to connect to the SMTP server
|
||||||
smtp_timeout: u64, true, def, 15;
|
smtp_timeout: u64, true, def, 15;
|
||||||
|
/// Server name sent during HELO |> By default this value should be is on the machine's hostname, but might need to be changed in case it trips some anti-spam filters
|
||||||
|
helo_name: String, true, option;
|
||||||
},
|
},
|
||||||
|
|
||||||
/// Email 2FA Settings
|
/// Email 2FA Settings
|
||||||
@@ -381,8 +418,9 @@ make_config! {
|
|||||||
|
|
||||||
fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
|
fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
|
||||||
let db_url = cfg.database_url.to_lowercase();
|
let db_url = cfg.database_url.to_lowercase();
|
||||||
|
if cfg!(feature = "sqlite")
|
||||||
if cfg!(feature = "sqlite") && (db_url.starts_with("mysql:") || db_url.starts_with("postgresql:")) {
|
&& (db_url.starts_with("mysql:") || db_url.starts_with("postgresql:") || db_url.starts_with("postgres:"))
|
||||||
|
{
|
||||||
err!("`DATABASE_URL` is meant for MySQL or Postgres, while this server is meant for SQLite")
|
err!("`DATABASE_URL` is meant for MySQL or Postgres, while this server is meant for SQLite")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -390,13 +428,24 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
|
|||||||
err!("`DATABASE_URL` should start with mysql: when using the MySQL server")
|
err!("`DATABASE_URL` should start with mysql: when using the MySQL server")
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfg!(feature = "postgresql") && !db_url.starts_with("postgresql:") {
|
if cfg!(feature = "postgresql") && !(db_url.starts_with("postgresql:") || db_url.starts_with("postgres:")) {
|
||||||
err!("`DATABASE_URL` should start with postgresql: when using the PostgreSQL server")
|
err!("`DATABASE_URL` should start with postgresql: when using the PostgreSQL server")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let dom = cfg.domain.to_lowercase();
|
||||||
|
if !dom.starts_with("http://") && !dom.starts_with("https://") {
|
||||||
|
err!("DOMAIN variable needs to contain the protocol (http, https). Use 'http[s]://bw.example.com' instead of 'bw.example.com'");
|
||||||
|
}
|
||||||
|
|
||||||
|
let whitelist = &cfg.signups_domains_whitelist;
|
||||||
|
if !whitelist.is_empty() && whitelist.split(',').any(|d| d.trim().is_empty()) {
|
||||||
|
err!("`SIGNUPS_DOMAINS_WHITELIST` contains empty tokens");
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(ref token) = cfg.admin_token {
|
if let Some(ref token) = cfg.admin_token {
|
||||||
if token.trim().is_empty() {
|
if token.trim().is_empty() && !cfg.disable_admin_token {
|
||||||
err!("`ADMIN_TOKEN` is enabled but has an empty value. To enable the admin page without token, use `DISABLE_ADMIN_TOKEN`")
|
println!("[WARNING] `ADMIN_TOKEN` is enabled but has an empty value, so the admin page will be disabled.");
|
||||||
|
println!("[WARNING] To enable the admin page without a token, use `DISABLE_ADMIN_TOKEN`.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -436,6 +485,29 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Extracts an RFC 6454 web origin from a URL.
|
||||||
|
fn extract_url_origin(url: &str) -> String {
|
||||||
|
match Url::parse(url) {
|
||||||
|
Ok(u) => u.origin().ascii_serialization(),
|
||||||
|
Err(e) => {
|
||||||
|
println!("Error validating domain: {}", e);
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extracts the path from a URL.
|
||||||
|
/// All trailing '/' chars are trimmed, even if the path is a lone '/'.
|
||||||
|
fn extract_url_path(url: &str) -> String {
|
||||||
|
match Url::parse(url) {
|
||||||
|
Ok(u) => u.path().trim_end_matches('/').to_string(),
|
||||||
|
Err(_) => {
|
||||||
|
// We already print it in the method above, no need to do it again
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Config {
|
impl Config {
|
||||||
pub fn load() -> Result<Self, Error> {
|
pub fn load() -> Result<Self, Error> {
|
||||||
// Loading from env and file
|
// Loading from env and file
|
||||||
@@ -450,12 +522,7 @@ impl Config {
|
|||||||
validate_config(&config)?;
|
validate_config(&config)?;
|
||||||
|
|
||||||
Ok(Config {
|
Ok(Config {
|
||||||
inner: RwLock::new(Inner {
|
inner: RwLock::new(Inner { templates: load_templates(&config.templates_folder), config, _env, _usr }),
|
||||||
templates: load_templates(&config.templates_folder),
|
|
||||||
config,
|
|
||||||
_env,
|
|
||||||
_usr,
|
|
||||||
}),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -499,14 +566,30 @@ impl Config {
|
|||||||
self.update_config(builder)
|
self.update_config(builder)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn can_signup_user(&self, email: &str) -> bool {
|
/// Tests whether an email's domain is allowed. A domain is allowed if it
|
||||||
|
/// is in signups_domains_whitelist, or if no whitelist is set (so there
|
||||||
|
/// are no domain restrictions in effect).
|
||||||
|
pub fn is_email_domain_allowed(&self, email: &str) -> bool {
|
||||||
let e: Vec<&str> = email.rsplitn(2, '@').collect();
|
let e: Vec<&str> = email.rsplitn(2, '@').collect();
|
||||||
if e.len() != 2 || e[0].is_empty() || e[1].is_empty() {
|
if e.len() != 2 || e[0].is_empty() || e[1].is_empty() {
|
||||||
warn!("Failed to parse email address '{}'", email);
|
warn!("Failed to parse email address '{}'", email);
|
||||||
return false
|
return false;
|
||||||
|
}
|
||||||
|
let email_domain = e[0].to_lowercase();
|
||||||
|
let whitelist = self.signups_domains_whitelist();
|
||||||
|
|
||||||
|
whitelist.is_empty() || whitelist.split(',').any(|d| d.trim() == email_domain)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Tests whether signup is allowed for an email address, taking into
|
||||||
|
/// account the signups_allowed and signups_domains_whitelist settings.
|
||||||
|
pub fn is_signup_allowed(&self, email: &str) -> bool {
|
||||||
|
if !self.signups_domains_whitelist().is_empty() {
|
||||||
|
// The whitelist setting overrides the signups_allowed setting.
|
||||||
|
self.is_email_domain_allowed(email)
|
||||||
|
} else {
|
||||||
|
self.signups_allowed()
|
||||||
}
|
}
|
||||||
|
|
||||||
self.signups_domains_whitelist().split(',').any(|d| d == e[0])
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_user_config(&self) -> Result<(), Error> {
|
pub fn delete_user_config(&self) -> Result<(), Error> {
|
||||||
@@ -561,6 +644,13 @@ impl Config {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Tests whether the admin token is set to a non-empty value.
|
||||||
|
pub fn is_admin_token_set(&self) -> bool {
|
||||||
|
let token = self.admin_token();
|
||||||
|
|
||||||
|
token.is_some() && !token.unwrap().trim().is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn render_template<T: serde::ser::Serialize>(
|
pub fn render_template<T: serde::ser::Serialize>(
|
||||||
&self,
|
&self,
|
||||||
name: &str,
|
name: &str,
|
||||||
@@ -568,7 +658,7 @@ impl Config {
|
|||||||
) -> Result<String, crate::error::Error> {
|
) -> Result<String, crate::error::Error> {
|
||||||
if CONFIG.reload_templates() {
|
if CONFIG.reload_templates() {
|
||||||
warn!("RELOADING TEMPLATES");
|
warn!("RELOADING TEMPLATES");
|
||||||
let hb = load_templates(CONFIG.templates_folder().as_ref());
|
let hb = load_templates(CONFIG.templates_folder());
|
||||||
hb.render(name, data).map_err(Into::into)
|
hb.render(name, data).map_err(Into::into)
|
||||||
} else {
|
} else {
|
||||||
let hb = &CONFIG.inner.read().unwrap().templates;
|
let hb = &CONFIG.inner.read().unwrap().templates;
|
||||||
@@ -577,17 +667,18 @@ impl Config {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
use handlebars::{
|
use handlebars::{Context, Handlebars, Helper, HelperResult, Output, RenderContext, RenderError, Renderable};
|
||||||
Context, Handlebars, Helper, HelperDef, HelperResult, Output, RenderContext, RenderError, Renderable,
|
|
||||||
};
|
|
||||||
|
|
||||||
fn load_templates(path: &str) -> Handlebars {
|
fn load_templates<P>(path: P) -> Handlebars<'static>
|
||||||
|
where
|
||||||
|
P: AsRef<std::path::Path>,
|
||||||
|
{
|
||||||
let mut hb = Handlebars::new();
|
let mut hb = Handlebars::new();
|
||||||
// Error on missing params
|
// Error on missing params
|
||||||
hb.set_strict_mode(true);
|
hb.set_strict_mode(true);
|
||||||
// Register helpers
|
// Register helpers
|
||||||
hb.register_helper("case", Box::new(CaseHelper));
|
hb.register_helper("case", Box::new(case_helper));
|
||||||
hb.register_helper("jsesc", Box::new(JsEscapeHelper));
|
hb.register_helper("jsesc", Box::new(js_escape_helper));
|
||||||
|
|
||||||
macro_rules! reg {
|
macro_rules! reg {
|
||||||
($name:expr) => {{
|
($name:expr) => {{
|
||||||
@@ -613,10 +704,14 @@ fn load_templates(path: &str) -> Handlebars {
|
|||||||
reg!("email/verify_email", ".html");
|
reg!("email/verify_email", ".html");
|
||||||
reg!("email/welcome", ".html");
|
reg!("email/welcome", ".html");
|
||||||
reg!("email/welcome_must_verify", ".html");
|
reg!("email/welcome_must_verify", ".html");
|
||||||
|
reg!("email/smtp_test", ".html");
|
||||||
|
|
||||||
reg!("admin/base");
|
reg!("admin/base");
|
||||||
reg!("admin/login");
|
reg!("admin/login");
|
||||||
reg!("admin/page");
|
reg!("admin/settings");
|
||||||
|
reg!("admin/users");
|
||||||
|
reg!("admin/organizations");
|
||||||
|
reg!("admin/diagnostics");
|
||||||
|
|
||||||
// And then load user templates to overwrite the defaults
|
// And then load user templates to overwrite the defaults
|
||||||
// Use .hbs extension for the files
|
// Use .hbs extension for the files
|
||||||
@@ -626,54 +721,44 @@ fn load_templates(path: &str) -> Handlebars {
|
|||||||
hb
|
hb
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct CaseHelper;
|
fn case_helper<'reg, 'rc>(
|
||||||
|
h: &Helper<'reg, 'rc>,
|
||||||
|
r: &'reg Handlebars,
|
||||||
|
ctx: &'rc Context,
|
||||||
|
rc: &mut RenderContext<'reg, 'rc>,
|
||||||
|
out: &mut dyn Output,
|
||||||
|
) -> HelperResult {
|
||||||
|
let param = h
|
||||||
|
.param(0)
|
||||||
|
.ok_or_else(|| RenderError::new("Param not found for helper \"case\""))?;
|
||||||
|
let value = param.value().clone();
|
||||||
|
|
||||||
impl HelperDef for CaseHelper {
|
if h.params().iter().skip(1).any(|x| x.value() == &value) {
|
||||||
fn call<'reg: 'rc, 'rc>(
|
h.template().map(|t| t.render(r, ctx, rc, out)).unwrap_or(Ok(()))
|
||||||
&self,
|
} else {
|
||||||
h: &Helper<'reg, 'rc>,
|
|
||||||
r: &'reg Handlebars,
|
|
||||||
ctx: &Context,
|
|
||||||
rc: &mut RenderContext<'reg>,
|
|
||||||
out: &mut dyn Output,
|
|
||||||
) -> HelperResult {
|
|
||||||
let param = h
|
|
||||||
.param(0)
|
|
||||||
.ok_or_else(|| RenderError::new("Param not found for helper \"case\""))?;
|
|
||||||
let value = param.value().clone();
|
|
||||||
|
|
||||||
if h.params().iter().skip(1).any(|x| x.value() == &value) {
|
|
||||||
h.template().map(|t| t.render(r, ctx, rc, out)).unwrap_or(Ok(()))
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct JsEscapeHelper;
|
|
||||||
|
|
||||||
impl HelperDef for JsEscapeHelper {
|
|
||||||
fn call<'reg: 'rc, 'rc>(
|
|
||||||
&self,
|
|
||||||
h: &Helper<'reg, 'rc>,
|
|
||||||
_: &'reg Handlebars,
|
|
||||||
_: &Context,
|
|
||||||
_: &mut RenderContext<'reg>,
|
|
||||||
out: &mut dyn Output,
|
|
||||||
) -> HelperResult {
|
|
||||||
let param = h
|
|
||||||
.param(0)
|
|
||||||
.ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?;
|
|
||||||
|
|
||||||
let value = param
|
|
||||||
.value()
|
|
||||||
.as_str()
|
|
||||||
.ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?;
|
|
||||||
|
|
||||||
let escaped_value = value.replace('\\', "").replace('\'', "\\x22").replace('\"', "\\x27");
|
|
||||||
let quoted_value = format!(""{}"", escaped_value);
|
|
||||||
|
|
||||||
out.write("ed_value)?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn js_escape_helper<'reg, 'rc>(
|
||||||
|
h: &Helper<'reg, 'rc>,
|
||||||
|
_r: &'reg Handlebars,
|
||||||
|
_ctx: &'rc Context,
|
||||||
|
_rc: &mut RenderContext<'reg, 'rc>,
|
||||||
|
out: &mut dyn Output,
|
||||||
|
) -> HelperResult {
|
||||||
|
let param = h
|
||||||
|
.param(0)
|
||||||
|
.ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?;
|
||||||
|
|
||||||
|
let value = param
|
||||||
|
.value()
|
||||||
|
.as_str()
|
||||||
|
.ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?;
|
||||||
|
|
||||||
|
let escaped_value = value.replace('\\', "").replace('\'', "\\x22").replace('\"', "\\x27");
|
||||||
|
let quoted_value = format!(""{}"", escaped_value);
|
||||||
|
|
||||||
|
out.write("ed_value)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
@@ -1,12 +1,13 @@
|
|||||||
//
|
//
|
||||||
// PBKDF2 derivation
|
// PBKDF2 derivation
|
||||||
//
|
//
|
||||||
|
use std::num::NonZeroU32;
|
||||||
|
|
||||||
use ring::{digest, hmac, pbkdf2};
|
use ring::{digest, hmac, pbkdf2};
|
||||||
use std::num::NonZeroU32;
|
|
||||||
use crate::error::Error;
|
use crate::error::Error;
|
||||||
|
|
||||||
static DIGEST_ALG: &digest::Algorithm = &digest::SHA256;
|
static DIGEST_ALG: pbkdf2::Algorithm = pbkdf2::PBKDF2_HMAC_SHA256;
|
||||||
const OUTPUT_LEN: usize = digest::SHA256_OUTPUT_LEN;
|
const OUTPUT_LEN: usize = digest::SHA256_OUTPUT_LEN;
|
||||||
|
|
||||||
pub fn hash_password(secret: &[u8], salt: &[u8], iterations: u32) -> Vec<u8> {
|
pub fn hash_password(secret: &[u8], salt: &[u8], iterations: u32) -> Vec<u8> {
|
||||||
@@ -29,7 +30,7 @@ pub fn verify_password_hash(secret: &[u8], salt: &[u8], previous: &[u8], iterati
|
|||||||
pub fn hmac_sign(key: &str, data: &str) -> String {
|
pub fn hmac_sign(key: &str, data: &str) -> String {
|
||||||
use data_encoding::HEXLOWER;
|
use data_encoding::HEXLOWER;
|
||||||
|
|
||||||
let key = hmac::SigningKey::new(&digest::SHA1, key.as_bytes());
|
let key = hmac::Key::new(hmac::HMAC_SHA1_FOR_LEGACY_USE_ONLY, key.as_bytes());
|
||||||
let signature = hmac::sign(&key, data.as_bytes());
|
let signature = hmac::sign(&key, data.as_bytes());
|
||||||
|
|
||||||
HEXLOWER.encode(signature.as_ref())
|
HEXLOWER.encode(signature.as_ref())
|
||||||
|
@@ -1,18 +1,14 @@
|
|||||||
use std::ops::Deref;
|
|
||||||
|
|
||||||
use diesel::r2d2;
|
|
||||||
use diesel::r2d2::ConnectionManager;
|
|
||||||
use diesel::{Connection as DieselConnection, ConnectionError};
|
|
||||||
|
|
||||||
use rocket::http::Status;
|
|
||||||
use rocket::request::{self, FromRequest};
|
|
||||||
use rocket::{Outcome, Request, State};
|
|
||||||
|
|
||||||
use crate::error::Error;
|
|
||||||
use chrono::prelude::*;
|
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
use crate::CONFIG;
|
use chrono::prelude::*;
|
||||||
|
use diesel::{r2d2, r2d2::ConnectionManager, Connection as DieselConnection, ConnectionError};
|
||||||
|
use rocket::{
|
||||||
|
http::Status,
|
||||||
|
request::{FromRequest, Outcome},
|
||||||
|
Request, State,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{error::Error, CONFIG};
|
||||||
|
|
||||||
/// An alias to the database connection used
|
/// An alias to the database connection used
|
||||||
#[cfg(feature = "sqlite")]
|
#[cfg(feature = "sqlite")]
|
||||||
@@ -75,8 +71,9 @@ pub fn backup_database() -> Result<(), Error> {
|
|||||||
impl<'a, 'r> FromRequest<'a, 'r> for DbConn {
|
impl<'a, 'r> FromRequest<'a, 'r> for DbConn {
|
||||||
type Error = ();
|
type Error = ();
|
||||||
|
|
||||||
fn from_request(request: &'a Request<'r>) -> request::Outcome<DbConn, ()> {
|
fn from_request(request: &'a Request<'r>) -> Outcome<DbConn, ()> {
|
||||||
let pool = request.guard::<State<Pool>>()?;
|
// https://github.com/SergioBenitez/Rocket/commit/e3c1a4ad3ab9b840482ec6de4200d30df43e357c
|
||||||
|
let pool = try_outcome!(request.guard::<State<Pool>>());
|
||||||
match pool.get() {
|
match pool.get() {
|
||||||
Ok(conn) => Outcome::Success(DbConn(conn)),
|
Ok(conn) => Outcome::Success(DbConn(conn)),
|
||||||
Err(_) => Outcome::Failure((Status::ServiceUnavailable, ())),
|
Err(_) => Outcome::Failure((Status::ServiceUnavailable, ())),
|
||||||
@@ -85,7 +82,7 @@ impl<'a, 'r> FromRequest<'a, 'r> for DbConn {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// For the convenience of using an &DbConn as a &Database.
|
// For the convenience of using an &DbConn as a &Database.
|
||||||
impl Deref for DbConn {
|
impl std::ops::Deref for DbConn {
|
||||||
type Target = Connection;
|
type Target = Connection;
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
&self.0
|
&self.0
|
||||||
|
@@ -5,6 +5,7 @@ use crate::CONFIG;
|
|||||||
|
|
||||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||||
#[table_name = "attachments"]
|
#[table_name = "attachments"]
|
||||||
|
#[changeset_options(treat_none_as_null="true")]
|
||||||
#[belongs_to(Cipher, foreign_key = "cipher_uuid")]
|
#[belongs_to(Cipher, foreign_key = "cipher_uuid")]
|
||||||
#[primary_key(id)]
|
#[primary_key(id)]
|
||||||
pub struct Attachment {
|
pub struct Attachment {
|
||||||
@@ -17,7 +18,7 @@ pub struct Attachment {
|
|||||||
|
|
||||||
/// Local methods
|
/// Local methods
|
||||||
impl Attachment {
|
impl Attachment {
|
||||||
pub fn new(id: String, cipher_uuid: String, file_name: String, file_size: i32) -> Self {
|
pub const fn new(id: String, cipher_uuid: String, file_name: String, file_size: i32) -> Self {
|
||||||
Self {
|
Self {
|
||||||
id,
|
id,
|
||||||
cipher_uuid,
|
cipher_uuid,
|
||||||
@@ -49,9 +50,8 @@ impl Attachment {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
use crate::db::schema::attachments;
|
use crate::db::schema::{attachments, ciphers};
|
||||||
use crate::db::DbConn;
|
use crate::db::DbConn;
|
||||||
use diesel;
|
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
|
|
||||||
use crate::api::EmptyResult;
|
use crate::api::EmptyResult;
|
||||||
@@ -118,4 +118,46 @@ impl Attachment {
|
|||||||
.load::<Self>(&**conn)
|
.load::<Self>(&**conn)
|
||||||
.expect("Error loading attachments")
|
.expect("Error loading attachments")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn size_by_user(user_uuid: &str, conn: &DbConn) -> i64 {
|
||||||
|
let result: Option<i64> = attachments::table
|
||||||
|
.left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid)))
|
||||||
|
.filter(ciphers::user_uuid.eq(user_uuid))
|
||||||
|
.select(diesel::dsl::sum(attachments::file_size))
|
||||||
|
.first(&**conn)
|
||||||
|
.expect("Error loading user attachment total size");
|
||||||
|
|
||||||
|
result.unwrap_or(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn count_by_user(user_uuid: &str, conn: &DbConn) -> i64 {
|
||||||
|
attachments::table
|
||||||
|
.left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid)))
|
||||||
|
.filter(ciphers::user_uuid.eq(user_uuid))
|
||||||
|
.count()
|
||||||
|
.first::<i64>(&**conn)
|
||||||
|
.ok()
|
||||||
|
.unwrap_or(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn size_by_org(org_uuid: &str, conn: &DbConn) -> i64 {
|
||||||
|
let result: Option<i64> = attachments::table
|
||||||
|
.left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid)))
|
||||||
|
.filter(ciphers::organization_uuid.eq(org_uuid))
|
||||||
|
.select(diesel::dsl::sum(attachments::file_size))
|
||||||
|
.first(&**conn)
|
||||||
|
.expect("Error loading user attachment total size");
|
||||||
|
|
||||||
|
result.unwrap_or(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn count_by_org(org_uuid: &str, conn: &DbConn) -> i64 {
|
||||||
|
attachments::table
|
||||||
|
.left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid)))
|
||||||
|
.filter(ciphers::organization_uuid.eq(org_uuid))
|
||||||
|
.count()
|
||||||
|
.first(&**conn)
|
||||||
|
.ok()
|
||||||
|
.unwrap_or(0)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -7,6 +7,7 @@ use super::{
|
|||||||
|
|
||||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||||
#[table_name = "ciphers"]
|
#[table_name = "ciphers"]
|
||||||
|
#[changeset_options(treat_none_as_null="true")]
|
||||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||||
#[belongs_to(Organization, foreign_key = "organization_uuid")]
|
#[belongs_to(Organization, foreign_key = "organization_uuid")]
|
||||||
#[primary_key(uuid)]
|
#[primary_key(uuid)]
|
||||||
@@ -33,6 +34,7 @@ pub struct Cipher {
|
|||||||
|
|
||||||
pub favorite: bool,
|
pub favorite: bool,
|
||||||
pub password_history: Option<String>,
|
pub password_history: Option<String>,
|
||||||
|
pub deleted_at: Option<NaiveDateTime>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Local methods
|
/// Local methods
|
||||||
@@ -57,13 +59,13 @@ impl Cipher {
|
|||||||
|
|
||||||
data: String::new(),
|
data: String::new(),
|
||||||
password_history: None,
|
password_history: None,
|
||||||
|
deleted_at: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
use crate::db::schema::*;
|
use crate::db::schema::*;
|
||||||
use crate::db::DbConn;
|
use crate::db::DbConn;
|
||||||
use diesel;
|
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
|
|
||||||
use crate::api::EmptyResult;
|
use crate::api::EmptyResult;
|
||||||
@@ -80,7 +82,28 @@ impl Cipher {
|
|||||||
let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
||||||
let password_history_json = self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
let password_history_json = self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
||||||
|
|
||||||
let mut data_json: Value = serde_json::from_str(&self.data).unwrap_or(Value::Null);
|
let (read_only, hide_passwords) =
|
||||||
|
match self.get_access_restrictions(&user_uuid, &conn) {
|
||||||
|
Some((ro, hp)) => (ro, hp),
|
||||||
|
None => {
|
||||||
|
error!("Cipher ownership assertion failure");
|
||||||
|
(true, true)
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get the data or a default empty value to avoid issues with the mobile apps
|
||||||
|
let mut data_json: Value = serde_json::from_str(&self.data).unwrap_or_else(|_| json!({
|
||||||
|
"Fields":null,
|
||||||
|
"Name": self.name,
|
||||||
|
"Notes":null,
|
||||||
|
"Password":null,
|
||||||
|
"PasswordHistory":null,
|
||||||
|
"PasswordRevisionDate":null,
|
||||||
|
"Response":null,
|
||||||
|
"Totp":null,
|
||||||
|
"Uris":null,
|
||||||
|
"Username":null
|
||||||
|
}));
|
||||||
|
|
||||||
// TODO: ******* Backwards compat start **********
|
// TODO: ******* Backwards compat start **********
|
||||||
// To remove backwards compatibility, just remove this entire section
|
// To remove backwards compatibility, just remove this entire section
|
||||||
@@ -91,15 +114,26 @@ impl Cipher {
|
|||||||
}
|
}
|
||||||
// TODO: ******* Backwards compat end **********
|
// TODO: ******* Backwards compat end **********
|
||||||
|
|
||||||
|
// There are three types of cipher response models in upstream
|
||||||
|
// Bitwarden: "cipherMini", "cipher", and "cipherDetails" (in order
|
||||||
|
// of increasing level of detail). bitwarden_rs currently only
|
||||||
|
// supports the "cipherDetails" type, though it seems like the
|
||||||
|
// Bitwarden clients will ignore extra fields.
|
||||||
|
//
|
||||||
|
// Ref: https://github.com/bitwarden/server/blob/master/src/Core/Models/Api/Response/CipherResponseModel.cs
|
||||||
let mut json_object = json!({
|
let mut json_object = json!({
|
||||||
|
"Object": "cipherDetails",
|
||||||
"Id": self.uuid,
|
"Id": self.uuid,
|
||||||
"Type": self.atype,
|
"Type": self.atype,
|
||||||
"RevisionDate": format_date(&self.updated_at),
|
"RevisionDate": format_date(&self.updated_at),
|
||||||
|
"DeletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))),
|
||||||
"FolderId": self.get_folder_uuid(&user_uuid, &conn),
|
"FolderId": self.get_folder_uuid(&user_uuid, &conn),
|
||||||
"Favorite": self.favorite,
|
"Favorite": self.favorite,
|
||||||
"OrganizationId": self.organization_uuid,
|
"OrganizationId": self.organization_uuid,
|
||||||
"Attachments": attachments_json,
|
"Attachments": attachments_json,
|
||||||
"OrganizationUseTotp": true,
|
"OrganizationUseTotp": true,
|
||||||
|
|
||||||
|
// This field is specific to the cipherDetails type.
|
||||||
"CollectionIds": self.get_collections(user_uuid, &conn),
|
"CollectionIds": self.get_collections(user_uuid, &conn),
|
||||||
|
|
||||||
"Name": self.name,
|
"Name": self.name,
|
||||||
@@ -108,8 +142,11 @@ impl Cipher {
|
|||||||
|
|
||||||
"Data": data_json,
|
"Data": data_json,
|
||||||
|
|
||||||
"Object": "cipher",
|
// These values are true by default, but can be false if the
|
||||||
"Edit": true,
|
// cipher belongs to a collection where the org owner has enabled
|
||||||
|
// the "Read Only" or "Hide Passwords" restrictions for the user.
|
||||||
|
"Edit": !read_only,
|
||||||
|
"ViewPassword": !hide_passwords,
|
||||||
|
|
||||||
"PasswordHistory": password_history_json,
|
"PasswordHistory": password_history_json,
|
||||||
});
|
});
|
||||||
@@ -226,64 +263,78 @@ impl Cipher {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
/// Returns whether this cipher is directly owned by the user.
|
||||||
ciphers::table
|
pub fn is_owned_by_user(&self, user_uuid: &str) -> bool {
|
||||||
|
self.user_uuid.is_some() && self.user_uuid.as_ref().unwrap() == user_uuid
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns whether this cipher is owned by an org in which the user has full access.
|
||||||
|
pub fn is_in_full_access_org(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
||||||
|
if let Some(ref org_uuid) = self.organization_uuid {
|
||||||
|
if let Some(user_org) = UserOrganization::find_by_user_and_org(&user_uuid, &org_uuid, &conn) {
|
||||||
|
return user_org.has_full_access();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the user's access restrictions to this cipher. A return value
|
||||||
|
/// of None means that this cipher does not belong to the user, and is
|
||||||
|
/// not in any collection the user has access to. Otherwise, the user has
|
||||||
|
/// access to this cipher, and Some(read_only, hide_passwords) represents
|
||||||
|
/// the access restrictions.
|
||||||
|
pub fn get_access_restrictions(&self, user_uuid: &str, conn: &DbConn) -> Option<(bool, bool)> {
|
||||||
|
// Check whether this cipher is directly owned by the user, or is in
|
||||||
|
// a collection that the user has full access to. If so, there are no
|
||||||
|
// access restrictions.
|
||||||
|
if self.is_owned_by_user(&user_uuid) || self.is_in_full_access_org(&user_uuid, &conn) {
|
||||||
|
return Some((false, false));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check whether this cipher is in any collections accessible to the
|
||||||
|
// user. If so, retrieve the access flags for each collection.
|
||||||
|
let query = ciphers::table
|
||||||
.filter(ciphers::uuid.eq(&self.uuid))
|
.filter(ciphers::uuid.eq(&self.uuid))
|
||||||
.left_join(
|
.inner_join(ciphers_collections::table.on(
|
||||||
users_organizations::table.on(ciphers::organization_uuid
|
ciphers::uuid.eq(ciphers_collections::cipher_uuid)))
|
||||||
.eq(users_organizations::org_uuid.nullable())
|
.inner_join(users_collections::table.on(
|
||||||
.and(users_organizations::user_uuid.eq(user_uuid))),
|
ciphers_collections::collection_uuid.eq(users_collections::collection_uuid)
|
||||||
)
|
.and(users_collections::user_uuid.eq(user_uuid))))
|
||||||
.left_join(ciphers_collections::table)
|
.select((users_collections::read_only, users_collections::hide_passwords));
|
||||||
.left_join(
|
|
||||||
users_collections::table
|
// There's an edge case where a cipher can be in multiple collections
|
||||||
.on(ciphers_collections::collection_uuid.eq(users_collections::collection_uuid)),
|
// with inconsistent access flags. For example, a cipher could be in
|
||||||
)
|
// one collection where the user has read-only access, but also in
|
||||||
.filter(ciphers::user_uuid.eq(user_uuid).or(
|
// another collection where the user has read/write access. To handle
|
||||||
// Cipher owner
|
// this, we do a boolean OR of all values in each of the `read_only`
|
||||||
users_organizations::access_all.eq(true).or(
|
// and `hide_passwords` columns. This could ideally be done as part
|
||||||
// access_all in Organization
|
// of the query, but Diesel doesn't support a max() or bool_or()
|
||||||
users_organizations::atype.le(UserOrgType::Admin as i32).or(
|
// function on booleans and this behavior isn't portable anyway.
|
||||||
// Org admin or owner
|
if let Some(vec) = query.load::<(bool, bool)>(&**conn).ok() {
|
||||||
users_collections::user_uuid.eq(user_uuid).and(
|
let mut read_only = false;
|
||||||
users_collections::read_only.eq(false), //R/W access to collection
|
let mut hide_passwords = false;
|
||||||
),
|
for (ro, hp) in vec.iter() {
|
||||||
),
|
read_only |= ro;
|
||||||
),
|
hide_passwords |= hp;
|
||||||
))
|
}
|
||||||
.select(ciphers::all_columns)
|
|
||||||
.first::<Self>(&**conn)
|
Some((read_only, hide_passwords))
|
||||||
.ok()
|
} else {
|
||||||
.is_some()
|
// This cipher isn't in any collections accessible to the user.
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
||||||
|
match self.get_access_restrictions(&user_uuid, &conn) {
|
||||||
|
Some((read_only, _hide_passwords)) => !read_only,
|
||||||
|
None => false,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
pub fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
||||||
ciphers::table
|
self.get_access_restrictions(&user_uuid, &conn).is_some()
|
||||||
.filter(ciphers::uuid.eq(&self.uuid))
|
|
||||||
.left_join(
|
|
||||||
users_organizations::table.on(ciphers::organization_uuid
|
|
||||||
.eq(users_organizations::org_uuid.nullable())
|
|
||||||
.and(users_organizations::user_uuid.eq(user_uuid))),
|
|
||||||
)
|
|
||||||
.left_join(ciphers_collections::table)
|
|
||||||
.left_join(
|
|
||||||
users_collections::table
|
|
||||||
.on(ciphers_collections::collection_uuid.eq(users_collections::collection_uuid)),
|
|
||||||
)
|
|
||||||
.filter(ciphers::user_uuid.eq(user_uuid).or(
|
|
||||||
// Cipher owner
|
|
||||||
users_organizations::access_all.eq(true).or(
|
|
||||||
// access_all in Organization
|
|
||||||
users_organizations::atype.le(UserOrgType::Admin as i32).or(
|
|
||||||
// Org admin or owner
|
|
||||||
users_collections::user_uuid.eq(user_uuid), // Access to Collection
|
|
||||||
),
|
|
||||||
),
|
|
||||||
))
|
|
||||||
.select(ciphers::all_columns)
|
|
||||||
.first::<Self>(&**conn)
|
|
||||||
.ok()
|
|
||||||
.is_some()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_folder_uuid(&self, user_uuid: &str, conn: &DbConn) -> Option<String> {
|
pub fn get_folder_uuid(&self, user_uuid: &str, conn: &DbConn) -> Option<String> {
|
||||||
@@ -340,12 +391,30 @@ impl Cipher {
|
|||||||
.load::<Self>(&**conn).expect("Error loading ciphers")
|
.load::<Self>(&**conn).expect("Error loading ciphers")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn count_owned_by_user(user_uuid: &str, conn: &DbConn) -> i64 {
|
||||||
|
ciphers::table
|
||||||
|
.filter(ciphers::user_uuid.eq(user_uuid))
|
||||||
|
.count()
|
||||||
|
.first::<i64>(&**conn)
|
||||||
|
.ok()
|
||||||
|
.unwrap_or(0)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
pub fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
||||||
ciphers::table
|
ciphers::table
|
||||||
.filter(ciphers::organization_uuid.eq(org_uuid))
|
.filter(ciphers::organization_uuid.eq(org_uuid))
|
||||||
.load::<Self>(&**conn).expect("Error loading ciphers")
|
.load::<Self>(&**conn).expect("Error loading ciphers")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn count_by_org(org_uuid: &str, conn: &DbConn) -> i64 {
|
||||||
|
ciphers::table
|
||||||
|
.filter(ciphers::organization_uuid.eq(org_uuid))
|
||||||
|
.count()
|
||||||
|
.first::<i64>(&**conn)
|
||||||
|
.ok()
|
||||||
|
.unwrap_or(0)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn find_by_folder(folder_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
pub fn find_by_folder(folder_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
||||||
folders_ciphers::table.inner_join(ciphers::table)
|
folders_ciphers::table.inner_join(ciphers::table)
|
||||||
.filter(folders_ciphers::folder_uuid.eq(folder_uuid))
|
.filter(folders_ciphers::folder_uuid.eq(folder_uuid))
|
||||||
|
@@ -35,7 +35,6 @@ impl Collection {
|
|||||||
|
|
||||||
use crate::db::schema::*;
|
use crate::db::schema::*;
|
||||||
use crate::db::DbConn;
|
use crate::db::DbConn;
|
||||||
use diesel;
|
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
|
|
||||||
use crate::api::EmptyResult;
|
use crate::api::EmptyResult;
|
||||||
@@ -200,6 +199,7 @@ pub struct CollectionUser {
|
|||||||
pub user_uuid: String,
|
pub user_uuid: String,
|
||||||
pub collection_uuid: String,
|
pub collection_uuid: String,
|
||||||
pub read_only: bool,
|
pub read_only: bool,
|
||||||
|
pub hide_passwords: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Database methods
|
/// Database methods
|
||||||
@@ -215,7 +215,7 @@ impl CollectionUser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "postgresql")]
|
#[cfg(feature = "postgresql")]
|
||||||
pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, conn: &DbConn) -> EmptyResult {
|
pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, hide_passwords: bool, conn: &DbConn) -> EmptyResult {
|
||||||
User::update_uuid_revision(&user_uuid, conn);
|
User::update_uuid_revision(&user_uuid, conn);
|
||||||
|
|
||||||
diesel::insert_into(users_collections::table)
|
diesel::insert_into(users_collections::table)
|
||||||
@@ -223,16 +223,20 @@ impl CollectionUser {
|
|||||||
users_collections::user_uuid.eq(user_uuid),
|
users_collections::user_uuid.eq(user_uuid),
|
||||||
users_collections::collection_uuid.eq(collection_uuid),
|
users_collections::collection_uuid.eq(collection_uuid),
|
||||||
users_collections::read_only.eq(read_only),
|
users_collections::read_only.eq(read_only),
|
||||||
|
users_collections::hide_passwords.eq(hide_passwords),
|
||||||
))
|
))
|
||||||
.on_conflict((users_collections::user_uuid, users_collections::collection_uuid))
|
.on_conflict((users_collections::user_uuid, users_collections::collection_uuid))
|
||||||
.do_update()
|
.do_update()
|
||||||
.set(users_collections::read_only.eq(read_only))
|
.set((
|
||||||
|
users_collections::read_only.eq(read_only),
|
||||||
|
users_collections::hide_passwords.eq(hide_passwords),
|
||||||
|
))
|
||||||
.execute(&**conn)
|
.execute(&**conn)
|
||||||
.map_res("Error adding user to collection")
|
.map_res("Error adding user to collection")
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = "postgresql"))]
|
#[cfg(not(feature = "postgresql"))]
|
||||||
pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, conn: &DbConn) -> EmptyResult {
|
pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, hide_passwords: bool, conn: &DbConn) -> EmptyResult {
|
||||||
User::update_uuid_revision(&user_uuid, conn);
|
User::update_uuid_revision(&user_uuid, conn);
|
||||||
|
|
||||||
diesel::replace_into(users_collections::table)
|
diesel::replace_into(users_collections::table)
|
||||||
@@ -240,6 +244,7 @@ impl CollectionUser {
|
|||||||
users_collections::user_uuid.eq(user_uuid),
|
users_collections::user_uuid.eq(user_uuid),
|
||||||
users_collections::collection_uuid.eq(collection_uuid),
|
users_collections::collection_uuid.eq(collection_uuid),
|
||||||
users_collections::read_only.eq(read_only),
|
users_collections::read_only.eq(read_only),
|
||||||
|
users_collections::hide_passwords.eq(hide_passwords),
|
||||||
))
|
))
|
||||||
.execute(&**conn)
|
.execute(&**conn)
|
||||||
.map_res("Error adding user to collection")
|
.map_res("Error adding user to collection")
|
||||||
|
@@ -5,6 +5,7 @@ use crate::CONFIG;
|
|||||||
|
|
||||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||||
#[table_name = "devices"]
|
#[table_name = "devices"]
|
||||||
|
#[changeset_options(treat_none_as_null="true")]
|
||||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||||
#[primary_key(uuid)]
|
#[primary_key(uuid)]
|
||||||
pub struct Device {
|
pub struct Device {
|
||||||
@@ -76,7 +77,6 @@ impl Device {
|
|||||||
let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect();
|
let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect();
|
||||||
let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect();
|
let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect();
|
||||||
|
|
||||||
|
|
||||||
// Create the JWT claims struct, to send to the client
|
// Create the JWT claims struct, to send to the client
|
||||||
use crate::auth::{encode_jwt, LoginJWTClaims, DEFAULT_VALIDITY, JWT_LOGIN_ISSUER};
|
use crate::auth::{encode_jwt, LoginJWTClaims, DEFAULT_VALIDITY, JWT_LOGIN_ISSUER};
|
||||||
let claims = LoginJWTClaims {
|
let claims = LoginJWTClaims {
|
||||||
@@ -107,7 +107,6 @@ impl Device {
|
|||||||
|
|
||||||
use crate::db::schema::devices;
|
use crate::db::schema::devices;
|
||||||
use crate::db::DbConn;
|
use crate::db::DbConn;
|
||||||
use diesel;
|
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
|
|
||||||
use crate::api::EmptyResult;
|
use crate::api::EmptyResult;
|
||||||
|
@@ -63,7 +63,6 @@ impl FolderCipher {
|
|||||||
|
|
||||||
use crate::db::schema::{folders, folders_ciphers};
|
use crate::db::schema::{folders, folders_ciphers};
|
||||||
use crate::db::DbConn;
|
use crate::db::DbConn;
|
||||||
use diesel;
|
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
|
|
||||||
use crate::api::EmptyResult;
|
use crate::api::EmptyResult;
|
||||||
|
@@ -7,6 +7,7 @@ mod user;
|
|||||||
mod collection;
|
mod collection;
|
||||||
mod organization;
|
mod organization;
|
||||||
mod two_factor;
|
mod two_factor;
|
||||||
|
mod org_policy;
|
||||||
|
|
||||||
pub use self::attachment::Attachment;
|
pub use self::attachment::Attachment;
|
||||||
pub use self::cipher::Cipher;
|
pub use self::cipher::Cipher;
|
||||||
@@ -17,3 +18,4 @@ pub use self::organization::Organization;
|
|||||||
pub use self::organization::{UserOrgStatus, UserOrgType, UserOrganization};
|
pub use self::organization::{UserOrgStatus, UserOrgType, UserOrganization};
|
||||||
pub use self::two_factor::{TwoFactor, TwoFactorType};
|
pub use self::two_factor::{TwoFactor, TwoFactorType};
|
||||||
pub use self::user::{Invitation, User};
|
pub use self::user::{Invitation, User};
|
||||||
|
pub use self::org_policy::{OrgPolicy, OrgPolicyType};
|
141
src/db/models/org_policy.rs
Normal file
141
src/db/models/org_policy.rs
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
use diesel::prelude::*;
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
|
use crate::api::EmptyResult;
|
||||||
|
use crate::db::schema::org_policies;
|
||||||
|
use crate::db::DbConn;
|
||||||
|
use crate::error::MapResult;
|
||||||
|
|
||||||
|
use super::Organization;
|
||||||
|
|
||||||
|
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||||
|
#[table_name = "org_policies"]
|
||||||
|
#[belongs_to(Organization, foreign_key = "org_uuid")]
|
||||||
|
#[primary_key(uuid)]
|
||||||
|
pub struct OrgPolicy {
|
||||||
|
pub uuid: String,
|
||||||
|
pub org_uuid: String,
|
||||||
|
pub atype: i32,
|
||||||
|
pub enabled: bool,
|
||||||
|
pub data: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(num_derive::FromPrimitive)]
|
||||||
|
pub enum OrgPolicyType {
|
||||||
|
TwoFactorAuthentication = 0,
|
||||||
|
MasterPassword = 1,
|
||||||
|
PasswordGenerator = 2,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Local methods
|
||||||
|
impl OrgPolicy {
|
||||||
|
pub fn new(org_uuid: String, atype: OrgPolicyType, data: String) -> Self {
|
||||||
|
Self {
|
||||||
|
uuid: crate::util::get_uuid(),
|
||||||
|
org_uuid,
|
||||||
|
atype: atype as i32,
|
||||||
|
enabled: false,
|
||||||
|
data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_json(&self) -> Value {
|
||||||
|
let data_json: Value = serde_json::from_str(&self.data).unwrap_or(Value::Null);
|
||||||
|
json!({
|
||||||
|
"Id": self.uuid,
|
||||||
|
"OrganizationId": self.org_uuid,
|
||||||
|
"Type": self.atype,
|
||||||
|
"Data": data_json,
|
||||||
|
"Enabled": self.enabled,
|
||||||
|
"Object": "policy",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Database methods
|
||||||
|
impl OrgPolicy {
|
||||||
|
#[cfg(feature = "postgresql")]
|
||||||
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
|
// We need to make sure we're not going to violate the unique constraint on org_uuid and atype.
|
||||||
|
// This happens automatically on other DBMS backends due to replace_into(). PostgreSQL does
|
||||||
|
// not support multiple constraints on ON CONFLICT clauses.
|
||||||
|
diesel::delete(
|
||||||
|
org_policies::table
|
||||||
|
.filter(org_policies::org_uuid.eq(&self.org_uuid))
|
||||||
|
.filter(org_policies::atype.eq(&self.atype)),
|
||||||
|
)
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error deleting org_policy for insert")?;
|
||||||
|
|
||||||
|
diesel::insert_into(org_policies::table)
|
||||||
|
.values(self)
|
||||||
|
.on_conflict(org_policies::uuid)
|
||||||
|
.do_update()
|
||||||
|
.set(self)
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error saving org_policy")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "postgresql"))]
|
||||||
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
|
diesel::replace_into(org_policies::table)
|
||||||
|
.values(&*self)
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error saving org_policy")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete(self, conn: &DbConn) -> EmptyResult {
|
||||||
|
diesel::delete(org_policies::table.filter(org_policies::uuid.eq(self.uuid)))
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error deleting org_policy")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
|
||||||
|
org_policies::table
|
||||||
|
.filter(org_policies::uuid.eq(uuid))
|
||||||
|
.first::<Self>(&**conn)
|
||||||
|
.ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
||||||
|
org_policies::table
|
||||||
|
.filter(org_policies::org_uuid.eq(org_uuid))
|
||||||
|
.load::<Self>(&**conn)
|
||||||
|
.expect("Error loading org_policy")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
||||||
|
use crate::db::schema::users_organizations;
|
||||||
|
|
||||||
|
org_policies::table
|
||||||
|
.left_join(
|
||||||
|
users_organizations::table.on(
|
||||||
|
users_organizations::org_uuid.eq(org_policies::org_uuid)
|
||||||
|
.and(users_organizations::user_uuid.eq(user_uuid)))
|
||||||
|
)
|
||||||
|
.select(org_policies::all_columns)
|
||||||
|
.load::<Self>(&**conn)
|
||||||
|
.expect("Error loading org_policy")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_by_org_and_type(org_uuid: &str, atype: i32, conn: &DbConn) -> Option<Self> {
|
||||||
|
org_policies::table
|
||||||
|
.filter(org_policies::org_uuid.eq(org_uuid))
|
||||||
|
.filter(org_policies::atype.eq(atype))
|
||||||
|
.first::<Self>(&**conn)
|
||||||
|
.ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
|
diesel::delete(org_policies::table.filter(org_policies::org_uuid.eq(org_uuid)))
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error deleting org_policy")
|
||||||
|
}
|
||||||
|
|
||||||
|
/*pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||||
|
diesel::delete(twofactor::table.filter(twofactor::user_uuid.eq(user_uuid)))
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error deleting twofactors")
|
||||||
|
}*/
|
||||||
|
}
|
@@ -1,7 +1,8 @@
|
|||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
|
use num_traits::FromPrimitive;
|
||||||
|
|
||||||
use super::{CollectionUser, User};
|
use super::{CollectionUser, User, OrgPolicy};
|
||||||
|
|
||||||
#[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)]
|
#[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)]
|
||||||
#[table_name = "organizations"]
|
#[table_name = "organizations"]
|
||||||
@@ -33,6 +34,7 @@ pub enum UserOrgStatus {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||||
|
#[derive(num_derive::FromPrimitive)]
|
||||||
pub enum UserOrgType {
|
pub enum UserOrgType {
|
||||||
Owner = 0,
|
Owner = 0,
|
||||||
Admin = 1,
|
Admin = 1,
|
||||||
@@ -135,16 +137,6 @@ impl UserOrgType {
|
|||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_i32(i: i32) -> Option<Self> {
|
|
||||||
match i {
|
|
||||||
0 => Some(UserOrgType::Owner),
|
|
||||||
1 => Some(UserOrgType::Admin),
|
|
||||||
2 => Some(UserOrgType::User),
|
|
||||||
3 => Some(UserOrgType::Manager),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Local methods
|
/// Local methods
|
||||||
@@ -170,11 +162,12 @@ impl Organization {
|
|||||||
"UseEvents": false,
|
"UseEvents": false,
|
||||||
"UseGroups": false,
|
"UseGroups": false,
|
||||||
"UseTotp": true,
|
"UseTotp": true,
|
||||||
|
"UsePolicies": true,
|
||||||
|
|
||||||
"BusinessName": null,
|
"BusinessName": null,
|
||||||
"BusinessAddress1": null,
|
"BusinessAddress1": null,
|
||||||
"BusinessAddress2": null,
|
"BusinessAddress2": null,
|
||||||
"BusinessAddress3": null,
|
"BusinessAddress3": null,
|
||||||
"BusinessCountry": null,
|
"BusinessCountry": null,
|
||||||
"BusinessTaxNumber": null,
|
"BusinessTaxNumber": null,
|
||||||
|
|
||||||
@@ -205,7 +198,6 @@ impl UserOrganization {
|
|||||||
|
|
||||||
use crate::db::schema::{ciphers_collections, organizations, users_collections, users_organizations};
|
use crate::db::schema::{ciphers_collections, organizations, users_collections, users_organizations};
|
||||||
use crate::db::DbConn;
|
use crate::db::DbConn;
|
||||||
use diesel;
|
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
|
|
||||||
use crate::api::EmptyResult;
|
use crate::api::EmptyResult;
|
||||||
@@ -250,6 +242,7 @@ impl Organization {
|
|||||||
Cipher::delete_all_by_organization(&self.uuid, &conn)?;
|
Cipher::delete_all_by_organization(&self.uuid, &conn)?;
|
||||||
Collection::delete_all_by_organization(&self.uuid, &conn)?;
|
Collection::delete_all_by_organization(&self.uuid, &conn)?;
|
||||||
UserOrganization::delete_all_by_organization(&self.uuid, &conn)?;
|
UserOrganization::delete_all_by_organization(&self.uuid, &conn)?;
|
||||||
|
OrgPolicy::delete_all_by_organization(&self.uuid, &conn)?;
|
||||||
|
|
||||||
diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid)))
|
diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid)))
|
||||||
.execute(&**conn)
|
.execute(&**conn)
|
||||||
@@ -262,12 +255,16 @@ impl Organization {
|
|||||||
.first::<Self>(&**conn)
|
.first::<Self>(&**conn)
|
||||||
.ok()
|
.ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_all(conn: &DbConn) -> Vec<Self> {
|
||||||
|
organizations::table.load::<Self>(&**conn).expect("Error loading organizations")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl UserOrganization {
|
impl UserOrganization {
|
||||||
pub fn to_json(&self, conn: &DbConn) -> Value {
|
pub fn to_json(&self, conn: &DbConn) -> Value {
|
||||||
let org = Organization::find_by_uuid(&self.org_uuid, conn).unwrap();
|
let org = Organization::find_by_uuid(&self.org_uuid, conn).unwrap();
|
||||||
|
|
||||||
json!({
|
json!({
|
||||||
"Id": self.org_uuid,
|
"Id": self.org_uuid,
|
||||||
"Name": org.name,
|
"Name": org.name,
|
||||||
@@ -280,6 +277,9 @@ impl UserOrganization {
|
|||||||
"UseEvents": false,
|
"UseEvents": false,
|
||||||
"UseGroups": false,
|
"UseGroups": false,
|
||||||
"UseTotp": true,
|
"UseTotp": true,
|
||||||
|
"UsePolicies": true,
|
||||||
|
"UseApi": false,
|
||||||
|
"SelfHost": true,
|
||||||
|
|
||||||
"MaxStorageGb": 10, // The value doesn't matter, we don't check server-side
|
"MaxStorageGb": 10, // The value doesn't matter, we don't check server-side
|
||||||
|
|
||||||
@@ -310,10 +310,11 @@ impl UserOrganization {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_json_collection_user_details(&self, read_only: bool) -> Value {
|
pub fn to_json_user_access_restrictions(&self, col_user: &CollectionUser) -> Value {
|
||||||
json!({
|
json!({
|
||||||
"Id": self.uuid,
|
"Id": self.uuid,
|
||||||
"ReadOnly": read_only
|
"ReadOnly": col_user.read_only,
|
||||||
|
"HidePasswords": col_user.hide_passwords,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -324,7 +325,11 @@ impl UserOrganization {
|
|||||||
let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn);
|
let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn);
|
||||||
collections
|
collections
|
||||||
.iter()
|
.iter()
|
||||||
.map(|c| json!({"Id": c.collection_uuid, "ReadOnly": c.read_only}))
|
.map(|c| json!({
|
||||||
|
"Id": c.collection_uuid,
|
||||||
|
"ReadOnly": c.read_only,
|
||||||
|
"HidePasswords": c.hide_passwords,
|
||||||
|
}))
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -388,8 +393,13 @@ impl UserOrganization {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn has_status(self, status: UserOrgStatus) -> bool {
|
||||||
|
self.status == status as i32
|
||||||
|
}
|
||||||
|
|
||||||
pub fn has_full_access(self) -> bool {
|
pub fn has_full_access(self) -> bool {
|
||||||
self.access_all || self.atype >= UserOrgType::Admin
|
(self.access_all || self.atype >= UserOrgType::Admin) &&
|
||||||
|
self.has_status(UserOrgStatus::Confirmed)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
|
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
|
||||||
@@ -437,6 +447,15 @@ impl UserOrganization {
|
|||||||
.expect("Error loading user organizations")
|
.expect("Error loading user organizations")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn count_by_org(org_uuid: &str, conn: &DbConn) -> i64 {
|
||||||
|
users_organizations::table
|
||||||
|
.filter(users_organizations::org_uuid.eq(org_uuid))
|
||||||
|
.count()
|
||||||
|
.first::<i64>(&**conn)
|
||||||
|
.ok()
|
||||||
|
.unwrap_or(0)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn find_by_org_and_type(org_uuid: &str, atype: i32, conn: &DbConn) -> Vec<Self> {
|
pub fn find_by_org_and_type(org_uuid: &str, atype: i32, conn: &DbConn) -> Vec<Self> {
|
||||||
users_organizations::table
|
users_organizations::table
|
||||||
.filter(users_organizations::org_uuid.eq(org_uuid))
|
.filter(users_organizations::org_uuid.eq(org_uuid))
|
||||||
|
@@ -1,4 +1,3 @@
|
|||||||
use diesel;
|
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
@@ -23,7 +22,7 @@ pub struct TwoFactor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[derive(FromPrimitive)]
|
#[derive(num_derive::FromPrimitive)]
|
||||||
pub enum TwoFactorType {
|
pub enum TwoFactorType {
|
||||||
Authenticator = 0,
|
Authenticator = 0,
|
||||||
Email = 1,
|
Email = 1,
|
||||||
@@ -60,7 +59,7 @@ impl TwoFactor {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_json_list(&self) -> Value {
|
pub fn to_json_provider(&self) -> Value {
|
||||||
json!({
|
json!({
|
||||||
"Enabled": self.enabled,
|
"Enabled": self.enabled,
|
||||||
"Type": self.atype,
|
"Type": self.atype,
|
||||||
@@ -73,6 +72,13 @@ impl TwoFactor {
|
|||||||
impl TwoFactor {
|
impl TwoFactor {
|
||||||
#[cfg(feature = "postgresql")]
|
#[cfg(feature = "postgresql")]
|
||||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||||
|
// We need to make sure we're not going to violate the unique constraint on user_uuid and atype.
|
||||||
|
// This happens automatically on other DBMS backends due to replace_into(). PostgreSQL does
|
||||||
|
// not support multiple constraints on ON CONFLICT clauses.
|
||||||
|
diesel::delete(twofactor::table.filter(twofactor::user_uuid.eq(&self.user_uuid)).filter(twofactor::atype.eq(&self.atype)))
|
||||||
|
.execute(&**conn)
|
||||||
|
.map_res("Error deleting twofactor for insert")?;
|
||||||
|
|
||||||
diesel::insert_into(twofactor::table)
|
diesel::insert_into(twofactor::table)
|
||||||
.values(self)
|
.values(self)
|
||||||
.on_conflict(twofactor::uuid)
|
.on_conflict(twofactor::uuid)
|
||||||
|
@@ -6,6 +6,7 @@ use crate::CONFIG;
|
|||||||
|
|
||||||
#[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)]
|
#[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)]
|
||||||
#[table_name = "users"]
|
#[table_name = "users"]
|
||||||
|
#[changeset_options(treat_none_as_null="true")]
|
||||||
#[primary_key(uuid)]
|
#[primary_key(uuid)]
|
||||||
pub struct User {
|
pub struct User {
|
||||||
pub uuid: String,
|
pub uuid: String,
|
||||||
@@ -120,7 +121,6 @@ impl User {
|
|||||||
use super::{Cipher, Device, Folder, TwoFactor, UserOrgType, UserOrganization};
|
use super::{Cipher, Device, Folder, TwoFactor, UserOrgType, UserOrganization};
|
||||||
use crate::db::schema::{invitations, users};
|
use crate::db::schema::{invitations, users};
|
||||||
use crate::db::DbConn;
|
use crate::db::DbConn;
|
||||||
use diesel;
|
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
|
|
||||||
use crate::api::EmptyResult;
|
use crate::api::EmptyResult;
|
||||||
@@ -274,7 +274,7 @@ pub struct Invitation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Invitation {
|
impl Invitation {
|
||||||
pub fn new(email: String) -> Self {
|
pub const fn new(email: String) -> Self {
|
||||||
Self { email }
|
Self { email }
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -319,10 +319,9 @@ impl Invitation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn take(mail: &str, conn: &DbConn) -> bool {
|
pub fn take(mail: &str, conn: &DbConn) -> bool {
|
||||||
CONFIG.invitations_allowed()
|
match Self::find_by_mail(mail, &conn) {
|
||||||
&& match Self::find_by_mail(mail, &conn) {
|
Some(invitation) => invitation.delete(&conn).is_ok(),
|
||||||
Some(invitation) => invitation.delete(&conn).is_ok(),
|
None => false,
|
||||||
None => false,
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -22,6 +22,7 @@ table! {
|
|||||||
data -> Text,
|
data -> Text,
|
||||||
favorite -> Bool,
|
favorite -> Bool,
|
||||||
password_history -> Nullable<Text>,
|
password_history -> Nullable<Text>,
|
||||||
|
deleted_at -> Nullable<Datetime>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -77,6 +78,16 @@ table! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
org_policies (uuid) {
|
||||||
|
uuid -> Varchar,
|
||||||
|
org_uuid -> Varchar,
|
||||||
|
atype -> Integer,
|
||||||
|
enabled -> Bool,
|
||||||
|
data -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
table! {
|
table! {
|
||||||
organizations (uuid) {
|
organizations (uuid) {
|
||||||
uuid -> Varchar,
|
uuid -> Varchar,
|
||||||
@@ -130,6 +141,7 @@ table! {
|
|||||||
user_uuid -> Varchar,
|
user_uuid -> Varchar,
|
||||||
collection_uuid -> Varchar,
|
collection_uuid -> Varchar,
|
||||||
read_only -> Bool,
|
read_only -> Bool,
|
||||||
|
hide_passwords -> Bool,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -155,6 +167,7 @@ joinable!(devices -> users (user_uuid));
|
|||||||
joinable!(folders -> users (user_uuid));
|
joinable!(folders -> users (user_uuid));
|
||||||
joinable!(folders_ciphers -> ciphers (cipher_uuid));
|
joinable!(folders_ciphers -> ciphers (cipher_uuid));
|
||||||
joinable!(folders_ciphers -> folders (folder_uuid));
|
joinable!(folders_ciphers -> folders (folder_uuid));
|
||||||
|
joinable!(org_policies -> organizations (org_uuid));
|
||||||
joinable!(twofactor -> users (user_uuid));
|
joinable!(twofactor -> users (user_uuid));
|
||||||
joinable!(users_collections -> collections (collection_uuid));
|
joinable!(users_collections -> collections (collection_uuid));
|
||||||
joinable!(users_collections -> users (user_uuid));
|
joinable!(users_collections -> users (user_uuid));
|
||||||
@@ -170,6 +183,7 @@ allow_tables_to_appear_in_same_query!(
|
|||||||
folders,
|
folders,
|
||||||
folders_ciphers,
|
folders_ciphers,
|
||||||
invitations,
|
invitations,
|
||||||
|
org_policies,
|
||||||
organizations,
|
organizations,
|
||||||
twofactor,
|
twofactor,
|
||||||
users,
|
users,
|
||||||
|
@@ -22,6 +22,7 @@ table! {
|
|||||||
data -> Text,
|
data -> Text,
|
||||||
favorite -> Bool,
|
favorite -> Bool,
|
||||||
password_history -> Nullable<Text>,
|
password_history -> Nullable<Text>,
|
||||||
|
deleted_at -> Nullable<Timestamp>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -77,6 +78,16 @@ table! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
org_policies (uuid) {
|
||||||
|
uuid -> Text,
|
||||||
|
org_uuid -> Text,
|
||||||
|
atype -> Integer,
|
||||||
|
enabled -> Bool,
|
||||||
|
data -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
table! {
|
table! {
|
||||||
organizations (uuid) {
|
organizations (uuid) {
|
||||||
uuid -> Text,
|
uuid -> Text,
|
||||||
@@ -130,6 +141,7 @@ table! {
|
|||||||
user_uuid -> Text,
|
user_uuid -> Text,
|
||||||
collection_uuid -> Text,
|
collection_uuid -> Text,
|
||||||
read_only -> Bool,
|
read_only -> Bool,
|
||||||
|
hide_passwords -> Bool,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -155,6 +167,7 @@ joinable!(devices -> users (user_uuid));
|
|||||||
joinable!(folders -> users (user_uuid));
|
joinable!(folders -> users (user_uuid));
|
||||||
joinable!(folders_ciphers -> ciphers (cipher_uuid));
|
joinable!(folders_ciphers -> ciphers (cipher_uuid));
|
||||||
joinable!(folders_ciphers -> folders (folder_uuid));
|
joinable!(folders_ciphers -> folders (folder_uuid));
|
||||||
|
joinable!(org_policies -> organizations (org_uuid));
|
||||||
joinable!(twofactor -> users (user_uuid));
|
joinable!(twofactor -> users (user_uuid));
|
||||||
joinable!(users_collections -> collections (collection_uuid));
|
joinable!(users_collections -> collections (collection_uuid));
|
||||||
joinable!(users_collections -> users (user_uuid));
|
joinable!(users_collections -> users (user_uuid));
|
||||||
@@ -170,6 +183,7 @@ allow_tables_to_appear_in_same_query!(
|
|||||||
folders,
|
folders,
|
||||||
folders_ciphers,
|
folders_ciphers,
|
||||||
invitations,
|
invitations,
|
||||||
|
org_policies,
|
||||||
organizations,
|
organizations,
|
||||||
twofactor,
|
twofactor,
|
||||||
users,
|
users,
|
||||||
|
@@ -22,6 +22,7 @@ table! {
|
|||||||
data -> Text,
|
data -> Text,
|
||||||
favorite -> Bool,
|
favorite -> Bool,
|
||||||
password_history -> Nullable<Text>,
|
password_history -> Nullable<Text>,
|
||||||
|
deleted_at -> Nullable<Timestamp>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -77,6 +78,16 @@ table! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
org_policies (uuid) {
|
||||||
|
uuid -> Text,
|
||||||
|
org_uuid -> Text,
|
||||||
|
atype -> Integer,
|
||||||
|
enabled -> Bool,
|
||||||
|
data -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
table! {
|
table! {
|
||||||
organizations (uuid) {
|
organizations (uuid) {
|
||||||
uuid -> Text,
|
uuid -> Text,
|
||||||
@@ -130,6 +141,7 @@ table! {
|
|||||||
user_uuid -> Text,
|
user_uuid -> Text,
|
||||||
collection_uuid -> Text,
|
collection_uuid -> Text,
|
||||||
read_only -> Bool,
|
read_only -> Bool,
|
||||||
|
hide_passwords -> Bool,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -155,6 +167,7 @@ joinable!(devices -> users (user_uuid));
|
|||||||
joinable!(folders -> users (user_uuid));
|
joinable!(folders -> users (user_uuid));
|
||||||
joinable!(folders_ciphers -> ciphers (cipher_uuid));
|
joinable!(folders_ciphers -> ciphers (cipher_uuid));
|
||||||
joinable!(folders_ciphers -> folders (folder_uuid));
|
joinable!(folders_ciphers -> folders (folder_uuid));
|
||||||
|
joinable!(org_policies -> organizations (org_uuid));
|
||||||
joinable!(twofactor -> users (user_uuid));
|
joinable!(twofactor -> users (user_uuid));
|
||||||
joinable!(users_collections -> collections (collection_uuid));
|
joinable!(users_collections -> collections (collection_uuid));
|
||||||
joinable!(users_collections -> users (user_uuid));
|
joinable!(users_collections -> users (user_uuid));
|
||||||
@@ -170,6 +183,7 @@ allow_tables_to_appear_in_same_query!(
|
|||||||
folders,
|
folders,
|
||||||
folders_ciphers,
|
folders_ciphers,
|
||||||
invitations,
|
invitations,
|
||||||
|
org_policies,
|
||||||
organizations,
|
organizations,
|
||||||
twofactor,
|
twofactor,
|
||||||
users,
|
users,
|
||||||
|
68
src/error.rs
68
src/error.rs
@@ -7,7 +7,6 @@ macro_rules! make_error {
|
|||||||
( $( $name:ident ( $ty:ty ): $src_fn:expr, $usr_msg_fun:expr ),+ $(,)? ) => {
|
( $( $name:ident ( $ty:ty ): $src_fn:expr, $usr_msg_fun:expr ),+ $(,)? ) => {
|
||||||
const BAD_REQUEST: u16 = 400;
|
const BAD_REQUEST: u16 = 400;
|
||||||
|
|
||||||
#[derive(Display)]
|
|
||||||
pub enum ErrorKind { $($name( $ty )),+ }
|
pub enum ErrorKind { $($name( $ty )),+ }
|
||||||
pub struct Error { message: String, error: ErrorKind, error_code: u16 }
|
pub struct Error { message: String, error: ErrorKind, error_code: u16 }
|
||||||
|
|
||||||
@@ -42,12 +41,16 @@ use reqwest::Error as ReqErr;
|
|||||||
use serde_json::{Error as SerdeErr, Value};
|
use serde_json::{Error as SerdeErr, Value};
|
||||||
use std::io::Error as IOErr;
|
use std::io::Error as IOErr;
|
||||||
|
|
||||||
use std::option::NoneError as NoneErr;
|
|
||||||
use std::time::SystemTimeError as TimeErr;
|
use std::time::SystemTimeError as TimeErr;
|
||||||
use u2f::u2ferror::U2fError as U2fErr;
|
use u2f::u2ferror::U2fError as U2fErr;
|
||||||
use yubico::yubicoerror::YubicoError as YubiErr;
|
use yubico::yubicoerror::YubicoError as YubiErr;
|
||||||
|
|
||||||
#[derive(Display, Serialize)]
|
use lettre::address::AddressError as AddrErr;
|
||||||
|
use lettre::error::Error as LettreErr;
|
||||||
|
use lettre::message::mime::FromStrError as FromStrErr;
|
||||||
|
use lettre::transport::smtp::error::Error as SmtpErr;
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
pub struct Empty {}
|
pub struct Empty {}
|
||||||
|
|
||||||
// Error struct
|
// Error struct
|
||||||
@@ -73,20 +76,29 @@ make_error! {
|
|||||||
ReqError(ReqErr): _has_source, _api_error,
|
ReqError(ReqErr): _has_source, _api_error,
|
||||||
RegexError(RegexErr): _has_source, _api_error,
|
RegexError(RegexErr): _has_source, _api_error,
|
||||||
YubiError(YubiErr): _has_source, _api_error,
|
YubiError(YubiErr): _has_source, _api_error,
|
||||||
}
|
|
||||||
|
|
||||||
// This is implemented by hand because NoneError doesn't implement neither Display nor Error
|
LetreError(LettreErr): _has_source, _api_error,
|
||||||
impl From<NoneErr> for Error {
|
AddressError(AddrErr): _has_source, _api_error,
|
||||||
fn from(_: NoneErr) -> Self {
|
SmtpError(SmtpErr): _has_source, _api_error,
|
||||||
Error::from(("NoneError", String::new()))
|
FromStrError(FromStrErr): _has_source, _api_error,
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Debug for Error {
|
impl std::fmt::Debug for Error {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
match self.source() {
|
match self.source() {
|
||||||
Some(e) => write!(f, "{}.\n[CAUSE] {:#?}", self.message, e),
|
Some(e) => write!(f, "{}.\n[CAUSE] {:#?}", self.message, e),
|
||||||
None => write!(f, "{}. {}", self.message, self.error),
|
None => match self.error {
|
||||||
|
ErrorKind::EmptyError(_) => Ok(()),
|
||||||
|
ErrorKind::SimpleError(ref s) => {
|
||||||
|
if &self.message == s {
|
||||||
|
write!(f, "{}", self.message)
|
||||||
|
} else {
|
||||||
|
write!(f, "{}. {}", self.message, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ErrorKind::JsonError(_) => write!(f, "{}", self.message),
|
||||||
|
_ => unreachable!(),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -105,7 +117,7 @@ impl Error {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_code(mut self, code: u16) -> Self {
|
pub const fn with_code(mut self, code: u16) -> Self {
|
||||||
self.error_code = code;
|
self.error_code = code;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -133,7 +145,7 @@ impl<S> MapResult<S> for Option<S> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _has_source<T>(e: T) -> Option<T> {
|
const fn _has_source<T>(e: T) -> Option<T> {
|
||||||
Some(e)
|
Some(e)
|
||||||
}
|
}
|
||||||
fn _no_source<T, S>(_: T) -> Option<S> {
|
fn _no_source<T, S>(_: T) -> Option<S> {
|
||||||
@@ -170,15 +182,17 @@ use rocket::response::{self, Responder, Response};
|
|||||||
|
|
||||||
impl<'r> Responder<'r> for Error {
|
impl<'r> Responder<'r> for Error {
|
||||||
fn respond_to(self, _: &Request) -> response::Result<'r> {
|
fn respond_to(self, _: &Request) -> response::Result<'r> {
|
||||||
let usr_msg = format!("{}", self);
|
match self.error {
|
||||||
error!("{:#?}", self);
|
ErrorKind::EmptyError(_) => {} // Don't print the error in this situation
|
||||||
|
_ => error!(target: "error", "{:#?}", self),
|
||||||
|
};
|
||||||
|
|
||||||
let code = Status::from_code(self.error_code).unwrap_or(Status::BadRequest);
|
let code = Status::from_code(self.error_code).unwrap_or(Status::BadRequest);
|
||||||
|
|
||||||
Response::build()
|
Response::build()
|
||||||
.status(code)
|
.status(code)
|
||||||
.header(ContentType::JSON)
|
.header(ContentType::JSON)
|
||||||
.sized_body(Cursor::new(usr_msg))
|
.sized_body(Cursor::new(format!("{}", self)))
|
||||||
.ok()
|
.ok()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -196,21 +210,33 @@ macro_rules! err {
|
|||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! err_discard {
|
||||||
|
($msg:expr, $data:expr) => {{
|
||||||
|
std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
|
||||||
|
return Err(crate::error::Error::new($msg, $msg));
|
||||||
|
}};
|
||||||
|
($usr_msg:expr, $log_value:expr, $data:expr) => {{
|
||||||
|
std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
|
||||||
|
return Err(crate::error::Error::new($usr_msg, $log_value));
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! err_json {
|
macro_rules! err_json {
|
||||||
($expr:expr) => {{
|
($expr:expr, $log_value:expr) => {{
|
||||||
return Err(crate::error::Error::from($expr));
|
return Err(($log_value, $expr).into());
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! err_handler {
|
macro_rules! err_handler {
|
||||||
($expr:expr) => {{
|
($expr:expr) => {{
|
||||||
error!("Unauthorized Error: {}", $expr);
|
error!(target: "auth", "Unauthorized Error: {}", $expr);
|
||||||
return rocket::Outcome::Failure((rocket::http::Status::Unauthorized, $expr));
|
return ::rocket::request::Outcome::Failure((rocket::http::Status::Unauthorized, $expr));
|
||||||
}};
|
}};
|
||||||
($usr_msg:expr, $log_value:expr) => {{
|
($usr_msg:expr, $log_value:expr) => {{
|
||||||
error!("Unauthorized Error: {}. {}", $usr_msg, $log_value);
|
error!(target: "auth", "Unauthorized Error: {}. {}", $usr_msg, $log_value);
|
||||||
return rocket::Outcome::Failure((rocket::http::Status::Unauthorized, $usr_msg));
|
return ::rocket::request::Outcome::Failure((rocket::http::Status::Unauthorized, $usr_msg));
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
200
src/mail.rs
200
src/mail.rs
@@ -1,17 +1,23 @@
|
|||||||
use lettre::smtp::authentication::Credentials;
|
use std::{env, str::FromStr};
|
||||||
use lettre::smtp::authentication::Mechanism as SmtpAuthMechanism;
|
|
||||||
use lettre::smtp::ConnectionReuseParameters;
|
use chrono::{DateTime, Local};
|
||||||
use lettre::{ClientSecurity, ClientTlsParameters, SmtpClient, SmtpTransport, Transport};
|
use chrono_tz::Tz;
|
||||||
use lettre_email::{EmailBuilder, MimeMultipartType, PartBuilder};
|
|
||||||
use native_tls::{Protocol, TlsConnector};
|
use native_tls::{Protocol, TlsConnector};
|
||||||
use percent_encoding::{percent_encode, NON_ALPHANUMERIC};
|
use percent_encoding::{percent_encode, NON_ALPHANUMERIC};
|
||||||
use quoted_printable::encode_to_str;
|
|
||||||
|
|
||||||
use crate::api::EmptyResult;
|
use lettre::{
|
||||||
use crate::auth::{encode_jwt, generate_invite_claims, generate_delete_claims, generate_verify_email_claims};
|
message::{header, Mailbox, Message, MultiPart, SinglePart},
|
||||||
use crate::error::Error;
|
transport::smtp::authentication::{Credentials, Mechanism as SmtpAuthMechanism},
|
||||||
use crate::CONFIG;
|
transport::smtp::extension::ClientId,
|
||||||
use chrono::NaiveDateTime;
|
Address, SmtpTransport, Tls, TlsParameters, Transport,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
api::EmptyResult,
|
||||||
|
auth::{encode_jwt, generate_delete_claims, generate_invite_claims, generate_verify_email_claims},
|
||||||
|
error::Error,
|
||||||
|
CONFIG,
|
||||||
|
};
|
||||||
|
|
||||||
fn mailer() -> SmtpTransport {
|
fn mailer() -> SmtpTransport {
|
||||||
let host = CONFIG.smtp_host().unwrap();
|
let host = CONFIG.smtp_host().unwrap();
|
||||||
@@ -22,42 +28,45 @@ fn mailer() -> SmtpTransport {
|
|||||||
.build()
|
.build()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let params = ClientTlsParameters::new(host.clone(), tls);
|
let params = TlsParameters::new(host.clone(), tls);
|
||||||
|
|
||||||
if CONFIG.smtp_explicit_tls() {
|
if CONFIG.smtp_explicit_tls() {
|
||||||
ClientSecurity::Wrapper(params)
|
Tls::Wrapper(params)
|
||||||
} else {
|
} else {
|
||||||
ClientSecurity::Required(params)
|
Tls::Required(params)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
ClientSecurity::None
|
Tls::None
|
||||||
};
|
};
|
||||||
|
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
let smtp_client = SmtpClient::new((host.as_str(), CONFIG.smtp_port()), client_security).unwrap();
|
let smtp_client = SmtpTransport::builder(host).port(CONFIG.smtp_port()).tls(client_security);
|
||||||
|
|
||||||
let smtp_client = match (&CONFIG.smtp_username(), &CONFIG.smtp_password()) {
|
let smtp_client = match (CONFIG.smtp_username(), CONFIG.smtp_password()) {
|
||||||
(Some(user), Some(pass)) => smtp_client.credentials(Credentials::new(user.clone(), pass.clone())),
|
(Some(user), Some(pass)) => smtp_client.credentials(Credentials::new(user, pass)),
|
||||||
_ => smtp_client,
|
_ => smtp_client,
|
||||||
};
|
};
|
||||||
|
|
||||||
let smtp_client = match &CONFIG.smtp_auth_mechanism() {
|
let smtp_client = match CONFIG.helo_name() {
|
||||||
Some(auth_mechanism_json) => {
|
Some(helo_name) => smtp_client.hello_name(ClientId::new(helo_name)),
|
||||||
let auth_mechanism = serde_json::from_str::<SmtpAuthMechanism>(&auth_mechanism_json);
|
None => smtp_client,
|
||||||
match auth_mechanism {
|
};
|
||||||
Ok(auth_mechanism) => smtp_client.authentication_mechanism(auth_mechanism),
|
|
||||||
|
let smtp_client = match CONFIG.smtp_auth_mechanism() {
|
||||||
|
Some(mechanism) => {
|
||||||
|
let correct_mechanism = format!("\"{}\"", crate::util::upcase_first(mechanism.trim_matches('"')));
|
||||||
|
|
||||||
|
// TODO: Allow more than one mechanism
|
||||||
|
match serde_json::from_str::<SmtpAuthMechanism>(&correct_mechanism) {
|
||||||
|
Ok(auth_mechanism) => smtp_client.authentication(vec![auth_mechanism]),
|
||||||
_ => panic!("Failure to parse mechanism. Is it proper Json? Eg. `\"Plain\"` not `Plain`"),
|
_ => panic!("Failure to parse mechanism. Is it proper Json? Eg. `\"Plain\"` not `Plain`"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => smtp_client,
|
_ => smtp_client,
|
||||||
};
|
};
|
||||||
|
|
||||||
smtp_client
|
smtp_client.timeout(Some(Duration::from_secs(CONFIG.smtp_timeout()))).build()
|
||||||
.smtp_utf8(true)
|
|
||||||
.timeout(Some(Duration::from_secs(CONFIG.smtp_timeout())))
|
|
||||||
.connection_reuse(ConnectionReuseParameters::NoReuse)
|
|
||||||
.transport()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_text(template_name: &'static str, data: serde_json::Value) -> Result<(String, String, String), Error> {
|
fn get_text(template_name: &'static str, data: serde_json::Value) -> Result<(String, String, String), Error> {
|
||||||
@@ -83,6 +92,22 @@ fn get_template(template_name: &str, data: &serde_json::Value) -> Result<(String
|
|||||||
Ok((subject, body))
|
Ok((subject, body))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn format_datetime(dt: &DateTime<Local>) -> String {
|
||||||
|
let fmt = "%A, %B %_d, %Y at %r %Z";
|
||||||
|
|
||||||
|
// With a DateTime<Local>, `%Z` formats as the time zone's UTC offset
|
||||||
|
// (e.g., `+00:00`). If the `TZ` environment variable is set, try to
|
||||||
|
// format as a time zone abbreviation instead (e.g., `UTC`).
|
||||||
|
if let Ok(tz) = env::var("TZ") {
|
||||||
|
if let Ok(tz) = tz.parse::<Tz>() {
|
||||||
|
return dt.with_timezone(&tz).format(fmt).to_string();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, fall back to just displaying the UTC offset.
|
||||||
|
dt.format(fmt).to_string()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn send_password_hint(address: &str, hint: Option<String>) -> EmptyResult {
|
pub fn send_password_hint(address: &str, hint: Option<String>) -> EmptyResult {
|
||||||
let template_name = if hint.is_some() {
|
let template_name = if hint.is_some() {
|
||||||
"email/pw_hint_some"
|
"email/pw_hint_some"
|
||||||
@@ -92,13 +117,11 @@ pub fn send_password_hint(address: &str, hint: Option<String>) -> EmptyResult {
|
|||||||
|
|
||||||
let (subject, body_html, body_text) = get_text(template_name, json!({ "hint": hint, "url": CONFIG.domain() }))?;
|
let (subject, body_html, body_text) = get_text(template_name, json!({ "hint": hint, "url": CONFIG.domain() }))?;
|
||||||
|
|
||||||
send_email(&address, &subject, &body_html, &body_text)
|
send_email(address, &subject, &body_html, &body_text)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn send_delete_account(address: &str, uuid: &str) -> EmptyResult {
|
pub fn send_delete_account(address: &str, uuid: &str) -> EmptyResult {
|
||||||
let claims = generate_delete_claims(
|
let claims = generate_delete_claims(uuid.to_string());
|
||||||
uuid.to_string(),
|
|
||||||
);
|
|
||||||
let delete_token = encode_jwt(&claims);
|
let delete_token = encode_jwt(&claims);
|
||||||
|
|
||||||
let (subject, body_html, body_text) = get_text(
|
let (subject, body_html, body_text) = get_text(
|
||||||
@@ -111,13 +134,11 @@ pub fn send_delete_account(address: &str, uuid: &str) -> EmptyResult {
|
|||||||
}),
|
}),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
send_email(&address, &subject, &body_html, &body_text)
|
send_email(address, &subject, &body_html, &body_text)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn send_verify_email(address: &str, uuid: &str) -> EmptyResult {
|
pub fn send_verify_email(address: &str, uuid: &str) -> EmptyResult {
|
||||||
let claims = generate_verify_email_claims(
|
let claims = generate_verify_email_claims(uuid.to_string());
|
||||||
uuid.to_string(),
|
|
||||||
);
|
|
||||||
let verify_email_token = encode_jwt(&claims);
|
let verify_email_token = encode_jwt(&claims);
|
||||||
|
|
||||||
let (subject, body_html, body_text) = get_text(
|
let (subject, body_html, body_text) = get_text(
|
||||||
@@ -130,7 +151,7 @@ pub fn send_verify_email(address: &str, uuid: &str) -> EmptyResult {
|
|||||||
}),
|
}),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
send_email(&address, &subject, &body_html, &body_text)
|
send_email(address, &subject, &body_html, &body_text)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn send_welcome(address: &str) -> EmptyResult {
|
pub fn send_welcome(address: &str) -> EmptyResult {
|
||||||
@@ -141,13 +162,11 @@ pub fn send_welcome(address: &str) -> EmptyResult {
|
|||||||
}),
|
}),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
send_email(&address, &subject, &body_html, &body_text)
|
send_email(address, &subject, &body_html, &body_text)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn send_welcome_must_verify(address: &str, uuid: &str) -> EmptyResult {
|
pub fn send_welcome_must_verify(address: &str, uuid: &str) -> EmptyResult {
|
||||||
let claims = generate_verify_email_claims(
|
let claims = generate_verify_email_claims(uuid.to_string());
|
||||||
uuid.to_string(),
|
|
||||||
);
|
|
||||||
let verify_email_token = encode_jwt(&claims);
|
let verify_email_token = encode_jwt(&claims);
|
||||||
|
|
||||||
let (subject, body_html, body_text) = get_text(
|
let (subject, body_html, body_text) = get_text(
|
||||||
@@ -159,7 +178,7 @@ pub fn send_welcome_must_verify(address: &str, uuid: &str) -> EmptyResult {
|
|||||||
}),
|
}),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
send_email(&address, &subject, &body_html, &body_text)
|
send_email(address, &subject, &body_html, &body_text)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn send_invite(
|
pub fn send_invite(
|
||||||
@@ -191,7 +210,7 @@ pub fn send_invite(
|
|||||||
}),
|
}),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
send_email(&address, &subject, &body_html, &body_text)
|
send_email(address, &subject, &body_html, &body_text)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn send_invite_accepted(new_user_email: &str, address: &str, org_name: &str) -> EmptyResult {
|
pub fn send_invite_accepted(new_user_email: &str, address: &str, org_name: &str) -> EmptyResult {
|
||||||
@@ -204,7 +223,7 @@ pub fn send_invite_accepted(new_user_email: &str, address: &str, org_name: &str)
|
|||||||
}),
|
}),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
send_email(&address, &subject, &body_html, &body_text)
|
send_email(address, &subject, &body_html, &body_text)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn send_invite_confirmed(address: &str, org_name: &str) -> EmptyResult {
|
pub fn send_invite_confirmed(address: &str, org_name: &str) -> EmptyResult {
|
||||||
@@ -216,26 +235,24 @@ pub fn send_invite_confirmed(address: &str, org_name: &str) -> EmptyResult {
|
|||||||
}),
|
}),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
send_email(&address, &subject, &body_html, &body_text)
|
send_email(address, &subject, &body_html, &body_text)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn send_new_device_logged_in(address: &str, ip: &str, dt: &NaiveDateTime, device: &str) -> EmptyResult {
|
pub fn send_new_device_logged_in(address: &str, ip: &str, dt: &DateTime<Local>, device: &str) -> EmptyResult {
|
||||||
use crate::util::upcase_first;
|
use crate::util::upcase_first;
|
||||||
let device = upcase_first(device);
|
let device = upcase_first(device);
|
||||||
|
|
||||||
let datetime = dt.format("%A, %B %_d, %Y at %H:%M").to_string();
|
|
||||||
|
|
||||||
let (subject, body_html, body_text) = get_text(
|
let (subject, body_html, body_text) = get_text(
|
||||||
"email/new_device_logged_in",
|
"email/new_device_logged_in",
|
||||||
json!({
|
json!({
|
||||||
"url": CONFIG.domain(),
|
"url": CONFIG.domain(),
|
||||||
"ip": ip,
|
"ip": ip,
|
||||||
"device": device,
|
"device": device,
|
||||||
"datetime": datetime,
|
"datetime": format_datetime(dt),
|
||||||
}),
|
}),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
send_email(&address, &subject, &body_html, &body_text)
|
send_email(address, &subject, &body_html, &body_text)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn send_token(address: &str, token: &str) -> EmptyResult {
|
pub fn send_token(address: &str, token: &str) -> EmptyResult {
|
||||||
@@ -247,7 +264,7 @@ pub fn send_token(address: &str, token: &str) -> EmptyResult {
|
|||||||
}),
|
}),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
send_email(&address, &subject, &body_html, &body_text)
|
send_email(address, &subject, &body_html, &body_text)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn send_change_email(address: &str, token: &str) -> EmptyResult {
|
pub fn send_change_email(address: &str, token: &str) -> EmptyResult {
|
||||||
@@ -259,43 +276,62 @@ pub fn send_change_email(address: &str, token: &str) -> EmptyResult {
|
|||||||
}),
|
}),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
send_email(&address, &subject, &body_html, &body_text)
|
send_email(address, &subject, &body_html, &body_text)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn send_test(address: &str) -> EmptyResult {
|
||||||
|
let (subject, body_html, body_text) = get_text(
|
||||||
|
"email/smtp_test",
|
||||||
|
json!({
|
||||||
|
"url": CONFIG.domain(),
|
||||||
|
}),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
send_email(address, &subject, &body_html, &body_text)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn send_email(address: &str, subject: &str, body_html: &str, body_text: &str) -> EmptyResult {
|
fn send_email(address: &str, subject: &str, body_html: &str, body_text: &str) -> EmptyResult {
|
||||||
let html = PartBuilder::new()
|
let address_split: Vec<&str> = address.rsplitn(2, '@').collect();
|
||||||
.body(encode_to_str(body_html))
|
if address_split.len() != 2 {
|
||||||
.header(("Content-Type", "text/html; charset=utf-8"))
|
err!("Invalid email address (no @)");
|
||||||
.header(("Content-Transfer-Encoding", "quoted-printable"))
|
}
|
||||||
.build();
|
|
||||||
|
|
||||||
let text = PartBuilder::new()
|
let domain_puny = match idna::domain_to_ascii_strict(address_split[0]) {
|
||||||
.body(encode_to_str(body_text))
|
Ok(d) => d,
|
||||||
.header(("Content-Type", "text/plain; charset=utf-8"))
|
Err(_) => err!("Can't convert email domain to ASCII representation"),
|
||||||
.header(("Content-Transfer-Encoding", "quoted-printable"))
|
};
|
||||||
.build();
|
|
||||||
|
|
||||||
let alternative = PartBuilder::new()
|
let address = format!("{}@{}", address_split[1], domain_puny);
|
||||||
.message_type(MimeMultipartType::Alternative)
|
|
||||||
.child(text)
|
|
||||||
.child(html);
|
|
||||||
|
|
||||||
let email = EmailBuilder::new()
|
let data = MultiPart::mixed()
|
||||||
.to(address)
|
.multipart(
|
||||||
.from((CONFIG.smtp_from().as_str(), CONFIG.smtp_from_name().as_str()))
|
MultiPart::alternative()
|
||||||
|
.singlepart(
|
||||||
|
SinglePart::quoted_printable()
|
||||||
|
.header(header::ContentType("text/plain; charset=utf-8".parse()?))
|
||||||
|
.body(body_text),
|
||||||
|
)
|
||||||
|
.multipart(
|
||||||
|
MultiPart::related().singlepart(
|
||||||
|
SinglePart::quoted_printable()
|
||||||
|
.header(header::ContentType("text/html; charset=utf-8".parse()?))
|
||||||
|
.body(body_html),
|
||||||
|
)
|
||||||
|
// .singlepart(SinglePart::base64() -- Inline files would go here
|
||||||
|
),
|
||||||
|
)
|
||||||
|
// .singlepart(SinglePart::base64() -- Attachments would go here
|
||||||
|
;
|
||||||
|
|
||||||
|
let email = Message::builder()
|
||||||
|
.to(Mailbox::new(None, Address::from_str(&address)?))
|
||||||
|
.from(Mailbox::new(
|
||||||
|
Some(CONFIG.smtp_from_name()),
|
||||||
|
Address::from_str(&CONFIG.smtp_from())?,
|
||||||
|
))
|
||||||
.subject(subject)
|
.subject(subject)
|
||||||
.child(alternative.build())
|
.multipart(data)?;
|
||||||
.build()
|
|
||||||
.map_err(|e| Error::new("Error building email", e.to_string()))?;
|
|
||||||
|
|
||||||
let mut transport = mailer();
|
let _ = mailer().send(&email)?;
|
||||||
|
Ok(())
|
||||||
let result = transport
|
|
||||||
.send(email.into())
|
|
||||||
.map_err(|e| Error::new("Error sending email", e.to_string()))
|
|
||||||
.and(Ok(()));
|
|
||||||
|
|
||||||
// Explicitly close the connection, in case of error
|
|
||||||
transport.close();
|
|
||||||
result
|
|
||||||
}
|
}
|
||||||
|
198
src/main.rs
198
src/main.rs
@@ -1,7 +1,7 @@
|
|||||||
#![feature(proc_macro_hygiene, vec_remove_item, try_trait, ip)]
|
#![forbid(unsafe_code)]
|
||||||
|
#![cfg_attr(feature = "unstable", feature(ip))]
|
||||||
#![recursion_limit = "256"]
|
#![recursion_limit = "256"]
|
||||||
|
|
||||||
#[cfg(feature = "openssl")]
|
|
||||||
extern crate openssl;
|
extern crate openssl;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate rocket;
|
extern crate rocket;
|
||||||
@@ -15,17 +15,15 @@ extern crate log;
|
|||||||
extern crate diesel;
|
extern crate diesel;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate diesel_migrations;
|
extern crate diesel_migrations;
|
||||||
#[macro_use]
|
|
||||||
extern crate lazy_static;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate derive_more;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate num_derive;
|
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
|
fmt, // For panic logging
|
||||||
|
fs::create_dir_all,
|
||||||
|
panic,
|
||||||
path::Path,
|
path::Path,
|
||||||
process::{exit, Command},
|
process::{exit, Command},
|
||||||
fs::create_dir_all,
|
str::FromStr,
|
||||||
|
thread,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
@@ -41,12 +39,38 @@ mod util;
|
|||||||
pub use config::CONFIG;
|
pub use config::CONFIG;
|
||||||
pub use error::{Error, MapResult};
|
pub use error::{Error, MapResult};
|
||||||
|
|
||||||
|
use structopt::StructOpt;
|
||||||
|
|
||||||
|
// Used for catching panics and log them to file instead of stderr
|
||||||
|
use backtrace::Backtrace;
|
||||||
|
struct Shim(Backtrace);
|
||||||
|
|
||||||
|
impl fmt::Debug for Shim {
|
||||||
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(fmt, "\n{:?}", self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, StructOpt)]
|
||||||
|
#[structopt(name = "bitwarden_rs", about = "A Bitwarden API server written in Rust")]
|
||||||
|
struct Opt {
|
||||||
|
/// Prints the app version
|
||||||
|
#[structopt(short, long)]
|
||||||
|
version: bool,
|
||||||
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
parse_args();
|
||||||
launch_info();
|
launch_info();
|
||||||
|
|
||||||
if CONFIG.extended_logging() {
|
use log::LevelFilter as LF;
|
||||||
init_logging().ok();
|
let level = LF::from_str(&CONFIG.log_level()).expect("Valid log level");
|
||||||
}
|
init_logging(level).ok();
|
||||||
|
|
||||||
|
let extra_debug = match level {
|
||||||
|
LF::Trace | LF::Debug => true,
|
||||||
|
_ => false,
|
||||||
|
};
|
||||||
|
|
||||||
check_db();
|
check_db();
|
||||||
check_rsa_keys();
|
check_rsa_keys();
|
||||||
@@ -55,43 +79,68 @@ fn main() {
|
|||||||
|
|
||||||
create_icon_cache_folder();
|
create_icon_cache_folder();
|
||||||
|
|
||||||
launch_rocket();
|
launch_rocket(extra_debug);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_args() {
|
||||||
|
let opt = Opt::from_args();
|
||||||
|
if opt.version {
|
||||||
|
if let Some(version) = option_env!("BWRS_VERSION") {
|
||||||
|
println!("bitwarden_rs {}", version);
|
||||||
|
} else {
|
||||||
|
println!("bitwarden_rs (Version info from Git not present)");
|
||||||
|
}
|
||||||
|
exit(0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn launch_info() {
|
fn launch_info() {
|
||||||
println!("/--------------------------------------------------------------------\\");
|
println!("/--------------------------------------------------------------------\\");
|
||||||
println!("| Starting Bitwarden_RS |");
|
println!("| Starting Bitwarden_RS |");
|
||||||
|
|
||||||
if let Some(version) = option_env!("GIT_VERSION") {
|
if let Some(version) = option_env!("BWRS_VERSION") {
|
||||||
println!("|{:^68}|", format!("Version {}", version));
|
println!("|{:^68}|", format!("Version {}", version));
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("|--------------------------------------------------------------------|");
|
println!("|--------------------------------------------------------------------|");
|
||||||
println!("| This is an *unofficial* Bitwarden implementation, DO NOT use the |");
|
println!("| This is an *unofficial* Bitwarden implementation, DO NOT use the |");
|
||||||
println!("| official channels to report bugs/features, regardless of client. |");
|
println!("| official channels to report bugs/features, regardless of client. |");
|
||||||
println!("| Report URL: https://github.com/dani-garcia/bitwarden_rs/issues/new |");
|
println!("| Send usage/configuration questions or feature requests to: |");
|
||||||
|
println!("| https://bitwardenrs.discourse.group/ |");
|
||||||
|
println!("| Report suspected bugs/issues in the software itself at: |");
|
||||||
|
println!("| https://github.com/dani-garcia/bitwarden_rs/issues/new |");
|
||||||
println!("\\--------------------------------------------------------------------/\n");
|
println!("\\--------------------------------------------------------------------/\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn init_logging() -> Result<(), fern::InitError> {
|
fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> {
|
||||||
use std::str::FromStr;
|
|
||||||
let mut logger = fern::Dispatch::new()
|
let mut logger = fern::Dispatch::new()
|
||||||
.format(|out, message, record| {
|
.level(level)
|
||||||
out.finish(format_args!(
|
|
||||||
"{}[{}][{}] {}",
|
|
||||||
chrono::Local::now().format("[%Y-%m-%d %H:%M:%S]"),
|
|
||||||
record.target(),
|
|
||||||
record.level(),
|
|
||||||
message
|
|
||||||
))
|
|
||||||
})
|
|
||||||
.level(log::LevelFilter::from_str(&CONFIG.log_level()).expect("Valid log level"))
|
|
||||||
// Hide unknown certificate errors if using self-signed
|
// Hide unknown certificate errors if using self-signed
|
||||||
.level_for("rustls::session", log::LevelFilter::Off)
|
.level_for("rustls::session", log::LevelFilter::Off)
|
||||||
// Hide failed to close stream messages
|
// Hide failed to close stream messages
|
||||||
.level_for("hyper::server", log::LevelFilter::Warn)
|
.level_for("hyper::server", log::LevelFilter::Warn)
|
||||||
|
// Silence rocket logs
|
||||||
|
.level_for("_", log::LevelFilter::Off)
|
||||||
|
.level_for("launch", log::LevelFilter::Off)
|
||||||
|
.level_for("launch_", log::LevelFilter::Off)
|
||||||
|
.level_for("rocket::rocket", log::LevelFilter::Off)
|
||||||
|
.level_for("rocket::fairing", log::LevelFilter::Off)
|
||||||
.chain(std::io::stdout());
|
.chain(std::io::stdout());
|
||||||
|
|
||||||
|
if CONFIG.extended_logging() {
|
||||||
|
logger = logger.format(|out, message, record| {
|
||||||
|
out.finish(format_args!(
|
||||||
|
"[{}][{}][{}] {}",
|
||||||
|
chrono::Local::now().format(&CONFIG.log_timestamp_format()),
|
||||||
|
record.target(),
|
||||||
|
record.level(),
|
||||||
|
message
|
||||||
|
))
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
logger = logger.format(|out, message, _| out.finish(format_args!("{}", message)));
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(log_file) = CONFIG.log_file() {
|
if let Some(log_file) = CONFIG.log_file() {
|
||||||
logger = logger.chain(fern::log_file(log_file)?);
|
logger = logger.chain(fern::log_file(log_file)?);
|
||||||
}
|
}
|
||||||
@@ -105,6 +154,42 @@ fn init_logging() -> Result<(), fern::InitError> {
|
|||||||
|
|
||||||
logger.apply()?;
|
logger.apply()?;
|
||||||
|
|
||||||
|
// Catch panics and log them instead of default output to StdErr
|
||||||
|
panic::set_hook(Box::new(|info| {
|
||||||
|
let backtrace = Backtrace::new();
|
||||||
|
|
||||||
|
let thread = thread::current();
|
||||||
|
let thread = thread.name().unwrap_or("unnamed");
|
||||||
|
|
||||||
|
let msg = match info.payload().downcast_ref::<&'static str>() {
|
||||||
|
Some(s) => *s,
|
||||||
|
None => match info.payload().downcast_ref::<String>() {
|
||||||
|
Some(s) => &**s,
|
||||||
|
None => "Box<Any>",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
match info.location() {
|
||||||
|
Some(location) => {
|
||||||
|
error!(
|
||||||
|
target: "panic", "thread '{}' panicked at '{}': {}:{}{:?}",
|
||||||
|
thread,
|
||||||
|
msg,
|
||||||
|
location.file(),
|
||||||
|
location.line(),
|
||||||
|
Shim(backtrace)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
None => error!(
|
||||||
|
target: "panic",
|
||||||
|
"thread '{}' panicked at '{}'{:?}",
|
||||||
|
thread,
|
||||||
|
msg,
|
||||||
|
Shim(backtrace)
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -141,7 +226,7 @@ fn check_db() {
|
|||||||
// Turn on WAL in SQLite
|
// Turn on WAL in SQLite
|
||||||
if CONFIG.enable_db_wal() {
|
if CONFIG.enable_db_wal() {
|
||||||
use diesel::RunQueryDsl;
|
use diesel::RunQueryDsl;
|
||||||
let connection = db::get_connection().expect("Can't conect to DB");
|
let connection = db::get_connection().expect("Can't connect to DB");
|
||||||
diesel::sql_query("PRAGMA journal_mode=wal")
|
diesel::sql_query("PRAGMA journal_mode=wal")
|
||||||
.execute(&connection)
|
.execute(&connection)
|
||||||
.expect("Failed to turn on WAL");
|
.expect("Failed to turn on WAL");
|
||||||
@@ -161,7 +246,9 @@ fn check_rsa_keys() {
|
|||||||
info!("JWT keys don't exist, checking if OpenSSL is available...");
|
info!("JWT keys don't exist, checking if OpenSSL is available...");
|
||||||
|
|
||||||
Command::new("openssl").arg("version").status().unwrap_or_else(|_| {
|
Command::new("openssl").arg("version").status().unwrap_or_else(|_| {
|
||||||
info!("Can't create keys because OpenSSL is not available, make sure it's installed and available on the PATH");
|
info!(
|
||||||
|
"Can't create keys because OpenSSL is not available, make sure it's installed and available on the PATH"
|
||||||
|
);
|
||||||
exit(1);
|
exit(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -209,7 +296,9 @@ fn check_web_vault() {
|
|||||||
let index_path = Path::new(&CONFIG.web_vault_folder()).join("index.html");
|
let index_path = Path::new(&CONFIG.web_vault_folder()).join("index.html");
|
||||||
|
|
||||||
if !index_path.exists() {
|
if !index_path.exists() {
|
||||||
error!("Web vault is not found. To install it, please follow the steps in https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault");
|
error!("Web vault is not found. To install it, please follow the steps in: ");
|
||||||
|
error!("https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault");
|
||||||
|
error!("You can also set the environment variable 'WEB_VAULT_ENABLED=false' to disable it");
|
||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -232,39 +321,40 @@ mod migrations {
|
|||||||
let connection = crate::db::get_connection().expect("Can't connect to DB");
|
let connection = crate::db::get_connection().expect("Can't connect to DB");
|
||||||
|
|
||||||
use std::io::stdout;
|
use std::io::stdout;
|
||||||
|
|
||||||
|
// Disable Foreign Key Checks during migration
|
||||||
|
use diesel::RunQueryDsl;
|
||||||
|
#[cfg(feature = "postgres")]
|
||||||
|
diesel::sql_query("SET CONSTRAINTS ALL DEFERRED").execute(&connection).expect("Failed to disable Foreign Key Checks during migrations");
|
||||||
|
#[cfg(feature = "mysql")]
|
||||||
|
diesel::sql_query("SET FOREIGN_KEY_CHECKS = 0").execute(&connection).expect("Failed to disable Foreign Key Checks during migrations");
|
||||||
|
#[cfg(feature = "sqlite")]
|
||||||
|
diesel::sql_query("PRAGMA defer_foreign_keys = ON").execute(&connection).expect("Failed to disable Foreign Key Checks during migrations");
|
||||||
|
|
||||||
embedded_migrations::run_with_output(&connection, &mut stdout()).expect("Can't run migrations");
|
embedded_migrations::run_with_output(&connection, &mut stdout()).expect("Can't run migrations");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn launch_rocket() {
|
fn launch_rocket(extra_debug: bool) {
|
||||||
// Create Rocket object, this stores current log level and sets it's own
|
let basepath = &CONFIG.domain_path();
|
||||||
let rocket = rocket::ignite();
|
|
||||||
|
|
||||||
// If we aren't logging the mounts, we force the logging level down
|
// If adding more paths here, consider also adding them to
|
||||||
if !CONFIG.log_mounts() {
|
// crate::utils::LOGGED_ROUTES to make sure they appear in the log
|
||||||
log::set_max_level(log::LevelFilter::Warn);
|
let result = rocket::ignite()
|
||||||
}
|
.mount(&[basepath, "/"].concat(), api::web_routes())
|
||||||
|
.mount(&[basepath, "/api"].concat(), api::core_routes())
|
||||||
let rocket = rocket
|
.mount(&[basepath, "/admin"].concat(), api::admin_routes())
|
||||||
.mount("/", api::web_routes())
|
.mount(&[basepath, "/identity"].concat(), api::identity_routes())
|
||||||
.mount("/api", api::core_routes())
|
.mount(&[basepath, "/icons"].concat(), api::icons_routes())
|
||||||
.mount("/admin", api::admin_routes())
|
.mount(&[basepath, "/notifications"].concat(), api::notifications_routes())
|
||||||
.mount("/identity", api::identity_routes())
|
|
||||||
.mount("/icons", api::icons_routes())
|
|
||||||
.mount("/notifications", api::notifications_routes());
|
|
||||||
|
|
||||||
// Force the level up for the fairings, managed state and lauch
|
|
||||||
if !CONFIG.log_mounts() {
|
|
||||||
log::set_max_level(log::LevelFilter::max());
|
|
||||||
}
|
|
||||||
|
|
||||||
let rocket = rocket
|
|
||||||
.manage(db::init_pool())
|
.manage(db::init_pool())
|
||||||
.manage(api::start_notification_server())
|
.manage(api::start_notification_server())
|
||||||
.attach(util::AppHeaders())
|
.attach(util::AppHeaders())
|
||||||
.attach(util::CORS());
|
.attach(util::CORS())
|
||||||
|
.attach(util::BetterLogging(extra_debug))
|
||||||
|
.launch();
|
||||||
|
|
||||||
// Launch and print error if there is one
|
// Launch and print error if there is one
|
||||||
// The launch will restore the original logging level
|
// The launch will restore the original logging level
|
||||||
error!("Launch error {:#?}", rocket.launch());
|
error!("Launch error {:#?}", result);
|
||||||
}
|
}
|
||||||
|
@@ -108,7 +108,9 @@
|
|||||||
"microsoftonline.com",
|
"microsoftonline.com",
|
||||||
"office365.com",
|
"office365.com",
|
||||||
"microsoftstore.com",
|
"microsoftstore.com",
|
||||||
"xbox.com"
|
"xbox.com",
|
||||||
|
"azure.com",
|
||||||
|
"windowsazure.com"
|
||||||
],
|
],
|
||||||
"Excluded": false
|
"Excluded": false
|
||||||
},
|
},
|
||||||
@@ -126,8 +128,7 @@
|
|||||||
"Type": 12,
|
"Type": 12,
|
||||||
"Domains": [
|
"Domains": [
|
||||||
"overture.com",
|
"overture.com",
|
||||||
"yahoo.com",
|
"yahoo.com"
|
||||||
"flickr.com"
|
|
||||||
],
|
],
|
||||||
"Excluded": false
|
"Excluded": false
|
||||||
},
|
},
|
||||||
@@ -192,7 +193,6 @@
|
|||||||
"amazon.it",
|
"amazon.it",
|
||||||
"amazon.com.au",
|
"amazon.com.au",
|
||||||
"amazon.co.nz",
|
"amazon.co.nz",
|
||||||
"amazon.co.jp",
|
|
||||||
"amazon.in"
|
"amazon.in"
|
||||||
],
|
],
|
||||||
"Excluded": false
|
"Excluded": false
|
||||||
@@ -775,5 +775,71 @@
|
|||||||
"customercontrolpanel.de"
|
"customercontrolpanel.de"
|
||||||
],
|
],
|
||||||
"Excluded": false
|
"Excluded": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Type": 76,
|
||||||
|
"Domains": [
|
||||||
|
"docusign.com",
|
||||||
|
"docusign.net"
|
||||||
|
],
|
||||||
|
"Excluded": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Type": 77,
|
||||||
|
"Domains": [
|
||||||
|
"envato.com",
|
||||||
|
"themeforest.net",
|
||||||
|
"codecanyon.net",
|
||||||
|
"videohive.net",
|
||||||
|
"audiojungle.net",
|
||||||
|
"graphicriver.net",
|
||||||
|
"photodune.net",
|
||||||
|
"3docean.net"
|
||||||
|
],
|
||||||
|
"Excluded": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Type": 78,
|
||||||
|
"Domains": [
|
||||||
|
"x10hosting.com",
|
||||||
|
"x10premium.com"
|
||||||
|
],
|
||||||
|
"Excluded": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Type": 79,
|
||||||
|
"Domains": [
|
||||||
|
"dnsomatic.com",
|
||||||
|
"opendns.com",
|
||||||
|
"umbrella.com"
|
||||||
|
],
|
||||||
|
"Excluded": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Type": 80,
|
||||||
|
"Domains": [
|
||||||
|
"cagreatamerica.com",
|
||||||
|
"canadaswonderland.com",
|
||||||
|
"carowinds.com",
|
||||||
|
"cedarfair.com",
|
||||||
|
"cedarpoint.com",
|
||||||
|
"dorneypark.com",
|
||||||
|
"kingsdominion.com",
|
||||||
|
"knotts.com",
|
||||||
|
"miadventure.com",
|
||||||
|
"schlitterbahn.com",
|
||||||
|
"valleyfair.com",
|
||||||
|
"visitkingsisland.com",
|
||||||
|
"worldsoffun.com"
|
||||||
|
],
|
||||||
|
"Excluded": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Type": 81,
|
||||||
|
"Domains": [
|
||||||
|
"ubnt.com",
|
||||||
|
"ui.com"
|
||||||
|
],
|
||||||
|
"Excluded": false
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
Binary file not shown.
Before Width: | Height: | Size: 7.4 KiB After Width: | Height: | Size: 5.8 KiB |
BIN
src/static/images/shield-white.png
Normal file
BIN
src/static/images/shield-white.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.9 KiB |
3513
src/static/scripts/bootstrap-native-v4.js
vendored
3513
src/static/scripts/bootstrap-native-v4.js
vendored
File diff suppressed because it is too large
Load Diff
746
src/static/scripts/bootstrap.css
vendored
746
src/static/scripts/bootstrap.css
vendored
File diff suppressed because it is too large
Load Diff
@@ -1,56 +1,127 @@
|
|||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
|
|
||||||
<head>
|
<head>
|
||||||
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
|
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no" />
|
||||||
|
<meta name="robots" content="noindex,nofollow" />
|
||||||
<title>Bitwarden_rs Admin Panel</title>
|
<title>Bitwarden_rs Admin Panel</title>
|
||||||
|
<link rel="stylesheet" href="{{urlpath}}/bwrs_static/bootstrap.css" />
|
||||||
<link rel="stylesheet" href="/bwrs_static/bootstrap.css" />
|
|
||||||
<script src="/bwrs_static/bootstrap-native-v4.js"></script>
|
|
||||||
<script src="/bwrs_static/md5.js"></script>
|
|
||||||
<script src="/bwrs_static/identicon.js"></script>
|
|
||||||
<style>
|
<style>
|
||||||
body {
|
body {
|
||||||
padding-top: 70px;
|
padding-top: 75px;
|
||||||
}
|
}
|
||||||
|
|
||||||
@media (max-width:768px) {
|
|
||||||
body {
|
|
||||||
padding-top: 190px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.container {
|
|
||||||
max-width: 100%;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
img {
|
img {
|
||||||
width: 48px;
|
width: 48px;
|
||||||
height: 48px;
|
height: 48px;
|
||||||
}
|
}
|
||||||
|
.navbar img {
|
||||||
|
height: 24px;
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
<script src="{{urlpath}}/bwrs_static/md5.js"></script>
|
||||||
|
<script src="{{urlpath}}/bwrs_static/identicon.js"></script>
|
||||||
|
<script>
|
||||||
|
function reload() { window.location.reload(); }
|
||||||
|
function msg(text, reload_page = true) {
|
||||||
|
text && alert(text);
|
||||||
|
reload_page && reload();
|
||||||
|
}
|
||||||
|
function identicon(email) {
|
||||||
|
const data = new Identicon(md5(email), { size: 48, format: 'svg' });
|
||||||
|
return "data:image/svg+xml;base64," + data.toString();
|
||||||
|
}
|
||||||
|
function toggleVis(input_id) {
|
||||||
|
const elem = document.getElementById(input_id);
|
||||||
|
const type = elem.getAttribute("type");
|
||||||
|
if (type === "text") {
|
||||||
|
elem.setAttribute("type", "password");
|
||||||
|
} else {
|
||||||
|
elem.setAttribute("type", "text");
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
function _post(url, successMsg, errMsg, body, reload_page = true) {
|
||||||
|
fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
body: body,
|
||||||
|
mode: "same-origin",
|
||||||
|
credentials: "same-origin",
|
||||||
|
headers: { "Content-Type": "application/json" }
|
||||||
|
}).then( resp => {
|
||||||
|
if (resp.ok) { msg(successMsg, reload_page); return Promise.reject({error: false}); }
|
||||||
|
respStatus = resp.status;
|
||||||
|
respStatusText = resp.statusText;
|
||||||
|
return resp.text();
|
||||||
|
}).then( respText => {
|
||||||
|
try {
|
||||||
|
const respJson = JSON.parse(respText);
|
||||||
|
return respJson ? respJson.ErrorModel.Message : "Unknown error";
|
||||||
|
} catch (e) {
|
||||||
|
return Promise.reject({body:respStatus + ' - ' + respStatusText, error: true});
|
||||||
|
}
|
||||||
|
}).then( apiMsg => {
|
||||||
|
msg(errMsg + "\n" + apiMsg, reload_page);
|
||||||
|
}).catch( e => {
|
||||||
|
if (e.error === false) { return true; }
|
||||||
|
else { msg(errMsg + "\n" + e.body, reload_page); }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body class="bg-light">
|
<body class="bg-light">
|
||||||
<nav class="navbar navbar-expand-md navbar-dark bg-dark fixed-top shadow">
|
<nav class="navbar navbar-expand-md navbar-dark bg-dark mb-4 shadow fixed-top">
|
||||||
<a class="navbar-brand" href="#">Bitwarden_rs</a>
|
<div class="container">
|
||||||
<div class="navbar-collapse">
|
<a class="navbar-brand" href="{{urlpath}}/admin"><img class="pr-1" src="{{urlpath}}/bwrs_static/shield-white.png">Bitwarden_rs Admin</a>
|
||||||
<ul class="navbar-nav">
|
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarCollapse"
|
||||||
<li class="nav-item active">
|
aria-controls="navbarCollapse" aria-expanded="false" aria-label="Toggle navigation">
|
||||||
<a class="nav-link" href="/admin">Admin Panel</a>
|
<span class="navbar-toggler-icon"></span>
|
||||||
</li>
|
</button>
|
||||||
<li class="nav-item">
|
<div class="collapse navbar-collapse" id="navbarCollapse">
|
||||||
<a class="nav-link" href="/">Vault</a>
|
<ul class="navbar-nav mr-auto">
|
||||||
</li>
|
{{#if logged_in}}
|
||||||
</ul>
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href="{{urlpath}}/admin">Settings</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href="{{urlpath}}/admin/users/overview">Users</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href="{{urlpath}}/admin/organizations/overview">Organizations</a>
|
||||||
|
</li>
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href="{{urlpath}}/admin/diagnostics">Diagnostics</a>
|
||||||
|
</li>
|
||||||
|
{{/if}}
|
||||||
|
<li class="nav-item">
|
||||||
|
<a class="nav-link" href="{{urlpath}}/">Vault</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
{{#if logged_in}}
|
||||||
|
<a class="btn btn-sm btn-secondary" href="{{urlpath}}/admin/logout">Log Out</a>
|
||||||
|
{{/if}}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{{#if version}}
|
|
||||||
<div class="navbar-text">Version: {{version}}</div>
|
|
||||||
{{/if}}
|
|
||||||
</nav>
|
</nav>
|
||||||
|
|
||||||
{{> (page_content) }}
|
{{> (page_content) }}
|
||||||
</body>
|
|
||||||
|
|
||||||
|
<!-- This script needs to be at the bottom, else it will fail! -->
|
||||||
|
<script>
|
||||||
|
// get current URL path and assign 'active' class to the correct nav-item
|
||||||
|
(function () {
|
||||||
|
var pathname = window.location.pathname;
|
||||||
|
if (pathname === "") return;
|
||||||
|
var navItem = document.querySelectorAll('.navbar-nav .nav-item a[href="'+pathname+'"]');
|
||||||
|
if (navItem.length === 1) {
|
||||||
|
navItem[0].parentElement.className = navItem[0].parentElement.className + ' active';
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
</script>
|
||||||
|
<!-- This script needs to be at the bottom, else it will fail! -->
|
||||||
|
<script src="{{urlpath}}/bwrs_static/bootstrap-native-v4.js"></script>
|
||||||
|
</body>
|
||||||
</html>
|
</html>
|
150
src/static/templates/admin/diagnostics.hbs
Normal file
150
src/static/templates/admin/diagnostics.hbs
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
<main class="container">
|
||||||
|
<div id="diagnostics-block" class="my-3 p-3 bg-white rounded shadow">
|
||||||
|
<h6 class="border-bottom pb-2 mb-2">Diagnostics</h6>
|
||||||
|
|
||||||
|
<h3>Version</h3>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-md">
|
||||||
|
<dl class="row">
|
||||||
|
<dt class="col-sm-5">Server Installed
|
||||||
|
<span class="badge badge-success d-none" id="server-success" title="Latest version is installed.">Ok</span>
|
||||||
|
<span class="badge badge-warning d-none" id="server-warning" title="There seems to be an update available.">Update</span>
|
||||||
|
<span class="badge badge-info d-none" id="server-branch" title="This is a branched version.">Branched</span>
|
||||||
|
</dt>
|
||||||
|
<dd class="col-sm-7">
|
||||||
|
<span id="server-installed">{{version}}</span>
|
||||||
|
</dd>
|
||||||
|
<dt class="col-sm-5">Server Latest
|
||||||
|
<span class="badge badge-danger d-none" id="server-failed" title="Unable to determine latest version.">Unknown</span>
|
||||||
|
</dt>
|
||||||
|
<dd class="col-sm-7">
|
||||||
|
<span id="server-latest">{{diagnostics.latest_release}}<span id="server-latest-commit" class="d-none">-{{diagnostics.latest_commit}}</span></span>
|
||||||
|
</dd>
|
||||||
|
<dt class="col-sm-5">Web Installed
|
||||||
|
<span class="badge badge-success d-none" id="web-success" title="Latest version is installed.">Ok</span>
|
||||||
|
<span class="badge badge-warning d-none" id="web-warning" title="There seems to be an update available.">Update</span>
|
||||||
|
</dt>
|
||||||
|
<dd class="col-sm-7">
|
||||||
|
<span id="web-installed">{{diagnostics.web_vault_version}}</span>
|
||||||
|
</dd>
|
||||||
|
<dt class="col-sm-5">Web Latest
|
||||||
|
<span class="badge badge-danger d-none" id="web-failed" title="Unable to determine latest version.">Unknown</span>
|
||||||
|
</dt>
|
||||||
|
<dd class="col-sm-7">
|
||||||
|
<span id="web-latest">{{diagnostics.latest_web_build}}</span>
|
||||||
|
</dd>
|
||||||
|
</dl>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<h3>Checks</h3>
|
||||||
|
<div class="row">
|
||||||
|
<div class="col-md">
|
||||||
|
<dl class="row">
|
||||||
|
<dt class="col-sm-5">DNS (github.com)
|
||||||
|
<span class="badge badge-success d-none" id="dns-success" title="DNS Resolving works!">Ok</span>
|
||||||
|
<span class="badge badge-danger d-none" id="dns-warning" title="DNS Resolving failed. Please fix.">Error</span>
|
||||||
|
</dt>
|
||||||
|
<dd class="col-sm-7">
|
||||||
|
<span id="dns-resolved">{{diagnostics.dns_resolved}}</span>
|
||||||
|
</dd>
|
||||||
|
|
||||||
|
<dt class="col-sm-5">Date & Time (UTC)
|
||||||
|
<span class="badge badge-success d-none" id="time-success" title="Time offsets seem to be correct.">Ok</span>
|
||||||
|
<span class="badge badge-danger d-none" id="time-warning" title="Time offsets are too mouch at drift.">Error</span>
|
||||||
|
</dt>
|
||||||
|
<dd class="col-sm-7">
|
||||||
|
<span id="time-server" class="d-block"><b>Server:</b> <span id="time-server-string">{{diagnostics.server_time}}</span></span>
|
||||||
|
<span id="time-browser" class="d-block"><b>Browser:</b> <span id="time-browser-string"></span></span>
|
||||||
|
</dd>
|
||||||
|
</dl>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
(() => {
|
||||||
|
const d = new Date();
|
||||||
|
const year = d.getUTCFullYear();
|
||||||
|
const month = String(d.getUTCMonth()+1).padStart(2, '0');
|
||||||
|
const day = String(d.getUTCDate()).padStart(2, '0');
|
||||||
|
const hour = String(d.getUTCHours()).padStart(2, '0');
|
||||||
|
const minute = String(d.getUTCMinutes()).padStart(2, '0');
|
||||||
|
const seconds = String(d.getUTCSeconds()).padStart(2, '0');
|
||||||
|
const browserUTC = year + '-' + month + '-' + day + ' ' + hour + ':' + minute + ':' + seconds;
|
||||||
|
document.getElementById("time-browser-string").innerText = browserUTC;
|
||||||
|
|
||||||
|
const serverUTC = document.getElementById("time-server-string").innerText;
|
||||||
|
const timeDrift = (Date.parse(serverUTC) - Date.parse(browserUTC)) / 1000;
|
||||||
|
if (timeDrift > 30 || timeDrift < -30) {
|
||||||
|
document.getElementById('time-warning').classList.remove('d-none');
|
||||||
|
} else {
|
||||||
|
document.getElementById('time-success').classList.remove('d-none');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the output is a valid IP
|
||||||
|
const isValidIp = value => (/^(?:(?:^|\.)(?:2(?:5[0-5]|[0-4]\d)|1?\d?\d)){4}$/.test(value) ? true : false);
|
||||||
|
if (isValidIp(document.getElementById('dns-resolved').innerText)) {
|
||||||
|
document.getElementById('dns-success').classList.remove('d-none');
|
||||||
|
} else {
|
||||||
|
document.getElementById('dns-warning').classList.remove('d-none');
|
||||||
|
}
|
||||||
|
|
||||||
|
let serverInstalled = document.getElementById('server-installed').innerText;
|
||||||
|
let serverLatest = document.getElementById('server-latest').innerText;
|
||||||
|
let serverLatestCommit = document.getElementById('server-latest-commit').innerText.replace('-', '');
|
||||||
|
if (serverInstalled.indexOf('-') !== -1 && serverLatest !== '-' && serverLatestCommit !== '-') {
|
||||||
|
document.getElementById('server-latest-commit').classList.remove('d-none');
|
||||||
|
}
|
||||||
|
|
||||||
|
const webInstalled = document.getElementById('web-installed').innerText;
|
||||||
|
const webLatest = document.getElementById('web-latest').innerText;
|
||||||
|
|
||||||
|
checkVersions('server', serverInstalled, serverLatest, serverLatestCommit);
|
||||||
|
checkVersions('web', webInstalled, webLatest);
|
||||||
|
|
||||||
|
function checkVersions(platform, installed, latest, commit=null) {
|
||||||
|
if (installed === '-' || latest === '-') {
|
||||||
|
document.getElementById(platform + '-failed').classList.remove('d-none');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only check basic versions, no commit revisions
|
||||||
|
if (commit === null || installed.indexOf('-') === -1) {
|
||||||
|
if (installed !== latest) {
|
||||||
|
document.getElementById(platform + '-warning').classList.remove('d-none');
|
||||||
|
} else {
|
||||||
|
document.getElementById(platform + '-success').classList.remove('d-none');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Check if this is a branched version.
|
||||||
|
const branchRegex = /(?:\s)\((.*?)\)/;
|
||||||
|
const branchMatch = installed.match(branchRegex);
|
||||||
|
if (branchMatch !== null) {
|
||||||
|
document.getElementById(platform + '-branch').classList.remove('d-none');
|
||||||
|
}
|
||||||
|
|
||||||
|
// This will remove branch info and check if there is a commit hash
|
||||||
|
const installedRegex = /(\d+\.\d+\.\d+)-(\w+)/;
|
||||||
|
const instMatch = installed.match(installedRegex);
|
||||||
|
|
||||||
|
// It could be that a new tagged version has the same commit hash.
|
||||||
|
// In this case the version is the same but only the number is different
|
||||||
|
if (instMatch !== null) {
|
||||||
|
if (instMatch[2] === commit) {
|
||||||
|
// The commit hashes are the same, so latest version is installed
|
||||||
|
document.getElementById(platform + '-success').classList.remove('d-none');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (installed === latest) {
|
||||||
|
document.getElementById(platform + '-success').classList.remove('d-none');
|
||||||
|
} else {
|
||||||
|
document.getElementById(platform + '-warning').classList.remove('d-none');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
</script>
|
@@ -14,7 +14,7 @@
|
|||||||
|
|
||||||
<form class="form-inline" method="post">
|
<form class="form-inline" method="post">
|
||||||
<input type="password" class="form-control w-50 mr-2" name="token" placeholder="Enter admin token">
|
<input type="password" class="form-control w-50 mr-2" name="token" placeholder="Enter admin token">
|
||||||
<button type="submit" class="btn btn-primary">Save</button>
|
<button type="submit" class="btn btn-primary">Enter</button>
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user