mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-09-10 18:55:57 +03:00
Compare commits
260 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
94341f9f3f | ||
|
ff19fb3426 | ||
|
baac8d9627 | ||
|
669b101e6a | ||
|
935f38692f | ||
|
d2d9fb08cc | ||
|
b85d548879 | ||
|
35f30088b2 | ||
|
dce054e632 | ||
|
ba725e1c25 | ||
|
b837348b25 | ||
|
7d9c7017c9 | ||
|
d6b9b8bf0c | ||
|
bd09fe1a3d | ||
|
bcbe6177b8 | ||
|
9b1d07365e | ||
|
37b212427c | ||
|
078234d8b3 | ||
|
3ce0c3d1a5 | ||
|
2ee07ea1d8 | ||
|
40c339db9b | ||
|
402c1cd06c | ||
|
819f340f39 | ||
|
1b4b40c95d | ||
|
afd9f4e278 | ||
|
47a9461f39 | ||
|
c6f64d8368 | ||
|
edabf19ddf | ||
|
a30d5f4cf9 | ||
|
3fa78e7bb1 | ||
|
a8a7e4f9a5 | ||
|
5d3b765a23 | ||
|
70f3ab8ec3 | ||
|
b6612e90ca | ||
|
161cccca30 | ||
|
84dc2eda1f | ||
|
390d10d656 | ||
|
1f775f4414 | ||
|
cc404b4edc | ||
|
536672ac1b | ||
|
e41e7c07db | ||
|
f1d3b03c60 | ||
|
2ebff958a4 | ||
|
edfdda86ae | ||
|
97fb7b5b96 | ||
|
f6de144cbb | ||
|
5a974c7b94 | ||
|
5f61607419 | ||
|
7439aeb63e | ||
|
cd8907542a | ||
|
8a5450e830 | ||
|
ad9f2b2d8e | ||
|
2f4a9865e1 | ||
|
0a3008e753 | ||
|
29a0795219 | ||
|
63459c5f72 | ||
|
916e96b143 | ||
|
325039c316 | ||
|
c5b97f4146 | ||
|
03233429f4 | ||
|
0a72c4b6db | ||
|
8867626de8 | ||
|
f5916ec396 | ||
|
ebb36235a7 | ||
|
def174a517 | ||
|
2798f623d4 | ||
|
480ba933fa | ||
|
3d1ee9ef62 | ||
|
5352321fe1 | ||
|
c4101162d6 | ||
|
632d55265b | ||
|
e277f7d1c1 | ||
|
ff7b4a3d38 | ||
|
d212dfe735 | ||
|
84ed185579 | ||
|
c0ba3406ef | ||
|
e196ba6e86 | ||
|
76743aee48 | ||
|
9ebca99290 | ||
|
a734ad2d36 | ||
|
baf7d1be4e | ||
|
31bcd1bf7c | ||
|
a3b30ed65a | ||
|
59e50b03bd | ||
|
0a88f020e1 | ||
|
c058a1d63c | ||
|
96a189deb9 | ||
|
8c229920ad | ||
|
d592323e39 | ||
|
402c857d17 | ||
|
def858854b | ||
|
f6761ac30e | ||
|
f8e49ea3f4 | ||
|
f6a4a2127b | ||
|
446fc3f1f8 | ||
|
146525db91 | ||
|
1698b43f9b | ||
|
078b21db85 | ||
|
43adcde094 | ||
|
7a0bb18dcf | ||
|
47a5a4e1fc | ||
|
0f0e5876ae | ||
|
43aa75dc89 | ||
|
95dd1cd7ad | ||
|
36ae946655 | ||
|
24edc94f9d | ||
|
4deae76347 | ||
|
8280d200ea | ||
|
8ee0c57224 | ||
|
cb6f392774 | ||
|
f250c54813 | ||
|
5c6081c4e2 | ||
|
88c56de97b | ||
|
e274af6e3d | ||
|
a0ece3754b | ||
|
0bcc2ae7ab | ||
|
bdb90460c4 | ||
|
824137a02c | ||
|
2edc699eac | ||
|
8e79366076 | ||
|
c1e39b182f | ||
|
13eb276085 | ||
|
4cec502f7b | ||
|
2545469713 | ||
|
f09996a21d | ||
|
5cabf4d040 | ||
|
a03db6d224 | ||
|
8d1b72b951 | ||
|
912e1f93b7 | ||
|
a5aa4d9b54 | ||
|
e777be3dde | ||
|
b5441f6b77 | ||
|
dbbd63e519 | ||
|
adc443ea80 | ||
|
0d32179d07 | ||
|
b45b02b37e | ||
|
12928b832c | ||
|
1e224220a8 | ||
|
3471e2660f | ||
|
924ba153aa | ||
|
bd1e8be328 | ||
|
cf5a985b31 | ||
|
607521c88f | ||
|
486c7d8c56 | ||
|
4b71197c97 | ||
|
8b8839d049 | ||
|
b209c1bc4d | ||
|
2b8d08a3f4 | ||
|
cbadf00941 | ||
|
c5b7447dac | ||
|
64d6f72e6c | ||
|
a19a6fb016 | ||
|
b889e5185e | ||
|
cd83a9e7b2 | ||
|
748c825202 | ||
|
204993568a | ||
|
70be2d93ce | ||
|
f5638716d2 | ||
|
fbc2fad9c9 | ||
|
3f39e35123 | ||
|
3f6809bcdf | ||
|
9ff577a7b4 | ||
|
c52adef919 | ||
|
cbb92bcbc0 | ||
|
948798a84f | ||
|
2ffc3eac4d | ||
|
0ff7fd939e | ||
|
ca7c5129b2 | ||
|
07e0fdbd2a | ||
|
b4dfc24040 | ||
|
85dbf4e16c | ||
|
efc65b93f8 | ||
|
9a0fe6f617 | ||
|
3442eb1b9d | ||
|
e449912f05 | ||
|
72a46fb386 | ||
|
d29b6bee28 | ||
|
e2e3712921 | ||
|
00a11b1b78 | ||
|
77b78f0991 | ||
|
ee550be80c | ||
|
97d41c2686 | ||
|
fccc0a4b05 | ||
|
57b1d3f850 | ||
|
77d40833d9 | ||
|
7814218208 | ||
|
95a7ffdf6b | ||
|
ebc47dc161 | ||
|
cd8acc2e8c | ||
|
3b7a5bd102 | ||
|
d3054d4f83 | ||
|
5ac66b05e3 | ||
|
83fd44eeef | ||
|
2edecf34ff | ||
|
18bc8331f9 | ||
|
7d956c5117 | ||
|
603a964579 | ||
|
dc515b83f3 | ||
|
9466f02696 | ||
|
d3bd2774dc | ||
|
f482585d7c | ||
|
2cde814aaa | ||
|
d989a19f76 | ||
|
d292269ea0 | ||
|
ebf40099f2 | ||
|
0586c00285 | ||
|
bb9ddd5680 | ||
|
cb1663fc12 | ||
|
45d9d8db94 | ||
|
edc482c8ea | ||
|
6e5c03cc78 | ||
|
881c1978eb | ||
|
662bc27523 | ||
|
b4b62c22a4 | ||
|
05569147af | ||
|
99a635d327 | ||
|
e6b763026e | ||
|
c182583e09 | ||
|
d821389c2e | ||
|
be2916333b | ||
|
9124d8a3fb | ||
|
7b1da527a6 | ||
|
e7b8602e1f | ||
|
d6e9af909b | ||
|
acdd42935b | ||
|
8367d1d715 | ||
|
56f12dc982 | ||
|
4c07f05b3a | ||
|
b73ff886c3 | ||
|
2e7bd62353 | ||
|
1264eb640a | ||
|
3a90364b32 | ||
|
f5f9861a78 | ||
|
f9408a00c6 | ||
|
ae8bf954c1 | ||
|
c656f2f694 | ||
|
eea3f13bb3 | ||
|
df8114f8be | ||
|
dda244edd8 | ||
|
cce3ce816c | ||
|
65c0d1064b | ||
|
5a2f968d7a | ||
|
16d88402cb | ||
|
7dcf18151d | ||
|
e3404dd322 | ||
|
bfc517ee80 | ||
|
4a7d2a1e28 | ||
|
66a68f6d22 | ||
|
469318bcbd | ||
|
c07c9995ea | ||
|
5d50b1ee3c | ||
|
c99df1c310 | ||
|
591ae10144 | ||
|
ad2225b6e5 | ||
|
5609103a97 | ||
|
6d460b44b0 | ||
|
efd8d9f528 | ||
|
29aedd388e | ||
|
27e0e41835 | ||
|
0b60f20eb3 |
@@ -21,6 +21,10 @@
|
||||
## Automatically reload the templates for every request, slow, use only for development
|
||||
# RELOAD_TEMPLATES=false
|
||||
|
||||
## Client IP Header, used to identify the IP of the client, defaults to "X-Client-IP"
|
||||
## Set to the string "none" (without quotes), to disable any headers and just use the remote IP
|
||||
# IP_HEADER=X-Client-IP
|
||||
|
||||
## Cache time-to-live for successfully obtained icons, in seconds (0 is "forever")
|
||||
# ICON_CACHE_TTL=2592000
|
||||
## Cache time-to-live for icons which weren't available, in seconds (0 is "forever")
|
||||
@@ -37,14 +41,10 @@
|
||||
# WEBSOCKET_ADDRESS=0.0.0.0
|
||||
# WEBSOCKET_PORT=3012
|
||||
|
||||
## Enable extended logging
|
||||
## This shows timestamps and allows logging to file and to syslog
|
||||
### To enable logging to file, use the LOG_FILE env variable
|
||||
### To enable syslog, use the USE_SYSLOG env variable
|
||||
## Enable extended logging, which shows timestamps and targets in the logs
|
||||
# EXTENDED_LOGGING=true
|
||||
|
||||
## Logging to file
|
||||
## This requires extended logging
|
||||
## It's recommended to also set 'ROCKET_CLI_COLORS=off'
|
||||
# LOG_FILE=/path/to/log
|
||||
|
||||
@@ -56,7 +56,8 @@
|
||||
## Log level
|
||||
## Change the verbosity of the log output
|
||||
## Valid values are "trace", "debug", "info", "warn", "error" and "off"
|
||||
## This requires extended logging
|
||||
## Setting it to "trace" or "debug" would also show logs for mounted
|
||||
## routes and static file, websocket and alive requests
|
||||
# LOG_LEVEL=Info
|
||||
|
||||
## Enable WAL for the DB
|
||||
@@ -83,6 +84,10 @@
|
||||
## Useful to hide other servers in the local network. Check the WIKI for more details
|
||||
# ICON_BLACKLIST_REGEX=192\.168\.1\.[0-9].*^
|
||||
|
||||
## Any IP which is not defined as a global IP will be blacklisted.
|
||||
## Usefull to secure your internal environment: See https://en.wikipedia.org/wiki/Reserved_IP_addresses for a list of IPs which it will block
|
||||
# ICON_BLACKLIST_NON_GLOBAL_IPS=true
|
||||
|
||||
## Disable 2FA remember
|
||||
## Enabling this would force the users to use a second factor to login every time.
|
||||
## Note that the checkbox would still be present, but ignored.
|
||||
@@ -91,10 +96,31 @@
|
||||
## Controls if new users can register
|
||||
# SIGNUPS_ALLOWED=true
|
||||
|
||||
## Controls if new users need to verify their email address upon registration
|
||||
## Note that setting this option to true prevents logins until the email address has been verified!
|
||||
## The welcome email will include a verification link, and login attempts will periodically
|
||||
## trigger another verification email to be sent.
|
||||
# SIGNUPS_VERIFY=false
|
||||
|
||||
## If SIGNUPS_VERIFY is set to true, this limits how many seconds after the last time
|
||||
## an email verification link has been sent another verification email will be sent
|
||||
# SIGNUPS_VERIFY_RESEND_TIME=3600
|
||||
|
||||
## If SIGNUPS_VERIFY is set to true, this limits how many times an email verification
|
||||
## email will be re-sent upon an attempted login.
|
||||
# SIGNUPS_VERIFY_RESEND_LIMIT=6
|
||||
|
||||
## Controls if new users from a list of comma-separated domains can register
|
||||
## even if SIGNUPS_ALLOWED is set to false
|
||||
# SIGNUPS_DOMAINS_WHITELIST=example.com,example.net,example.org
|
||||
|
||||
## Token for the admin interface, preferably use a long random string
|
||||
## One option is to use 'openssl rand -base64 48'
|
||||
## If not set, the admin panel is disabled
|
||||
# ADMIN_TOKEN=Vy2VyYTTsKPv8W5aEOWUbB/Bt3DEKePbHmI4m9VcemUMS2rEviDowNAFqYi1xjmp
|
||||
|
||||
## Enable this to bypass the admin panel security. This option is only
|
||||
## meant to be used with the use of a separate auth layer in front
|
||||
# DISABLE_ADMIN_TOKEN=false
|
||||
|
||||
## Invitations org admins to invite users, even when signups are disabled
|
||||
@@ -133,6 +159,18 @@
|
||||
## After that, you should be able to follow the rest of the guide linked above,
|
||||
## ignoring the fields that ask for the values that you already configured beforehand.
|
||||
|
||||
## Authenticator Settings
|
||||
## Disable authenticator time drifted codes to be valid.
|
||||
## TOTP codes of the previous and next 30 seconds will be invalid
|
||||
##
|
||||
## According to the RFC6238 (https://tools.ietf.org/html/rfc6238),
|
||||
## we allow by default the TOTP code which was valid one step back and one in the future.
|
||||
## This can however allow attackers to be a bit more lucky with there attempts because there are 3 valid codes.
|
||||
## You can disable this, so that only the current TOTP Code is allowed.
|
||||
## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid.
|
||||
## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid.
|
||||
# AUTHENTICATOR_DISABLE_TIME_DRIFT = false
|
||||
|
||||
## Rocket specific settings, check Rocket documentation to learn more
|
||||
# ROCKET_ENV=staging
|
||||
# ROCKET_ADDRESS=0.0.0.0 # Enable this to test mobile app
|
||||
@@ -150,3 +188,6 @@
|
||||
# SMTP_USERNAME=username
|
||||
# SMTP_PASSWORD=password
|
||||
# SMTP_AUTH_MECHANISM="Plain"
|
||||
# SMTP_TIMEOUT=15
|
||||
|
||||
# vim: syntax=ini
|
||||
|
2
.github/FUNDING.yml
vendored
Normal file
2
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
github: dani-garcia
|
||||
custom: ["https://paypal.me/DaniGG"]
|
42
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
42
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
Please fill out the following template to make solving your problem easier and faster for us.
|
||||
This is only a guideline. If you think that parts are unneccessary for your issue, feel free to remove them.
|
||||
|
||||
Remember to hide/obfuscate personal and confidential information,
|
||||
such as names, global IP/DNS adresses and especially passwords, if neccessary.
|
||||
-->
|
||||
|
||||
### Subject of the issue
|
||||
<!-- Describe your issue here.-->
|
||||
|
||||
### Your environment
|
||||
<!-- The version number, obtained from the logs or the admin page -->
|
||||
* Bitwarden_rs version:
|
||||
<!-- How the server was installed: Docker image / package / built from source -->
|
||||
* Install method:
|
||||
* Clients used: <!-- if applicable -->
|
||||
* Reverse proxy and version: <!-- if applicable -->
|
||||
* Version of mysql/postgresql: <!-- if applicable -->
|
||||
* Other relevant information:
|
||||
|
||||
### Steps to reproduce
|
||||
<!-- Tell us how to reproduce this issue. What parameters did you set (differently from the defaults)
|
||||
and how did you start bitwarden_rs? -->
|
||||
|
||||
### Expected behaviour
|
||||
<!-- Tell us what should happen -->
|
||||
|
||||
### Actual behaviour
|
||||
<!-- Tell us what happens instead -->
|
||||
|
||||
### Relevant logs
|
||||
<!-- Share some logfiles, screenshots or output of relevant programs with us. -->
|
11
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
11
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: better for forum
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
# Please submit all your feature requests to the forum
|
||||
Link: https://bitwardenrs.discourse.group/c/feature-requests
|
11
.github/ISSUE_TEMPLATE/help-with-installation-configuration.md
vendored
Normal file
11
.github/ISSUE_TEMPLATE/help-with-installation-configuration.md
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
---
|
||||
name: Help with installation/configuration
|
||||
about: Any questions about the setup of bitwarden_rs
|
||||
title: ''
|
||||
labels: better for forum
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
# Please submit all your third party help requests to the forum
|
||||
Link: https://bitwardenrs.discourse.group/c/help
|
11
.github/ISSUE_TEMPLATE/help-with-proxy-database-nas-setup.md
vendored
Normal file
11
.github/ISSUE_TEMPLATE/help-with-proxy-database-nas-setup.md
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
---
|
||||
name: Help with proxy/database/NAS setup
|
||||
about: Any questions about third party software
|
||||
title: ''
|
||||
labels: better for forum
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
# Please submit all your third party help requests to the forum
|
||||
Link: https://bitwardenrs.discourse.group/c/third-party-help
|
148
.github/workflows/workspace.yml
vendored
Normal file
148
.github/workflows/workspace.yml
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
name: Workflow
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
#pull_request:
|
||||
# paths-ignore:
|
||||
# - "**.md"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
db-backend: [sqlite, mysql, postgresql]
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
# - x86_64-unknown-linux-musl
|
||||
# - x86_64-apple-darwin
|
||||
# - x86_64-pc-windows-msvc
|
||||
include:
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
os: ubuntu-latest
|
||||
ext:
|
||||
# - target: x86_64-unknown-linux-musl
|
||||
# os: ubuntu-latest
|
||||
# ext:
|
||||
# - target: x86_64-apple-darwin
|
||||
# os: macOS-latest
|
||||
# ext:
|
||||
# - target: x86_64-pc-windows-msvc
|
||||
# os: windows-latest
|
||||
# ext: .exe
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
|
||||
# - name: Cache choco cache
|
||||
# uses: actions/cache@v1.0.3
|
||||
# if: matrix.os == 'windows-latest'
|
||||
# with:
|
||||
# path: ~\AppData\Local\Temp\chocolatey
|
||||
# key: ${{ runner.os }}-choco-cache-${{ matrix.db-backend }}
|
||||
|
||||
- name: Cache vcpkg installed
|
||||
uses: actions/cache@v1.0.3
|
||||
if: matrix.os == 'windows-latest'
|
||||
with:
|
||||
path: $VCPKG_ROOT/installed
|
||||
key: ${{ runner.os }}-vcpkg-cache-${{ matrix.db-backend }}
|
||||
env:
|
||||
VCPKG_ROOT: 'C:\vcpkg'
|
||||
|
||||
- name: Cache vcpkg downloads
|
||||
uses: actions/cache@v1.0.3
|
||||
if: matrix.os == 'windows-latest'
|
||||
with:
|
||||
path: $VCPKG_ROOT/downloads
|
||||
key: ${{ runner.os }}-vcpkg-cache-${{ matrix.db-backend }}
|
||||
env:
|
||||
VCPKG_ROOT: 'C:\vcpkg'
|
||||
|
||||
# - name: Cache homebrew
|
||||
# uses: actions/cache@v1.0.3
|
||||
# if: matrix.os == 'macOS-latest'
|
||||
# with:
|
||||
# path: ~/Library/Caches/Homebrew
|
||||
# key: ${{ runner.os }}-brew-cache
|
||||
|
||||
# - name: Cache apt
|
||||
# uses: actions/cache@v1.0.3
|
||||
# if: matrix.os == 'ubuntu-latest'
|
||||
# with:
|
||||
# path: /var/cache/apt/archives
|
||||
# key: ${{ runner.os }}-apt-cache
|
||||
|
||||
# Install dependencies
|
||||
- name: Install dependencies macOS
|
||||
run: brew update; brew install openssl sqlite libpq mysql
|
||||
if: matrix.os == 'macOS-latest'
|
||||
|
||||
- name: Install dependencies Ubuntu
|
||||
run: sudo apt-get update && sudo apt-get install --no-install-recommends openssl sqlite libpq-dev libmysql++-dev
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
|
||||
- name: Install dependencies Windows
|
||||
run: vcpkg integrate install; vcpkg install sqlite3:x64-windows openssl:x64-windows libpq:x64-windows libmysql:x64-windows
|
||||
if: matrix.os == 'windows-latest'
|
||||
env:
|
||||
VCPKG_ROOT: 'C:\vcpkg'
|
||||
# End Install dependencies
|
||||
|
||||
# Install rust nightly toolchain
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v1.0.3
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-${{matrix.db-backend}}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v1.0.3
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-${{matrix.db-backend}}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
- name: Cache cargo build
|
||||
uses: actions/cache@v1.0.3
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-${{matrix.db-backend}}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Install latest nightly
|
||||
uses: actions-rs/toolchain@v1.0.5
|
||||
with:
|
||||
# Uses rust-toolchain to determine version
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
# Build
|
||||
- name: Build Win
|
||||
if: matrix.os == 'windows-latest'
|
||||
run: cargo.exe build --features ${{ matrix.db-backend }} --release --target ${{ matrix.target }}
|
||||
env:
|
||||
RUSTFLAGS: -Ctarget-feature=+crt-static
|
||||
VCPKG_ROOT: 'C:\vcpkg'
|
||||
|
||||
- name: Build macOS / Ubuntu
|
||||
if: matrix.os == 'macOS-latest' || matrix.os == 'ubuntu-latest'
|
||||
run: cargo build --verbose --features ${{ matrix.db-backend }} --release --target ${{ matrix.target }}
|
||||
|
||||
# Test
|
||||
- name: Run tests
|
||||
run: cargo test --features ${{ matrix.db-backend }}
|
||||
|
||||
# Upload & Release
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v1.0.0
|
||||
with:
|
||||
name: bitwarden_rs-${{ matrix.db-backend }}-${{ matrix.target }}${{ matrix.ext }}
|
||||
path: target/${{ matrix.target }}/release/bitwarden_rs${{ matrix.ext }}
|
||||
|
||||
- name: Release
|
||||
uses: Shopify/upload-to-release@1.0.0
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
name: bitwarden_rs-${{ matrix.db-backend }}-${{ matrix.target }}${{ matrix.ext }}
|
||||
path: target/${{ matrix.target }}/release/bitwarden_rs${{ matrix.ext }}
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
@@ -11,10 +11,11 @@ cache: cargo
|
||||
before_install:
|
||||
- sudo curl -L https://github.com/hadolint/hadolint/releases/download/v$HADOLINT_VERSION/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint
|
||||
- sudo chmod +rx /usr/local/bin/hadolint
|
||||
- rustup set profile minimal
|
||||
|
||||
# Nothing to install
|
||||
install: true
|
||||
script:
|
||||
- git ls-files --exclude='Dockerfile*' --ignored | xargs --max-lines=1 hadolint
|
||||
- cargo build --features "sqlite"
|
||||
- cargo build --features "mysql"
|
||||
- cargo test --features "sqlite"
|
||||
- cargo test --features "mysql"
|
||||
|
3286
Cargo.lock
generated
3286
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
86
Cargo.toml
86
Cargo.toml
@@ -14,6 +14,7 @@ build = "build.rs"
|
||||
# Empty to keep compatibility, prefer to set USE_SYSLOG=true
|
||||
enable_syslog = []
|
||||
mysql = ["diesel/mysql", "diesel_migrations/mysql"]
|
||||
postgresql = ["diesel/postgres", "diesel_migrations/postgres"]
|
||||
sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "libsqlite3-sys"]
|
||||
|
||||
[target."cfg(not(windows))".dependencies]
|
||||
@@ -25,93 +26,108 @@ rocket = { version = "0.5.0-dev", features = ["tls"], default-features = false }
|
||||
rocket_contrib = "0.5.0-dev"
|
||||
|
||||
# HTTP client
|
||||
reqwest = "0.9.19"
|
||||
reqwest = { version = "0.10.4", features = ["blocking", "json"] }
|
||||
|
||||
# multipart/form-data support
|
||||
multipart = { version = "0.16.1", features = ["server"], default-features = false }
|
||||
|
||||
# WebSockets library
|
||||
ws = "0.9.0"
|
||||
ws = "0.9.1"
|
||||
|
||||
# MessagePack library
|
||||
rmpv = "0.4.0"
|
||||
rmpv = "0.4.4"
|
||||
|
||||
# Concurrent hashmap implementation
|
||||
chashmap = "2.2.2"
|
||||
|
||||
# A generic serialization/deserialization framework
|
||||
serde = "1.0.99"
|
||||
serde_derive = "1.0.99"
|
||||
serde_json = "1.0.40"
|
||||
serde = "1.0.104"
|
||||
serde_derive = "1.0.104"
|
||||
serde_json = "1.0.48"
|
||||
|
||||
# Logging
|
||||
log = "0.4.8"
|
||||
fern = { version = "0.5.8", features = ["syslog-4"] }
|
||||
fern = { version = "0.6.0", features = ["syslog-4"] }
|
||||
|
||||
# A safe, extensible ORM and Query builder
|
||||
diesel = { version = "1.4.2", features = [ "chrono", "r2d2"] }
|
||||
diesel = { version = "1.4.3", features = [ "chrono", "r2d2"] }
|
||||
diesel_migrations = "1.4.0"
|
||||
|
||||
# Bundled SQLite
|
||||
libsqlite3-sys = { version = "0.12.0", features = ["bundled"], optional = true }
|
||||
libsqlite3-sys = { version = "0.16.0", features = ["bundled"], optional = true }
|
||||
|
||||
# Crypto library
|
||||
ring = "0.14.6"
|
||||
ring = "0.16.11"
|
||||
|
||||
# UUID generation
|
||||
uuid = { version = "0.7.4", features = ["v4"] }
|
||||
uuid = { version = "0.8.1", features = ["v4"] }
|
||||
|
||||
# Date and time library for Rust
|
||||
chrono = "0.4.7"
|
||||
# Date and time librar for Rust
|
||||
chrono = "0.4.11"
|
||||
time = "0.2.9"
|
||||
|
||||
# TOTP library
|
||||
oath = "0.10.2"
|
||||
|
||||
# Data encoding library
|
||||
data-encoding = "2.1.2"
|
||||
data-encoding = "2.2.0"
|
||||
|
||||
# JWT library
|
||||
jsonwebtoken = "6.0.1"
|
||||
jsonwebtoken = "7.1.0"
|
||||
|
||||
# U2F library
|
||||
u2f = "0.1.6"
|
||||
u2f = "0.2.0"
|
||||
|
||||
# Yubico Library
|
||||
yubico = { version = "0.6.1", features = ["online", "online-tokio"], default-features = false }
|
||||
yubico = { version = "0.9.0", features = ["online-tokio"], default-features = false }
|
||||
|
||||
# A `dotenv` implementation for Rust
|
||||
dotenv = { version = "0.14.1", default-features = false }
|
||||
dotenv = { version = "0.15.0", default-features = false }
|
||||
|
||||
# Lazy static macro
|
||||
lazy_static = "1.3.0"
|
||||
# Lazy initialization
|
||||
once_cell = "1.3.1"
|
||||
|
||||
# More derives
|
||||
derive_more = "0.15.0"
|
||||
derive_more = "0.99.3"
|
||||
|
||||
# Numerical libraries
|
||||
num-traits = "0.2.8"
|
||||
num-derive = "0.2.5"
|
||||
num-traits = "0.2.11"
|
||||
num-derive = "0.3.0"
|
||||
|
||||
# Email libraries
|
||||
lettre = "0.9.2"
|
||||
lettre_email = "0.9.2"
|
||||
native-tls = "0.2.3"
|
||||
quoted_printable = "0.4.1"
|
||||
lettre = "0.10.0-pre"
|
||||
native-tls = "0.2.4"
|
||||
quoted_printable = "0.4.2"
|
||||
|
||||
# Template library
|
||||
handlebars = "2.0.1"
|
||||
handlebars = { version = "3.0.1", features = ["dir_source"] }
|
||||
|
||||
# For favicon extraction from main website
|
||||
soup = "0.4.1"
|
||||
regex = "1.2.1"
|
||||
soup = "0.5.0"
|
||||
regex = "1.3.4"
|
||||
data-url = "0.1.0"
|
||||
|
||||
# Used by U2F, JWT and Postgres
|
||||
openssl = "0.10.28"
|
||||
|
||||
# URL encoding library
|
||||
percent-encoding = "2.1.0"
|
||||
# Punycode conversion
|
||||
idna = "0.2.0"
|
||||
|
||||
# CLI argument parsing
|
||||
structopt = "0.3.11"
|
||||
|
||||
# Logging panics to logfile instead stderr only
|
||||
backtrace = "0.3.45"
|
||||
|
||||
[patch.crates-io]
|
||||
# Add support for Timestamp type
|
||||
rmp = { git = 'https://github.com/dani-garcia/msgpack-rust' }
|
||||
|
||||
# Use newest ring
|
||||
rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = 'dbcb0a75b9556763ac3ab708f40c8f8ed75f1a1e' }
|
||||
rocket_contrib = { git = 'https://github.com/SergioBenitez/Rocket', rev = 'dbcb0a75b9556763ac3ab708f40c8f8ed75f1a1e' }
|
||||
rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = 'dfc9e9aab01d349da32c52db393e35b7fffea63c' }
|
||||
rocket_contrib = { git = 'https://github.com/SergioBenitez/Rocket', rev = 'dfc9e9aab01d349da32c52db393e35b7fffea63c' }
|
||||
|
||||
# Use git version for timeout fix #706
|
||||
lettre = { git = 'https://github.com/lettre/lettre', rev = '245c600c82ee18b766e8729f005ff453a55dce34' }
|
||||
|
||||
# For favicon extraction from main website
|
||||
data-url = { git = 'https://github.com/servo/rust-url', package="data-url", rev = '7f1bd6ce1c2fde599a757302a843a60e714c5f72' }
|
||||
|
14
README.md
14
README.md
@@ -13,7 +13,7 @@ Image is based on [Rust implementation of Bitwarden API](https://github.com/dani
|
||||
|
||||
**This project is not associated with the [Bitwarden](https://bitwarden.com/) project nor 8bit Solutions LLC.**
|
||||
|
||||
#### ⚠️**IMPORTANT**⚠️: When using this server, please report any Bitwarden related bug-reports or suggestions [here](https://github.com/dani-garcia/bitwarden_rs/issues/new), regardless of whatever clients you are using (mobile, desktop, browser...). DO NOT use the official support channels.
|
||||
#### ⚠️**IMPORTANT**⚠️: When using this server, please report any bugs or suggestions to us directly (look at the bottom of this page for ways to get in touch), regardless of whatever clients you are using (mobile, desktop, browser...). DO NOT use the official support channels.
|
||||
|
||||
---
|
||||
|
||||
@@ -21,14 +21,14 @@ Image is based on [Rust implementation of Bitwarden API](https://github.com/dani
|
||||
|
||||
Basically full implementation of Bitwarden API is provided including:
|
||||
|
||||
* Basic single user functionality
|
||||
* Single user functionality
|
||||
* Organizations support
|
||||
* Attachments
|
||||
* Vault API support
|
||||
* Serving the static files for Vault interface
|
||||
* Website icons API
|
||||
* Authenticator and U2F support
|
||||
* YubiKey OTP
|
||||
* YubiKey and Duo support
|
||||
|
||||
## Installation
|
||||
Pull the docker image and mount a volume from the host for persistent storage:
|
||||
@@ -49,7 +49,13 @@ If you have an available domain name, you can get HTTPS certificates with [Let's
|
||||
See the [bitwarden_rs wiki](https://github.com/dani-garcia/bitwarden_rs/wiki) for more information on how to configure and run the bitwarden_rs server.
|
||||
|
||||
## Get in touch
|
||||
To ask a question, offer suggestions or new features or to get help configuring or installing the software, please [use the forum](https://bitwardenrs.discourse.group/).
|
||||
|
||||
To ask an question, [raising an issue](https://github.com/dani-garcia/bitwarden_rs/issues/new) is fine, also please report any bugs spotted here.
|
||||
If you spot any bugs or crashes with bitwarden_rs itself, please [create an issue](https://github.com/dani-garcia/bitwarden_rs/issues/). Make sure there aren't any similar issues open, though!
|
||||
|
||||
If you prefer to chat, we're usually hanging around at [#bitwarden_rs:matrix.org](https://matrix.to/#/#bitwarden_rs:matrix.org) room on Matrix. Feel free to join us!
|
||||
|
||||
### Sponsors
|
||||
Thanks for your contribution to the project!
|
||||
|
||||
- [@ChonoN](https://github.com/ChonoN)
|
||||
|
@@ -4,7 +4,7 @@ pool:
|
||||
steps:
|
||||
- script: |
|
||||
ls -la
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain $(cat rust-toolchain)
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain $(cat rust-toolchain) --profile=minimal
|
||||
echo "##vso[task.prependpath]$HOME/.cargo/bin"
|
||||
displayName: 'Install Rust'
|
||||
|
||||
@@ -18,8 +18,8 @@ steps:
|
||||
cargo -V
|
||||
displayName: Query rust and cargo versions
|
||||
|
||||
- script : cargo build --features "sqlite"
|
||||
displayName: 'Build project with sqlite backend'
|
||||
- script : cargo test --features "sqlite"
|
||||
displayName: 'Test project with sqlite backend'
|
||||
|
||||
- script : cargo build --features "mysql"
|
||||
displayName: 'Build project with mysql backend'
|
||||
- script : cargo test --features "mysql"
|
||||
displayName: 'Test project with mysql backend'
|
||||
|
8
build.rs
8
build.rs
@@ -2,9 +2,13 @@ use std::process::Command;
|
||||
|
||||
fn main() {
|
||||
#[cfg(all(feature = "sqlite", feature = "mysql"))]
|
||||
compile_error!("Can't enable both backends");
|
||||
compile_error!("Can't enable both sqlite and mysql at the same time");
|
||||
#[cfg(all(feature = "sqlite", feature = "postgresql"))]
|
||||
compile_error!("Can't enable both sqlite and postgresql at the same time");
|
||||
#[cfg(all(feature = "mysql", feature = "postgresql"))]
|
||||
compile_error!("Can't enable both mysql and postgresql at the same time");
|
||||
|
||||
#[cfg(not(any(feature = "sqlite", feature = "mysql")))]
|
||||
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
|
||||
compile_error!("You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite");
|
||||
|
||||
read_git_info().ok();
|
||||
|
296
docker/Dockerfile.j2
Normal file
296
docker/Dockerfile.j2
Normal file
@@ -0,0 +1,296 @@
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||
|
||||
{% set build_stage_base_image = "rust:1.40" %}
|
||||
{% if "alpine" in target_file %}
|
||||
{% set build_stage_base_image = "clux/muslrust:nightly-2020-03-09" %}
|
||||
{% set runtime_stage_base_image = "alpine:3.11" %}
|
||||
{% set package_arch_name = "" %}
|
||||
{% elif "amd64" in target_file %}
|
||||
{% set runtime_stage_base_image = "debian:buster-slim" %}
|
||||
{% set package_arch_name = "" %}
|
||||
{% elif "aarch64" in target_file %}
|
||||
{% set runtime_stage_base_image = "balenalib/aarch64-debian:buster" %}
|
||||
{% set package_arch_name = "arm64" %}
|
||||
{% elif "armv6" in target_file %}
|
||||
{% set runtime_stage_base_image = "balenalib/rpi-debian:buster" %}
|
||||
{% set package_arch_name = "armel" %}
|
||||
{% elif "armv7" in target_file %}
|
||||
{% set runtime_stage_base_image = "balenalib/armv7hf-debian:buster" %}
|
||||
{% set package_arch_name = "armhf" %}
|
||||
{% endif %}
|
||||
{% set package_arch_prefix = ":" + package_arch_name %}
|
||||
{% if package_arch_name == "" %}
|
||||
{% set package_arch_prefix = "" %}
|
||||
{% endif %}
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
{% set vault_image_hash = "sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c" %}
|
||||
{% raw %}
|
||||
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||
# It can be viewed in multiple ways:
|
||||
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||
# - From the console, with the following commands:
|
||||
# docker pull bitwardenrs/web-vault:v2.12.0e
|
||||
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.12.0e
|
||||
#
|
||||
# - To do the opposite, and get the tag from the hash, you can do:
|
||||
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c
|
||||
{% endraw %}
|
||||
FROM bitwardenrs/web-vault@{{ vault_image_hash }} as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
{% if "musl" in build_stage_base_image %}
|
||||
# Musl build image for statically compiled binary
|
||||
{% else %}
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
{% endif %}
|
||||
FROM {{ build_stage_base_image }} as build
|
||||
|
||||
{% if "sqlite" in target_file %}
|
||||
# set sqlite as default for DB ARG for backward compatibility
|
||||
ARG DB=sqlite
|
||||
|
||||
{% elif "mysql" in target_file %}
|
||||
# set mysql backend
|
||||
ARG DB=mysql
|
||||
|
||||
{% elif "postgresql" in target_file %}
|
||||
# set postgresql backend
|
||||
ARG DB=postgresql
|
||||
|
||||
{% endif %}
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
{% if "alpine" in target_file %}
|
||||
ENV USER "root"
|
||||
ENV RUSTFLAGS='-C link-arg=-s'
|
||||
|
||||
{% elif "aarch64" in target_file or "armv" in target_file %}
|
||||
# Install required build libs for {{ package_arch_name }} architecture.
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture {{ package_arch_name }} \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev{{ package_arch_prefix }} \
|
||||
libc6-dev{{ package_arch_prefix }}
|
||||
|
||||
{% endif -%}
|
||||
{% if "aarch64" in target_file %}
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
gcc-aarch64-linux-gnu \
|
||||
&& mkdir -p ~/.cargo \
|
||||
&& echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config \
|
||||
&& echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config
|
||||
|
||||
ENV CARGO_HOME "/root/.cargo"
|
||||
ENV USER "root"
|
||||
|
||||
{% elif "armv6" in target_file %}
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
gcc-arm-linux-gnueabi \
|
||||
&& mkdir -p ~/.cargo \
|
||||
&& echo '[target.arm-unknown-linux-gnueabi]' >> ~/.cargo/config \
|
||||
&& echo 'linker = "arm-linux-gnueabi-gcc"' >> ~/.cargo/config
|
||||
|
||||
ENV CARGO_HOME "/root/.cargo"
|
||||
ENV USER "root"
|
||||
|
||||
{% elif "armv6" in target_file %}
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
gcc-arm-linux-gnueabihf \
|
||||
&& mkdir -p ~/.cargo \
|
||||
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> ~/.cargo/config \
|
||||
&& echo 'linker = "arm-linux-gnueabihf-gcc"' >> ~/.cargo/config
|
||||
|
||||
ENV CARGO_HOME "/root/.cargo"
|
||||
ENV USER "root"
|
||||
|
||||
{% elif "armv7" in target_file %}
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
gcc-arm-linux-gnueabihf \
|
||||
&& mkdir -p ~/.cargo \
|
||||
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> ~/.cargo/config \
|
||||
&& echo 'linker = "arm-linux-gnueabihf-gcc"' >> ~/.cargo/config
|
||||
|
||||
ENV CARGO_HOME "/root/.cargo"
|
||||
ENV USER "root"
|
||||
|
||||
{% endif %}
|
||||
{% if "mysql" in target_file %}
|
||||
# Install MySQL package
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
{% if "musl" in build_stage_base_image %}
|
||||
libmysqlclient-dev{{ package_arch_prefix }} \
|
||||
{% else %}
|
||||
libmariadb-dev{{ package_arch_prefix }} \
|
||||
{% endif %}
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
{% elif "postgresql" in target_file %}
|
||||
# Install PostgreSQL package
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libpq-dev{{ package_arch_prefix }} \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
{% endif %}
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
{% if "aarch64" in target_file %}
|
||||
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
||||
{% elif "armv6" in target_file %}
|
||||
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||
{% elif "armv7" in target_file %}
|
||||
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
||||
{% endif -%}
|
||||
|
||||
{% if "alpine" in target_file %}
|
||||
RUN rustup target add x86_64-unknown-linux-musl
|
||||
|
||||
{% elif "aarch64" in target_file %}
|
||||
RUN rustup target add aarch64-unknown-linux-gnu
|
||||
|
||||
{% elif "armv6" in target_file %}
|
||||
RUN rustup target add arm-unknown-linux-gnueabi
|
||||
|
||||
{% elif "armv7" in target_file %}
|
||||
RUN rustup target add armv7-unknown-linux-gnueabihf
|
||||
{% endif %}
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
{% if "amd64" in target_file %}
|
||||
RUN cargo build --features ${DB} --release
|
||||
{% elif "aarch64" in target_file %}
|
||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||
{% elif "armv6" in target_file %}
|
||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||
{% elif "armv7" in target_file %}
|
||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
||||
{% endif %}
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM {{ runtime_stage_base_image }}
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
{% if "alpine" in runtime_stage_base_image %}
|
||||
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||
{% endif %}
|
||||
|
||||
{% if "amd64" not in target_file %}
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
{% endif %}
|
||||
# Install needed libraries
|
||||
{% if "alpine" in runtime_stage_base_image %}
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
curl \
|
||||
{% if "sqlite" in target_file %}
|
||||
sqlite \
|
||||
{% elif "mysql" in target_file %}
|
||||
mariadb-connector-c \
|
||||
{% elif "postgresql" in target_file %}
|
||||
postgresql-libs \
|
||||
{% endif %}
|
||||
ca-certificates
|
||||
{% else %}
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
{% if "sqlite" in target_file %}
|
||||
sqlite3 \
|
||||
{% elif "mysql" in target_file %}
|
||||
libmariadbclient-dev \
|
||||
{% elif "postgresql" in target_file %}
|
||||
libpq5 \
|
||||
{% endif %}
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
{% endif %}
|
||||
|
||||
RUN mkdir /data
|
||||
{% if "amd64" not in target_file %}
|
||||
|
||||
RUN [ "cross-build-end" ]
|
||||
|
||||
{% endif %}
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
{% if "alpine" in target_file %}
|
||||
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||
{% elif "aarch64" in target_file %}
|
||||
COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs .
|
||||
{% elif "armv6" in target_file %}
|
||||
COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs .
|
||||
{% elif "armv7" in target_file %}
|
||||
COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs .
|
||||
{% else %}
|
||||
COPY --from=build app/target/release/bitwarden_rs .
|
||||
{% endif %}
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
9
docker/Makefile
Normal file
9
docker/Makefile
Normal file
@@ -0,0 +1,9 @@
|
||||
OBJECTS := $(shell find -mindepth 2 -name 'Dockerfile*')
|
||||
|
||||
all: $(OBJECTS)
|
||||
|
||||
%/Dockerfile: Dockerfile.j2 render_template
|
||||
./render_template "$<" "{\"target_file\":\"$@\"}" > "$@"
|
||||
|
||||
%/Dockerfile.alpine: Dockerfile.j2 render_template
|
||||
./render_template "$<" "{\"target_file\":\"$@\"}" > "$@"
|
@@ -1,33 +1,46 @@
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.10 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.11.0"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||
# It can be viewed in multiple ways:
|
||||
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||
# - From the console, with the following commands:
|
||||
# docker pull bitwardenrs/web-vault:v2.12.0e
|
||||
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.12.0e
|
||||
#
|
||||
# - To do the opposite, and get the tag from the hash, you can do:
|
||||
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c
|
||||
FROM bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.36 as build
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set mysql backend
|
||||
ARG DB=mysql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
# Install required build libs for arm64 architecture.
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture arm64 \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:arm64 \
|
||||
libc6-dev:arm64
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
@@ -39,36 +52,48 @@ RUN apt-get update \
|
||||
ENV CARGO_HOME "/root/.cargo"
|
||||
ENV USER "root"
|
||||
|
||||
# Install MySQL package
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libmariadb-dev:arm64 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Prepare openssl arm64 libs
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture arm64 \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:arm64 \
|
||||
libc6-dev:arm64 \
|
||||
libmariadb-dev:arm64
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
||||
RUN rustup target add aarch64-unknown-linux-gnu
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Build
|
||||
RUN rustup target add aarch64-unknown-linux-gnu
|
||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu -v
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/aarch64-debian:stretch
|
||||
FROM balenalib/aarch64-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
@@ -81,6 +106,7 @@ RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libmariadbclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@@ -90,6 +116,7 @@ RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
@@ -97,5 +124,10 @@ COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
CMD ["./bitwarden_rs"]
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
||||
|
@@ -1,33 +1,46 @@
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.10 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.11.0"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||
# It can be viewed in multiple ways:
|
||||
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||
# - From the console, with the following commands:
|
||||
# docker pull bitwardenrs/web-vault:v2.12.0e
|
||||
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.12.0e
|
||||
#
|
||||
# - To do the opposite, and get the tag from the hash, you can do:
|
||||
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c
|
||||
FROM bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.36 as build
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set sqlite as default for DB ARG for backward comaptibility
|
||||
# set sqlite as default for DB ARG for backward compatibility
|
||||
ARG DB=sqlite
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
# Install required build libs for arm64 architecture.
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture arm64 \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:arm64 \
|
||||
libc6-dev:arm64
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
@@ -39,36 +52,42 @@ RUN apt-get update \
|
||||
ENV CARGO_HOME "/root/.cargo"
|
||||
ENV USER "root"
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Prepare openssl arm64 libs
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture arm64 \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:arm64 \
|
||||
libc6-dev:arm64 \
|
||||
libmariadb-dev:arm64
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
||||
RUN rustup target add aarch64-unknown-linux-gnu
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Build
|
||||
RUN rustup target add aarch64-unknown-linux-gnu
|
||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu -v
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/aarch64-debian:stretch
|
||||
FROM balenalib/aarch64-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
@@ -81,7 +100,8 @@ RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
libmariadbclient-dev \
|
||||
curl \
|
||||
sqlite3 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
@@ -90,6 +110,7 @@ RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
@@ -97,5 +118,10 @@ COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
CMD ["./bitwarden_rs"]
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
||||
|
@@ -1,38 +1,35 @@
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.10 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.11.0"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||
# It can be viewed in multiple ways:
|
||||
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||
# - From the console, with the following commands:
|
||||
# docker pull bitwardenrs/web-vault:v2.12.0e
|
||||
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.12.0e
|
||||
#
|
||||
# - To do the opposite, and get the tag from the hash, you can do:
|
||||
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c
|
||||
FROM bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.36 as build
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set mysql backend
|
||||
ARG DB=mysql
|
||||
|
||||
# Using bundled SQLite, no need to install it
|
||||
# RUN apt-get update && apt-get install -y\
|
||||
# --no-install-recommends \
|
||||
# sqlite3\
|
||||
# && rm -rf /var/lib/apt/lists/*
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
# Install MySQL package
|
||||
RUN apt-get update && apt-get install -y \
|
||||
@@ -41,7 +38,7 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin app
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
@@ -69,7 +66,7 @@ RUN cargo build --features ${DB} --release
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM debian:stretch-slim
|
||||
FROM debian:buster-slim
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
@@ -80,6 +77,7 @@ RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libmariadbclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@@ -94,5 +92,10 @@ COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build app/target/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
CMD ["./bitwarden_rs"]
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
||||
|
@@ -1,58 +1,75 @@
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.10 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.11.0"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||
# It can be viewed in multiple ways:
|
||||
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||
# - From the console, with the following commands:
|
||||
# docker pull bitwardenrs/web-vault:v2.12.0e
|
||||
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.12.0e
|
||||
#
|
||||
# - To do the opposite, and get the tag from the hash, you can do:
|
||||
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c
|
||||
FROM bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# Musl build image for statically compiled binary
|
||||
FROM clux/muslrust:nightly-2019-07-08 as build
|
||||
FROM clux/muslrust:nightly-2019-12-19 as build
|
||||
|
||||
# set mysql backend
|
||||
ARG DB=mysql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
ENV USER "root"
|
||||
|
||||
# Install needed libraries
|
||||
# Install MySQL package
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libmysqlclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
RUN rustup target add x86_64-unknown-linux-musl
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
RUN rustup target add x86_64-unknown-linux-musl
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Build
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --features ${DB} --release
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM alpine:3.10
|
||||
FROM alpine:3.11
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
@@ -62,6 +79,7 @@ ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||
# Install needed libraries
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
curl \
|
||||
mariadb-connector-c \
|
||||
ca-certificates
|
||||
|
||||
@@ -76,5 +94,10 @@ COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
CMD ["./bitwarden_rs"]
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
||||
|
101
docker/amd64/postgresql/Dockerfile
Normal file
101
docker/amd64/postgresql/Dockerfile
Normal file
@@ -0,0 +1,101 @@
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
|
||||
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||
# It can be viewed in multiple ways:
|
||||
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||
# - From the console, with the following commands:
|
||||
# docker pull bitwardenrs/web-vault:v2.12.0e
|
||||
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.12.0e
|
||||
#
|
||||
# - To do the opposite, and get the tag from the hash, you can do:
|
||||
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c
|
||||
FROM bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set postgresql backend
|
||||
ARG DB=postgresql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
# Install PostgreSQL package
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libpq-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --features ${DB} --release
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM debian:buster-slim
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libpq5 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build app/target/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
103
docker/amd64/postgresql/Dockerfile.alpine
Normal file
103
docker/amd64/postgresql/Dockerfile.alpine
Normal file
@@ -0,0 +1,103 @@
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
|
||||
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||
# It can be viewed in multiple ways:
|
||||
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||
# - From the console, with the following commands:
|
||||
# docker pull bitwardenrs/web-vault:v2.12.0e
|
||||
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.12.0e
|
||||
#
|
||||
# - To do the opposite, and get the tag from the hash, you can do:
|
||||
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c
|
||||
FROM bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# Musl build image for statically compiled binary
|
||||
FROM clux/muslrust:nightly-2019-12-19 as build
|
||||
|
||||
# set postgresql backend
|
||||
ARG DB=postgresql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
ENV USER "root"
|
||||
|
||||
# Install PostgreSQL package
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libpq-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
RUN rustup target add x86_64-unknown-linux-musl
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --features ${DB} --release
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM alpine:3.11
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
# Install needed libraries
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
curl \
|
||||
postgresql-libs \
|
||||
ca-certificates
|
||||
|
||||
RUN mkdir /data
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
@@ -1,47 +1,38 @@
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.10 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.11.0"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||
# It can be viewed in multiple ways:
|
||||
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||
# - From the console, with the following commands:
|
||||
# docker pull bitwardenrs/web-vault:v2.12.0e
|
||||
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.12.0e
|
||||
#
|
||||
# - To do the opposite, and get the tag from the hash, you can do:
|
||||
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c
|
||||
FROM bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.36 as build
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set sqlite as default for DB ARG for backward comaptibility
|
||||
# set sqlite as default for DB ARG for backward compatibility
|
||||
ARG DB=sqlite
|
||||
|
||||
# Using bundled SQLite, no need to install it
|
||||
# RUN apt-get update && apt-get install -y\
|
||||
# --no-install-recommends \
|
||||
# sqlite3 \
|
||||
# && rm -rf /var/lib/apt/lists/*
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||
|
||||
# Install MySQL package
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libmariadb-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin app
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
@@ -69,7 +60,7 @@ RUN cargo build --features ${DB} --release
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM debian:stretch-slim
|
||||
FROM debian:buster-slim
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
@@ -80,7 +71,8 @@ RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
libmariadbclient-dev \
|
||||
curl \
|
||||
sqlite3 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
@@ -94,5 +86,10 @@ COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build app/target/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
CMD ["./bitwarden_rs"]
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
||||
|
@@ -1,58 +1,69 @@
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.10 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.11.0"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||
# It can be viewed in multiple ways:
|
||||
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||
# - From the console, with the following commands:
|
||||
# docker pull bitwardenrs/web-vault:v2.12.0e
|
||||
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.12.0e
|
||||
#
|
||||
# - To do the opposite, and get the tag from the hash, you can do:
|
||||
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c
|
||||
FROM bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# Musl build image for statically compiled binary
|
||||
FROM clux/muslrust:nightly-2019-07-08 as build
|
||||
FROM clux/muslrust:nightly-2019-12-19 as build
|
||||
|
||||
# set sqlite as default for DB ARG for backward comaptibility
|
||||
# set sqlite as default for DB ARG for backward compatibility
|
||||
ARG DB=sqlite
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
ENV USER "root"
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libmysqlclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
RUN rustup target add x86_64-unknown-linux-musl
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
RUN rustup target add x86_64-unknown-linux-musl
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Build
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --features ${DB} --release
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM alpine:3.10
|
||||
FROM alpine:3.11
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
@@ -62,7 +73,8 @@ ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||
# Install needed libraries
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
mariadb-connector-c \
|
||||
curl \
|
||||
sqlite \
|
||||
ca-certificates
|
||||
|
||||
RUN mkdir /data
|
||||
@@ -76,5 +88,10 @@ COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
CMD ["./bitwarden_rs"]
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
||||
|
@@ -1,33 +1,46 @@
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.10 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.11.0"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||
# It can be viewed in multiple ways:
|
||||
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||
# - From the console, with the following commands:
|
||||
# docker pull bitwardenrs/web-vault:v2.12.0e
|
||||
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.12.0e
|
||||
#
|
||||
# - To do the opposite, and get the tag from the hash, you can do:
|
||||
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c
|
||||
FROM bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.36 as build
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set mysql backend
|
||||
ARG DB=mysql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
# Install required build libs for armel architecture.
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armel \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armel \
|
||||
libc6-dev:armel
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
@@ -39,36 +52,48 @@ RUN apt-get update \
|
||||
ENV CARGO_HOME "/root/.cargo"
|
||||
ENV USER "root"
|
||||
|
||||
# Install MySQL package
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libmariadb-dev:armel \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Prepare openssl armel libs
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armel \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armel \
|
||||
libc6-dev:armel \
|
||||
libmariadb-dev:armel
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||
RUN rustup target add arm-unknown-linux-gnueabi
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Build
|
||||
RUN rustup target add arm-unknown-linux-gnueabi
|
||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi -v
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/rpi-debian:stretch
|
||||
FROM balenalib/rpi-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
@@ -81,6 +106,7 @@ RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libmariadbclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@@ -90,6 +116,7 @@ RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
@@ -97,5 +124,10 @@ COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
CMD ["./bitwarden_rs"]
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
||||
|
@@ -1,33 +1,46 @@
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.10 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.11.0"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||
# It can be viewed in multiple ways:
|
||||
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||
# - From the console, with the following commands:
|
||||
# docker pull bitwardenrs/web-vault:v2.12.0e
|
||||
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.12.0e
|
||||
#
|
||||
# - To do the opposite, and get the tag from the hash, you can do:
|
||||
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c
|
||||
FROM bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.36 as build
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set sqlite as default for DB ARG for backward comaptibility
|
||||
# set sqlite as default for DB ARG for backward compatibility
|
||||
ARG DB=sqlite
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
# Install required build libs for armel architecture.
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armel \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armel \
|
||||
libc6-dev:armel
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
@@ -39,36 +52,42 @@ RUN apt-get update \
|
||||
ENV CARGO_HOME "/root/.cargo"
|
||||
ENV USER "root"
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Prepare openssl armel libs
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armel \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armel \
|
||||
libc6-dev:armel \
|
||||
libmariadb-dev:armel
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||
RUN rustup target add arm-unknown-linux-gnueabi
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Build
|
||||
RUN rustup target add arm-unknown-linux-gnueabi
|
||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi -v
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/rpi-debian:stretch
|
||||
FROM balenalib/rpi-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
@@ -81,7 +100,8 @@ RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
libmariadbclient-dev \
|
||||
curl \
|
||||
sqlite3 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
@@ -90,6 +110,7 @@ RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
@@ -97,5 +118,10 @@ COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
CMD ["./bitwarden_rs"]
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
||||
|
@@ -1,33 +1,46 @@
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.10 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.11.0"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||
# It can be viewed in multiple ways:
|
||||
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||
# - From the console, with the following commands:
|
||||
# docker pull bitwardenrs/web-vault:v2.12.0e
|
||||
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.12.0e
|
||||
#
|
||||
# - To do the opposite, and get the tag from the hash, you can do:
|
||||
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c
|
||||
FROM bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.36 as build
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set mysql backend
|
||||
ARG DB=mysql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
# Install required build libs for armhf architecture.
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armhf \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armhf \
|
||||
libc6-dev:armhf
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
@@ -39,37 +52,47 @@ RUN apt-get update \
|
||||
ENV CARGO_HOME "/root/.cargo"
|
||||
ENV USER "root"
|
||||
|
||||
# Install MySQL package
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libmariadb-dev:armhf \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Prepare openssl armhf libs
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armhf \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armhf \
|
||||
libc6-dev:armhf \
|
||||
libmariadb-dev:armhf
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
||||
RUN rustup target add armv7-unknown-linux-gnueabihf
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Build
|
||||
RUN rustup target add armv7-unknown-linux-gnueabihf
|
||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf -v
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/armv7hf-debian:stretch
|
||||
FROM balenalib/armv7hf-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
@@ -82,6 +105,7 @@ RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libmariadbclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@@ -91,6 +115,7 @@ RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
@@ -98,5 +123,10 @@ COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
CMD ["./bitwarden_rs"]
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
||||
|
@@ -1,33 +1,46 @@
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfile's.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.10 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.11.0"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
# This hash is extracted from the docker web-vault builds and it's prefered over a simple tag because it's immutable.
|
||||
# It can be viewed in multiple ways:
|
||||
# - From the https://hub.docker.com/repository/docker/bitwardenrs/web-vault/tags page, click the tag name and the digest should be there.
|
||||
# - From the console, with the following commands:
|
||||
# docker pull bitwardenrs/web-vault:v2.12.0e
|
||||
# docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.12.0e
|
||||
#
|
||||
# - To do the opposite, and get the tag from the hash, you can do:
|
||||
# docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c
|
||||
FROM bitwardenrs/web-vault@sha256:feb3f46d15738191b9043be4cdb1be2c0078ed411e7b7be73a2f4fcbca01e13c as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.36 as build
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set sqlite as default for DB ARG for backward comaptibility
|
||||
# set sqlite as default for DB ARG for backward compatibility
|
||||
ARG DB=sqlite
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive LANG=C.UTF-8 TZ=UTC TERM=xterm-256color
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
# Install required build libs for armhf architecture.
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armhf \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armhf \
|
||||
libc6-dev:armhf
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
@@ -39,36 +52,41 @@ RUN apt-get update \
|
||||
ENV CARGO_HOME "/root/.cargo"
|
||||
ENV USER "root"
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Prepare openssl armhf libs
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armhf \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armhf \
|
||||
libc6-dev:armhf \
|
||||
libmariadb-dev:armhf
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
||||
RUN rustup target add armv7-unknown-linux-gnueabihf
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Build
|
||||
RUN rustup target add armv7-unknown-linux-gnueabihf
|
||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf -v
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/armv7hf-debian:stretch
|
||||
FROM balenalib/armv7hf-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
@@ -81,7 +99,8 @@ RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
libmariadbclient-dev \
|
||||
curl \
|
||||
sqlite3 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
@@ -90,6 +109,7 @@ RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
@@ -97,5 +117,10 @@ COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
CMD ["./bitwarden_rs"]
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
||||
|
8
docker/healthcheck.sh
Normal file
8
docker/healthcheck.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
if [ -z "$ROCKET_TLS"]
|
||||
then
|
||||
curl --fail http://localhost:${ROCKET_PORT:-"80"}/alive || exit 1
|
||||
else
|
||||
curl --insecure --fail https://localhost:${ROCKET_PORT:-"80"}/alive || exit 1
|
||||
fi
|
17
docker/render_template
Executable file
17
docker/render_template
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os, argparse, json
|
||||
|
||||
import jinja2
|
||||
|
||||
args_parser = argparse.ArgumentParser()
|
||||
args_parser.add_argument('template_file', help='Jinja2 template file to render.')
|
||||
args_parser.add_argument('render_vars', help='JSON-encoded data to pass to the templating engine.')
|
||||
cli_args = args_parser.parse_args()
|
||||
|
||||
render_vars = json.loads(cli_args.render_vars)
|
||||
environment = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader(os.getcwd()),
|
||||
trim_blocks=True,
|
||||
)
|
||||
print(environment.get_template(cli_args.template_file).render(render_vars))
|
@@ -4,4 +4,4 @@ ALTER TABLE users
|
||||
|
||||
ALTER TABLE users
|
||||
ADD COLUMN
|
||||
client_kdf_iter INTEGER NOT NULL DEFAULT 5000;
|
||||
client_kdf_iter INTEGER NOT NULL DEFAULT 100000;
|
||||
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE twofactor ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;
|
@@ -0,0 +1 @@
|
||||
|
@@ -0,0 +1,5 @@
|
||||
ALTER TABLE users ADD COLUMN verified_at DATETIME DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN last_verifying_at DATETIME DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN login_verify_count INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE users ADD COLUMN email_new VARCHAR(255) DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN email_new_token VARCHAR(16) DEFAULT NULL;
|
@@ -0,0 +1 @@
|
||||
DROP TABLE org_policies;
|
@@ -0,0 +1,9 @@
|
||||
CREATE TABLE org_policies (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
org_uuid CHAR(36) NOT NULL REFERENCES organizations (uuid),
|
||||
atype INTEGER NOT NULL,
|
||||
enabled BOOLEAN NOT NULL,
|
||||
data TEXT NOT NULL,
|
||||
|
||||
UNIQUE (org_uuid, atype)
|
||||
);
|
@@ -0,0 +1,13 @@
|
||||
DROP TABLE devices;
|
||||
DROP TABLE attachments;
|
||||
DROP TABLE users_collections;
|
||||
DROP TABLE users_organizations;
|
||||
DROP TABLE folders_ciphers;
|
||||
DROP TABLE ciphers_collections;
|
||||
DROP TABLE twofactor;
|
||||
DROP TABLE invitations;
|
||||
DROP TABLE collections;
|
||||
DROP TABLE folders;
|
||||
DROP TABLE ciphers;
|
||||
DROP TABLE users;
|
||||
DROP TABLE organizations;
|
121
migrations/postgresql/2019-09-12-100000_create_tables/up.sql
Normal file
121
migrations/postgresql/2019-09-12-100000_create_tables/up.sql
Normal file
@@ -0,0 +1,121 @@
|
||||
CREATE TABLE users (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
updated_at TIMESTAMP NOT NULL,
|
||||
email VARCHAR(255) NOT NULL UNIQUE,
|
||||
name TEXT NOT NULL,
|
||||
password_hash BYTEA NOT NULL,
|
||||
salt BYTEA NOT NULL,
|
||||
password_iterations INTEGER NOT NULL,
|
||||
password_hint TEXT,
|
||||
akey TEXT NOT NULL,
|
||||
private_key TEXT,
|
||||
public_key TEXT,
|
||||
totp_secret TEXT,
|
||||
totp_recover TEXT,
|
||||
security_stamp TEXT NOT NULL,
|
||||
equivalent_domains TEXT NOT NULL,
|
||||
excluded_globals TEXT NOT NULL,
|
||||
client_kdf_type INTEGER NOT NULL DEFAULT 0,
|
||||
client_kdf_iter INTEGER NOT NULL DEFAULT 100000
|
||||
);
|
||||
|
||||
CREATE TABLE devices (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
updated_at TIMESTAMP NOT NULL,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
name TEXT NOT NULL,
|
||||
atype INTEGER NOT NULL,
|
||||
push_token TEXT,
|
||||
refresh_token TEXT NOT NULL,
|
||||
twofactor_remember TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE organizations (
|
||||
uuid VARCHAR(40) NOT NULL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
billing_email TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE ciphers (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
updated_at TIMESTAMP NOT NULL,
|
||||
user_uuid CHAR(36) REFERENCES users (uuid),
|
||||
organization_uuid CHAR(36) REFERENCES organizations (uuid),
|
||||
atype INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
notes TEXT,
|
||||
fields TEXT,
|
||||
data TEXT NOT NULL,
|
||||
favorite BOOLEAN NOT NULL,
|
||||
password_history TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE attachments (
|
||||
id CHAR(36) NOT NULL PRIMARY KEY,
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||
file_name TEXT NOT NULL,
|
||||
file_size INTEGER NOT NULL,
|
||||
akey TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE folders (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
updated_at TIMESTAMP NOT NULL,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE collections (
|
||||
uuid VARCHAR(40) NOT NULL PRIMARY KEY,
|
||||
org_uuid VARCHAR(40) NOT NULL REFERENCES organizations (uuid),
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE users_collections (
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
|
||||
read_only BOOLEAN NOT NULL DEFAULT false,
|
||||
PRIMARY KEY (user_uuid, collection_uuid)
|
||||
);
|
||||
|
||||
CREATE TABLE users_organizations (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
org_uuid CHAR(36) NOT NULL REFERENCES organizations (uuid),
|
||||
|
||||
access_all BOOLEAN NOT NULL,
|
||||
akey TEXT NOT NULL,
|
||||
status INTEGER NOT NULL,
|
||||
atype INTEGER NOT NULL,
|
||||
|
||||
UNIQUE (user_uuid, org_uuid)
|
||||
);
|
||||
|
||||
CREATE TABLE folders_ciphers (
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||
folder_uuid CHAR(36) NOT NULL REFERENCES folders (uuid),
|
||||
PRIMARY KEY (cipher_uuid, folder_uuid)
|
||||
);
|
||||
|
||||
CREATE TABLE ciphers_collections (
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||
collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
|
||||
PRIMARY KEY (cipher_uuid, collection_uuid)
|
||||
);
|
||||
|
||||
CREATE TABLE twofactor (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
atype INTEGER NOT NULL,
|
||||
enabled BOOLEAN NOT NULL,
|
||||
data TEXT NOT NULL,
|
||||
UNIQUE (user_uuid, atype)
|
||||
);
|
||||
|
||||
CREATE TABLE invitations (
|
||||
email VARCHAR(255) NOT NULL PRIMARY KEY
|
||||
);
|
@@ -0,0 +1,26 @@
|
||||
ALTER TABLE attachments ALTER COLUMN id TYPE CHAR(36);
|
||||
ALTER TABLE attachments ALTER COLUMN cipher_uuid TYPE CHAR(36);
|
||||
ALTER TABLE users ALTER COLUMN uuid TYPE CHAR(36);
|
||||
ALTER TABLE users ALTER COLUMN email TYPE VARCHAR(255);
|
||||
ALTER TABLE devices ALTER COLUMN uuid TYPE CHAR(36);
|
||||
ALTER TABLE devices ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||
ALTER TABLE organizations ALTER COLUMN uuid TYPE CHAR(40);
|
||||
ALTER TABLE ciphers ALTER COLUMN uuid TYPE CHAR(36);
|
||||
ALTER TABLE ciphers ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||
ALTER TABLE ciphers ALTER COLUMN organization_uuid TYPE CHAR(36);
|
||||
ALTER TABLE folders ALTER COLUMN uuid TYPE CHAR(36);
|
||||
ALTER TABLE folders ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||
ALTER TABLE collections ALTER COLUMN uuid TYPE CHAR(40);
|
||||
ALTER TABLE collections ALTER COLUMN org_uuid TYPE CHAR(40);
|
||||
ALTER TABLE users_collections ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||
ALTER TABLE users_collections ALTER COLUMN collection_uuid TYPE CHAR(36);
|
||||
ALTER TABLE users_organizations ALTER COLUMN uuid TYPE CHAR(36);
|
||||
ALTER TABLE users_organizations ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||
ALTER TABLE users_organizations ALTER COLUMN org_uuid TYPE CHAR(36);
|
||||
ALTER TABLE folders_ciphers ALTER COLUMN cipher_uuid TYPE CHAR(36);
|
||||
ALTER TABLE folders_ciphers ALTER COLUMN folder_uuid TYPE CHAR(36);
|
||||
ALTER TABLE ciphers_collections ALTER COLUMN cipher_uuid TYPE CHAR(36);
|
||||
ALTER TABLE ciphers_collections ALTER COLUMN collection_uuid TYPE CHAR(36);
|
||||
ALTER TABLE twofactor ALTER COLUMN uuid TYPE CHAR(36);
|
||||
ALTER TABLE twofactor ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||
ALTER TABLE invitations ALTER COLUMN email TYPE VARCHAR(255);
|
@@ -0,0 +1,27 @@
|
||||
-- Switch from CHAR() types to VARCHAR() types to avoid padding issues.
|
||||
ALTER TABLE attachments ALTER COLUMN id TYPE TEXT;
|
||||
ALTER TABLE attachments ALTER COLUMN cipher_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE users ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE users ALTER COLUMN email TYPE TEXT;
|
||||
ALTER TABLE devices ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE devices ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE organizations ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE ciphers ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE ciphers ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE ciphers ALTER COLUMN organization_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE folders ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE folders ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE collections ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE collections ALTER COLUMN org_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE users_collections ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE users_collections ALTER COLUMN collection_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE users_organizations ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE users_organizations ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE users_organizations ALTER COLUMN org_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE folders_ciphers ALTER COLUMN cipher_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE folders_ciphers ALTER COLUMN folder_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE ciphers_collections ALTER COLUMN cipher_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE ciphers_collections ALTER COLUMN collection_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE twofactor ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE twofactor ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE invitations ALTER COLUMN email TYPE TEXT;
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE twofactor ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;
|
@@ -0,0 +1 @@
|
||||
|
@@ -0,0 +1,5 @@
|
||||
ALTER TABLE users ADD COLUMN verified_at TIMESTAMP DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN last_verifying_at TIMESTAMP DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN login_verify_count INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE users ADD COLUMN email_new VARCHAR(255) DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN email_new_token VARCHAR(16) DEFAULT NULL;
|
@@ -0,0 +1 @@
|
||||
DROP TABLE org_policies;
|
@@ -0,0 +1,9 @@
|
||||
CREATE TABLE org_policies (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
org_uuid CHAR(36) NOT NULL REFERENCES organizations (uuid),
|
||||
atype INTEGER NOT NULL,
|
||||
enabled BOOLEAN NOT NULL,
|
||||
data TEXT NOT NULL,
|
||||
|
||||
UNIQUE (org_uuid, atype)
|
||||
);
|
@@ -4,4 +4,4 @@ ALTER TABLE users
|
||||
|
||||
ALTER TABLE users
|
||||
ADD COLUMN
|
||||
client_kdf_iter INTEGER NOT NULL DEFAULT 5000;
|
||||
client_kdf_iter INTEGER NOT NULL DEFAULT 100000;
|
||||
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE twofactor ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;
|
@@ -0,0 +1 @@
|
||||
|
@@ -0,0 +1,5 @@
|
||||
ALTER TABLE users ADD COLUMN verified_at DATETIME DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN last_verifying_at DATETIME DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN login_verify_count INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE users ADD COLUMN email_new TEXT DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN email_new_token TEXT DEFAULT NULL;
|
@@ -0,0 +1 @@
|
||||
DROP TABLE org_policies;
|
@@ -0,0 +1,9 @@
|
||||
CREATE TABLE org_policies (
|
||||
uuid TEXT NOT NULL PRIMARY KEY,
|
||||
org_uuid TEXT NOT NULL REFERENCES organizations (uuid),
|
||||
atype INTEGER NOT NULL,
|
||||
enabled BOOLEAN NOT NULL,
|
||||
data TEXT NOT NULL,
|
||||
|
||||
UNIQUE (org_uuid, atype)
|
||||
);
|
@@ -1 +1 @@
|
||||
nightly-2019-08-18
|
||||
nightly-2020-03-09
|
@@ -1 +1,2 @@
|
||||
version = "Two"
|
||||
max_width = 120
|
||||
|
@@ -1,3 +1,4 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::Value;
|
||||
use std::process::Command;
|
||||
|
||||
@@ -26,6 +27,7 @@ pub fn routes() -> Vec<Route> {
|
||||
post_admin_login,
|
||||
admin_page,
|
||||
invite_user,
|
||||
logout,
|
||||
delete_user,
|
||||
deauth_user,
|
||||
remove_2fa,
|
||||
@@ -33,12 +35,12 @@ pub fn routes() -> Vec<Route> {
|
||||
post_config,
|
||||
delete_config,
|
||||
backup_db,
|
||||
test_smtp,
|
||||
]
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref CAN_BACKUP: bool = cfg!(feature = "sqlite") && Command::new("sqlite").arg("-version").status().is_ok();
|
||||
}
|
||||
static CAN_BACKUP: Lazy<bool> =
|
||||
Lazy::new(|| cfg!(feature = "sqlite") && Command::new("sqlite3").arg("-version").status().is_ok());
|
||||
|
||||
#[get("/")]
|
||||
fn admin_disabled() -> &'static str {
|
||||
@@ -51,11 +53,15 @@ const ADMIN_PATH: &str = "/admin";
|
||||
const BASE_TEMPLATE: &str = "admin/base";
|
||||
const VERSION: Option<&str> = option_env!("GIT_VERSION");
|
||||
|
||||
fn admin_path() -> String {
|
||||
format!("{}{}", CONFIG.domain_path(), ADMIN_PATH)
|
||||
}
|
||||
|
||||
#[get("/", rank = 2)]
|
||||
fn admin_login(flash: Option<FlashMessage>) -> ApiResult<Html<String>> {
|
||||
// If there is an error, show it
|
||||
let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg()));
|
||||
let json = json!({"page_content": "admin/login", "version": VERSION, "error": msg});
|
||||
let json = json!({"page_content": "admin/login", "version": VERSION, "error": msg, "urlpath": CONFIG.domain_path()});
|
||||
|
||||
// Return the page
|
||||
let text = CONFIG.render_template(BASE_TEMPLATE, &json)?;
|
||||
@@ -75,7 +81,7 @@ fn post_admin_login(data: Form<LoginForm>, mut cookies: Cookies, ip: ClientIp) -
|
||||
if !_validate_token(&data.token) {
|
||||
error!("Invalid admin token. IP: {}", ip.ip);
|
||||
Err(Flash::error(
|
||||
Redirect::to(ADMIN_PATH),
|
||||
Redirect::to(admin_path()),
|
||||
"Invalid admin token, please try again.",
|
||||
))
|
||||
} else {
|
||||
@@ -84,14 +90,14 @@ fn post_admin_login(data: Form<LoginForm>, mut cookies: Cookies, ip: ClientIp) -
|
||||
let jwt = encode_jwt(&claims);
|
||||
|
||||
let cookie = Cookie::build(COOKIE_NAME, jwt)
|
||||
.path(ADMIN_PATH)
|
||||
.max_age(chrono::Duration::minutes(20))
|
||||
.path(admin_path())
|
||||
.max_age(time::Duration::minutes(20))
|
||||
.same_site(SameSite::Strict)
|
||||
.http_only(true)
|
||||
.finish();
|
||||
|
||||
cookies.add(cookie);
|
||||
Ok(Redirect::to(ADMIN_PATH))
|
||||
Ok(Redirect::to(admin_path()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -109,6 +115,8 @@ struct AdminTemplateData {
|
||||
users: Vec<Value>,
|
||||
config: Value,
|
||||
can_backup: bool,
|
||||
logged_in: bool,
|
||||
urlpath: String,
|
||||
}
|
||||
|
||||
impl AdminTemplateData {
|
||||
@@ -119,6 +127,8 @@ impl AdminTemplateData {
|
||||
users,
|
||||
config: CONFIG.prepare_json(),
|
||||
can_backup: *CAN_BACKUP,
|
||||
logged_in: true,
|
||||
urlpath: CONFIG.domain_path(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -150,22 +160,35 @@ fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -> Empt
|
||||
err!("User already exists")
|
||||
}
|
||||
|
||||
if !CONFIG.invitations_allowed() {
|
||||
err!("Invitations are not allowed")
|
||||
}
|
||||
|
||||
let mut user = User::new(email);
|
||||
user.save(&conn)?;
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
let org_name = "bitwarden_rs";
|
||||
mail::send_invite(&user.email, &user.uuid, None, None, &org_name, None)
|
||||
mail::send_invite(&user.email, &user.uuid, None, None, &CONFIG.invitation_org_name(), None)
|
||||
} else {
|
||||
let invitation = Invitation::new(data.email);
|
||||
invitation.save(&conn)
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/test/smtp", data = "<data>")]
|
||||
fn test_smtp(data: Json<InviteData>, _token: AdminToken) -> EmptyResult {
|
||||
let data: InviteData = data.into_inner();
|
||||
let email = data.email.clone();
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
mail::send_test(&email)
|
||||
} else {
|
||||
err!("Mail is not enabled")
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/logout")]
|
||||
fn logout(mut cookies: Cookies) -> Result<Redirect, ()> {
|
||||
cookies.remove(Cookie::named(COOKIE_NAME));
|
||||
Ok(Redirect::to(admin_path()))
|
||||
}
|
||||
|
||||
#[get("/users")]
|
||||
fn get_users(_token: AdminToken, conn: DbConn) -> JsonResult {
|
||||
let users = User::get_all(&conn);
|
||||
|
@@ -1,10 +1,12 @@
|
||||
use chrono::Utc;
|
||||
use rocket_contrib::json::Json;
|
||||
|
||||
use crate::db::models::*;
|
||||
use crate::db::DbConn;
|
||||
|
||||
use crate::api::{EmptyResult, JsonResult, JsonUpcase, Notify, NumberOrString, PasswordData, UpdateType};
|
||||
use crate::auth::{decode_invite, Headers};
|
||||
use crate::auth::{decode_delete, decode_invite, decode_verify_email, Headers};
|
||||
use crate::crypto;
|
||||
use crate::mail;
|
||||
|
||||
use crate::CONFIG;
|
||||
@@ -25,6 +27,10 @@ pub fn routes() -> Vec<Route> {
|
||||
post_sstamp,
|
||||
post_email_token,
|
||||
post_email,
|
||||
post_verify_email,
|
||||
post_verify_email_token,
|
||||
post_delete_recover,
|
||||
post_delete_recover_token,
|
||||
delete_account,
|
||||
post_delete_account,
|
||||
revision_date,
|
||||
@@ -62,7 +68,11 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
|
||||
let mut user = match User::find_by_mail(&data.Email, &conn) {
|
||||
Some(user) => {
|
||||
if !user.password_hash.is_empty() {
|
||||
if CONFIG.signups_allowed() {
|
||||
err!("User already exists")
|
||||
} else {
|
||||
err!("Registration not allowed or user already exists")
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(token) = data.Token {
|
||||
@@ -82,14 +92,14 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
|
||||
} else if CONFIG.signups_allowed() {
|
||||
err!("Account with this email already exists")
|
||||
} else {
|
||||
err!("Registration not allowed")
|
||||
err!("Registration not allowed or user already exists")
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if CONFIG.signups_allowed() || Invitation::take(&data.Email, &conn) {
|
||||
if CONFIG.signups_allowed() || Invitation::take(&data.Email, &conn) || CONFIG.can_signup_user(&data.Email) {
|
||||
User::new(data.Email.clone())
|
||||
} else {
|
||||
err!("Registration not allowed")
|
||||
err!("Registration not allowed or user already exists")
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -122,6 +132,20 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
|
||||
user.public_key = Some(keys.PublicKey);
|
||||
}
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
if CONFIG.signups_verify() {
|
||||
if let Err(e) = mail::send_welcome_must_verify(&user.email, &user.uuid) {
|
||||
error!("Error sending welcome email: {:#?}", e);
|
||||
}
|
||||
|
||||
user.last_verifying_at = Some(user.created_at);
|
||||
} else {
|
||||
if let Err(e) = mail::send_welcome(&user.email) {
|
||||
error!("Error sending welcome email: {:#?}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
user.save(&conn)
|
||||
}
|
||||
|
||||
@@ -337,8 +361,9 @@ struct EmailTokenData {
|
||||
#[post("/accounts/email-token", data = "<data>")]
|
||||
fn post_email_token(data: JsonUpcase<EmailTokenData>, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||
let data: EmailTokenData = data.into_inner().data;
|
||||
let mut user = headers.user;
|
||||
|
||||
if !headers.user.check_valid_password(&data.MasterPasswordHash) {
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password")
|
||||
}
|
||||
|
||||
@@ -346,7 +371,21 @@ fn post_email_token(data: JsonUpcase<EmailTokenData>, headers: Headers, conn: Db
|
||||
err!("Email already in use");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
if !CONFIG.signups_allowed() && !CONFIG.can_signup_user(&data.NewEmail) {
|
||||
err!("Email cannot be changed to this address");
|
||||
}
|
||||
|
||||
let token = crypto::generate_token(6)?;
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
if let Err(e) = mail::send_change_email(&data.NewEmail, &token) {
|
||||
error!("Error sending change-email email: {:#?}", e);
|
||||
}
|
||||
}
|
||||
|
||||
user.email_new = Some(data.NewEmail);
|
||||
user.email_new_token = Some(token);
|
||||
user.save(&conn)
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -357,8 +396,7 @@ struct ChangeEmailData {
|
||||
|
||||
Key: String,
|
||||
NewMasterPasswordHash: String,
|
||||
#[serde(rename = "Token")]
|
||||
_Token: NumberOrString,
|
||||
Token: NumberOrString,
|
||||
}
|
||||
|
||||
#[post("/accounts/email", data = "<data>")]
|
||||
@@ -374,7 +412,33 @@ fn post_email(data: JsonUpcase<ChangeEmailData>, headers: Headers, conn: DbConn)
|
||||
err!("Email already in use");
|
||||
}
|
||||
|
||||
match user.email_new {
|
||||
Some(ref val) => {
|
||||
if val != &data.NewEmail {
|
||||
err!("Email change mismatch");
|
||||
}
|
||||
}
|
||||
None => err!("No email change pending"),
|
||||
}
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
// Only check the token if we sent out an email...
|
||||
match user.email_new_token {
|
||||
Some(ref val) => {
|
||||
if *val != data.Token.into_string() {
|
||||
err!("Token mismatch");
|
||||
}
|
||||
}
|
||||
None => err!("No email change pending"),
|
||||
}
|
||||
user.verified_at = Some(Utc::now().naive_utc());
|
||||
} else {
|
||||
user.verified_at = None;
|
||||
}
|
||||
|
||||
user.email = data.NewEmail;
|
||||
user.email_new = None;
|
||||
user.email_new_token = None;
|
||||
|
||||
user.set_password(&data.NewMasterPasswordHash);
|
||||
user.akey = data.Key;
|
||||
@@ -382,6 +446,108 @@ fn post_email(data: JsonUpcase<ChangeEmailData>, headers: Headers, conn: DbConn)
|
||||
user.save(&conn)
|
||||
}
|
||||
|
||||
#[post("/accounts/verify-email")]
|
||||
fn post_verify_email(headers: Headers, _conn: DbConn) -> EmptyResult {
|
||||
let user = headers.user;
|
||||
|
||||
if !CONFIG.mail_enabled() {
|
||||
err!("Cannot verify email address");
|
||||
}
|
||||
|
||||
if let Err(e) = mail::send_verify_email(&user.email, &user.uuid) {
|
||||
error!("Error sending delete account email: {:#?}", e);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct VerifyEmailTokenData {
|
||||
UserId: String,
|
||||
Token: String,
|
||||
}
|
||||
|
||||
#[post("/accounts/verify-email-token", data = "<data>")]
|
||||
fn post_verify_email_token(data: JsonUpcase<VerifyEmailTokenData>, conn: DbConn) -> EmptyResult {
|
||||
let data: VerifyEmailTokenData = data.into_inner().data;
|
||||
|
||||
let mut user = match User::find_by_uuid(&data.UserId, &conn) {
|
||||
Some(user) => user,
|
||||
None => err!("User doesn't exist"),
|
||||
};
|
||||
|
||||
let claims = match decode_verify_email(&data.Token) {
|
||||
Ok(claims) => claims,
|
||||
Err(_) => err!("Invalid claim"),
|
||||
};
|
||||
if claims.sub != user.uuid {
|
||||
err!("Invalid claim");
|
||||
}
|
||||
user.verified_at = Some(Utc::now().naive_utc());
|
||||
user.last_verifying_at = None;
|
||||
user.login_verify_count = 0;
|
||||
if let Err(e) = user.save(&conn) {
|
||||
error!("Error saving email verification: {:#?}", e);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct DeleteRecoverData {
|
||||
Email: String,
|
||||
}
|
||||
|
||||
#[post("/accounts/delete-recover", data = "<data>")]
|
||||
fn post_delete_recover(data: JsonUpcase<DeleteRecoverData>, conn: DbConn) -> EmptyResult {
|
||||
let data: DeleteRecoverData = data.into_inner().data;
|
||||
|
||||
let user = User::find_by_mail(&data.Email, &conn);
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
if let Some(user) = user {
|
||||
if let Err(e) = mail::send_delete_account(&user.email, &user.uuid) {
|
||||
error!("Error sending delete account email: {:#?}", e);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
// We don't support sending emails, but we shouldn't allow anybody
|
||||
// to delete accounts without at least logging in... And if the user
|
||||
// cannot remember their password then they will need to contact
|
||||
// the administrator to delete it...
|
||||
err!("Please contact the administrator to delete your account");
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct DeleteRecoverTokenData {
|
||||
UserId: String,
|
||||
Token: String,
|
||||
}
|
||||
|
||||
#[post("/accounts/delete-recover-token", data = "<data>")]
|
||||
fn post_delete_recover_token(data: JsonUpcase<DeleteRecoverTokenData>, conn: DbConn) -> EmptyResult {
|
||||
let data: DeleteRecoverTokenData = data.into_inner().data;
|
||||
|
||||
let user = match User::find_by_uuid(&data.UserId, &conn) {
|
||||
Some(user) => user,
|
||||
None => err!("User doesn't exist"),
|
||||
};
|
||||
|
||||
let claims = match decode_delete(&data.Token) {
|
||||
Ok(claims) => claims,
|
||||
Err(_) => err!("Invalid claim"),
|
||||
};
|
||||
if claims.sub != user.uuid {
|
||||
err!("Invalid claim");
|
||||
}
|
||||
user.delete(&conn)
|
||||
}
|
||||
|
||||
#[post("/accounts/delete", data = "<data>")]
|
||||
fn post_delete_account(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||
delete_account(data, headers, conn)
|
||||
|
@@ -79,6 +79,9 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn);
|
||||
let collections_json: Vec<Value> = collections.iter().map(Collection::to_json).collect();
|
||||
|
||||
let policies = OrgPolicy::find_by_user(&headers.user.uuid, &conn);
|
||||
let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect();
|
||||
|
||||
let ciphers = Cipher::find_by_user(&headers.user.uuid, &conn);
|
||||
let ciphers_json: Vec<Value> = ciphers
|
||||
.iter()
|
||||
@@ -88,13 +91,14 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let domains_json = if data.exclude_domains {
|
||||
Value::Null
|
||||
} else {
|
||||
api::core::get_eq_domains(headers).unwrap().into_inner()
|
||||
api::core::_get_eq_domains(headers, true).unwrap().into_inner()
|
||||
};
|
||||
|
||||
Ok(Json(json!({
|
||||
"Profile": user_json,
|
||||
"Folders": folders_json,
|
||||
"Collections": collections_json,
|
||||
"Policies": policies_json,
|
||||
"Ciphers": ciphers_json,
|
||||
"Domains": domains_json,
|
||||
"Object": "sync"
|
||||
@@ -599,10 +603,14 @@ fn share_cipher_by_uuid(
|
||||
None => err!("Cipher doesn't exist"),
|
||||
};
|
||||
|
||||
match data.Cipher.OrganizationId.clone() {
|
||||
None => err!("Organization id not provided"),
|
||||
Some(organization_uuid) => {
|
||||
let mut shared_to_collection = false;
|
||||
|
||||
match data.Cipher.OrganizationId.clone() {
|
||||
// If we don't get an organization ID, we don't do anything
|
||||
// No error because this is used when using the Clone functionality
|
||||
None => {},
|
||||
Some(organization_uuid) => {
|
||||
|
||||
for uuid in &data.CollectionIds {
|
||||
match Collection::find_by_uuid_and_org(uuid, &organization_uuid, &conn) {
|
||||
None => err!("Invalid collection ID provided"),
|
||||
@@ -616,6 +624,9 @@ fn share_cipher_by_uuid(
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
update_cipher_from_data(
|
||||
&mut cipher,
|
||||
data.Cipher,
|
||||
@@ -627,8 +638,6 @@ fn share_cipher_by_uuid(
|
||||
)?;
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/ciphers/<uuid>/attachment", format = "multipart/form-data", data = "<data>")]
|
||||
@@ -642,20 +651,49 @@ fn post_attachment(
|
||||
) -> JsonResult {
|
||||
let cipher = match Cipher::find_by_uuid(&uuid, &conn) {
|
||||
Some(cipher) => cipher,
|
||||
None => err!("Cipher doesn't exist"),
|
||||
None => err_discard!("Cipher doesn't exist", data),
|
||||
};
|
||||
|
||||
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
|
||||
err!("Cipher is not write accessible")
|
||||
err_discard!("Cipher is not write accessible", data)
|
||||
}
|
||||
|
||||
let mut params = content_type.params();
|
||||
let boundary_pair = params.next().expect("No boundary provided");
|
||||
let boundary = boundary_pair.1;
|
||||
|
||||
let size_limit = if let Some(ref user_uuid) = cipher.user_uuid {
|
||||
match CONFIG.user_attachment_limit() {
|
||||
Some(0) => err_discard!("Attachments are disabled", data),
|
||||
Some(limit) => {
|
||||
let left = limit - Attachment::size_by_user(user_uuid, &conn);
|
||||
if left <= 0 {
|
||||
err_discard!("Attachment size limit reached! Delete some files to open space", data)
|
||||
}
|
||||
Some(left as u64)
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
} else if let Some(ref org_uuid) = cipher.organization_uuid {
|
||||
match CONFIG.org_attachment_limit() {
|
||||
Some(0) => err_discard!("Attachments are disabled", data),
|
||||
Some(limit) => {
|
||||
let left = limit - Attachment::size_by_org(org_uuid, &conn);
|
||||
if left <= 0 {
|
||||
err_discard!("Attachment size limit reached! Delete some files to open space", data)
|
||||
}
|
||||
Some(left as u64)
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
} else {
|
||||
err_discard!("Cipher is neither owned by a user nor an organization", data);
|
||||
};
|
||||
|
||||
let base_path = Path::new(&CONFIG.attachments_folder()).join(&cipher.uuid);
|
||||
|
||||
let mut attachment_key = None;
|
||||
let mut error = None;
|
||||
|
||||
Multipart::with_body(data.open(), boundary)
|
||||
.foreach_entry(|mut field| {
|
||||
@@ -674,18 +712,21 @@ fn post_attachment(
|
||||
let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10]));
|
||||
let path = base_path.join(&file_name);
|
||||
|
||||
let size = match field.data.save().memory_threshold(0).size_limit(None).with_path(path) {
|
||||
let size = match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) {
|
||||
SaveResult::Full(SavedData::File(_, size)) => size as i32,
|
||||
SaveResult::Full(other) => {
|
||||
error!("Attachment is not a file: {:?}", other);
|
||||
std::fs::remove_file(path).ok();
|
||||
error = Some(format!("Attachment is not a file: {:?}", other));
|
||||
return;
|
||||
}
|
||||
SaveResult::Partial(_, reason) => {
|
||||
error!("Partial result: {:?}", reason);
|
||||
std::fs::remove_file(path).ok();
|
||||
error = Some(format!("Attachment size limit exceeded with this file: {:?}", reason));
|
||||
return;
|
||||
}
|
||||
SaveResult::Error(e) => {
|
||||
error!("Error: {:?}", e);
|
||||
std::fs::remove_file(path).ok();
|
||||
error = Some(format!("Error: {:?}", e));
|
||||
return;
|
||||
}
|
||||
};
|
||||
@@ -699,6 +740,10 @@ fn post_attachment(
|
||||
})
|
||||
.expect("Error processing multipart data");
|
||||
|
||||
if let Some(ref e) = error {
|
||||
err!(e);
|
||||
}
|
||||
|
||||
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&conn));
|
||||
|
||||
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
|
||||
|
@@ -59,7 +59,7 @@ pub struct FolderData {
|
||||
fn post_folders(data: JsonUpcase<FolderData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
|
||||
let data: FolderData = data.into_inner().data;
|
||||
|
||||
let mut folder = Folder::new(headers.user.uuid.clone(), data.Name);
|
||||
let mut folder = Folder::new(headers.user.uuid, data.Name);
|
||||
|
||||
folder.save(&conn)?;
|
||||
nt.send_folder_update(UpdateType::FolderCreate, &folder);
|
||||
|
@@ -81,6 +81,10 @@ const GLOBAL_DOMAINS: &str = include_str!("../../static/global_domains.json");
|
||||
|
||||
#[get("/settings/domains")]
|
||||
fn get_eq_domains(headers: Headers) -> JsonResult {
|
||||
_get_eq_domains(headers, false)
|
||||
}
|
||||
|
||||
fn _get_eq_domains(headers: Headers, no_excluded: bool) -> JsonResult {
|
||||
let user = headers.user;
|
||||
use serde_json::from_str;
|
||||
|
||||
@@ -93,6 +97,10 @@ fn get_eq_domains(headers: Headers) -> JsonResult {
|
||||
global.Excluded = excluded_globals.contains(&global.Type);
|
||||
}
|
||||
|
||||
if no_excluded {
|
||||
globals.retain(|g| !g.Excluded);
|
||||
}
|
||||
|
||||
Ok(Json(json!({
|
||||
"EquivalentDomains": equivalent_domains,
|
||||
"GlobalEquivalentDomains": globals,
|
||||
@@ -138,10 +146,12 @@ fn hibp_breach(username: String) -> JsonResult {
|
||||
username
|
||||
);
|
||||
|
||||
use reqwest::{header::USER_AGENT, Client};
|
||||
use reqwest::{header::USER_AGENT, blocking::Client};
|
||||
|
||||
if let Some(api_key) = crate::CONFIG.hibp_api_key() {
|
||||
let res = Client::new()
|
||||
let hibp_client = Client::builder().build()?;
|
||||
|
||||
let res = hibp_client
|
||||
.get(&url)
|
||||
.header(USER_AGENT, user_agent)
|
||||
.header("hibp-api-key", api_key)
|
||||
@@ -156,9 +166,17 @@ fn hibp_breach(username: String) -> JsonResult {
|
||||
Ok(Json(value))
|
||||
} else {
|
||||
Ok(Json(json!([{
|
||||
"title": "--- Error! ---",
|
||||
"description": "HaveIBeenPwned API key not set! Go to https://haveibeenpwned.com/API/Key",
|
||||
"logopath": "/bwrs_images/error-x.svg"
|
||||
"Name": "HaveIBeenPwned",
|
||||
"Title": "Manual HIBP Check",
|
||||
"Domain": "haveibeenpwned.com",
|
||||
"BreachDate": "2019-08-18T00:00:00Z",
|
||||
"AddedDate": "2019-08-18T00:00:00Z",
|
||||
"Description": format!("Go to: <a href=\"https://haveibeenpwned.com/account/{account}\" target=\"_blank\" rel=\"noopener\">https://haveibeenpwned.com/account/{account}</a> for a manual check.<br/><br/>HaveIBeenPwned API key not set!<br/>Go to <a href=\"https://haveibeenpwned.com/API/Key\" target=\"_blank\" rel=\"noopener\">https://haveibeenpwned.com/API/Key</a> to purchase an API key from HaveIBeenPwned.<br/><br/>", account=username),
|
||||
"LogoPath": "bwrs_static/hibp.png",
|
||||
"PwnCount": 0,
|
||||
"DataClasses": [
|
||||
"Error - No API key set!"
|
||||
]
|
||||
}])))
|
||||
}
|
||||
}
|
||||
|
@@ -2,6 +2,7 @@ use rocket::request::Form;
|
||||
use rocket::Route;
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json::Value;
|
||||
use num_traits::FromPrimitive;
|
||||
|
||||
use crate::api::{
|
||||
EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType,
|
||||
@@ -45,6 +46,10 @@ pub fn routes() -> Vec<Route> {
|
||||
delete_user,
|
||||
post_delete_user,
|
||||
post_org_import,
|
||||
list_policies,
|
||||
list_policies_token,
|
||||
get_policy,
|
||||
put_policy,
|
||||
]
|
||||
}
|
||||
|
||||
@@ -77,7 +82,7 @@ fn create_organization(headers: Headers, data: JsonUpcase<OrgData>, conn: DbConn
|
||||
let data: OrgData = data.into_inner().data;
|
||||
|
||||
let org = Organization::new(data.Name, data.BillingEmail);
|
||||
let mut user_org = UserOrganization::new(headers.user.uuid.clone(), org.uuid.clone());
|
||||
let mut user_org = UserOrganization::new(headers.user.uuid, org.uuid.clone());
|
||||
let collection = Collection::new(org.uuid.clone(), data.CollectionName);
|
||||
|
||||
user_org.akey = data.Key;
|
||||
@@ -221,7 +226,7 @@ fn post_organization_collections(
|
||||
None => err!("Can't find organization details"),
|
||||
};
|
||||
|
||||
let collection = Collection::new(org.uuid.clone(), data.Name);
|
||||
let collection = Collection::new(org.uuid, data.Name);
|
||||
collection.save(&conn)?;
|
||||
|
||||
Ok(Json(collection.to_json()))
|
||||
@@ -262,7 +267,7 @@ fn post_organization_collection_update(
|
||||
err!("Collection is not owned by organization");
|
||||
}
|
||||
|
||||
collection.name = data.Name.clone();
|
||||
collection.name = data.Name;
|
||||
collection.save(&conn)?;
|
||||
|
||||
Ok(Json(collection.to_json()))
|
||||
@@ -581,7 +586,7 @@ fn reinvite_user(org_id: String, user_org: String, headers: AdminHeaders, conn:
|
||||
Some(headers.user.email),
|
||||
)?;
|
||||
} else {
|
||||
let invitation = Invitation::new(user.email.clone());
|
||||
let invitation = Invitation::new(user.email);
|
||||
invitation.save(&conn)?;
|
||||
}
|
||||
|
||||
@@ -830,22 +835,13 @@ struct RelationsData {
|
||||
fn post_org_import(
|
||||
query: Form<OrgIdData>,
|
||||
data: JsonUpcase<ImportData>,
|
||||
headers: Headers,
|
||||
headers: AdminHeaders,
|
||||
conn: DbConn,
|
||||
nt: Notify,
|
||||
) -> EmptyResult {
|
||||
let data: ImportData = data.into_inner().data;
|
||||
let org_id = query.into_inner().organization_id;
|
||||
|
||||
let org_user = match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) {
|
||||
Some(user) => user,
|
||||
None => err!("User is not part of the organization"),
|
||||
};
|
||||
|
||||
if org_user.atype < UserOrgType::Admin {
|
||||
err!("Only admins or owners can import into an organization")
|
||||
}
|
||||
|
||||
// Read and create the collections
|
||||
let collections: Vec<_> = data
|
||||
.Collections
|
||||
@@ -866,6 +862,8 @@ fn post_org_import(
|
||||
relations.push((relation.Key, relation.Value));
|
||||
}
|
||||
|
||||
let headers: Headers = headers.into();
|
||||
|
||||
// Read and create the ciphers
|
||||
let ciphers: Vec<_> = data
|
||||
.Ciphers
|
||||
@@ -901,3 +899,83 @@ fn post_org_import(
|
||||
let mut user = headers.user;
|
||||
user.update_revision(&conn)
|
||||
}
|
||||
|
||||
#[get("/organizations/<org_id>/policies")]
|
||||
fn list_policies(org_id: String, _headers: AdminHeaders, conn: DbConn) -> JsonResult {
|
||||
let policies = OrgPolicy::find_by_org(&org_id, &conn);
|
||||
let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect();
|
||||
|
||||
Ok(Json(json!({
|
||||
"Data": policies_json,
|
||||
"Object": "list",
|
||||
"ContinuationToken": null
|
||||
})))
|
||||
}
|
||||
|
||||
#[get("/organizations/<org_id>/policies/token?<token>")]
|
||||
fn list_policies_token(org_id: String, token: String, conn: DbConn) -> JsonResult {
|
||||
let invite = crate::auth::decode_invite(&token)?;
|
||||
|
||||
let invite_org_id = match invite.org_id {
|
||||
Some(invite_org_id) => invite_org_id,
|
||||
None => err!("Invalid token"),
|
||||
};
|
||||
|
||||
if invite_org_id != org_id {
|
||||
err!("Token doesn't match request organization");
|
||||
}
|
||||
|
||||
// TODO: We receive the invite token as ?token=<>, validate it contains the org id
|
||||
let policies = OrgPolicy::find_by_org(&org_id, &conn);
|
||||
let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect();
|
||||
|
||||
Ok(Json(json!({
|
||||
"Data": policies_json,
|
||||
"Object": "list",
|
||||
"ContinuationToken": null
|
||||
})))
|
||||
}
|
||||
|
||||
#[get("/organizations/<org_id>/policies/<pol_type>")]
|
||||
fn get_policy(org_id: String, pol_type: i32, _headers: AdminHeaders, conn: DbConn) -> JsonResult {
|
||||
let pol_type_enum = match OrgPolicyType::from_i32(pol_type) {
|
||||
Some(pt) => pt,
|
||||
None => err!("Invalid policy type"),
|
||||
};
|
||||
|
||||
let policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type, &conn) {
|
||||
Some(p) => p,
|
||||
None => OrgPolicy::new(org_id, pol_type_enum, "{}".to_string()),
|
||||
};
|
||||
|
||||
Ok(Json(policy.to_json()))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PolicyData {
|
||||
enabled: bool,
|
||||
#[serde(rename = "type")]
|
||||
_type: i32,
|
||||
data: Value,
|
||||
}
|
||||
|
||||
#[put("/organizations/<org_id>/policies/<pol_type>", data = "<data>")]
|
||||
fn put_policy(org_id: String, pol_type: i32, data: Json<PolicyData>, _headers: AdminHeaders, conn: DbConn) -> JsonResult {
|
||||
let data: PolicyData = data.into_inner();
|
||||
|
||||
let pol_type_enum = match OrgPolicyType::from_i32(pol_type) {
|
||||
Some(pt) => pt,
|
||||
None => err!("Invalid policy type"),
|
||||
};
|
||||
|
||||
let mut policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type, &conn) {
|
||||
Some(p) => p,
|
||||
None => OrgPolicy::new(org_id, pol_type_enum, "{}".to_string()),
|
||||
};
|
||||
|
||||
policy.enabled = data.enabled;
|
||||
policy.data = serde_json::to_string(&data.data)?;
|
||||
policy.save(&conn)?;
|
||||
|
||||
Ok(Json(policy.to_json()))
|
||||
}
|
File diff suppressed because it is too large
Load Diff
153
src/api/core/two_factor/authenticator.rs
Normal file
153
src/api/core/two_factor/authenticator.rs
Normal file
@@ -0,0 +1,153 @@
|
||||
use data_encoding::BASE32;
|
||||
use rocket::Route;
|
||||
use rocket_contrib::json::Json;
|
||||
|
||||
use crate::api::core::two_factor::_generate_recover_code;
|
||||
use crate::api::{EmptyResult, JsonResult, JsonUpcase, NumberOrString, PasswordData};
|
||||
use crate::auth::Headers;
|
||||
use crate::crypto;
|
||||
use crate::db::{
|
||||
models::{TwoFactor, TwoFactorType},
|
||||
DbConn,
|
||||
};
|
||||
|
||||
pub use crate::config::CONFIG;
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
routes![
|
||||
generate_authenticator,
|
||||
activate_authenticator,
|
||||
activate_authenticator_put,
|
||||
]
|
||||
}
|
||||
#[post("/two-factor/get-authenticator", data = "<data>")]
|
||||
fn generate_authenticator(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: PasswordData = data.into_inner().data;
|
||||
let user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
let type_ = TwoFactorType::Authenticator as i32;
|
||||
let twofactor = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn);
|
||||
|
||||
let (enabled, key) = match twofactor {
|
||||
Some(tf) => (true, tf.data),
|
||||
_ => (false, BASE32.encode(&crypto::get_random(vec![0u8; 20]))),
|
||||
};
|
||||
|
||||
Ok(Json(json!({
|
||||
"Enabled": enabled,
|
||||
"Key": key,
|
||||
"Object": "twoFactorAuthenticator"
|
||||
})))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[allow(non_snake_case)]
|
||||
struct EnableAuthenticatorData {
|
||||
MasterPasswordHash: String,
|
||||
Key: String,
|
||||
Token: NumberOrString,
|
||||
}
|
||||
|
||||
#[post("/two-factor/authenticator", data = "<data>")]
|
||||
fn activate_authenticator(data: JsonUpcase<EnableAuthenticatorData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: EnableAuthenticatorData = data.into_inner().data;
|
||||
let password_hash = data.MasterPasswordHash;
|
||||
let key = data.Key;
|
||||
let token = data.Token.into_i32()? as u64;
|
||||
|
||||
let mut user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&password_hash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
// Validate key as base32 and 20 bytes length
|
||||
let decoded_key: Vec<u8> = match BASE32.decode(key.as_bytes()) {
|
||||
Ok(decoded) => decoded,
|
||||
_ => err!("Invalid totp secret"),
|
||||
};
|
||||
|
||||
if decoded_key.len() != 20 {
|
||||
err!("Invalid key length")
|
||||
}
|
||||
|
||||
// Validate the token provided with the key, and save new twofactor
|
||||
validate_totp_code(&user.uuid, token, &key.to_uppercase(), &conn)?;
|
||||
|
||||
_generate_recover_code(&mut user, &conn);
|
||||
|
||||
Ok(Json(json!({
|
||||
"Enabled": true,
|
||||
"Key": key,
|
||||
"Object": "twoFactorAuthenticator"
|
||||
})))
|
||||
}
|
||||
|
||||
#[put("/two-factor/authenticator", data = "<data>")]
|
||||
fn activate_authenticator_put(data: JsonUpcase<EnableAuthenticatorData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
activate_authenticator(data, headers, conn)
|
||||
}
|
||||
|
||||
pub fn validate_totp_code_str(user_uuid: &str, totp_code: &str, secret: &str, conn: &DbConn) -> EmptyResult {
|
||||
let totp_code: u64 = match totp_code.parse() {
|
||||
Ok(code) => code,
|
||||
_ => err!("TOTP code is not a number"),
|
||||
};
|
||||
|
||||
validate_totp_code(user_uuid, totp_code, secret, &conn)
|
||||
}
|
||||
|
||||
pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, conn: &DbConn) -> EmptyResult {
|
||||
use oath::{totp_raw_custom_time, HashType};
|
||||
|
||||
let decoded_secret = match BASE32.decode(secret.as_bytes()) {
|
||||
Ok(s) => s,
|
||||
Err(_) => err!("Invalid TOTP secret"),
|
||||
};
|
||||
|
||||
let mut twofactor = match TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Authenticator as i32, &conn) {
|
||||
Some(tf) => tf,
|
||||
_ => TwoFactor::new(user_uuid.to_string(), TwoFactorType::Authenticator, secret.to_string()),
|
||||
};
|
||||
|
||||
// Get the current system time in UNIX Epoch (UTC)
|
||||
let current_time = chrono::Utc::now();
|
||||
let current_timestamp = current_time.timestamp();
|
||||
|
||||
// The amount of steps back and forward in time
|
||||
// Also check if we need to disable time drifted TOTP codes.
|
||||
// If that is the case, we set the steps to 0 so only the current TOTP is valid.
|
||||
let steps: i64 = if CONFIG.authenticator_disable_time_drift() { 0 } else { 1 };
|
||||
|
||||
for step in -steps..=steps {
|
||||
let time_step = current_timestamp / 30i64 + step;
|
||||
// We need to calculate the time offsite and cast it as an i128.
|
||||
// Else we can't do math with it on a default u64 variable.
|
||||
let time = (current_timestamp + step * 30i64) as u64;
|
||||
let generated = totp_raw_custom_time(&decoded_secret, 6, 0, 30, time, &HashType::SHA1);
|
||||
|
||||
// Check the the given code equals the generated and if the time_step is larger then the one last used.
|
||||
if generated == totp_code && time_step > twofactor.last_used as i64 {
|
||||
// If the step does not equals 0 the time is drifted either server or client side.
|
||||
if step != 0 {
|
||||
info!("TOTP Time drift detected. The step offset is {}", step);
|
||||
}
|
||||
|
||||
// Save the last used time step so only totp time steps higher then this one are allowed.
|
||||
// This will also save a newly created twofactor if the code is correct.
|
||||
twofactor.last_used = time_step as i32;
|
||||
twofactor.save(&conn)?;
|
||||
return Ok(());
|
||||
} else if generated == totp_code && time_step <= twofactor.last_used as i64 {
|
||||
warn!("This or a TOTP code within {} steps back and forward has already been used!", steps);
|
||||
err!(format!("Invalid TOTP code! Server time: {}", current_time.format("%F %T UTC")));
|
||||
}
|
||||
}
|
||||
|
||||
// Else no valide code received, deny access
|
||||
err!(format!("Invalid TOTP code! Server time: {}", current_time.format("%F %T UTC")));
|
||||
}
|
346
src/api/core/two_factor/duo.rs
Normal file
346
src/api/core/two_factor/duo.rs
Normal file
@@ -0,0 +1,346 @@
|
||||
use chrono::Utc;
|
||||
use data_encoding::BASE64;
|
||||
use rocket::Route;
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json;
|
||||
|
||||
use crate::api::core::two_factor::_generate_recover_code;
|
||||
use crate::api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, PasswordData};
|
||||
use crate::auth::Headers;
|
||||
use crate::crypto;
|
||||
use crate::db::{
|
||||
models::{TwoFactor, TwoFactorType, User},
|
||||
DbConn,
|
||||
};
|
||||
use crate::error::MapResult;
|
||||
use crate::CONFIG;
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
routes![get_duo, activate_duo, activate_duo_put,]
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct DuoData {
|
||||
host: String,
|
||||
ik: String,
|
||||
sk: String,
|
||||
}
|
||||
|
||||
impl DuoData {
|
||||
fn global() -> Option<Self> {
|
||||
match (CONFIG._enable_duo(), CONFIG.duo_host()) {
|
||||
(true, Some(host)) => Some(Self {
|
||||
host,
|
||||
ik: CONFIG.duo_ikey().unwrap(),
|
||||
sk: CONFIG.duo_skey().unwrap(),
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn msg(s: &str) -> Self {
|
||||
Self {
|
||||
host: s.into(),
|
||||
ik: s.into(),
|
||||
sk: s.into(),
|
||||
}
|
||||
}
|
||||
fn secret() -> Self {
|
||||
Self::msg("<global_secret>")
|
||||
}
|
||||
fn obscure(self) -> Self {
|
||||
let mut host = self.host;
|
||||
let mut ik = self.ik;
|
||||
let mut sk = self.sk;
|
||||
|
||||
let digits = 4;
|
||||
let replaced = "************";
|
||||
|
||||
host.replace_range(digits.., replaced);
|
||||
ik.replace_range(digits.., replaced);
|
||||
sk.replace_range(digits.., replaced);
|
||||
|
||||
Self { host, ik, sk }
|
||||
}
|
||||
}
|
||||
|
||||
enum DuoStatus {
|
||||
Global(DuoData),
|
||||
// Using the global duo config
|
||||
User(DuoData),
|
||||
// Using the user's config
|
||||
Disabled(bool), // True if there is a global setting
|
||||
}
|
||||
|
||||
impl DuoStatus {
|
||||
fn data(self) -> Option<DuoData> {
|
||||
match self {
|
||||
DuoStatus::Global(data) => Some(data),
|
||||
DuoStatus::User(data) => Some(data),
|
||||
DuoStatus::Disabled(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const DISABLED_MESSAGE_DEFAULT: &str = "<To use the global Duo keys, please leave these fields untouched>";
|
||||
|
||||
#[post("/two-factor/get-duo", data = "<data>")]
|
||||
fn get_duo(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: PasswordData = data.into_inner().data;
|
||||
|
||||
if !headers.user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
let data = get_user_duo_data(&headers.user.uuid, &conn);
|
||||
|
||||
let (enabled, data) = match data {
|
||||
DuoStatus::Global(_) => (true, Some(DuoData::secret())),
|
||||
DuoStatus::User(data) => (true, Some(data.obscure())),
|
||||
DuoStatus::Disabled(true) => (false, Some(DuoData::msg(DISABLED_MESSAGE_DEFAULT))),
|
||||
DuoStatus::Disabled(false) => (false, None),
|
||||
};
|
||||
|
||||
let json = if let Some(data) = data {
|
||||
json!({
|
||||
"Enabled": enabled,
|
||||
"Host": data.host,
|
||||
"SecretKey": data.sk,
|
||||
"IntegrationKey": data.ik,
|
||||
"Object": "twoFactorDuo"
|
||||
})
|
||||
} else {
|
||||
json!({
|
||||
"Enabled": enabled,
|
||||
"Object": "twoFactorDuo"
|
||||
})
|
||||
};
|
||||
|
||||
Ok(Json(json))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case, dead_code)]
|
||||
struct EnableDuoData {
|
||||
MasterPasswordHash: String,
|
||||
Host: String,
|
||||
SecretKey: String,
|
||||
IntegrationKey: String,
|
||||
}
|
||||
|
||||
impl From<EnableDuoData> for DuoData {
|
||||
fn from(d: EnableDuoData) -> Self {
|
||||
Self {
|
||||
host: d.Host,
|
||||
ik: d.IntegrationKey,
|
||||
sk: d.SecretKey,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_duo_fields_custom(data: &EnableDuoData) -> bool {
|
||||
fn empty_or_default(s: &str) -> bool {
|
||||
let st = s.trim();
|
||||
st.is_empty() || s == DISABLED_MESSAGE_DEFAULT
|
||||
}
|
||||
|
||||
!empty_or_default(&data.Host) && !empty_or_default(&data.SecretKey) && !empty_or_default(&data.IntegrationKey)
|
||||
}
|
||||
|
||||
#[post("/two-factor/duo", data = "<data>")]
|
||||
fn activate_duo(data: JsonUpcase<EnableDuoData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: EnableDuoData = data.into_inner().data;
|
||||
let mut user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
let (data, data_str) = if check_duo_fields_custom(&data) {
|
||||
let data_req: DuoData = data.into();
|
||||
let data_str = serde_json::to_string(&data_req)?;
|
||||
duo_api_request("GET", "/auth/v2/check", "", &data_req).map_res("Failed to validate Duo credentials")?;
|
||||
(data_req.obscure(), data_str)
|
||||
} else {
|
||||
(DuoData::secret(), String::new())
|
||||
};
|
||||
|
||||
let type_ = TwoFactorType::Duo;
|
||||
let twofactor = TwoFactor::new(user.uuid.clone(), type_, data_str);
|
||||
twofactor.save(&conn)?;
|
||||
|
||||
_generate_recover_code(&mut user, &conn);
|
||||
|
||||
Ok(Json(json!({
|
||||
"Enabled": true,
|
||||
"Host": data.host,
|
||||
"SecretKey": data.sk,
|
||||
"IntegrationKey": data.ik,
|
||||
"Object": "twoFactorDuo"
|
||||
})))
|
||||
}
|
||||
|
||||
#[put("/two-factor/duo", data = "<data>")]
|
||||
fn activate_duo_put(data: JsonUpcase<EnableDuoData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
activate_duo(data, headers, conn)
|
||||
}
|
||||
|
||||
fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> EmptyResult {
|
||||
const AGENT: &str = "bitwarden_rs:Duo/1.0 (Rust)";
|
||||
|
||||
use reqwest::{header::*, Method, blocking::Client};
|
||||
use std::str::FromStr;
|
||||
|
||||
let url = format!("https://{}{}", &data.host, path);
|
||||
let date = Utc::now().to_rfc2822();
|
||||
let username = &data.ik;
|
||||
let fields = [&date, method, &data.host, path, params];
|
||||
let password = crypto::hmac_sign(&data.sk, &fields.join("\n"));
|
||||
|
||||
let m = Method::from_str(method).unwrap_or_default();
|
||||
|
||||
Client::new()
|
||||
.request(m, &url)
|
||||
.basic_auth(username, Some(password))
|
||||
.header(USER_AGENT, AGENT)
|
||||
.header(DATE, date)
|
||||
.send()?
|
||||
.error_for_status()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
const DUO_EXPIRE: i64 = 300;
|
||||
const APP_EXPIRE: i64 = 3600;
|
||||
|
||||
const AUTH_PREFIX: &str = "AUTH";
|
||||
const DUO_PREFIX: &str = "TX";
|
||||
const APP_PREFIX: &str = "APP";
|
||||
|
||||
fn get_user_duo_data(uuid: &str, conn: &DbConn) -> DuoStatus {
|
||||
let type_ = TwoFactorType::Duo as i32;
|
||||
|
||||
// If the user doesn't have an entry, disabled
|
||||
let twofactor = match TwoFactor::find_by_user_and_type(uuid, type_, &conn) {
|
||||
Some(t) => t,
|
||||
None => return DuoStatus::Disabled(DuoData::global().is_some()),
|
||||
};
|
||||
|
||||
// If the user has the required values, we use those
|
||||
if let Ok(data) = serde_json::from_str(&twofactor.data) {
|
||||
return DuoStatus::User(data);
|
||||
}
|
||||
|
||||
// Otherwise, we try to use the globals
|
||||
if let Some(global) = DuoData::global() {
|
||||
return DuoStatus::Global(global);
|
||||
}
|
||||
|
||||
// If there are no globals configured, just disable it
|
||||
DuoStatus::Disabled(false)
|
||||
}
|
||||
|
||||
// let (ik, sk, ak, host) = get_duo_keys();
|
||||
fn get_duo_keys_email(email: &str, conn: &DbConn) -> ApiResult<(String, String, String, String)> {
|
||||
let data = User::find_by_mail(email, &conn)
|
||||
.and_then(|u| get_user_duo_data(&u.uuid, &conn).data())
|
||||
.or_else(DuoData::global)
|
||||
.map_res("Can't fetch Duo keys")?;
|
||||
|
||||
Ok((data.ik, data.sk, CONFIG.get_duo_akey(), data.host))
|
||||
}
|
||||
|
||||
pub fn generate_duo_signature(email: &str, conn: &DbConn) -> ApiResult<(String, String)> {
|
||||
let now = Utc::now().timestamp();
|
||||
|
||||
let (ik, sk, ak, host) = get_duo_keys_email(email, conn)?;
|
||||
|
||||
let duo_sign = sign_duo_values(&sk, email, &ik, DUO_PREFIX, now + DUO_EXPIRE);
|
||||
let app_sign = sign_duo_values(&ak, email, &ik, APP_PREFIX, now + APP_EXPIRE);
|
||||
|
||||
Ok((format!("{}:{}", duo_sign, app_sign), host))
|
||||
}
|
||||
|
||||
fn sign_duo_values(key: &str, email: &str, ikey: &str, prefix: &str, expire: i64) -> String {
|
||||
let val = format!("{}|{}|{}", email, ikey, expire);
|
||||
let cookie = format!("{}|{}", prefix, BASE64.encode(val.as_bytes()));
|
||||
|
||||
format!("{}|{}", cookie, crypto::hmac_sign(key, &cookie))
|
||||
}
|
||||
|
||||
pub fn validate_duo_login(email: &str, response: &str, conn: &DbConn) -> EmptyResult {
|
||||
let split: Vec<&str> = response.split(':').collect();
|
||||
if split.len() != 2 {
|
||||
err!("Invalid response length");
|
||||
}
|
||||
|
||||
let auth_sig = split[0];
|
||||
let app_sig = split[1];
|
||||
|
||||
let now = Utc::now().timestamp();
|
||||
|
||||
let (ik, sk, ak, _host) = get_duo_keys_email(email, conn)?;
|
||||
|
||||
let auth_user = parse_duo_values(&sk, auth_sig, &ik, AUTH_PREFIX, now)?;
|
||||
let app_user = parse_duo_values(&ak, app_sig, &ik, APP_PREFIX, now)?;
|
||||
|
||||
if !crypto::ct_eq(&auth_user, app_user) || !crypto::ct_eq(&auth_user, email) {
|
||||
err!("Error validating duo authentication")
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_duo_values(key: &str, val: &str, ikey: &str, prefix: &str, time: i64) -> ApiResult<String> {
|
||||
let split: Vec<&str> = val.split('|').collect();
|
||||
if split.len() != 3 {
|
||||
err!("Invalid value length")
|
||||
}
|
||||
|
||||
let u_prefix = split[0];
|
||||
let u_b64 = split[1];
|
||||
let u_sig = split[2];
|
||||
|
||||
let sig = crypto::hmac_sign(key, &format!("{}|{}", u_prefix, u_b64));
|
||||
|
||||
if !crypto::ct_eq(crypto::hmac_sign(key, &sig), crypto::hmac_sign(key, u_sig)) {
|
||||
err!("Duo signatures don't match")
|
||||
}
|
||||
|
||||
if u_prefix != prefix {
|
||||
err!("Prefixes don't match")
|
||||
}
|
||||
|
||||
let cookie_vec = match BASE64.decode(u_b64.as_bytes()) {
|
||||
Ok(c) => c,
|
||||
Err(_) => err!("Invalid Duo cookie encoding"),
|
||||
};
|
||||
|
||||
let cookie = match String::from_utf8(cookie_vec) {
|
||||
Ok(c) => c,
|
||||
Err(_) => err!("Invalid Duo cookie encoding"),
|
||||
};
|
||||
|
||||
let cookie_split: Vec<&str> = cookie.split('|').collect();
|
||||
if cookie_split.len() != 3 {
|
||||
err!("Invalid cookie length")
|
||||
}
|
||||
|
||||
let username = cookie_split[0];
|
||||
let u_ikey = cookie_split[1];
|
||||
let expire = cookie_split[2];
|
||||
|
||||
if !crypto::ct_eq(ikey, u_ikey) {
|
||||
err!("Invalid ikey")
|
||||
}
|
||||
|
||||
let expire = match expire.parse() {
|
||||
Ok(e) => e,
|
||||
Err(_) => err!("Invalid expire time"),
|
||||
};
|
||||
|
||||
if time >= expire {
|
||||
err!("Expired authorization")
|
||||
}
|
||||
|
||||
Ok(username.into())
|
||||
}
|
330
src/api/core/two_factor/email.rs
Normal file
330
src/api/core/two_factor/email.rs
Normal file
@@ -0,0 +1,330 @@
|
||||
use rocket::Route;
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json;
|
||||
|
||||
use crate::api::core::two_factor::_generate_recover_code;
|
||||
use crate::api::{EmptyResult, JsonResult, JsonUpcase, PasswordData};
|
||||
use crate::auth::Headers;
|
||||
use crate::crypto;
|
||||
use crate::db::{
|
||||
models::{TwoFactor, TwoFactorType},
|
||||
DbConn,
|
||||
};
|
||||
use crate::error::Error;
|
||||
use crate::mail;
|
||||
use crate::CONFIG;
|
||||
|
||||
use chrono::{Duration, NaiveDateTime, Utc};
|
||||
use std::ops::Add;
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
routes![get_email, send_email_login, send_email, email,]
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct SendEmailLoginData {
|
||||
Email: String,
|
||||
MasterPasswordHash: String,
|
||||
}
|
||||
|
||||
/// User is trying to login and wants to use email 2FA.
|
||||
/// Does not require Bearer token
|
||||
#[post("/two-factor/send-email-login", data = "<data>")] // JsonResult
|
||||
fn send_email_login(data: JsonUpcase<SendEmailLoginData>, conn: DbConn) -> EmptyResult {
|
||||
let data: SendEmailLoginData = data.into_inner().data;
|
||||
|
||||
use crate::db::models::User;
|
||||
|
||||
// Get the user
|
||||
let user = match User::find_by_mail(&data.Email, &conn) {
|
||||
Some(user) => user,
|
||||
None => err!("Username or password is incorrect. Try again."),
|
||||
};
|
||||
|
||||
// Check password
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Username or password is incorrect. Try again.")
|
||||
}
|
||||
|
||||
if !CONFIG._enable_email_2fa() {
|
||||
err!("Email 2FA is disabled")
|
||||
}
|
||||
|
||||
send_token(&user.uuid, &conn)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Generate the token, save the data for later verification and send email to user
|
||||
pub fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||
let type_ = TwoFactorType::Email as i32;
|
||||
let mut twofactor = TwoFactor::find_by_user_and_type(user_uuid, type_, &conn)?;
|
||||
|
||||
let generated_token = crypto::generate_token(CONFIG.email_token_size())?;
|
||||
|
||||
let mut twofactor_data = EmailTokenData::from_json(&twofactor.data)?;
|
||||
twofactor_data.set_token(generated_token);
|
||||
twofactor.data = twofactor_data.to_json();
|
||||
twofactor.save(&conn)?;
|
||||
|
||||
mail::send_token(&twofactor_data.email, &twofactor_data.last_token?)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// When user clicks on Manage email 2FA show the user the related information
|
||||
#[post("/two-factor/get-email", data = "<data>")]
|
||||
fn get_email(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: PasswordData = data.into_inner().data;
|
||||
let user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
let type_ = TwoFactorType::Email as i32;
|
||||
let enabled = match TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn) {
|
||||
Some(x) => x.enabled,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
Ok(Json(json!({
|
||||
"Email": user.email,
|
||||
"Enabled": enabled,
|
||||
"Object": "twoFactorEmail"
|
||||
})))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct SendEmailData {
|
||||
/// Email where 2FA codes will be sent to, can be different than user email account.
|
||||
Email: String,
|
||||
MasterPasswordHash: String,
|
||||
}
|
||||
|
||||
/// Send a verification email to the specified email address to check whether it exists/belongs to user.
|
||||
#[post("/two-factor/send-email", data = "<data>")]
|
||||
fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||
let data: SendEmailData = data.into_inner().data;
|
||||
let user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
if !CONFIG._enable_email_2fa() {
|
||||
err!("Email 2FA is disabled")
|
||||
}
|
||||
|
||||
let type_ = TwoFactorType::Email as i32;
|
||||
|
||||
if let Some(tf) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn) {
|
||||
tf.delete(&conn)?;
|
||||
}
|
||||
|
||||
let generated_token = crypto::generate_token(CONFIG.email_token_size())?;
|
||||
let twofactor_data = EmailTokenData::new(data.Email, generated_token);
|
||||
|
||||
// Uses EmailVerificationChallenge as type to show that it's not verified yet.
|
||||
let twofactor = TwoFactor::new(
|
||||
user.uuid,
|
||||
TwoFactorType::EmailVerificationChallenge,
|
||||
twofactor_data.to_json(),
|
||||
);
|
||||
twofactor.save(&conn)?;
|
||||
|
||||
mail::send_token(&twofactor_data.email, &twofactor_data.last_token?)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct EmailData {
|
||||
Email: String,
|
||||
MasterPasswordHash: String,
|
||||
Token: String,
|
||||
}
|
||||
|
||||
/// Verify email belongs to user and can be used for 2FA email codes.
|
||||
#[put("/two-factor/email", data = "<data>")]
|
||||
fn email(data: JsonUpcase<EmailData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: EmailData = data.into_inner().data;
|
||||
let mut user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
let type_ = TwoFactorType::EmailVerificationChallenge as i32;
|
||||
let mut twofactor = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn)?;
|
||||
|
||||
let mut email_data = EmailTokenData::from_json(&twofactor.data)?;
|
||||
|
||||
let issued_token = match &email_data.last_token {
|
||||
Some(t) => t,
|
||||
_ => err!("No token available"),
|
||||
};
|
||||
|
||||
if !crypto::ct_eq(issued_token, data.Token) {
|
||||
err!("Token is invalid")
|
||||
}
|
||||
|
||||
email_data.reset_token();
|
||||
twofactor.atype = TwoFactorType::Email as i32;
|
||||
twofactor.data = email_data.to_json();
|
||||
twofactor.save(&conn)?;
|
||||
|
||||
_generate_recover_code(&mut user, &conn);
|
||||
|
||||
Ok(Json(json!({
|
||||
"Email": email_data.email,
|
||||
"Enabled": "true",
|
||||
"Object": "twoFactorEmail"
|
||||
})))
|
||||
}
|
||||
|
||||
/// Validate the email code when used as TwoFactor token mechanism
|
||||
pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult {
|
||||
let mut email_data = EmailTokenData::from_json(&data)?;
|
||||
let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn)?;
|
||||
let issued_token = match &email_data.last_token {
|
||||
Some(t) => t,
|
||||
_ => err!("No token available"),
|
||||
};
|
||||
|
||||
if !crypto::ct_eq(issued_token, token) {
|
||||
email_data.add_attempt();
|
||||
if email_data.attempts >= CONFIG.email_attempts_limit() {
|
||||
email_data.reset_token();
|
||||
}
|
||||
twofactor.data = email_data.to_json();
|
||||
twofactor.save(&conn)?;
|
||||
|
||||
err!("Token is invalid")
|
||||
}
|
||||
|
||||
email_data.reset_token();
|
||||
twofactor.data = email_data.to_json();
|
||||
twofactor.save(&conn)?;
|
||||
|
||||
let date = NaiveDateTime::from_timestamp(email_data.token_sent, 0);
|
||||
let max_time = CONFIG.email_expiration_time() as i64;
|
||||
if date.add(Duration::seconds(max_time)) < Utc::now().naive_utc() {
|
||||
err!("Token has expired")
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
/// Data stored in the TwoFactor table in the db
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct EmailTokenData {
|
||||
/// Email address where the token will be sent to. Can be different from account email.
|
||||
pub email: String,
|
||||
/// Some(token): last valid token issued that has not been entered.
|
||||
/// None: valid token was used and removed.
|
||||
pub last_token: Option<String>,
|
||||
/// UNIX timestamp of token issue.
|
||||
pub token_sent: i64,
|
||||
/// Amount of token entry attempts for last_token.
|
||||
pub attempts: u64,
|
||||
}
|
||||
|
||||
impl EmailTokenData {
|
||||
pub fn new(email: String, token: String) -> EmailTokenData {
|
||||
EmailTokenData {
|
||||
email,
|
||||
last_token: Some(token),
|
||||
token_sent: Utc::now().naive_utc().timestamp(),
|
||||
attempts: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_token(&mut self, token: String) {
|
||||
self.last_token = Some(token);
|
||||
self.token_sent = Utc::now().naive_utc().timestamp();
|
||||
}
|
||||
|
||||
pub fn reset_token(&mut self) {
|
||||
self.last_token = None;
|
||||
self.attempts = 0;
|
||||
}
|
||||
|
||||
pub fn add_attempt(&mut self) {
|
||||
self.attempts += 1;
|
||||
}
|
||||
|
||||
pub fn to_json(&self) -> String {
|
||||
serde_json::to_string(&self).unwrap()
|
||||
}
|
||||
|
||||
pub fn from_json(string: &str) -> Result<EmailTokenData, Error> {
|
||||
let res: Result<EmailTokenData, crate::serde_json::Error> = serde_json::from_str(&string);
|
||||
match res {
|
||||
Ok(x) => Ok(x),
|
||||
Err(_) => err!("Could not decode EmailTokenData from string"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes an email address and obscures it by replacing it with asterisks except two characters.
|
||||
pub fn obscure_email(email: &str) -> String {
|
||||
let split: Vec<&str> = email.rsplitn(2, '@').collect();
|
||||
|
||||
let mut name = split[1].to_string();
|
||||
let domain = &split[0];
|
||||
|
||||
let name_size = name.chars().count();
|
||||
|
||||
let new_name = match name_size {
|
||||
1..=3 => "*".repeat(name_size),
|
||||
_ => {
|
||||
let stars = "*".repeat(name_size - 2);
|
||||
name.truncate(2);
|
||||
format!("{}{}", name, stars)
|
||||
}
|
||||
};
|
||||
|
||||
format!("{}@{}", new_name, &domain)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_obscure_email_long() {
|
||||
let email = "bytes@example.ext";
|
||||
|
||||
let result = obscure_email(&email);
|
||||
|
||||
// Only first two characters should be visible.
|
||||
assert_eq!(result, "by***@example.ext");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_obscure_email_short() {
|
||||
let email = "byt@example.ext";
|
||||
|
||||
let result = obscure_email(&email);
|
||||
|
||||
// If it's smaller than 3 characters it should only show asterisks.
|
||||
assert_eq!(result, "***@example.ext");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_token() {
|
||||
let result = crypto::generate_token(19).unwrap();
|
||||
|
||||
assert_eq!(result.chars().count(), 19);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_token_too_large() {
|
||||
let result = crypto::generate_token(20);
|
||||
|
||||
assert!(result.is_err(), "too large token should give an error");
|
||||
}
|
||||
}
|
146
src/api/core/two_factor/mod.rs
Normal file
146
src/api/core/two_factor/mod.rs
Normal file
@@ -0,0 +1,146 @@
|
||||
use data_encoding::BASE32;
|
||||
use rocket::Route;
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::api::{JsonResult, JsonUpcase, NumberOrString, PasswordData};
|
||||
use crate::auth::Headers;
|
||||
use crate::crypto;
|
||||
use crate::db::{
|
||||
models::{TwoFactor, User},
|
||||
DbConn,
|
||||
};
|
||||
|
||||
pub(crate) mod authenticator;
|
||||
pub(crate) mod duo;
|
||||
pub(crate) mod email;
|
||||
pub(crate) mod u2f;
|
||||
pub(crate) mod yubikey;
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
let mut routes = routes![
|
||||
get_twofactor,
|
||||
get_recover,
|
||||
recover,
|
||||
disable_twofactor,
|
||||
disable_twofactor_put,
|
||||
];
|
||||
|
||||
routes.append(&mut authenticator::routes());
|
||||
routes.append(&mut duo::routes());
|
||||
routes.append(&mut email::routes());
|
||||
routes.append(&mut u2f::routes());
|
||||
routes.append(&mut yubikey::routes());
|
||||
|
||||
routes
|
||||
}
|
||||
|
||||
#[get("/two-factor")]
|
||||
fn get_twofactor(headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let twofactors = TwoFactor::find_by_user(&headers.user.uuid, &conn);
|
||||
let twofactors_json: Vec<Value> = twofactors.iter().map(TwoFactor::to_json_list).collect();
|
||||
|
||||
Ok(Json(json!({
|
||||
"Data": twofactors_json,
|
||||
"Object": "list",
|
||||
"ContinuationToken": null,
|
||||
})))
|
||||
}
|
||||
|
||||
#[post("/two-factor/get-recover", data = "<data>")]
|
||||
fn get_recover(data: JsonUpcase<PasswordData>, headers: Headers) -> JsonResult {
|
||||
let data: PasswordData = data.into_inner().data;
|
||||
let user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
Ok(Json(json!({
|
||||
"Code": user.totp_recover,
|
||||
"Object": "twoFactorRecover"
|
||||
})))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct RecoverTwoFactor {
|
||||
MasterPasswordHash: String,
|
||||
Email: String,
|
||||
RecoveryCode: String,
|
||||
}
|
||||
|
||||
#[post("/two-factor/recover", data = "<data>")]
|
||||
fn recover(data: JsonUpcase<RecoverTwoFactor>, conn: DbConn) -> JsonResult {
|
||||
let data: RecoverTwoFactor = data.into_inner().data;
|
||||
|
||||
use crate::db::models::User;
|
||||
|
||||
// Get the user
|
||||
let mut user = match User::find_by_mail(&data.Email, &conn) {
|
||||
Some(user) => user,
|
||||
None => err!("Username or password is incorrect. Try again."),
|
||||
};
|
||||
|
||||
// Check password
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Username or password is incorrect. Try again.")
|
||||
}
|
||||
|
||||
// Check if recovery code is correct
|
||||
if !user.check_valid_recovery_code(&data.RecoveryCode) {
|
||||
err!("Recovery code is incorrect. Try again.")
|
||||
}
|
||||
|
||||
// Remove all twofactors from the user
|
||||
TwoFactor::delete_all_by_user(&user.uuid, &conn)?;
|
||||
|
||||
// Remove the recovery code, not needed without twofactors
|
||||
user.totp_recover = None;
|
||||
user.save(&conn)?;
|
||||
Ok(Json(json!({})))
|
||||
}
|
||||
|
||||
fn _generate_recover_code(user: &mut User, conn: &DbConn) {
|
||||
if user.totp_recover.is_none() {
|
||||
let totp_recover = BASE32.encode(&crypto::get_random(vec![0u8; 20]));
|
||||
user.totp_recover = Some(totp_recover);
|
||||
user.save(conn).ok();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct DisableTwoFactorData {
|
||||
MasterPasswordHash: String,
|
||||
Type: NumberOrString,
|
||||
}
|
||||
|
||||
#[post("/two-factor/disable", data = "<data>")]
|
||||
fn disable_twofactor(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: DisableTwoFactorData = data.into_inner().data;
|
||||
let password_hash = data.MasterPasswordHash;
|
||||
let user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&password_hash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
let type_ = data.Type.into_i32()?;
|
||||
|
||||
if let Some(twofactor) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn) {
|
||||
twofactor.delete(&conn)?;
|
||||
}
|
||||
|
||||
Ok(Json(json!({
|
||||
"Enabled": false,
|
||||
"Type": type_,
|
||||
"Object": "twoFactorProvider"
|
||||
})))
|
||||
}
|
||||
|
||||
#[put("/two-factor/disable", data = "<data>")]
|
||||
fn disable_twofactor_put(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
disable_twofactor(data, headers, conn)
|
||||
}
|
360
src/api/core/two_factor/u2f.rs
Normal file
360
src/api/core/two_factor/u2f.rs
Normal file
@@ -0,0 +1,360 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use rocket::Route;
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json;
|
||||
use serde_json::Value;
|
||||
use u2f::messages::{RegisterResponse, SignResponse, U2fSignRequest};
|
||||
use u2f::protocol::{Challenge, U2f};
|
||||
use u2f::register::Registration;
|
||||
|
||||
use crate::api::core::two_factor::_generate_recover_code;
|
||||
use crate::api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, NumberOrString, PasswordData};
|
||||
use crate::auth::Headers;
|
||||
use crate::db::{
|
||||
models::{TwoFactor, TwoFactorType},
|
||||
DbConn,
|
||||
};
|
||||
use crate::error::Error;
|
||||
use crate::CONFIG;
|
||||
|
||||
const U2F_VERSION: &str = "U2F_V2";
|
||||
|
||||
static APP_ID: Lazy<String> = Lazy::new(|| format!("{}/app-id.json", &CONFIG.domain()));
|
||||
static U2F: Lazy<U2f> = Lazy::new(|| U2f::new(APP_ID.clone()));
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
routes![
|
||||
generate_u2f,
|
||||
generate_u2f_challenge,
|
||||
activate_u2f,
|
||||
activate_u2f_put,
|
||||
delete_u2f,
|
||||
]
|
||||
}
|
||||
|
||||
#[post("/two-factor/get-u2f", data = "<data>")]
|
||||
fn generate_u2f(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
if !CONFIG.domain_set() {
|
||||
err!("`DOMAIN` environment variable is not set. U2F disabled")
|
||||
}
|
||||
let data: PasswordData = data.into_inner().data;
|
||||
|
||||
if !headers.user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
let (enabled, keys) = get_u2f_registrations(&headers.user.uuid, &conn)?;
|
||||
let keys_json: Vec<Value> = keys.iter().map(U2FRegistration::to_json).collect();
|
||||
|
||||
Ok(Json(json!({
|
||||
"Enabled": enabled,
|
||||
"Keys": keys_json,
|
||||
"Object": "twoFactorU2f"
|
||||
})))
|
||||
}
|
||||
|
||||
#[post("/two-factor/get-u2f-challenge", data = "<data>")]
|
||||
fn generate_u2f_challenge(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: PasswordData = data.into_inner().data;
|
||||
|
||||
if !headers.user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
let _type = TwoFactorType::U2fRegisterChallenge;
|
||||
let challenge = _create_u2f_challenge(&headers.user.uuid, _type, &conn).challenge;
|
||||
|
||||
Ok(Json(json!({
|
||||
"UserId": headers.user.uuid,
|
||||
"AppId": APP_ID.to_string(),
|
||||
"Challenge": challenge,
|
||||
"Version": U2F_VERSION,
|
||||
})))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[allow(non_snake_case)]
|
||||
struct EnableU2FData {
|
||||
Id: NumberOrString,
|
||||
// 1..5
|
||||
Name: String,
|
||||
MasterPasswordHash: String,
|
||||
DeviceResponse: String,
|
||||
}
|
||||
|
||||
// This struct is referenced from the U2F lib
|
||||
// because it doesn't implement Deserialize
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(remote = "Registration")]
|
||||
struct RegistrationDef {
|
||||
key_handle: Vec<u8>,
|
||||
pub_key: Vec<u8>,
|
||||
attestation_cert: Option<Vec<u8>>,
|
||||
device_name: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct U2FRegistration {
|
||||
id: i32,
|
||||
name: String,
|
||||
#[serde(with = "RegistrationDef")]
|
||||
reg: Registration,
|
||||
counter: u32,
|
||||
compromised: bool,
|
||||
}
|
||||
|
||||
impl U2FRegistration {
|
||||
fn to_json(&self) -> Value {
|
||||
json!({
|
||||
"Id": self.id,
|
||||
"Name": self.name,
|
||||
"Compromised": self.compromised,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// This struct is copied from the U2F lib
|
||||
// to add an optional error code
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct RegisterResponseCopy {
|
||||
pub registration_data: String,
|
||||
pub version: String,
|
||||
pub client_data: String,
|
||||
|
||||
pub error_code: Option<NumberOrString>,
|
||||
}
|
||||
|
||||
impl Into<RegisterResponse> for RegisterResponseCopy {
|
||||
fn into(self) -> RegisterResponse {
|
||||
RegisterResponse {
|
||||
registration_data: self.registration_data,
|
||||
version: self.version,
|
||||
client_data: self.client_data,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/two-factor/u2f", data = "<data>")]
|
||||
fn activate_u2f(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: EnableU2FData = data.into_inner().data;
|
||||
let mut user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
let tf_type = TwoFactorType::U2fRegisterChallenge as i32;
|
||||
let tf_challenge = match TwoFactor::find_by_user_and_type(&user.uuid, tf_type, &conn) {
|
||||
Some(c) => c,
|
||||
None => err!("Can't recover challenge"),
|
||||
};
|
||||
|
||||
let challenge: Challenge = serde_json::from_str(&tf_challenge.data)?;
|
||||
tf_challenge.delete(&conn)?;
|
||||
|
||||
let response: RegisterResponseCopy = serde_json::from_str(&data.DeviceResponse)?;
|
||||
|
||||
let error_code = response
|
||||
.error_code
|
||||
.clone()
|
||||
.map_or("0".into(), NumberOrString::into_string);
|
||||
|
||||
if error_code != "0" {
|
||||
err!("Error registering U2F token")
|
||||
}
|
||||
|
||||
let registration = U2F.register_response(challenge, response.into())?;
|
||||
let full_registration = U2FRegistration {
|
||||
id: data.Id.into_i32()?,
|
||||
name: data.Name,
|
||||
reg: registration,
|
||||
compromised: false,
|
||||
counter: 0,
|
||||
};
|
||||
|
||||
let mut regs = get_u2f_registrations(&user.uuid, &conn)?.1;
|
||||
|
||||
// TODO: Check that there is no repeat Id
|
||||
regs.push(full_registration);
|
||||
save_u2f_registrations(&user.uuid, ®s, &conn)?;
|
||||
|
||||
_generate_recover_code(&mut user, &conn);
|
||||
|
||||
let keys_json: Vec<Value> = regs.iter().map(U2FRegistration::to_json).collect();
|
||||
Ok(Json(json!({
|
||||
"Enabled": true,
|
||||
"Keys": keys_json,
|
||||
"Object": "twoFactorU2f"
|
||||
})))
|
||||
}
|
||||
|
||||
#[put("/two-factor/u2f", data = "<data>")]
|
||||
fn activate_u2f_put(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
activate_u2f(data, headers, conn)
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[allow(non_snake_case)]
|
||||
struct DeleteU2FData {
|
||||
Id: NumberOrString,
|
||||
MasterPasswordHash: String,
|
||||
}
|
||||
|
||||
#[delete("/two-factor/u2f", data = "<data>")]
|
||||
fn delete_u2f(data: JsonUpcase<DeleteU2FData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: DeleteU2FData = data.into_inner().data;
|
||||
|
||||
let id = data.Id.into_i32()?;
|
||||
|
||||
if !headers.user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
let type_ = TwoFactorType::U2f as i32;
|
||||
let mut tf = match TwoFactor::find_by_user_and_type(&headers.user.uuid, type_, &conn) {
|
||||
Some(tf) => tf,
|
||||
None => err!("U2F data not found!"),
|
||||
};
|
||||
|
||||
let mut data: Vec<U2FRegistration> = match serde_json::from_str(&tf.data) {
|
||||
Ok(d) => d,
|
||||
Err(_) => err!("Error parsing U2F data"),
|
||||
};
|
||||
|
||||
data.retain(|r| r.id != id);
|
||||
|
||||
let new_data_str = serde_json::to_string(&data)?;
|
||||
|
||||
tf.data = new_data_str;
|
||||
tf.save(&conn)?;
|
||||
|
||||
let keys_json: Vec<Value> = data.iter().map(U2FRegistration::to_json).collect();
|
||||
|
||||
Ok(Json(json!({
|
||||
"Enabled": true,
|
||||
"Keys": keys_json,
|
||||
"Object": "twoFactorU2f"
|
||||
})))
|
||||
}
|
||||
|
||||
fn _create_u2f_challenge(user_uuid: &str, type_: TwoFactorType, conn: &DbConn) -> Challenge {
|
||||
let challenge = U2F.generate_challenge().unwrap();
|
||||
|
||||
TwoFactor::new(user_uuid.into(), type_, serde_json::to_string(&challenge).unwrap())
|
||||
.save(conn)
|
||||
.expect("Error saving challenge");
|
||||
|
||||
challenge
|
||||
}
|
||||
|
||||
fn save_u2f_registrations(user_uuid: &str, regs: &[U2FRegistration], conn: &DbConn) -> EmptyResult {
|
||||
TwoFactor::new(user_uuid.into(), TwoFactorType::U2f, serde_json::to_string(regs)?).save(&conn)
|
||||
}
|
||||
|
||||
fn get_u2f_registrations(user_uuid: &str, conn: &DbConn) -> Result<(bool, Vec<U2FRegistration>), Error> {
|
||||
let type_ = TwoFactorType::U2f as i32;
|
||||
let (enabled, regs) = match TwoFactor::find_by_user_and_type(user_uuid, type_, conn) {
|
||||
Some(tf) => (tf.enabled, tf.data),
|
||||
None => return Ok((false, Vec::new())), // If no data, return empty list
|
||||
};
|
||||
|
||||
let data = match serde_json::from_str(®s) {
|
||||
Ok(d) => d,
|
||||
Err(_) => {
|
||||
// If error, try old format
|
||||
let mut old_regs = _old_parse_registrations(®s);
|
||||
|
||||
if old_regs.len() != 1 {
|
||||
err!("The old U2F format only allows one device")
|
||||
}
|
||||
|
||||
// Convert to new format
|
||||
let new_regs = vec![U2FRegistration {
|
||||
id: 1,
|
||||
name: "Unnamed U2F key".into(),
|
||||
reg: old_regs.remove(0),
|
||||
compromised: false,
|
||||
counter: 0,
|
||||
}];
|
||||
|
||||
// Save new format
|
||||
save_u2f_registrations(user_uuid, &new_regs, &conn)?;
|
||||
|
||||
new_regs
|
||||
}
|
||||
};
|
||||
|
||||
Ok((enabled, data))
|
||||
}
|
||||
|
||||
fn _old_parse_registrations(registations: &str) -> Vec<Registration> {
|
||||
#[derive(Deserialize)]
|
||||
struct Helper(#[serde(with = "RegistrationDef")] Registration);
|
||||
|
||||
let regs: Vec<Value> = serde_json::from_str(registations).expect("Can't parse Registration data");
|
||||
|
||||
regs.into_iter()
|
||||
.map(|r| serde_json::from_value(r).unwrap())
|
||||
.map(|Helper(r)| r)
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn generate_u2f_login(user_uuid: &str, conn: &DbConn) -> ApiResult<U2fSignRequest> {
|
||||
let challenge = _create_u2f_challenge(user_uuid, TwoFactorType::U2fLoginChallenge, conn);
|
||||
|
||||
let registrations: Vec<_> = get_u2f_registrations(user_uuid, conn)?
|
||||
.1
|
||||
.into_iter()
|
||||
.map(|r| r.reg)
|
||||
.collect();
|
||||
|
||||
if registrations.is_empty() {
|
||||
err!("No U2F devices registered")
|
||||
}
|
||||
|
||||
Ok(U2F.sign_request(challenge, registrations))
|
||||
}
|
||||
|
||||
pub fn validate_u2f_login(user_uuid: &str, response: &str, conn: &DbConn) -> EmptyResult {
|
||||
let challenge_type = TwoFactorType::U2fLoginChallenge as i32;
|
||||
let tf_challenge = TwoFactor::find_by_user_and_type(user_uuid, challenge_type, &conn);
|
||||
|
||||
let challenge = match tf_challenge {
|
||||
Some(tf_challenge) => {
|
||||
let challenge: Challenge = serde_json::from_str(&tf_challenge.data)?;
|
||||
tf_challenge.delete(&conn)?;
|
||||
challenge
|
||||
}
|
||||
None => err!("Can't recover login challenge"),
|
||||
};
|
||||
let response: SignResponse = serde_json::from_str(response)?;
|
||||
let mut registrations = get_u2f_registrations(user_uuid, conn)?.1;
|
||||
if registrations.is_empty() {
|
||||
err!("No U2F devices registered")
|
||||
}
|
||||
|
||||
for reg in &mut registrations {
|
||||
let response = U2F.sign_response(challenge.clone(), reg.reg.clone(), response.clone(), reg.counter);
|
||||
match response {
|
||||
Ok(new_counter) => {
|
||||
reg.counter = new_counter;
|
||||
save_u2f_registrations(user_uuid, ®istrations, &conn)?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
Err(u2f::u2ferror::U2fError::CounterTooLow) => {
|
||||
reg.compromised = true;
|
||||
save_u2f_registrations(user_uuid, ®istrations, &conn)?;
|
||||
|
||||
err!("This device might be compromised!");
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("E {:#}", e);
|
||||
// break;
|
||||
}
|
||||
}
|
||||
}
|
||||
err!("error verifying response")
|
||||
}
|
194
src/api/core/two_factor/yubikey.rs
Normal file
194
src/api/core/two_factor/yubikey.rs
Normal file
@@ -0,0 +1,194 @@
|
||||
use rocket::Route;
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json;
|
||||
use serde_json::Value;
|
||||
use yubico::config::Config;
|
||||
use yubico::verify;
|
||||
|
||||
use crate::api::core::two_factor::_generate_recover_code;
|
||||
use crate::api::{EmptyResult, JsonResult, JsonUpcase, PasswordData};
|
||||
use crate::auth::Headers;
|
||||
use crate::db::{
|
||||
models::{TwoFactor, TwoFactorType},
|
||||
DbConn,
|
||||
};
|
||||
use crate::error::{Error, MapResult};
|
||||
use crate::CONFIG;
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
routes![generate_yubikey, activate_yubikey, activate_yubikey_put,]
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[allow(non_snake_case)]
|
||||
struct EnableYubikeyData {
|
||||
MasterPasswordHash: String,
|
||||
Key1: Option<String>,
|
||||
Key2: Option<String>,
|
||||
Key3: Option<String>,
|
||||
Key4: Option<String>,
|
||||
Key5: Option<String>,
|
||||
Nfc: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug)]
|
||||
#[allow(non_snake_case)]
|
||||
pub struct YubikeyMetadata {
|
||||
Keys: Vec<String>,
|
||||
pub Nfc: bool,
|
||||
}
|
||||
|
||||
fn parse_yubikeys(data: &EnableYubikeyData) -> Vec<String> {
|
||||
let data_keys = [&data.Key1, &data.Key2, &data.Key3, &data.Key4, &data.Key5];
|
||||
|
||||
data_keys.iter().filter_map(|e| e.as_ref().cloned()).collect()
|
||||
}
|
||||
|
||||
fn jsonify_yubikeys(yubikeys: Vec<String>) -> serde_json::Value {
|
||||
let mut result = json!({});
|
||||
|
||||
for (i, key) in yubikeys.into_iter().enumerate() {
|
||||
result[format!("Key{}", i + 1)] = Value::String(key);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn get_yubico_credentials() -> Result<(String, String), Error> {
|
||||
if !CONFIG._enable_yubico() {
|
||||
err!("Yubico support is disabled");
|
||||
}
|
||||
|
||||
match (CONFIG.yubico_client_id(), CONFIG.yubico_secret_key()) {
|
||||
(Some(id), Some(secret)) => Ok((id, secret)),
|
||||
_ => err!("`YUBICO_CLIENT_ID` or `YUBICO_SECRET_KEY` environment variable is not set. Yubikey OTP Disabled"),
|
||||
}
|
||||
}
|
||||
|
||||
fn verify_yubikey_otp(otp: String) -> EmptyResult {
|
||||
let (yubico_id, yubico_secret) = get_yubico_credentials()?;
|
||||
|
||||
let config = Config::default().set_client_id(yubico_id).set_key(yubico_secret);
|
||||
|
||||
match CONFIG.yubico_server() {
|
||||
Some(server) => verify(otp, config.set_api_hosts(vec![server])),
|
||||
None => verify(otp, config),
|
||||
}
|
||||
.map_res("Failed to verify OTP")
|
||||
.and(Ok(()))
|
||||
}
|
||||
|
||||
#[post("/two-factor/get-yubikey", data = "<data>")]
|
||||
fn generate_yubikey(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
// Make sure the credentials are set
|
||||
get_yubico_credentials()?;
|
||||
|
||||
let data: PasswordData = data.into_inner().data;
|
||||
let user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
let user_uuid = &user.uuid;
|
||||
let yubikey_type = TwoFactorType::YubiKey as i32;
|
||||
|
||||
let r = TwoFactor::find_by_user_and_type(user_uuid, yubikey_type, &conn);
|
||||
|
||||
if let Some(r) = r {
|
||||
let yubikey_metadata: YubikeyMetadata = serde_json::from_str(&r.data)?;
|
||||
|
||||
let mut result = jsonify_yubikeys(yubikey_metadata.Keys);
|
||||
|
||||
result["Enabled"] = Value::Bool(true);
|
||||
result["Nfc"] = Value::Bool(yubikey_metadata.Nfc);
|
||||
result["Object"] = Value::String("twoFactorU2f".to_owned());
|
||||
|
||||
Ok(Json(result))
|
||||
} else {
|
||||
Ok(Json(json!({
|
||||
"Enabled": false,
|
||||
"Object": "twoFactorU2f",
|
||||
})))
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/two-factor/yubikey", data = "<data>")]
|
||||
fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: EnableYubikeyData = data.into_inner().data;
|
||||
let mut user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password");
|
||||
}
|
||||
|
||||
// Check if we already have some data
|
||||
let mut yubikey_data = match TwoFactor::find_by_user_and_type(&user.uuid, TwoFactorType::YubiKey as i32, &conn) {
|
||||
Some(data) => data,
|
||||
None => TwoFactor::new(user.uuid.clone(), TwoFactorType::YubiKey, String::new()),
|
||||
};
|
||||
|
||||
let yubikeys = parse_yubikeys(&data);
|
||||
|
||||
if yubikeys.is_empty() {
|
||||
return Ok(Json(json!({
|
||||
"Enabled": false,
|
||||
"Object": "twoFactorU2f",
|
||||
})));
|
||||
}
|
||||
|
||||
// Ensure they are valid OTPs
|
||||
for yubikey in &yubikeys {
|
||||
if yubikey.len() == 12 {
|
||||
// YubiKey ID
|
||||
continue;
|
||||
}
|
||||
|
||||
verify_yubikey_otp(yubikey.to_owned()).map_res("Invalid Yubikey OTP provided")?;
|
||||
}
|
||||
|
||||
let yubikey_ids: Vec<String> = yubikeys.into_iter().map(|x| (&x[..12]).to_owned()).collect();
|
||||
|
||||
let yubikey_metadata = YubikeyMetadata {
|
||||
Keys: yubikey_ids,
|
||||
Nfc: data.Nfc,
|
||||
};
|
||||
|
||||
yubikey_data.data = serde_json::to_string(&yubikey_metadata).unwrap();
|
||||
yubikey_data.save(&conn)?;
|
||||
|
||||
_generate_recover_code(&mut user, &conn);
|
||||
|
||||
let mut result = jsonify_yubikeys(yubikey_metadata.Keys);
|
||||
|
||||
result["Enabled"] = Value::Bool(true);
|
||||
result["Nfc"] = Value::Bool(yubikey_metadata.Nfc);
|
||||
result["Object"] = Value::String("twoFactorU2f".to_owned());
|
||||
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
#[put("/two-factor/yubikey", data = "<data>")]
|
||||
fn activate_yubikey_put(data: JsonUpcase<EnableYubikeyData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
activate_yubikey(data, headers, conn)
|
||||
}
|
||||
|
||||
pub fn validate_yubikey_login(response: &str, twofactor_data: &str) -> EmptyResult {
|
||||
if response.len() != 44 {
|
||||
err!("Invalid Yubikey OTP length");
|
||||
}
|
||||
|
||||
let yubikey_metadata: YubikeyMetadata = serde_json::from_str(twofactor_data).expect("Can't parse Yubikey Metadata");
|
||||
let response_id = &response[..12];
|
||||
|
||||
if !yubikey_metadata.Keys.contains(&response_id.to_owned()) {
|
||||
err!("Given Yubikey is not registered");
|
||||
}
|
||||
|
||||
let result = verify_yubikey_otp(response.to_owned());
|
||||
|
||||
match result {
|
||||
Ok(_answer) => Ok(()),
|
||||
Err(_e) => err!("Failed to verify Yubikey against OTP server"),
|
||||
}
|
||||
}
|
115
src/api/icons.rs
115
src/api/icons.rs
@@ -1,12 +1,14 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use std::fs::{create_dir_all, remove_file, symlink_metadata, File};
|
||||
use std::io::prelude::*;
|
||||
use std::net::ToSocketAddrs;
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
use rocket::http::ContentType;
|
||||
use rocket::response::Content;
|
||||
use rocket::Route;
|
||||
|
||||
use reqwest::{header::HeaderMap, Client, Response};
|
||||
use reqwest::{Url, header::HeaderMap, blocking::Client, blocking::Response};
|
||||
|
||||
use rocket::http::Cookie;
|
||||
|
||||
@@ -15,6 +17,7 @@ use soup::prelude::*;
|
||||
|
||||
use crate::error::Error;
|
||||
use crate::CONFIG;
|
||||
use crate::util::Cached;
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
routes![icon]
|
||||
@@ -24,16 +27,14 @@ const FALLBACK_ICON: &[u8; 344] = include_bytes!("../static/fallback-icon.png");
|
||||
|
||||
const ALLOWED_CHARS: &str = "_-.";
|
||||
|
||||
lazy_static! {
|
||||
static CLIENT: Lazy<Client> = Lazy::new(|| {
|
||||
// Reuse the client between requests
|
||||
static ref CLIENT: Client = Client::builder()
|
||||
.use_sys_proxy()
|
||||
.gzip(true)
|
||||
Client::builder()
|
||||
.timeout(Duration::from_secs(CONFIG.icon_download_timeout()))
|
||||
.default_headers(_header_map())
|
||||
.build()
|
||||
.unwrap();
|
||||
}
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
fn is_valid_domain(domain: &str) -> bool {
|
||||
// Don't allow empty or too big domains or path traversal
|
||||
@@ -52,28 +53,44 @@ fn is_valid_domain(domain: &str) -> bool {
|
||||
}
|
||||
|
||||
#[get("/<domain>/icon.png")]
|
||||
fn icon(domain: String) -> Content<Vec<u8>> {
|
||||
fn icon(domain: String) -> Cached<Content<Vec<u8>>> {
|
||||
let icon_type = ContentType::new("image", "x-icon");
|
||||
|
||||
if !is_valid_domain(&domain) {
|
||||
warn!("Invalid domain: {:#?}", domain);
|
||||
return Content(icon_type, FALLBACK_ICON.to_vec());
|
||||
return Cached::long(Content(icon_type, FALLBACK_ICON.to_vec()));
|
||||
}
|
||||
|
||||
Cached::long(Content(icon_type, get_icon(&domain)))
|
||||
}
|
||||
|
||||
fn check_icon_domain_is_blacklisted(domain: &str) -> bool {
|
||||
let mut is_blacklisted = CONFIG.icon_blacklist_non_global_ips()
|
||||
&& (domain, 0)
|
||||
.to_socket_addrs()
|
||||
.map(|x| {
|
||||
for ip_port in x {
|
||||
if !ip_port.ip().is_global() {
|
||||
warn!("IP {} for domain '{}' is not a global IP!", ip_port.ip(), domain);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
})
|
||||
.unwrap_or(false);
|
||||
|
||||
// Skip the regex check if the previous one is true already
|
||||
if !is_blacklisted {
|
||||
if let Some(blacklist) = CONFIG.icon_blacklist_regex() {
|
||||
info!("Icon blacklist enabled: {:#?}", blacklist);
|
||||
|
||||
let regex = Regex::new(&blacklist).expect("Valid Regex");
|
||||
|
||||
if regex.is_match(&domain) {
|
||||
warn!("Blacklisted domain: {:#?}", domain);
|
||||
return Content(icon_type, FALLBACK_ICON.to_vec());
|
||||
warn!("Blacklisted domain: {:#?} matched {:#?}", domain, blacklist);
|
||||
is_blacklisted = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let icon = get_icon(&domain);
|
||||
|
||||
Content(icon_type, icon)
|
||||
is_blacklisted
|
||||
}
|
||||
|
||||
fn get_icon(domain: &str) -> Vec<u8> {
|
||||
@@ -95,7 +112,9 @@ fn get_icon(domain: &str) -> Vec<u8> {
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Error downloading icon: {:?}", e);
|
||||
mark_negcache(&path);
|
||||
let miss_indicator = path + ".miss";
|
||||
let empty_icon = Vec::new();
|
||||
save_icon(&miss_indicator, &empty_icon);
|
||||
FALLBACK_ICON.to_vec()
|
||||
}
|
||||
}
|
||||
@@ -151,11 +170,6 @@ fn icon_is_negcached(path: &str) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
fn mark_negcache(path: &str) {
|
||||
let miss_indicator = path.to_owned() + ".miss";
|
||||
File::create(&miss_indicator).expect("Error creating negative cache marker");
|
||||
}
|
||||
|
||||
fn icon_is_expired(path: &str) -> bool {
|
||||
let expired = file_is_expired(path, CONFIG.icon_cache_ttl());
|
||||
expired.unwrap_or(true)
|
||||
@@ -199,9 +213,10 @@ fn get_icon_url(domain: &str) -> Result<(Vec<Icon>, String), Error> {
|
||||
let mut cookie_str = String::new();
|
||||
|
||||
let resp = get_page(&ssldomain).or_else(|_| get_page(&httpdomain));
|
||||
if let Ok(content) = resp {
|
||||
if let Ok(mut content) = resp {
|
||||
// Extract the URL from the respose in case redirects occured (like @ gitlab.com)
|
||||
let url = content.url().clone();
|
||||
|
||||
let raw_cookies = content.headers().get_all("set-cookie");
|
||||
cookie_str = raw_cookies
|
||||
.iter()
|
||||
@@ -218,12 +233,16 @@ fn get_icon_url(domain: &str) -> Result<(Vec<Icon>, String), Error> {
|
||||
// Add the default favicon.ico to the list with the domain the content responded from.
|
||||
iconlist.push(Icon::new(35, url.join("/favicon.ico").unwrap().into_string()));
|
||||
|
||||
let soup = Soup::from_reader(content)?;
|
||||
// 512KB should be more than enough for the HTML, though as we only really need
|
||||
// the HTML header, it could potentially be reduced even further
|
||||
let limited_reader = crate::util::LimitedReader::new(&mut content, 512 * 1024);
|
||||
|
||||
let soup = Soup::from_reader(limited_reader)?;
|
||||
// Search for and filter
|
||||
let favicons = soup
|
||||
.tag("link")
|
||||
.attr("rel", Regex::new(r"icon$|apple.*icon")?) // Only use icon rels
|
||||
.attr("href", Regex::new(r"(?i)\w+\.(jpg|jpeg|png|ico)(\?.*)?$")?) // Only allow specific extensions
|
||||
.attr("href", Regex::new(r"(?i)\w+\.(jpg|jpeg|png|ico)(\?.*)?$|^data:image.*base64")?) // Only allow specific extensions
|
||||
.find_all();
|
||||
|
||||
// Loop through all the found icons and determine it's priority
|
||||
@@ -239,6 +258,7 @@ fn get_icon_url(domain: &str) -> Result<(Vec<Icon>, String), Error> {
|
||||
} else {
|
||||
// Add the default favicon.ico to the list with just the given domain
|
||||
iconlist.push(Icon::new(35, format!("{}/favicon.ico", ssldomain)));
|
||||
iconlist.push(Icon::new(35, format!("{}/favicon.ico", httpdomain)));
|
||||
}
|
||||
|
||||
// Sort the iconlist by priority
|
||||
@@ -253,12 +273,20 @@ fn get_page(url: &str) -> Result<Response, Error> {
|
||||
}
|
||||
|
||||
fn get_page_with_cookies(url: &str, cookie_str: &str) -> Result<Response, Error> {
|
||||
if check_icon_domain_is_blacklisted(Url::parse(url).unwrap().host_str().unwrap_or_default()) {
|
||||
err!("Favicon rel linked to a non blacklisted domain!");
|
||||
}
|
||||
|
||||
if cookie_str.is_empty() {
|
||||
CLIENT.get(url).send()?.error_for_status().map_err(Into::into)
|
||||
} else {
|
||||
CLIENT
|
||||
.get(url)
|
||||
.header("cookie", cookie_str)
|
||||
.send()?
|
||||
.error_for_status()
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a Integer with the priority of the type of the icon which to prefer.
|
||||
@@ -341,11 +369,31 @@ fn parse_sizes(sizes: Option<String>) -> (u16, u16) {
|
||||
}
|
||||
|
||||
fn download_icon(domain: &str) -> Result<Vec<u8>, Error> {
|
||||
if check_icon_domain_is_blacklisted(domain) {
|
||||
err!("Domain is blacklisted", domain)
|
||||
}
|
||||
|
||||
let (iconlist, cookie_str) = get_icon_url(&domain)?;
|
||||
|
||||
let mut buffer = Vec::new();
|
||||
|
||||
use data_url::DataUrl;
|
||||
|
||||
for icon in iconlist.iter().take(5) {
|
||||
if icon.href.starts_with("data:image") {
|
||||
let datauri = DataUrl::process(&icon.href).unwrap();
|
||||
// Check if we are able to decode the data uri
|
||||
match datauri.decode_to_vec() {
|
||||
Ok((body, _fragment)) => {
|
||||
// Also check if the size is atleast 67 bytes, which seems to be the smallest png i could create
|
||||
if body.len() >= 67 {
|
||||
buffer = body;
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => warn!("data uri is invalid"),
|
||||
};
|
||||
} else {
|
||||
match get_page_with_cookies(&icon.href, &cookie_str) {
|
||||
Ok(mut res) => {
|
||||
info!("Downloaded icon from {}", icon.href);
|
||||
@@ -355,6 +403,7 @@ fn download_icon(domain: &str) -> Result<Vec<u8>, Error> {
|
||||
Err(_) => info!("Download failed for {}", icon.href),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if buffer.is_empty() {
|
||||
err!("Empty response")
|
||||
@@ -364,11 +413,17 @@ fn download_icon(domain: &str) -> Result<Vec<u8>, Error> {
|
||||
}
|
||||
|
||||
fn save_icon(path: &str, icon: &[u8]) {
|
||||
create_dir_all(&CONFIG.icon_cache_folder()).expect("Error creating icon cache");
|
||||
|
||||
if let Ok(mut f) = File::create(path) {
|
||||
match File::create(path) {
|
||||
Ok(mut f) => {
|
||||
f.write_all(icon).expect("Error writing icon file");
|
||||
};
|
||||
}
|
||||
Err(ref e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
create_dir_all(&CONFIG.icon_cache_folder()).expect("Error creating icon cache");
|
||||
}
|
||||
Err(e) => {
|
||||
info!("Icon save error: {:?}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn _header_map() -> HeaderMap {
|
||||
|
@@ -1,22 +1,18 @@
|
||||
use chrono::Utc;
|
||||
use num_traits::FromPrimitive;
|
||||
use rocket::request::{Form, FormItems, FromForm};
|
||||
use rocket::Route;
|
||||
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json::Value;
|
||||
|
||||
use num_traits::FromPrimitive;
|
||||
|
||||
use crate::api::core::two_factor::email::EmailTokenData;
|
||||
use crate::api::core::two_factor::{duo, email, yubikey};
|
||||
use crate::api::{ApiResult, EmptyResult, JsonResult};
|
||||
use crate::auth::ClientIp;
|
||||
use crate::db::models::*;
|
||||
use crate::db::DbConn;
|
||||
|
||||
use crate::util;
|
||||
|
||||
use crate::api::{ApiResult, EmptyResult, JsonResult};
|
||||
|
||||
use crate::auth::ClientIp;
|
||||
|
||||
use crate::mail;
|
||||
|
||||
use crate::util;
|
||||
use crate::CONFIG;
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
@@ -101,6 +97,34 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: ClientIp) -> JsonResult
|
||||
)
|
||||
}
|
||||
|
||||
if user.verified_at.is_none() && CONFIG.mail_enabled() && CONFIG.signups_verify() {
|
||||
let now = Utc::now().naive_utc();
|
||||
if user.last_verifying_at.is_none() || now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds() > CONFIG.signups_verify_resend_time() as i64 {
|
||||
let resend_limit = CONFIG.signups_verify_resend_limit() as i32;
|
||||
if resend_limit == 0 || user.login_verify_count < resend_limit {
|
||||
// We want to send another email verification if we require signups to verify
|
||||
// their email address, and we haven't sent them a reminder in a while...
|
||||
let mut user = user;
|
||||
user.last_verifying_at = Some(now);
|
||||
user.login_verify_count += 1;
|
||||
|
||||
if let Err(e) = user.save(&conn) {
|
||||
error!("Error updating user: {:#?}", e);
|
||||
}
|
||||
|
||||
if let Err(e) = mail::send_verify_email(&user.email, &user.uuid) {
|
||||
error!("Error auto-sending email verification email: {:#?}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We still want the login to fail until they actually verified the email address
|
||||
err!(
|
||||
"Please verify your email before trying again.",
|
||||
format!("IP: {}. Username: {}.", ip.ip, username)
|
||||
)
|
||||
}
|
||||
|
||||
let (mut device, new_device) = get_device(&data, &conn, &user);
|
||||
|
||||
let twofactor_token = twofactor_auth(&user.uuid, &data, &mut device, &conn)?;
|
||||
@@ -129,6 +153,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: ClientIp) -> JsonResult
|
||||
"refresh_token": device.refresh_token,
|
||||
"Key": user.akey,
|
||||
"PrivateKey": user.private_key,
|
||||
//"TwoFactorToken": "11122233333444555666777888999"
|
||||
});
|
||||
|
||||
if let Some(token) = twofactor_token {
|
||||
@@ -186,10 +211,13 @@ fn twofactor_auth(
|
||||
|
||||
let twofactor_code = match data.two_factor_token {
|
||||
Some(ref code) => code,
|
||||
None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?),
|
||||
None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA token not provided"),
|
||||
};
|
||||
|
||||
let selected_twofactor = twofactors.into_iter().filter(|tf| tf.atype == selected_id).nth(0);
|
||||
let selected_twofactor = twofactors
|
||||
.into_iter()
|
||||
.filter(|tf| tf.atype == selected_id && tf.enabled)
|
||||
.nth(0);
|
||||
|
||||
use crate::api::core::two_factor as _tf;
|
||||
use crate::crypto::ct_eq;
|
||||
@@ -198,17 +226,18 @@ fn twofactor_auth(
|
||||
let mut remember = data.two_factor_remember.unwrap_or(0);
|
||||
|
||||
match TwoFactorType::from_i32(selected_id) {
|
||||
Some(TwoFactorType::Authenticator) => _tf::validate_totp_code_str(twofactor_code, &selected_data?)?,
|
||||
Some(TwoFactorType::U2f) => _tf::validate_u2f_login(user_uuid, twofactor_code, conn)?,
|
||||
Some(TwoFactorType::YubiKey) => _tf::validate_yubikey_login(twofactor_code, &selected_data?)?,
|
||||
Some(TwoFactorType::Duo) => _tf::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?,
|
||||
Some(TwoFactorType::Authenticator) => _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, conn)?,
|
||||
Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?,
|
||||
Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?,
|
||||
Some(TwoFactorType::Duo) => _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?,
|
||||
Some(TwoFactorType::Email) => _tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)?,
|
||||
|
||||
Some(TwoFactorType::Remember) => {
|
||||
match device.twofactor_remember {
|
||||
Some(ref code) if !CONFIG.disable_2fa_remember() && ct_eq(code, twofactor_code) => {
|
||||
remember = 1; // Make sure we also return the token here, otherwise it will only remember the first time
|
||||
}
|
||||
_ => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?),
|
||||
_ => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided"),
|
||||
}
|
||||
}
|
||||
_ => err!("Invalid two factor provider"),
|
||||
@@ -246,7 +275,7 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api
|
||||
Some(TwoFactorType::Authenticator) => { /* Nothing to do for TOTP */ }
|
||||
|
||||
Some(TwoFactorType::U2f) if CONFIG.domain_set() => {
|
||||
let request = two_factor::generate_u2f_login(user_uuid, conn)?;
|
||||
let request = two_factor::u2f::generate_u2f_login(user_uuid, conn)?;
|
||||
let mut challenge_list = Vec::new();
|
||||
|
||||
for key in request.registered_keys {
|
||||
@@ -271,7 +300,7 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api
|
||||
None => err!("User does not exist"),
|
||||
};
|
||||
|
||||
let (signature, host) = two_factor::generate_duo_signature(&email, conn)?;
|
||||
let (signature, host) = duo::generate_duo_signature(&email, conn)?;
|
||||
|
||||
result["TwoFactorProviders2"][provider.to_string()] = json!({
|
||||
"Host": host,
|
||||
@@ -285,13 +314,32 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api
|
||||
None => err!("No YubiKey devices registered"),
|
||||
};
|
||||
|
||||
let yubikey_metadata: two_factor::YubikeyMetadata = serde_json::from_str(&twofactor.data)?;
|
||||
let yubikey_metadata: yubikey::YubikeyMetadata = serde_json::from_str(&twofactor.data)?;
|
||||
|
||||
result["TwoFactorProviders2"][provider.to_string()] = json!({
|
||||
"Nfc": yubikey_metadata.Nfc,
|
||||
})
|
||||
}
|
||||
|
||||
Some(tf_type @ TwoFactorType::Email) => {
|
||||
use crate::api::core::two_factor as _tf;
|
||||
|
||||
let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, &conn) {
|
||||
Some(tf) => tf,
|
||||
None => err!("No twofactor email registered"),
|
||||
};
|
||||
|
||||
// Send email immediately if email is the only 2FA option
|
||||
if providers.len() == 1 {
|
||||
_tf::email::send_token(&user_uuid, &conn)?
|
||||
}
|
||||
|
||||
let email_data = EmailTokenData::from_json(&twofactor.data)?;
|
||||
result["TwoFactorProviders2"][provider.to_string()] = json!({
|
||||
"Email": email::obscure_email(&email_data.email),
|
||||
})
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
@@ -1,20 +1,31 @@
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
|
||||
use rocket::Route;
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json::Value as JsonValue;
|
||||
|
||||
use crate::api::JsonResult;
|
||||
use crate::api::{EmptyResult, JsonResult};
|
||||
use crate::auth::Headers;
|
||||
use crate::db::DbConn;
|
||||
|
||||
use crate::CONFIG;
|
||||
use crate::{Error, CONFIG};
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
routes![negotiate, websockets_err]
|
||||
}
|
||||
|
||||
static SHOW_WEBSOCKETS_MSG: AtomicBool = AtomicBool::new(true);
|
||||
|
||||
#[get("/hub")]
|
||||
fn websockets_err() -> JsonResult {
|
||||
err!("'/notifications/hub' should be proxied to the websocket server or notifications won't work. Go to the README for more info.")
|
||||
fn websockets_err() -> EmptyResult {
|
||||
if CONFIG.websocket_enabled() && SHOW_WEBSOCKETS_MSG.compare_and_swap(true, false, Ordering::Relaxed) {
|
||||
err!("###########################################################
|
||||
'/notifications/hub' should be proxied to the websocket server or notifications won't work.
|
||||
Go to the Wiki for more info, or disable WebSockets setting WEBSOCKET_ENABLED=false.
|
||||
###########################################################################################")
|
||||
} else {
|
||||
Err(Error::empty())
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/hub/negotiate")]
|
||||
@@ -43,10 +54,11 @@ fn negotiate(_headers: Headers, _conn: DbConn) -> JsonResult {
|
||||
//
|
||||
// Websockets server
|
||||
//
|
||||
use std::io;
|
||||
use std::sync::Arc;
|
||||
use std::thread;
|
||||
|
||||
use ws::{self, util::Token, Factory, Handler, Handshake, Message, Sender, WebSocket};
|
||||
use ws::{self, util::Token, Factory, Handler, Handshake, Message, Sender};
|
||||
|
||||
use chashmap::CHashMap;
|
||||
use chrono::NaiveDateTime;
|
||||
@@ -124,20 +136,51 @@ struct InitialMessage {
|
||||
const PING_MS: u64 = 15_000;
|
||||
const PING: Token = Token(1);
|
||||
|
||||
const ID_KEY: &str = "id=";
|
||||
const ACCESS_TOKEN_KEY: &str = "access_token=";
|
||||
|
||||
impl WSHandler {
|
||||
fn err(&self, msg: &'static str) -> ws::Result<()> {
|
||||
self.out.close(ws::CloseCode::Invalid)?;
|
||||
|
||||
// We need to specifically return an IO error so ws closes the connection
|
||||
let io_error = io::Error::from(io::ErrorKind::InvalidData);
|
||||
Err(ws::Error::new(ws::ErrorKind::Io(io_error), msg))
|
||||
}
|
||||
}
|
||||
|
||||
impl Handler for WSHandler {
|
||||
fn on_open(&mut self, hs: Handshake) -> ws::Result<()> {
|
||||
// TODO: Improve this split
|
||||
// Path == "/notifications/hub?id=<id>==&access_token=<access_token>"
|
||||
let path = hs.request.resource();
|
||||
let mut query_split: Vec<_> = path.split('?').nth(1).unwrap().split('&').collect();
|
||||
query_split.sort();
|
||||
let access_token = &query_split[0][13..];
|
||||
let _id = &query_split[1][3..];
|
||||
|
||||
let (_id, access_token) = match path.split('?').nth(1) {
|
||||
Some(params) => {
|
||||
let mut params_iter = params.split('&').take(2);
|
||||
|
||||
let mut id = None;
|
||||
let mut access_token = None;
|
||||
while let Some(val) = params_iter.next() {
|
||||
if val.starts_with(ID_KEY) {
|
||||
id = Some(&val[ID_KEY.len()..]);
|
||||
} else if val.starts_with(ACCESS_TOKEN_KEY) {
|
||||
access_token = Some(&val[ACCESS_TOKEN_KEY.len()..]);
|
||||
}
|
||||
}
|
||||
|
||||
match (id, access_token) {
|
||||
(Some(a), Some(b)) => (a, b),
|
||||
_ => return self.err("Missing id or access token"),
|
||||
}
|
||||
}
|
||||
None => return self.err("Missing query path"),
|
||||
};
|
||||
|
||||
// Validate the user
|
||||
use crate::auth;
|
||||
let claims = match auth::decode_login(access_token) {
|
||||
Ok(claims) => claims,
|
||||
Err(_) => return Err(ws::Error::new(ws::ErrorKind::Internal, "Invalid access token provided")),
|
||||
Err(_) => return self.err("Invalid access token provided"),
|
||||
};
|
||||
|
||||
// Assign the user to the handler
|
||||
@@ -157,8 +200,6 @@ impl Handler for WSHandler {
|
||||
}
|
||||
|
||||
fn on_message(&mut self, msg: Message) -> ws::Result<()> {
|
||||
info!("Server got message '{}'. ", msg);
|
||||
|
||||
if let Message::Text(text) = msg.clone() {
|
||||
let json = &text[..text.len() - 1]; // Remove last char
|
||||
|
||||
@@ -181,10 +222,7 @@ impl Handler for WSHandler {
|
||||
// reschedule the timeout
|
||||
self.out.timeout(PING_MS, PING)
|
||||
} else {
|
||||
Err(ws::Error::new(
|
||||
ws::ErrorKind::Internal,
|
||||
"Invalid timeout token provided",
|
||||
))
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -353,7 +391,14 @@ pub fn start_notification_server() -> WebSocketUsers {
|
||||
|
||||
if CONFIG.websocket_enabled() {
|
||||
thread::spawn(move || {
|
||||
WebSocket::new(factory)
|
||||
let mut settings = ws::Settings::default();
|
||||
settings.max_connections = 500;
|
||||
settings.queue_size = 2;
|
||||
settings.panic_on_internal = false;
|
||||
|
||||
ws::Builder::new()
|
||||
.with_settings(settings)
|
||||
.build(factory)
|
||||
.unwrap()
|
||||
.listen((CONFIG.websocket_address().as_str(), CONFIG.websocket_port()))
|
||||
.unwrap();
|
||||
|
@@ -1,4 +1,3 @@
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use rocket::http::ContentType;
|
||||
@@ -8,23 +7,23 @@ use rocket::Route;
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::util::Cached;
|
||||
use crate::error::Error;
|
||||
use crate::util::Cached;
|
||||
use crate::CONFIG;
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
// If addding more routes here, consider also adding them to
|
||||
// crate::utils::LOGGED_ROUTES to make sure they appear in the log
|
||||
if CONFIG.web_vault_enabled() {
|
||||
routes![web_index, app_id, web_files, attachments, alive, images]
|
||||
routes![web_index, app_id, web_files, attachments, alive, static_files]
|
||||
} else {
|
||||
routes![attachments, alive]
|
||||
routes![attachments, alive, static_files]
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/")]
|
||||
fn web_index() -> Cached<io::Result<NamedFile>> {
|
||||
Cached::short(NamedFile::open(
|
||||
Path::new(&CONFIG.web_vault_folder()).join("index.html"),
|
||||
))
|
||||
fn web_index() -> Cached<Option<NamedFile>> {
|
||||
Cached::short(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join("index.html")).ok())
|
||||
}
|
||||
|
||||
#[get("/app-id.json")]
|
||||
@@ -38,7 +37,17 @@ fn app_id() -> Cached<Content<Json<Value>>> {
|
||||
{
|
||||
"version": { "major": 1, "minor": 0 },
|
||||
"ids": [
|
||||
&CONFIG.domain(),
|
||||
// Per <https://fidoalliance.org/specs/fido-v2.0-id-20180227/fido-appid-and-facets-v2.0-id-20180227.html#determining-the-facetid-of-a-calling-application>:
|
||||
//
|
||||
// "In the Web case, the FacetID MUST be the Web Origin [RFC6454]
|
||||
// of the web page triggering the FIDO operation, written as
|
||||
// a URI with an empty path. Default ports are omitted and any
|
||||
// path component is ignored."
|
||||
//
|
||||
// This leaves it unclear as to whether the path must be empty,
|
||||
// or whether it can be non-empty and will be ignored. To be on
|
||||
// the safe side, use a proper web origin (with empty path).
|
||||
&CONFIG.domain_origin(),
|
||||
"ios:bundle-id:com.8bit.bitwarden",
|
||||
"android:apk-key-hash:dUGFzUzf3lmHSLBDBIv+WaFyZMI" ]
|
||||
}]
|
||||
@@ -47,13 +56,13 @@ fn app_id() -> Cached<Content<Json<Value>>> {
|
||||
}
|
||||
|
||||
#[get("/<p..>", rank = 10)] // Only match this if the other routes don't match
|
||||
fn web_files(p: PathBuf) -> Cached<io::Result<NamedFile>> {
|
||||
Cached::long(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join(p)))
|
||||
fn web_files(p: PathBuf) -> Cached<Option<NamedFile>> {
|
||||
Cached::long(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join(p)).ok())
|
||||
}
|
||||
|
||||
#[get("/attachments/<uuid>/<file..>")]
|
||||
fn attachments(uuid: String, file: PathBuf) -> io::Result<NamedFile> {
|
||||
NamedFile::open(Path::new(&CONFIG.attachments_folder()).join(uuid).join(file))
|
||||
fn attachments(uuid: String, file: PathBuf) -> Option<NamedFile> {
|
||||
NamedFile::open(Path::new(&CONFIG.attachments_folder()).join(uuid).join(file)).ok()
|
||||
}
|
||||
|
||||
#[get("/alive")]
|
||||
@@ -64,12 +73,18 @@ fn alive() -> Json<String> {
|
||||
Json(format_date(&Utc::now().naive_utc()))
|
||||
}
|
||||
|
||||
#[get("/bwrs_images/<filename>")]
|
||||
fn images(filename: String) -> Result<Content<&'static [u8]>, Error> {
|
||||
#[get("/bwrs_static/<filename>")]
|
||||
fn static_files(filename: String) -> Result<Content<&'static [u8]>, Error> {
|
||||
match filename.as_ref() {
|
||||
"mail-github.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/mail-github.png"))),
|
||||
"logo-gray.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/logo-gray.png"))),
|
||||
"error-x.svg" => Ok(Content(ContentType::SVG, include_bytes!("../static/images/error-x.svg"))),
|
||||
_ => err!("Image not found"),
|
||||
"hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))),
|
||||
|
||||
"bootstrap.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))),
|
||||
"bootstrap-native-v4.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native-v4.js"))),
|
||||
"md5.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/md5.js"))),
|
||||
"identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))),
|
||||
_ => err!(format!("Static file not found: {}", filename)),
|
||||
}
|
||||
}
|
151
src/auth.rs
151
src/auth.rs
@@ -3,8 +3,10 @@
|
||||
//
|
||||
use crate::util::read_file;
|
||||
use chrono::{Duration, Utc};
|
||||
use once_cell::sync::Lazy;
|
||||
use num_traits::FromPrimitive;
|
||||
|
||||
use jsonwebtoken::{self, Algorithm, Header};
|
||||
use jsonwebtoken::{self, Algorithm, Header, EncodingKey, DecodingKey};
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::ser::Serialize;
|
||||
|
||||
@@ -13,24 +15,24 @@ use crate::CONFIG;
|
||||
|
||||
const JWT_ALGORITHM: Algorithm = Algorithm::RS256;
|
||||
|
||||
lazy_static! {
|
||||
pub static ref DEFAULT_VALIDITY: Duration = Duration::hours(2);
|
||||
static ref JWT_HEADER: Header = Header::new(JWT_ALGORITHM);
|
||||
pub static ref JWT_LOGIN_ISSUER: String = format!("{}|login", CONFIG.domain());
|
||||
pub static ref JWT_INVITE_ISSUER: String = format!("{}|invite", CONFIG.domain());
|
||||
pub static ref JWT_ADMIN_ISSUER: String = format!("{}|admin", CONFIG.domain());
|
||||
static ref PRIVATE_RSA_KEY: Vec<u8> = match read_file(&CONFIG.private_rsa_key()) {
|
||||
pub static DEFAULT_VALIDITY: Lazy<Duration> = Lazy::new(|| Duration::hours(2));
|
||||
static JWT_HEADER: Lazy<Header> = Lazy::new(|| Header::new(JWT_ALGORITHM));
|
||||
pub static JWT_LOGIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|login", CONFIG.domain_origin()));
|
||||
static JWT_INVITE_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|invite", CONFIG.domain_origin()));
|
||||
static JWT_DELETE_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|delete", CONFIG.domain_origin()));
|
||||
static JWT_VERIFYEMAIL_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|verifyemail", CONFIG.domain_origin()));
|
||||
static JWT_ADMIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|admin", CONFIG.domain_origin()));
|
||||
static PRIVATE_RSA_KEY: Lazy<Vec<u8>> = Lazy::new(|| match read_file(&CONFIG.private_rsa_key()) {
|
||||
Ok(key) => key,
|
||||
Err(e) => panic!("Error loading private RSA Key.\n Error: {}", e),
|
||||
};
|
||||
static ref PUBLIC_RSA_KEY: Vec<u8> = match read_file(&CONFIG.public_rsa_key()) {
|
||||
});
|
||||
static PUBLIC_RSA_KEY: Lazy<Vec<u8>> = Lazy::new(|| match read_file(&CONFIG.public_rsa_key()) {
|
||||
Ok(key) => key,
|
||||
Err(e) => panic!("Error loading public RSA Key.\n Error: {}", e),
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
pub fn encode_jwt<T: Serialize>(claims: &T) -> String {
|
||||
match jsonwebtoken::encode(&JWT_HEADER, claims, &PRIVATE_RSA_KEY) {
|
||||
match jsonwebtoken::encode(&JWT_HEADER, claims, &EncodingKey::from_rsa_der(&PRIVATE_RSA_KEY)) {
|
||||
Ok(token) => token,
|
||||
Err(e) => panic!("Error encoding jwt {}", e),
|
||||
}
|
||||
@@ -49,7 +51,7 @@ fn decode_jwt<T: DeserializeOwned>(token: &str, issuer: String) -> Result<T, Err
|
||||
|
||||
let token = token.replace(char::is_whitespace, "");
|
||||
|
||||
jsonwebtoken::decode(&token, &PUBLIC_RSA_KEY, &validation)
|
||||
jsonwebtoken::decode(&token, &DecodingKey::from_rsa_der(&PUBLIC_RSA_KEY), &validation)
|
||||
.map(|d| d.claims)
|
||||
.map_res("Error decoding JWT")
|
||||
}
|
||||
@@ -62,6 +64,14 @@ pub fn decode_invite(token: &str) -> Result<InviteJWTClaims, Error> {
|
||||
decode_jwt(token, JWT_INVITE_ISSUER.to_string())
|
||||
}
|
||||
|
||||
pub fn decode_delete(token: &str) -> Result<DeleteJWTClaims, Error> {
|
||||
decode_jwt(token, JWT_DELETE_ISSUER.to_string())
|
||||
}
|
||||
|
||||
pub fn decode_verify_email(token: &str) -> Result<VerifyEmailJWTClaims, Error> {
|
||||
decode_jwt(token, JWT_VERIFYEMAIL_ISSUER.to_string())
|
||||
}
|
||||
|
||||
pub fn decode_admin(token: &str) -> Result<AdminJWTClaims, Error> {
|
||||
decode_jwt(token, JWT_ADMIN_ISSUER.to_string())
|
||||
}
|
||||
@@ -118,7 +128,7 @@ pub fn generate_invite_claims(
|
||||
uuid: String,
|
||||
email: String,
|
||||
org_id: Option<String>,
|
||||
org_user_id: Option<String>,
|
||||
user_org_id: Option<String>,
|
||||
invited_by_email: Option<String>,
|
||||
) -> InviteJWTClaims {
|
||||
let time_now = Utc::now().naive_utc();
|
||||
@@ -126,11 +136,55 @@ pub fn generate_invite_claims(
|
||||
nbf: time_now.timestamp(),
|
||||
exp: (time_now + Duration::days(5)).timestamp(),
|
||||
iss: JWT_INVITE_ISSUER.to_string(),
|
||||
sub: uuid.clone(),
|
||||
email: email.clone(),
|
||||
org_id: org_id.clone(),
|
||||
user_org_id: org_user_id.clone(),
|
||||
invited_by_email: invited_by_email.clone(),
|
||||
sub: uuid,
|
||||
email,
|
||||
org_id,
|
||||
user_org_id,
|
||||
invited_by_email,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct DeleteJWTClaims {
|
||||
// Not before
|
||||
pub nbf: i64,
|
||||
// Expiration time
|
||||
pub exp: i64,
|
||||
// Issuer
|
||||
pub iss: String,
|
||||
// Subject
|
||||
pub sub: String,
|
||||
}
|
||||
|
||||
pub fn generate_delete_claims(uuid: String) -> DeleteJWTClaims {
|
||||
let time_now = Utc::now().naive_utc();
|
||||
DeleteJWTClaims {
|
||||
nbf: time_now.timestamp(),
|
||||
exp: (time_now + Duration::days(5)).timestamp(),
|
||||
iss: JWT_DELETE_ISSUER.to_string(),
|
||||
sub: uuid,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct VerifyEmailJWTClaims {
|
||||
// Not before
|
||||
pub nbf: i64,
|
||||
// Expiration time
|
||||
pub exp: i64,
|
||||
// Issuer
|
||||
pub iss: String,
|
||||
// Subject
|
||||
pub sub: String,
|
||||
}
|
||||
|
||||
pub fn generate_verify_email_claims(uuid: String) -> DeleteJWTClaims {
|
||||
let time_now = Utc::now().naive_utc();
|
||||
DeleteJWTClaims {
|
||||
nbf: time_now.timestamp(),
|
||||
exp: (time_now + Duration::days(5)).timestamp(),
|
||||
iss: JWT_VERIFYEMAIL_ISSUER.to_string(),
|
||||
sub: uuid,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -253,6 +307,25 @@ pub struct OrgHeaders {
|
||||
pub org_user_type: UserOrgType,
|
||||
}
|
||||
|
||||
// org_id is usually the second param ("/organizations/<org_id>")
|
||||
// But there are cases where it is located in a query value.
|
||||
// First check the param, if this is not a valid uuid, we will try the query value.
|
||||
fn get_org_id(request: &Request) -> Option<String> {
|
||||
if let Some(Ok(org_id)) = request.get_param::<String>(1) {
|
||||
if uuid::Uuid::parse_str(&org_id).is_ok() {
|
||||
return Some(org_id);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Ok(org_id)) = request.get_query_value::<String>("organizationId") {
|
||||
if uuid::Uuid::parse_str(&org_id).is_ok() {
|
||||
return Some(org_id);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
impl<'a, 'r> FromRequest<'a, 'r> for OrgHeaders {
|
||||
type Error = &'static str;
|
||||
|
||||
@@ -261,9 +334,8 @@ impl<'a, 'r> FromRequest<'a, 'r> for OrgHeaders {
|
||||
Outcome::Forward(_) => Outcome::Forward(()),
|
||||
Outcome::Failure(f) => Outcome::Failure(f),
|
||||
Outcome::Success(headers) => {
|
||||
// org_id is expected to be the second param ("/organizations/<org_id>")
|
||||
match request.get_param::<String>(1) {
|
||||
Some(Ok(org_id)) => {
|
||||
match get_org_id(request) {
|
||||
Some(org_id) => {
|
||||
let conn = match request.guard::<DbConn>() {
|
||||
Outcome::Success(conn) => conn,
|
||||
_ => err_handler!("Error getting DB"),
|
||||
@@ -294,7 +366,7 @@ impl<'a, 'r> FromRequest<'a, 'r> for OrgHeaders {
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
},
|
||||
_ => err_handler!("Error getting the organization id"),
|
||||
}
|
||||
}
|
||||
@@ -332,6 +404,16 @@ impl<'a, 'r> FromRequest<'a, 'r> for AdminHeaders {
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<Headers> for AdminHeaders {
|
||||
fn into(self) -> Headers {
|
||||
Headers {
|
||||
host: self.host,
|
||||
device: self.device,
|
||||
user: self.user
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct OwnerHeaders {
|
||||
pub host: String,
|
||||
pub device: Device,
|
||||
@@ -372,12 +454,25 @@ pub struct ClientIp {
|
||||
impl<'a, 'r> FromRequest<'a, 'r> for ClientIp {
|
||||
type Error = ();
|
||||
|
||||
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
|
||||
let ip = match request.client_ip() {
|
||||
Some(addr) => addr,
|
||||
None => "0.0.0.0".parse().unwrap(),
|
||||
fn from_request(req: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
|
||||
let ip = if CONFIG._ip_header_enabled() {
|
||||
req.headers().get_one(&CONFIG.ip_header()).and_then(|ip| {
|
||||
match ip.find(',') {
|
||||
Some(idx) => &ip[..idx],
|
||||
None => ip,
|
||||
}
|
||||
.parse()
|
||||
.map_err(|_| warn!("'{}' header is malformed: {}", CONFIG.ip_header(), ip))
|
||||
.ok()
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let ip = ip
|
||||
.or_else(|| req.remote().map(|r| r.ip()))
|
||||
.unwrap_or_else(|| "0.0.0.0".parse().unwrap());
|
||||
|
||||
Outcome::Success(ClientIp { ip })
|
||||
}
|
||||
}
|
||||
|
230
src/config.rs
230
src/config.rs
@@ -1,19 +1,23 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use std::process::exit;
|
||||
use std::sync::RwLock;
|
||||
|
||||
use crate::error::Error;
|
||||
use crate::util::get_env;
|
||||
use reqwest::Url;
|
||||
|
||||
lazy_static! {
|
||||
pub static ref CONFIG: Config = Config::load().unwrap_or_else(|e| {
|
||||
println!("Error loading config:\n\t{:?}\n", e);
|
||||
exit(12)
|
||||
});
|
||||
pub static ref CONFIG_FILE: String = {
|
||||
use crate::error::Error;
|
||||
use crate::util::{get_env, get_env_bool};
|
||||
|
||||
static CONFIG_FILE: Lazy<String> = Lazy::new(|| {
|
||||
let data_folder = get_env("DATA_FOLDER").unwrap_or_else(|| String::from("data"));
|
||||
get_env("CONFIG_FILE").unwrap_or_else(|| format!("{}/config.json", data_folder))
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
|
||||
Config::load().unwrap_or_else(|e| {
|
||||
println!("Error loading config:\n\t{:?}\n", e);
|
||||
exit(12)
|
||||
})
|
||||
});
|
||||
|
||||
pub type Pass = String;
|
||||
|
||||
@@ -23,13 +27,13 @@ macro_rules! make_config {
|
||||
$group:ident $(: $group_enabled:ident)? {
|
||||
$(
|
||||
$(#[doc = $doc:literal])+
|
||||
$name:ident : $ty:ty, $editable:literal, $none_action:ident $(, $default:expr)?;
|
||||
$name:ident : $ty:ident, $editable:literal, $none_action:ident $(, $default:expr)?;
|
||||
)+},
|
||||
)+) => {
|
||||
pub struct Config { inner: RwLock<Inner> }
|
||||
|
||||
struct Inner {
|
||||
templates: Handlebars,
|
||||
templates: Handlebars<'static>,
|
||||
config: ConfigItems,
|
||||
|
||||
_env: ConfigBuilder,
|
||||
@@ -50,7 +54,7 @@ macro_rules! make_config {
|
||||
|
||||
let mut builder = ConfigBuilder::default();
|
||||
$($(
|
||||
builder.$name = get_env(&stringify!($name).to_uppercase());
|
||||
builder.$name = make_config! { @getenv &stringify!($name).to_uppercase(), $ty };
|
||||
)+)+
|
||||
|
||||
builder
|
||||
@@ -185,19 +189,28 @@ macro_rules! make_config {
|
||||
}
|
||||
}
|
||||
}};
|
||||
( @build $value:expr, $config:expr, gen, $default_fn:expr ) => {{
|
||||
let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
|
||||
f($config)
|
||||
}};
|
||||
|
||||
( @getenv $name:expr, bool ) => { get_env_bool($name) };
|
||||
( @getenv $name:expr, $ty:ident ) => { get_env($name) };
|
||||
|
||||
}
|
||||
|
||||
//STRUCTURE:
|
||||
// /// Short description (without this they won't appear on the list)
|
||||
// group {
|
||||
// /// Friendly Name |> Description (Optional)
|
||||
// name: type, is_editable, none_action, <default_value (Optional)>
|
||||
// name: type, is_editable, action, <default_value (Optional)>
|
||||
// }
|
||||
//
|
||||
// Where none_action applied when the value wasn't provided and can be:
|
||||
// Where action applied when the value wasn't provided and can be:
|
||||
// def: Use a default value
|
||||
// auto: Value is auto generated based on other values
|
||||
// option: Value is optional
|
||||
// gen: Value is always autogenerated and it's original value ignored
|
||||
make_config! {
|
||||
folders {
|
||||
/// Data folder |> Main data folder
|
||||
@@ -231,18 +244,35 @@ make_config! {
|
||||
domain: String, true, def, "http://localhost".to_string();
|
||||
/// Domain Set |> Indicates if the domain is set by the admin. Otherwise the default will be used.
|
||||
domain_set: bool, false, def, false;
|
||||
/// Domain origin |> Domain URL origin (in https://example.com:8443/path, https://example.com:8443 is the origin)
|
||||
domain_origin: String, false, auto, |c| extract_url_origin(&c.domain);
|
||||
/// Domain path |> Domain URL path (in https://example.com:8443/path, /path is the path)
|
||||
domain_path: String, false, auto, |c| extract_url_path(&c.domain);
|
||||
/// Enable web vault
|
||||
web_vault_enabled: bool, false, def, true;
|
||||
|
||||
/// HIBP Api Key |> HaveIBeenPwned API Key, request it here: https://haveibeenpwned.com/API/Key
|
||||
hibp_api_key: Pass, true, option;
|
||||
|
||||
/// Per-user attachment limit (KB) |> Limit in kilobytes for a users attachments, once the limit is exceeded it won't be possible to upload more
|
||||
user_attachment_limit: i64, true, option;
|
||||
/// Per-organization attachment limit (KB) |> Limit in kilobytes for an organization attachments, once the limit is exceeded it won't be possible to upload more
|
||||
org_attachment_limit: i64, true, option;
|
||||
|
||||
/// Disable icon downloads |> Set to true to disable icon downloading, this would still serve icons from
|
||||
/// $ICON_CACHE_FOLDER, but it won't produce any external network request. Needs to set $ICON_CACHE_TTL to 0,
|
||||
/// otherwise it will delete them and they won't be downloaded again.
|
||||
disable_icon_download: bool, true, def, false;
|
||||
/// Allow new signups |> Controls if new users can register. Note that while this is disabled, users could still be invited
|
||||
signups_allowed: bool, true, def, true;
|
||||
/// Require email verification on signups. This will prevent logins from succeeding until the address has been verified
|
||||
signups_verify: bool, true, def, false;
|
||||
/// If signups require email verification, automatically re-send verification email if it hasn't been sent for a while (in seconds)
|
||||
signups_verify_resend_time: u64, true, def, 3_600;
|
||||
/// If signups require email verification, limit how many emails are automatically sent when login is attempted (0 means no limit)
|
||||
signups_verify_resend_limit: u32, true, def, 6;
|
||||
/// Allow signups only from this list of comma-separated domains
|
||||
signups_domains_whitelist: String, true, def, "".to_string();
|
||||
/// Allow invitations |> Controls whether users can be invited by organization admins, even when signups are disabled
|
||||
invitations_allowed: bool, true, def, true;
|
||||
/// Password iterations |> Number of server-side passwords hashing iterations.
|
||||
@@ -254,10 +284,18 @@ make_config! {
|
||||
|
||||
/// Admin page token |> The token used to authenticate in this very same page. Changing it here won't deauthorize the current session
|
||||
admin_token: Pass, true, option;
|
||||
|
||||
/// Invitation organization name |> Name shown in the invitation emails that don't come from a specific organization
|
||||
invitation_org_name: String, true, def, "Bitwarden_RS".to_string();
|
||||
},
|
||||
|
||||
/// Advanced settings
|
||||
advanced {
|
||||
/// Client IP header |> If not present, the remote IP is used.
|
||||
/// Set to the string "none" (without quotes), to disable any headers and just use the remote IP
|
||||
ip_header: String, true, def, "X-Real-IP".to_string();
|
||||
/// Internal IP header property, used to avoid recomputing each time
|
||||
_ip_header_enabled: bool, false, gen, |c| &c.ip_header.trim().to_lowercase() != "none";
|
||||
/// Positive icon cache expiry |> Number of seconds to consider that an already cached icon is fresh. After this period, the icon will be redownloaded
|
||||
icon_cache_ttl: u64, true, def, 2_592_000;
|
||||
/// Negative icon cache expiry |> Number of seconds before trying to download an icon that failed again.
|
||||
@@ -267,11 +305,18 @@ make_config! {
|
||||
/// Icon blacklist Regex |> Any domains or IPs that match this regex won't be fetched by the icon service.
|
||||
/// Useful to hide other servers in the local network. Check the WIKI for more details
|
||||
icon_blacklist_regex: String, true, option;
|
||||
/// Icon blacklist non global IPs |> Any IP which is not defined as a global IP will be blacklisted.
|
||||
/// Usefull to secure your internal environment: See https://en.wikipedia.org/wiki/Reserved_IP_addresses for a list of IPs which it will block
|
||||
icon_blacklist_non_global_ips: bool, true, def, true;
|
||||
|
||||
/// Disable Two-Factor remember |> Enabling this would force the users to use a second factor to login every time.
|
||||
/// Note that the checkbox would still be present, but ignored.
|
||||
disable_2fa_remember: bool, true, def, false;
|
||||
|
||||
/// Disable authenticator time drifted codes to be valid |> Enabling this only allows the current TOTP code to be valid
|
||||
/// TOTP codes of the previous and next 30 seconds will be invalid.
|
||||
authenticator_disable_time_drift: bool, true, def, false;
|
||||
|
||||
/// Require new device emails |> When a user logs in an email is required to be sent.
|
||||
/// If sending the email fails the login attempt will fail.
|
||||
require_device_email: bool, true, def, false;
|
||||
@@ -279,9 +324,6 @@ make_config! {
|
||||
/// Reload templates (Dev) |> When this is set to true, the templates get reloaded with every request.
|
||||
/// ONLY use this during development, as it can slow down the server
|
||||
reload_templates: bool, true, def, false;
|
||||
|
||||
/// Log routes at launch (Dev)
|
||||
log_mounts: bool, true, def, false;
|
||||
/// Enable extended logging
|
||||
extended_logging: bool, false, def, true;
|
||||
/// Enable the log to output to Syslog
|
||||
@@ -295,8 +337,11 @@ make_config! {
|
||||
/// that do not support WAL. Please make sure you read project wiki on the topic before changing this setting.
|
||||
enable_db_wal: bool, false, def, true;
|
||||
|
||||
/// Disable Admin Token (Know the risks!) |> Disables the Admin Token for the admin page so you may use your own auth in-front
|
||||
/// Bypass admin page security (Know the risks!) |> Disables the Admin Token for the admin page so you may use your own auth in-front
|
||||
disable_admin_token: bool, true, def, false;
|
||||
|
||||
/// Allowed iframe ancestors (Know the risks!) |> Allows other domains to embed the web vault into an iframe, useful for embedding into secure intranets
|
||||
allowed_iframe_ancestors: String, true, def, String::new();
|
||||
},
|
||||
|
||||
/// Yubikey settings
|
||||
@@ -347,26 +392,60 @@ make_config! {
|
||||
smtp_password: Pass, true, option;
|
||||
/// Json form auth mechanism |> Defaults for ssl is "Plain" and "Login" and nothing for non-ssl connections. Possible values: ["Plain", "Login", "Xoauth2"]
|
||||
smtp_auth_mechanism: String, true, option;
|
||||
/// SMTP connection timeout |> Number of seconds when to stop trying to connect to the SMTP server
|
||||
smtp_timeout: u64, true, def, 15;
|
||||
},
|
||||
|
||||
/// Email 2FA Settings
|
||||
email_2fa: _enable_email_2fa {
|
||||
/// Enabled |> Disabling will prevent users from setting up new email 2FA and using existing email 2FA configured
|
||||
_enable_email_2fa: bool, true, auto, |c| c._enable_smtp && c.smtp_host.is_some();
|
||||
/// Token number length |> Length of the numbers in an email token. Minimum of 6. Maximum is 19.
|
||||
email_token_size: u32, true, def, 6;
|
||||
/// Token expiration time |> Maximum time in seconds a token is valid. The time the user has to open email client and copy token.
|
||||
email_expiration_time: u64, true, def, 600;
|
||||
/// Maximum attempts |> Maximum attempts before an email token is reset and a new email will need to be sent
|
||||
email_attempts_limit: u64, true, def, 3;
|
||||
},
|
||||
}
|
||||
|
||||
fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
|
||||
let db_url = cfg.database_url.to_lowercase();
|
||||
if cfg!(feature = "sqlite") && (db_url.starts_with("mysql:") || db_url.starts_with("postgresql:")) {
|
||||
err!("`DATABASE_URL` is meant for MySQL or Postgres, while this server is meant for SQLite")
|
||||
}
|
||||
|
||||
if cfg!(feature = "mysql") && !db_url.starts_with("mysql:") {
|
||||
err!("`DATABASE_URL` should start with mysql: when using the MySQL server")
|
||||
}
|
||||
|
||||
if cfg!(feature = "postgresql") && !db_url.starts_with("postgresql:") {
|
||||
err!("`DATABASE_URL` should start with postgresql: when using the PostgreSQL server")
|
||||
}
|
||||
|
||||
let dom = cfg.domain.to_lowercase();
|
||||
if !dom.starts_with("http://") && !dom.starts_with("https://") {
|
||||
err!("DOMAIN variable needs to contain the protocol (http, https). Use 'http[s]://bw.example.com' instead of 'bw.example.com'");
|
||||
}
|
||||
|
||||
if let Some(ref token) = cfg.admin_token {
|
||||
if token.trim().is_empty() {
|
||||
if token.trim().is_empty() && !cfg.disable_admin_token {
|
||||
err!("`ADMIN_TOKEN` is enabled but has an empty value. To enable the admin page without token, use `DISABLE_ADMIN_TOKEN`")
|
||||
}
|
||||
}
|
||||
|
||||
if (cfg.duo_host.is_some() || cfg.duo_ikey.is_some() || cfg.duo_skey.is_some())
|
||||
if cfg._enable_duo
|
||||
&& (cfg.duo_host.is_some() || cfg.duo_ikey.is_some() || cfg.duo_skey.is_some())
|
||||
&& !(cfg.duo_host.is_some() && cfg.duo_ikey.is_some() && cfg.duo_skey.is_some())
|
||||
{
|
||||
err!("All Duo options need to be set for global Duo support")
|
||||
}
|
||||
|
||||
if cfg.yubico_client_id.is_some() != cfg.yubico_secret_key.is_some() {
|
||||
if cfg._enable_yubico && cfg.yubico_client_id.is_some() != cfg.yubico_secret_key.is_some() {
|
||||
err!("Both `YUBICO_CLIENT_ID` and `YUBICO_SECRET_KEY` need to be set for Yubikey OTP support")
|
||||
}
|
||||
|
||||
if cfg._enable_smtp {
|
||||
if cfg.smtp_host.is_some() == cfg.smtp_from.is_empty() {
|
||||
err!("Both `SMTP_HOST` and `SMTP_FROM` need to be set for email support")
|
||||
}
|
||||
@@ -375,9 +454,45 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
|
||||
err!("Both `SMTP_USERNAME` and `SMTP_PASSWORD` need to be set to enable email authentication")
|
||||
}
|
||||
|
||||
if cfg._enable_email_2fa && (!cfg._enable_smtp || cfg.smtp_host.is_none()) {
|
||||
err!("To enable email 2FA, SMTP must be configured")
|
||||
}
|
||||
|
||||
if cfg._enable_email_2fa && cfg.email_token_size < 6 {
|
||||
err!("`EMAIL_TOKEN_SIZE` has a minimum size of 6")
|
||||
}
|
||||
|
||||
if cfg._enable_email_2fa && cfg.email_token_size > 19 {
|
||||
err!("`EMAIL_TOKEN_SIZE` has a maximum size of 19")
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Extracts an RFC 6454 web origin from a URL.
|
||||
fn extract_url_origin(url: &str) -> String {
|
||||
match Url::parse(url) {
|
||||
Ok(u) => u.origin().ascii_serialization(),
|
||||
Err(e) => {
|
||||
println!("Error validating domain: {}", e);
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extracts the path from a URL.
|
||||
/// All trailing '/' chars are trimmed, even if the path is a lone '/'.
|
||||
fn extract_url_path(url: &str) -> String {
|
||||
match Url::parse(url) {
|
||||
Ok(u) => u.path().trim_end_matches('/').to_string(),
|
||||
Err(_) => {
|
||||
// We already print it in the method above, no need to do it again
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn load() -> Result<Self, Error> {
|
||||
// Loading from env and file
|
||||
@@ -392,12 +507,7 @@ impl Config {
|
||||
validate_config(&config)?;
|
||||
|
||||
Ok(Config {
|
||||
inner: RwLock::new(Inner {
|
||||
templates: load_templates(&config.templates_folder),
|
||||
config,
|
||||
_env,
|
||||
_usr,
|
||||
}),
|
||||
inner: RwLock::new(Inner { templates: load_templates(&config.templates_folder), config, _env, _usr }),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -441,6 +551,20 @@ impl Config {
|
||||
self.update_config(builder)
|
||||
}
|
||||
|
||||
pub fn can_signup_user(&self, email: &str) -> bool {
|
||||
let e: Vec<&str> = email.rsplitn(2, '@').collect();
|
||||
if e.len() != 2 || e[0].is_empty() || e[1].is_empty() {
|
||||
warn!("Failed to parse email address '{}'", email);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Allow signups if the whitelist is empty/not configured
|
||||
// (it doesn't contain any domains), or if it matches at least
|
||||
// one domain.
|
||||
let whitelist_str = self.signups_domains_whitelist();
|
||||
( whitelist_str.is_empty() && CONFIG.signups_allowed() )|| whitelist_str.split(',').filter(|s| !s.is_empty()).any(|d| d == e[0])
|
||||
}
|
||||
|
||||
pub fn delete_user_config(&self) -> Result<(), Error> {
|
||||
crate::util::delete_file(&CONFIG_FILE)?;
|
||||
|
||||
@@ -500,7 +624,7 @@ impl Config {
|
||||
) -> Result<String, crate::error::Error> {
|
||||
if CONFIG.reload_templates() {
|
||||
warn!("RELOADING TEMPLATES");
|
||||
let hb = load_templates(CONFIG.templates_folder().as_ref());
|
||||
let hb = load_templates(CONFIG.templates_folder());
|
||||
hb.render(name, data).map_err(Into::into)
|
||||
} else {
|
||||
let hb = &CONFIG.inner.read().unwrap().templates;
|
||||
@@ -509,17 +633,18 @@ impl Config {
|
||||
}
|
||||
}
|
||||
|
||||
use handlebars::{
|
||||
Context, Handlebars, Helper, HelperDef, HelperResult, Output, RenderContext, RenderError, Renderable,
|
||||
};
|
||||
use handlebars::{Context, Handlebars, Helper, HelperResult, Output, RenderContext, RenderError, Renderable};
|
||||
|
||||
fn load_templates(path: &str) -> Handlebars {
|
||||
fn load_templates<P>(path: P) -> Handlebars<'static>
|
||||
where
|
||||
P: AsRef<std::path::Path>,
|
||||
{
|
||||
let mut hb = Handlebars::new();
|
||||
// Error on missing params
|
||||
hb.set_strict_mode(true);
|
||||
// Register helpers
|
||||
hb.register_helper("case", Box::new(CaseHelper));
|
||||
hb.register_helper("jsesc", Box::new(JsEscapeHelper));
|
||||
hb.register_helper("case", Box::new(case_helper));
|
||||
hb.register_helper("jsesc", Box::new(js_escape_helper));
|
||||
|
||||
macro_rules! reg {
|
||||
($name:expr) => {{
|
||||
@@ -533,12 +658,19 @@ fn load_templates(path: &str) -> Handlebars {
|
||||
}
|
||||
|
||||
// First register default templates here
|
||||
reg!("email/change_email", ".html");
|
||||
reg!("email/delete_account", ".html");
|
||||
reg!("email/invite_accepted", ".html");
|
||||
reg!("email/invite_confirmed", ".html");
|
||||
reg!("email/new_device_logged_in", ".html");
|
||||
reg!("email/pw_hint_none", ".html");
|
||||
reg!("email/pw_hint_some", ".html");
|
||||
reg!("email/send_org_invite", ".html");
|
||||
reg!("email/twofactor_email", ".html");
|
||||
reg!("email/verify_email", ".html");
|
||||
reg!("email/welcome", ".html");
|
||||
reg!("email/welcome_must_verify", ".html");
|
||||
reg!("email/smtp_test", ".html");
|
||||
|
||||
reg!("admin/base");
|
||||
reg!("admin/login");
|
||||
@@ -552,17 +684,13 @@ fn load_templates(path: &str) -> Handlebars {
|
||||
hb
|
||||
}
|
||||
|
||||
pub struct CaseHelper;
|
||||
|
||||
impl HelperDef for CaseHelper {
|
||||
fn call<'reg: 'rc, 'rc>(
|
||||
&self,
|
||||
fn case_helper<'reg, 'rc>(
|
||||
h: &Helper<'reg, 'rc>,
|
||||
r: &'reg Handlebars,
|
||||
ctx: &Context,
|
||||
rc: &mut RenderContext<'reg>,
|
||||
ctx: &'rc Context,
|
||||
rc: &mut RenderContext<'reg, 'rc>,
|
||||
out: &mut dyn Output,
|
||||
) -> HelperResult {
|
||||
) -> HelperResult {
|
||||
let param = h
|
||||
.param(0)
|
||||
.ok_or_else(|| RenderError::new("Param not found for helper \"case\""))?;
|
||||
@@ -573,20 +701,15 @@ impl HelperDef for CaseHelper {
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct JsEscapeHelper;
|
||||
|
||||
impl HelperDef for JsEscapeHelper {
|
||||
fn call<'reg: 'rc, 'rc>(
|
||||
&self,
|
||||
fn js_escape_helper<'reg, 'rc>(
|
||||
h: &Helper<'reg, 'rc>,
|
||||
_: &'reg Handlebars,
|
||||
_: &Context,
|
||||
_: &mut RenderContext<'reg>,
|
||||
_r: &'reg Handlebars,
|
||||
_ctx: &'rc Context,
|
||||
_rc: &mut RenderContext<'reg, 'rc>,
|
||||
out: &mut dyn Output,
|
||||
) -> HelperResult {
|
||||
) -> HelperResult {
|
||||
let param = h
|
||||
.param(0)
|
||||
.ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?;
|
||||
@@ -601,5 +724,4 @@ impl HelperDef for JsEscapeHelper {
|
||||
|
||||
out.write("ed_value)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@@ -2,10 +2,11 @@
|
||||
// PBKDF2 derivation
|
||||
//
|
||||
|
||||
use crate::error::Error;
|
||||
use ring::{digest, hmac, pbkdf2};
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
static DIGEST_ALG: &digest::Algorithm = &digest::SHA256;
|
||||
static DIGEST_ALG: pbkdf2::Algorithm = pbkdf2::PBKDF2_HMAC_SHA256;
|
||||
const OUTPUT_LEN: usize = digest::SHA256_OUTPUT_LEN;
|
||||
|
||||
pub fn hash_password(secret: &[u8], salt: &[u8], iterations: u32) -> Vec<u8> {
|
||||
@@ -28,7 +29,7 @@ pub fn verify_password_hash(secret: &[u8], salt: &[u8], previous: &[u8], iterati
|
||||
pub fn hmac_sign(key: &str, data: &str) -> String {
|
||||
use data_encoding::HEXLOWER;
|
||||
|
||||
let key = hmac::SigningKey::new(&digest::SHA1, key.as_bytes());
|
||||
let key = hmac::Key::new(hmac::HMAC_SHA1_FOR_LEGACY_USE_ONLY, key.as_bytes());
|
||||
let signature = hmac::sign(&key, data.as_bytes());
|
||||
|
||||
HEXLOWER.encode(signature.as_ref())
|
||||
@@ -52,6 +53,21 @@ pub fn get_random(mut array: Vec<u8>) -> Vec<u8> {
|
||||
array
|
||||
}
|
||||
|
||||
pub fn generate_token(token_size: u32) -> Result<String, Error> {
|
||||
if token_size > 19 {
|
||||
err!("Generating token failed")
|
||||
}
|
||||
|
||||
// 8 bytes to create an u64 for up to 19 token digits
|
||||
let bytes = get_random(vec![0; 8]);
|
||||
let mut bytes_array = [0u8; 8];
|
||||
bytes_array.copy_from_slice(&bytes);
|
||||
|
||||
let number = u64::from_be_bytes(bytes_array) % 10u64.pow(token_size);
|
||||
let token = format!("{:0size$}", number, size = token_size as usize);
|
||||
Ok(token)
|
||||
}
|
||||
|
||||
//
|
||||
// Constant time compare
|
||||
//
|
||||
|
@@ -19,6 +19,8 @@ use crate::CONFIG;
|
||||
type Connection = diesel::sqlite::SqliteConnection;
|
||||
#[cfg(feature = "mysql")]
|
||||
type Connection = diesel::mysql::MysqlConnection;
|
||||
#[cfg(feature = "postgresql")]
|
||||
type Connection = diesel::pg::PgConnection;
|
||||
|
||||
/// An alias to the type for a pool of Diesel connections.
|
||||
type Pool = r2d2::Pool<ConnectionManager<Connection>>;
|
||||
@@ -33,6 +35,9 @@ pub mod schema;
|
||||
#[cfg(feature = "mysql")]
|
||||
#[path = "schemas/mysql/schema.rs"]
|
||||
pub mod schema;
|
||||
#[cfg(feature = "postgresql")]
|
||||
#[path = "schemas/postgresql/schema.rs"]
|
||||
pub mod schema;
|
||||
|
||||
/// Initializes a database pool.
|
||||
pub fn init_pool() -> Pool {
|
||||
@@ -47,12 +52,16 @@ pub fn get_connection() -> Result<Connection, ConnectionError> {
|
||||
|
||||
/// Creates a back-up of the database using sqlite3
|
||||
pub fn backup_database() -> Result<(), Error> {
|
||||
use std::path::Path;
|
||||
let db_url = CONFIG.database_url();
|
||||
let db_path = Path::new(&db_url).parent().unwrap();
|
||||
|
||||
let now: DateTime<Utc> = Utc::now();
|
||||
let file_date = now.format("%Y%m%d").to_string();
|
||||
let backup_command: String = format!("{}{}{}", ".backup 'db_", file_date, ".sqlite3'");
|
||||
|
||||
Command::new("sqlite3")
|
||||
.current_dir("./data")
|
||||
.current_dir(db_path)
|
||||
.args(&["db.sqlite3", &backup_command])
|
||||
.output()
|
||||
.expect("Can't open database, sqlite3 is not available, make sure it's installed and available on the PATH");
|
||||
@@ -67,7 +76,8 @@ impl<'a, 'r> FromRequest<'a, 'r> for DbConn {
|
||||
type Error = ();
|
||||
|
||||
fn from_request(request: &'a Request<'r>) -> request::Outcome<DbConn, ()> {
|
||||
let pool = request.guard::<State<Pool>>()?;
|
||||
// https://github.com/SergioBenitez/Rocket/commit/e3c1a4ad3ab9b840482ec6de4200d30df43e357c
|
||||
let pool = try_outcome!(request.guard::<State<Pool>>());
|
||||
match pool.get() {
|
||||
Ok(conn) => Outcome::Success(DbConn(conn)),
|
||||
Err(_) => Outcome::Failure((Status::ServiceUnavailable, ())),
|
||||
|
@@ -3,7 +3,7 @@ use serde_json::Value;
|
||||
use super::Cipher;
|
||||
use crate::CONFIG;
|
||||
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[table_name = "attachments"]
|
||||
#[belongs_to(Cipher, foreign_key = "cipher_uuid")]
|
||||
#[primary_key(id)]
|
||||
@@ -49,7 +49,7 @@ impl Attachment {
|
||||
}
|
||||
}
|
||||
|
||||
use crate::db::schema::attachments;
|
||||
use crate::db::schema::{attachments, ciphers};
|
||||
use crate::db::DbConn;
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
@@ -59,6 +59,18 @@ use crate::error::MapResult;
|
||||
|
||||
/// Database methods
|
||||
impl Attachment {
|
||||
#[cfg(feature = "postgresql")]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
diesel::insert_into(attachments::table)
|
||||
.values(self)
|
||||
.on_conflict(attachments::id)
|
||||
.do_update()
|
||||
.set(self)
|
||||
.execute(&**conn)
|
||||
.map_res("Error saving attachment")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "postgresql"))]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
diesel::replace_into(attachments::table)
|
||||
.values(self)
|
||||
@@ -106,4 +118,26 @@ impl Attachment {
|
||||
.load::<Self>(&**conn)
|
||||
.expect("Error loading attachments")
|
||||
}
|
||||
|
||||
pub fn size_by_user(user_uuid: &str, conn: &DbConn) -> i64 {
|
||||
let result: Option<i64> = attachments::table
|
||||
.left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid)))
|
||||
.filter(ciphers::user_uuid.eq(user_uuid))
|
||||
.select(diesel::dsl::sum(attachments::file_size))
|
||||
.first(&**conn)
|
||||
.expect("Error loading user attachment total size");
|
||||
|
||||
result.unwrap_or(0)
|
||||
}
|
||||
|
||||
pub fn size_by_org(org_uuid: &str, conn: &DbConn) -> i64 {
|
||||
let result: Option<i64> = attachments::table
|
||||
.left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid)))
|
||||
.filter(ciphers::organization_uuid.eq(org_uuid))
|
||||
.select(diesel::dsl::sum(attachments::file_size))
|
||||
.first(&**conn)
|
||||
.expect("Error loading user attachment total size");
|
||||
|
||||
result.unwrap_or(0)
|
||||
}
|
||||
}
|
||||
|
@@ -5,7 +5,7 @@ use super::{
|
||||
Attachment, CollectionCipher, FolderCipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization,
|
||||
};
|
||||
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[table_name = "ciphers"]
|
||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||
#[belongs_to(Organization, foreign_key = "organization_uuid")]
|
||||
@@ -148,6 +148,21 @@ impl Cipher {
|
||||
user_uuids
|
||||
}
|
||||
|
||||
#[cfg(feature = "postgresql")]
|
||||
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||
self.update_users_revision(conn);
|
||||
self.updated_at = Utc::now().naive_utc();
|
||||
|
||||
diesel::insert_into(ciphers::table)
|
||||
.values(&*self)
|
||||
.on_conflict(ciphers::uuid)
|
||||
.do_update()
|
||||
.set(&*self)
|
||||
.execute(&**conn)
|
||||
.map_res("Error saving cipher")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "postgresql"))]
|
||||
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||
self.update_users_revision(conn);
|
||||
self.updated_at = Utc::now().naive_utc();
|
||||
|
@@ -2,7 +2,7 @@ use serde_json::Value;
|
||||
|
||||
use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
|
||||
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[table_name = "collections"]
|
||||
#[belongs_to(Organization, foreign_key = "org_uuid")]
|
||||
#[primary_key(uuid)]
|
||||
@@ -43,6 +43,20 @@ use crate::error::MapResult;
|
||||
|
||||
/// Database methods
|
||||
impl Collection {
|
||||
#[cfg(feature = "postgresql")]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
self.update_users_revision(conn);
|
||||
|
||||
diesel::insert_into(collections::table)
|
||||
.values(self)
|
||||
.on_conflict(collections::uuid)
|
||||
.do_update()
|
||||
.set(self)
|
||||
.execute(&**conn)
|
||||
.map_res("Error saving collection")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "postgresql"))]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
self.update_users_revision(conn);
|
||||
|
||||
@@ -200,6 +214,24 @@ impl CollectionUser {
|
||||
.expect("Error loading users_collections")
|
||||
}
|
||||
|
||||
#[cfg(feature = "postgresql")]
|
||||
pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, conn: &DbConn) -> EmptyResult {
|
||||
User::update_uuid_revision(&user_uuid, conn);
|
||||
|
||||
diesel::insert_into(users_collections::table)
|
||||
.values((
|
||||
users_collections::user_uuid.eq(user_uuid),
|
||||
users_collections::collection_uuid.eq(collection_uuid),
|
||||
users_collections::read_only.eq(read_only),
|
||||
))
|
||||
.on_conflict((users_collections::user_uuid, users_collections::collection_uuid))
|
||||
.do_update()
|
||||
.set(users_collections::read_only.eq(read_only))
|
||||
.execute(&**conn)
|
||||
.map_res("Error adding user to collection")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "postgresql"))]
|
||||
pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, conn: &DbConn) -> EmptyResult {
|
||||
User::update_uuid_revision(&user_uuid, conn);
|
||||
|
||||
@@ -277,6 +309,21 @@ pub struct CollectionCipher {
|
||||
|
||||
/// Database methods
|
||||
impl CollectionCipher {
|
||||
#[cfg(feature = "postgresql")]
|
||||
pub fn save(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||
Self::update_users_revision(&collection_uuid, conn);
|
||||
diesel::insert_into(ciphers_collections::table)
|
||||
.values((
|
||||
ciphers_collections::cipher_uuid.eq(cipher_uuid),
|
||||
ciphers_collections::collection_uuid.eq(collection_uuid),
|
||||
))
|
||||
.on_conflict((ciphers_collections::cipher_uuid, ciphers_collections::collection_uuid))
|
||||
.do_nothing()
|
||||
.execute(&**conn)
|
||||
.map_res("Error adding cipher to collection")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "postgresql"))]
|
||||
pub fn save(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||
Self::update_users_revision(&collection_uuid, conn);
|
||||
diesel::replace_into(ciphers_collections::table)
|
||||
|
@@ -1,8 +1,9 @@
|
||||
use chrono::{NaiveDateTime, Utc};
|
||||
|
||||
use super::User;
|
||||
use crate::CONFIG;
|
||||
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[table_name = "devices"]
|
||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||
#[primary_key(uuid)]
|
||||
@@ -87,7 +88,7 @@ impl Device {
|
||||
premium: true,
|
||||
name: user.name.to_string(),
|
||||
email: user.email.to_string(),
|
||||
email_verified: true,
|
||||
email_verified: !CONFIG.mail_enabled() || user.verified_at.is_some(),
|
||||
|
||||
orgowner,
|
||||
orgadmin,
|
||||
@@ -114,6 +115,18 @@ use crate::error::MapResult;
|
||||
|
||||
/// Database methods
|
||||
impl Device {
|
||||
#[cfg(feature = "postgresql")]
|
||||
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||
self.updated_at = Utc::now().naive_utc();
|
||||
|
||||
crate::util::retry(
|
||||
|| diesel::insert_into(devices::table).values(&*self).on_conflict(devices::uuid).do_update().set(&*self).execute(&**conn),
|
||||
10,
|
||||
)
|
||||
.map_res("Error saving device")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "postgresql"))]
|
||||
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||
self.updated_at = Utc::now().naive_utc();
|
||||
|
||||
|
@@ -3,7 +3,7 @@ use serde_json::Value;
|
||||
|
||||
use super::{Cipher, User};
|
||||
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[table_name = "folders"]
|
||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||
#[primary_key(uuid)]
|
||||
@@ -71,6 +71,21 @@ use crate::error::MapResult;
|
||||
|
||||
/// Database methods
|
||||
impl Folder {
|
||||
#[cfg(feature = "postgresql")]
|
||||
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||
User::update_uuid_revision(&self.user_uuid, conn);
|
||||
self.updated_at = Utc::now().naive_utc();
|
||||
|
||||
diesel::insert_into(folders::table)
|
||||
.values(&*self)
|
||||
.on_conflict(folders::uuid)
|
||||
.do_update()
|
||||
.set(&*self)
|
||||
.execute(&**conn)
|
||||
.map_res("Error saving folder")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "postgresql"))]
|
||||
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||
User::update_uuid_revision(&self.user_uuid, conn);
|
||||
self.updated_at = Utc::now().naive_utc();
|
||||
@@ -113,6 +128,17 @@ impl Folder {
|
||||
}
|
||||
|
||||
impl FolderCipher {
|
||||
#[cfg(feature = "postgresql")]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
diesel::insert_into(folders_ciphers::table)
|
||||
.values(&*self)
|
||||
.on_conflict((folders_ciphers::cipher_uuid, folders_ciphers::folder_uuid))
|
||||
.do_nothing()
|
||||
.execute(&**conn)
|
||||
.map_res("Error adding cipher to folder")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "postgresql"))]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
diesel::replace_into(folders_ciphers::table)
|
||||
.values(&*self)
|
||||
|
@@ -7,6 +7,7 @@ mod user;
|
||||
mod collection;
|
||||
mod organization;
|
||||
mod two_factor;
|
||||
mod org_policy;
|
||||
|
||||
pub use self::attachment::Attachment;
|
||||
pub use self::cipher::Cipher;
|
||||
@@ -17,3 +18,4 @@ pub use self::organization::Organization;
|
||||
pub use self::organization::{UserOrgStatus, UserOrgType, UserOrganization};
|
||||
pub use self::two_factor::{TwoFactor, TwoFactorType};
|
||||
pub use self::user::{Invitation, User};
|
||||
pub use self::org_policy::{OrgPolicy, OrgPolicyType};
|
142
src/db/models/org_policy.rs
Normal file
142
src/db/models/org_policy.rs
Normal file
@@ -0,0 +1,142 @@
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::api::EmptyResult;
|
||||
use crate::db::schema::org_policies;
|
||||
use crate::db::DbConn;
|
||||
use crate::error::MapResult;
|
||||
|
||||
use super::Organization;
|
||||
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[table_name = "org_policies"]
|
||||
#[belongs_to(Organization, foreign_key = "org_uuid")]
|
||||
#[primary_key(uuid)]
|
||||
pub struct OrgPolicy {
|
||||
pub uuid: String,
|
||||
pub org_uuid: String,
|
||||
pub atype: i32,
|
||||
pub enabled: bool,
|
||||
pub data: String,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(FromPrimitive)]
|
||||
pub enum OrgPolicyType {
|
||||
TwoFactorAuthentication = 0,
|
||||
MasterPassword = 1,
|
||||
PasswordGenerator = 2,
|
||||
}
|
||||
|
||||
/// Local methods
|
||||
impl OrgPolicy {
|
||||
pub fn new(org_uuid: String, atype: OrgPolicyType, data: String) -> Self {
|
||||
Self {
|
||||
uuid: crate::util::get_uuid(),
|
||||
org_uuid,
|
||||
atype: atype as i32,
|
||||
enabled: false,
|
||||
data,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_json(&self) -> Value {
|
||||
let data_json: Value = serde_json::from_str(&self.data).unwrap_or(Value::Null);
|
||||
json!({
|
||||
"Id": self.uuid,
|
||||
"OrganizationId": self.org_uuid,
|
||||
"Type": self.atype,
|
||||
"Data": data_json,
|
||||
"Enabled": self.enabled,
|
||||
"Object": "policy",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Database methods
|
||||
impl OrgPolicy {
|
||||
#[cfg(feature = "postgresql")]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
// We need to make sure we're not going to violate the unique constraint on org_uuid and atype.
|
||||
// This happens automatically on other DBMS backends due to replace_into(). PostgreSQL does
|
||||
// not support multiple constraints on ON CONFLICT clauses.
|
||||
diesel::delete(
|
||||
org_policies::table
|
||||
.filter(org_policies::org_uuid.eq(&self.org_uuid))
|
||||
.filter(org_policies::atype.eq(&self.atype)),
|
||||
)
|
||||
.execute(&**conn)
|
||||
.map_res("Error deleting org_policy for insert")?;
|
||||
|
||||
diesel::insert_into(org_policies::table)
|
||||
.values(self)
|
||||
.on_conflict(org_policies::uuid)
|
||||
.do_update()
|
||||
.set(self)
|
||||
.execute(&**conn)
|
||||
.map_res("Error saving org_policy")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "postgresql"))]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
diesel::replace_into(org_policies::table)
|
||||
.values(&*self)
|
||||
.execute(&**conn)
|
||||
.map_res("Error saving org_policy")
|
||||
}
|
||||
|
||||
pub fn delete(self, conn: &DbConn) -> EmptyResult {
|
||||
diesel::delete(org_policies::table.filter(org_policies::uuid.eq(self.uuid)))
|
||||
.execute(&**conn)
|
||||
.map_res("Error deleting org_policy")
|
||||
}
|
||||
|
||||
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
|
||||
org_policies::table
|
||||
.filter(org_policies::uuid.eq(uuid))
|
||||
.first::<Self>(&**conn)
|
||||
.ok()
|
||||
}
|
||||
|
||||
pub fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
||||
org_policies::table
|
||||
.filter(org_policies::org_uuid.eq(org_uuid))
|
||||
.load::<Self>(&**conn)
|
||||
.expect("Error loading org_policy")
|
||||
}
|
||||
|
||||
pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
||||
use crate::db::schema::users_organizations;
|
||||
|
||||
org_policies::table
|
||||
.left_join(
|
||||
users_organizations::table.on(
|
||||
users_organizations::org_uuid.eq(org_policies::org_uuid)
|
||||
.and(users_organizations::user_uuid.eq(user_uuid)))
|
||||
)
|
||||
.select(org_policies::all_columns)
|
||||
.load::<Self>(&**conn)
|
||||
.expect("Error loading org_policy")
|
||||
}
|
||||
|
||||
pub fn find_by_org_and_type(org_uuid: &str, atype: i32, conn: &DbConn) -> Option<Self> {
|
||||
org_policies::table
|
||||
.filter(org_policies::org_uuid.eq(org_uuid))
|
||||
.filter(org_policies::atype.eq(atype))
|
||||
.first::<Self>(&**conn)
|
||||
.ok()
|
||||
}
|
||||
|
||||
pub fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||
diesel::delete(org_policies::table.filter(org_policies::org_uuid.eq(org_uuid)))
|
||||
.execute(&**conn)
|
||||
.map_res("Error deleting org_policy")
|
||||
}
|
||||
|
||||
/*pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||
diesel::delete(twofactor::table.filter(twofactor::user_uuid.eq(user_uuid)))
|
||||
.execute(&**conn)
|
||||
.map_res("Error deleting twofactors")
|
||||
}*/
|
||||
}
|
@@ -1,9 +1,10 @@
|
||||
use serde_json::Value;
|
||||
use std::cmp::Ordering;
|
||||
use num_traits::FromPrimitive;
|
||||
|
||||
use super::{CollectionUser, User};
|
||||
use super::{CollectionUser, User, OrgPolicy};
|
||||
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable)]
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
#[table_name = "organizations"]
|
||||
#[primary_key(uuid)]
|
||||
pub struct Organization {
|
||||
@@ -12,7 +13,7 @@ pub struct Organization {
|
||||
pub billing_email: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable)]
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
#[table_name = "users_organizations"]
|
||||
#[primary_key(uuid)]
|
||||
pub struct UserOrganization {
|
||||
@@ -33,6 +34,7 @@ pub enum UserOrgStatus {
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
#[derive(FromPrimitive)]
|
||||
pub enum UserOrgType {
|
||||
Owner = 0,
|
||||
Admin = 1,
|
||||
@@ -135,16 +137,6 @@ impl UserOrgType {
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_i32(i: i32) -> Option<Self> {
|
||||
match i {
|
||||
0 => Some(UserOrgType::Owner),
|
||||
1 => Some(UserOrgType::Admin),
|
||||
2 => Some(UserOrgType::User),
|
||||
3 => Some(UserOrgType::Manager),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Local methods
|
||||
@@ -170,6 +162,7 @@ impl Organization {
|
||||
"UseEvents": false,
|
||||
"UseGroups": false,
|
||||
"UseTotp": true,
|
||||
"UsePolicies": true,
|
||||
|
||||
"BusinessName": null,
|
||||
"BusinessAddress1": null,
|
||||
@@ -213,6 +206,24 @@ use crate::error::MapResult;
|
||||
|
||||
/// Database methods
|
||||
impl Organization {
|
||||
#[cfg(feature = "postgresql")]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
UserOrganization::find_by_org(&self.uuid, conn)
|
||||
.iter()
|
||||
.for_each(|user_org| {
|
||||
User::update_uuid_revision(&user_org.user_uuid, conn);
|
||||
});
|
||||
|
||||
diesel::insert_into(organizations::table)
|
||||
.values(self)
|
||||
.on_conflict(organizations::uuid)
|
||||
.do_update()
|
||||
.set(self)
|
||||
.execute(&**conn)
|
||||
.map_res("Error saving organization")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "postgresql"))]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
UserOrganization::find_by_org(&self.uuid, conn)
|
||||
.iter()
|
||||
@@ -232,6 +243,7 @@ impl Organization {
|
||||
Cipher::delete_all_by_organization(&self.uuid, &conn)?;
|
||||
Collection::delete_all_by_organization(&self.uuid, &conn)?;
|
||||
UserOrganization::delete_all_by_organization(&self.uuid, &conn)?;
|
||||
OrgPolicy::delete_all_by_organization(&self.uuid, &conn)?;
|
||||
|
||||
diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid)))
|
||||
.execute(&**conn)
|
||||
@@ -262,6 +274,7 @@ impl UserOrganization {
|
||||
"UseEvents": false,
|
||||
"UseGroups": false,
|
||||
"UseTotp": true,
|
||||
"UsePolicies": true,
|
||||
|
||||
"MaxStorageGb": 10, // The value doesn't matter, we don't check server-side
|
||||
|
||||
@@ -323,6 +336,20 @@ impl UserOrganization {
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "postgresql")]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
User::update_uuid_revision(&self.user_uuid, conn);
|
||||
|
||||
diesel::insert_into(users_organizations::table)
|
||||
.values(self)
|
||||
.on_conflict(users_organizations::uuid)
|
||||
.do_update()
|
||||
.set(self)
|
||||
.execute(&**conn)
|
||||
.map_res("Error adding user to organization")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "postgresql"))]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
User::update_uuid_revision(&self.user_uuid, conn);
|
||||
|
||||
|
@@ -1,8 +1,15 @@
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::api::EmptyResult;
|
||||
use crate::db::schema::twofactor;
|
||||
use crate::db::DbConn;
|
||||
use crate::error::MapResult;
|
||||
|
||||
use super::User;
|
||||
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[table_name = "twofactor"]
|
||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||
#[primary_key(uuid)]
|
||||
@@ -12,6 +19,7 @@ pub struct TwoFactor {
|
||||
pub atype: i32,
|
||||
pub enabled: bool,
|
||||
pub data: String,
|
||||
pub last_used: i32,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
@@ -28,6 +36,7 @@ pub enum TwoFactorType {
|
||||
// These are implementation details
|
||||
U2fRegisterChallenge = 1000,
|
||||
U2fLoginChallenge = 1001,
|
||||
EmailVerificationChallenge = 1002,
|
||||
}
|
||||
|
||||
/// Local methods
|
||||
@@ -39,6 +48,7 @@ impl TwoFactor {
|
||||
atype: atype as i32,
|
||||
enabled: true,
|
||||
data,
|
||||
last_used: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,16 +69,27 @@ impl TwoFactor {
|
||||
}
|
||||
}
|
||||
|
||||
use crate::db::schema::twofactor;
|
||||
use crate::db::DbConn;
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
|
||||
use crate::api::EmptyResult;
|
||||
use crate::error::MapResult;
|
||||
|
||||
/// Database methods
|
||||
impl TwoFactor {
|
||||
#[cfg(feature = "postgresql")]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
// We need to make sure we're not going to violate the unique constraint on user_uuid and atype.
|
||||
// This happens automatically on other DBMS backends due to replace_into(). PostgreSQL does
|
||||
// not support multiple constraints on ON CONFLICT clauses.
|
||||
diesel::delete(twofactor::table.filter(twofactor::user_uuid.eq(&self.user_uuid)).filter(twofactor::atype.eq(&self.atype)))
|
||||
.execute(&**conn)
|
||||
.map_res("Error deleting twofactor for insert")?;
|
||||
|
||||
diesel::insert_into(twofactor::table)
|
||||
.values(self)
|
||||
.on_conflict(twofactor::uuid)
|
||||
.do_update()
|
||||
.set(self)
|
||||
.execute(&**conn)
|
||||
.map_res("Error saving twofactor")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "postgresql"))]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
diesel::replace_into(twofactor::table)
|
||||
.values(self)
|
||||
|
@@ -4,15 +4,20 @@ use serde_json::Value;
|
||||
use crate::crypto;
|
||||
use crate::CONFIG;
|
||||
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable)]
|
||||
#[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)]
|
||||
#[table_name = "users"]
|
||||
#[primary_key(uuid)]
|
||||
pub struct User {
|
||||
pub uuid: String,
|
||||
pub created_at: NaiveDateTime,
|
||||
pub updated_at: NaiveDateTime,
|
||||
pub verified_at: Option<NaiveDateTime>,
|
||||
pub last_verifying_at: Option<NaiveDateTime>,
|
||||
pub login_verify_count: i32,
|
||||
|
||||
pub email: String,
|
||||
pub email_new: Option<String>,
|
||||
pub email_new_token: Option<String>,
|
||||
pub name: String,
|
||||
|
||||
pub password_hash: Vec<u8>,
|
||||
@@ -46,7 +51,7 @@ enum UserStatus {
|
||||
/// Local methods
|
||||
impl User {
|
||||
pub const CLIENT_KDF_TYPE_DEFAULT: i32 = 0; // PBKDF2: 0
|
||||
pub const CLIENT_KDF_ITER_DEFAULT: i32 = 5_000;
|
||||
pub const CLIENT_KDF_ITER_DEFAULT: i32 = 100_000;
|
||||
|
||||
pub fn new(mail: String) -> Self {
|
||||
let now = Utc::now().naive_utc();
|
||||
@@ -56,9 +61,14 @@ impl User {
|
||||
uuid: crate::util::get_uuid(),
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
verified_at: None,
|
||||
last_verifying_at: None,
|
||||
login_verify_count: 0,
|
||||
name: email.clone(),
|
||||
email,
|
||||
akey: String::new(),
|
||||
email_new: None,
|
||||
email_new_token: None,
|
||||
|
||||
password_hash: Vec::new(),
|
||||
salt: crypto::get_random_64(),
|
||||
@@ -135,7 +145,7 @@ impl User {
|
||||
"Id": self.uuid,
|
||||
"Name": self.name,
|
||||
"Email": self.email,
|
||||
"EmailVerified": true,
|
||||
"EmailVerified": !CONFIG.mail_enabled() || self.verified_at.is_some(),
|
||||
"Premium": true,
|
||||
"MasterPasswordHint": self.password_hint,
|
||||
"Culture": "en-US",
|
||||
@@ -148,6 +158,24 @@ impl User {
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "postgresql")]
|
||||
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||
if self.email.trim().is_empty() {
|
||||
err!("User email can't be empty")
|
||||
}
|
||||
|
||||
self.updated_at = Utc::now().naive_utc();
|
||||
|
||||
diesel::insert_into(users::table) // Insert or update
|
||||
.values(&*self)
|
||||
.on_conflict(users::uuid)
|
||||
.do_update()
|
||||
.set(&*self)
|
||||
.execute(&**conn)
|
||||
.map_res("Error saving user")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "postgresql"))]
|
||||
pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
|
||||
if self.email.trim().is_empty() {
|
||||
err!("User email can't be empty")
|
||||
@@ -250,6 +278,21 @@ impl Invitation {
|
||||
Self { email }
|
||||
}
|
||||
|
||||
#[cfg(feature = "postgresql")]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
if self.email.trim().is_empty() {
|
||||
err!("Invitation email can't be empty")
|
||||
}
|
||||
|
||||
diesel::insert_into(invitations::table)
|
||||
.values(self)
|
||||
.on_conflict(invitations::email)
|
||||
.do_nothing()
|
||||
.execute(&**conn)
|
||||
.map_res("Error saving invitation")
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "postgresql"))]
|
||||
pub fn save(&self, conn: &DbConn) -> EmptyResult {
|
||||
if self.email.trim().is_empty() {
|
||||
err!("Invitation email can't be empty")
|
||||
@@ -276,8 +319,7 @@ impl Invitation {
|
||||
}
|
||||
|
||||
pub fn take(mail: &str, conn: &DbConn) -> bool {
|
||||
CONFIG.invitations_allowed()
|
||||
&& match Self::find_by_mail(mail, &conn) {
|
||||
match Self::find_by_mail(mail, &conn) {
|
||||
Some(invitation) => invitation.delete(&conn).is_ok(),
|
||||
None => false,
|
||||
}
|
||||
|
@@ -77,6 +77,16 @@ table! {
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
org_policies (uuid) {
|
||||
uuid -> Varchar,
|
||||
org_uuid -> Varchar,
|
||||
atype -> Integer,
|
||||
enabled -> Bool,
|
||||
data -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
organizations (uuid) {
|
||||
uuid -> Varchar,
|
||||
@@ -92,6 +102,7 @@ table! {
|
||||
atype -> Integer,
|
||||
enabled -> Bool,
|
||||
data -> Text,
|
||||
last_used -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -100,7 +111,12 @@ table! {
|
||||
uuid -> Varchar,
|
||||
created_at -> Datetime,
|
||||
updated_at -> Datetime,
|
||||
verified_at -> Nullable<Datetime>,
|
||||
last_verifying_at -> Nullable<Datetime>,
|
||||
login_verify_count -> Integer,
|
||||
email -> Varchar,
|
||||
email_new -> Nullable<Varchar>,
|
||||
email_new_token -> Nullable<Varchar>,
|
||||
name -> Text,
|
||||
password_hash -> Blob,
|
||||
salt -> Blob,
|
||||
@@ -149,6 +165,7 @@ joinable!(devices -> users (user_uuid));
|
||||
joinable!(folders -> users (user_uuid));
|
||||
joinable!(folders_ciphers -> ciphers (cipher_uuid));
|
||||
joinable!(folders_ciphers -> folders (folder_uuid));
|
||||
joinable!(org_policies -> organizations (org_uuid));
|
||||
joinable!(twofactor -> users (user_uuid));
|
||||
joinable!(users_collections -> collections (collection_uuid));
|
||||
joinable!(users_collections -> users (user_uuid));
|
||||
@@ -164,6 +181,7 @@ allow_tables_to_appear_in_same_query!(
|
||||
folders,
|
||||
folders_ciphers,
|
||||
invitations,
|
||||
org_policies,
|
||||
organizations,
|
||||
twofactor,
|
||||
users,
|
||||
|
190
src/db/schemas/postgresql/schema.rs
Normal file
190
src/db/schemas/postgresql/schema.rs
Normal file
@@ -0,0 +1,190 @@
|
||||
table! {
|
||||
attachments (id) {
|
||||
id -> Text,
|
||||
cipher_uuid -> Text,
|
||||
file_name -> Text,
|
||||
file_size -> Integer,
|
||||
akey -> Nullable<Text>,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
ciphers (uuid) {
|
||||
uuid -> Text,
|
||||
created_at -> Timestamp,
|
||||
updated_at -> Timestamp,
|
||||
user_uuid -> Nullable<Text>,
|
||||
organization_uuid -> Nullable<Text>,
|
||||
atype -> Integer,
|
||||
name -> Text,
|
||||
notes -> Nullable<Text>,
|
||||
fields -> Nullable<Text>,
|
||||
data -> Text,
|
||||
favorite -> Bool,
|
||||
password_history -> Nullable<Text>,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
ciphers_collections (cipher_uuid, collection_uuid) {
|
||||
cipher_uuid -> Text,
|
||||
collection_uuid -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
collections (uuid) {
|
||||
uuid -> Text,
|
||||
org_uuid -> Text,
|
||||
name -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
devices (uuid) {
|
||||
uuid -> Text,
|
||||
created_at -> Timestamp,
|
||||
updated_at -> Timestamp,
|
||||
user_uuid -> Text,
|
||||
name -> Text,
|
||||
atype -> Integer,
|
||||
push_token -> Nullable<Text>,
|
||||
refresh_token -> Text,
|
||||
twofactor_remember -> Nullable<Text>,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
folders (uuid) {
|
||||
uuid -> Text,
|
||||
created_at -> Timestamp,
|
||||
updated_at -> Timestamp,
|
||||
user_uuid -> Text,
|
||||
name -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
folders_ciphers (cipher_uuid, folder_uuid) {
|
||||
cipher_uuid -> Text,
|
||||
folder_uuid -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
invitations (email) {
|
||||
email -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
org_policies (uuid) {
|
||||
uuid -> Text,
|
||||
org_uuid -> Text,
|
||||
atype -> Integer,
|
||||
enabled -> Bool,
|
||||
data -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
organizations (uuid) {
|
||||
uuid -> Text,
|
||||
name -> Text,
|
||||
billing_email -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
twofactor (uuid) {
|
||||
uuid -> Text,
|
||||
user_uuid -> Text,
|
||||
atype -> Integer,
|
||||
enabled -> Bool,
|
||||
data -> Text,
|
||||
last_used -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
users (uuid) {
|
||||
uuid -> Text,
|
||||
created_at -> Timestamp,
|
||||
updated_at -> Timestamp,
|
||||
verified_at -> Nullable<Timestamp>,
|
||||
last_verifying_at -> Nullable<Timestamp>,
|
||||
login_verify_count -> Integer,
|
||||
email -> Text,
|
||||
email_new -> Nullable<Text>,
|
||||
email_new_token -> Nullable<Text>,
|
||||
name -> Text,
|
||||
password_hash -> Binary,
|
||||
salt -> Binary,
|
||||
password_iterations -> Integer,
|
||||
password_hint -> Nullable<Text>,
|
||||
akey -> Text,
|
||||
private_key -> Nullable<Text>,
|
||||
public_key -> Nullable<Text>,
|
||||
totp_secret -> Nullable<Text>,
|
||||
totp_recover -> Nullable<Text>,
|
||||
security_stamp -> Text,
|
||||
equivalent_domains -> Text,
|
||||
excluded_globals -> Text,
|
||||
client_kdf_type -> Integer,
|
||||
client_kdf_iter -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
users_collections (user_uuid, collection_uuid) {
|
||||
user_uuid -> Text,
|
||||
collection_uuid -> Text,
|
||||
read_only -> Bool,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
users_organizations (uuid) {
|
||||
uuid -> Text,
|
||||
user_uuid -> Text,
|
||||
org_uuid -> Text,
|
||||
access_all -> Bool,
|
||||
akey -> Text,
|
||||
status -> Integer,
|
||||
atype -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
joinable!(attachments -> ciphers (cipher_uuid));
|
||||
joinable!(ciphers -> organizations (organization_uuid));
|
||||
joinable!(ciphers -> users (user_uuid));
|
||||
joinable!(ciphers_collections -> ciphers (cipher_uuid));
|
||||
joinable!(ciphers_collections -> collections (collection_uuid));
|
||||
joinable!(collections -> organizations (org_uuid));
|
||||
joinable!(devices -> users (user_uuid));
|
||||
joinable!(folders -> users (user_uuid));
|
||||
joinable!(folders_ciphers -> ciphers (cipher_uuid));
|
||||
joinable!(folders_ciphers -> folders (folder_uuid));
|
||||
joinable!(org_policies -> organizations (org_uuid));
|
||||
joinable!(twofactor -> users (user_uuid));
|
||||
joinable!(users_collections -> collections (collection_uuid));
|
||||
joinable!(users_collections -> users (user_uuid));
|
||||
joinable!(users_organizations -> organizations (org_uuid));
|
||||
joinable!(users_organizations -> users (user_uuid));
|
||||
|
||||
allow_tables_to_appear_in_same_query!(
|
||||
attachments,
|
||||
ciphers,
|
||||
ciphers_collections,
|
||||
collections,
|
||||
devices,
|
||||
folders,
|
||||
folders_ciphers,
|
||||
invitations,
|
||||
org_policies,
|
||||
organizations,
|
||||
twofactor,
|
||||
users,
|
||||
users_collections,
|
||||
users_organizations,
|
||||
);
|
@@ -77,6 +77,16 @@ table! {
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
org_policies (uuid) {
|
||||
uuid -> Text,
|
||||
org_uuid -> Text,
|
||||
atype -> Integer,
|
||||
enabled -> Bool,
|
||||
data -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
organizations (uuid) {
|
||||
uuid -> Text,
|
||||
@@ -92,6 +102,7 @@ table! {
|
||||
atype -> Integer,
|
||||
enabled -> Bool,
|
||||
data -> Text,
|
||||
last_used -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -100,7 +111,12 @@ table! {
|
||||
uuid -> Text,
|
||||
created_at -> Timestamp,
|
||||
updated_at -> Timestamp,
|
||||
verified_at -> Nullable<Timestamp>,
|
||||
last_verifying_at -> Nullable<Timestamp>,
|
||||
login_verify_count -> Integer,
|
||||
email -> Text,
|
||||
email_new -> Nullable<Text>,
|
||||
email_new_token -> Nullable<Text>,
|
||||
name -> Text,
|
||||
password_hash -> Binary,
|
||||
salt -> Binary,
|
||||
@@ -149,6 +165,7 @@ joinable!(devices -> users (user_uuid));
|
||||
joinable!(folders -> users (user_uuid));
|
||||
joinable!(folders_ciphers -> ciphers (cipher_uuid));
|
||||
joinable!(folders_ciphers -> folders (folder_uuid));
|
||||
joinable!(org_policies -> organizations (org_uuid));
|
||||
joinable!(twofactor -> users (user_uuid));
|
||||
joinable!(users_collections -> collections (collection_uuid));
|
||||
joinable!(users_collections -> users (user_uuid));
|
||||
@@ -164,6 +181,7 @@ allow_tables_to_appear_in_same_query!(
|
||||
folders,
|
||||
folders_ciphers,
|
||||
invitations,
|
||||
org_policies,
|
||||
organizations,
|
||||
twofactor,
|
||||
users,
|
||||
|
43
src/error.rs
43
src/error.rs
@@ -46,6 +46,7 @@ use std::option::NoneError as NoneErr;
|
||||
use std::time::SystemTimeError as TimeErr;
|
||||
use u2f::u2ferror::U2fError as U2fErr;
|
||||
use yubico::yubicoerror::YubicoError as YubiErr;
|
||||
use lettre::smtp::error::Error as LettreErr;
|
||||
|
||||
#[derive(Display, Serialize)]
|
||||
pub struct Empty {}
|
||||
@@ -73,6 +74,7 @@ make_error! {
|
||||
ReqError(ReqErr): _has_source, _api_error,
|
||||
RegexError(RegexErr): _has_source, _api_error,
|
||||
YubiError(YubiErr): _has_source, _api_error,
|
||||
LetreErr(LettreErr): _has_source, _api_error,
|
||||
}
|
||||
|
||||
// This is implemented by hand because NoneError doesn't implement neither Display nor Error
|
||||
@@ -86,7 +88,18 @@ impl std::fmt::Debug for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
match self.source() {
|
||||
Some(e) => write!(f, "{}.\n[CAUSE] {:#?}", self.message, e),
|
||||
None => write!(f, "{}. {}", self.message, self.error),
|
||||
None => match self.error {
|
||||
ErrorKind::EmptyError(_) => Ok(()),
|
||||
ErrorKind::SimpleError(ref s) => {
|
||||
if &self.message == s {
|
||||
write!(f, "{}", self.message)
|
||||
} else {
|
||||
write!(f, "{}. {}", self.message, s)
|
||||
}
|
||||
}
|
||||
ErrorKind::JsonError(_) => write!(f, "{}", self.message),
|
||||
_ => unreachable!(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -170,15 +183,17 @@ use rocket::response::{self, Responder, Response};
|
||||
|
||||
impl<'r> Responder<'r> for Error {
|
||||
fn respond_to(self, _: &Request) -> response::Result<'r> {
|
||||
let usr_msg = format!("{}", self);
|
||||
error!("{:#?}", self);
|
||||
match self.error {
|
||||
ErrorKind::EmptyError(_) => {} // Don't print the error in this situation
|
||||
_ => error!(target: "error", "{:#?}", self),
|
||||
};
|
||||
|
||||
let code = Status::from_code(self.error_code).unwrap_or(Status::BadRequest);
|
||||
|
||||
Response::build()
|
||||
.status(code)
|
||||
.header(ContentType::JSON)
|
||||
.sized_body(Cursor::new(usr_msg))
|
||||
.sized_body(Cursor::new(format!("{}", self)))
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
@@ -196,21 +211,33 @@ macro_rules! err {
|
||||
}};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! err_discard {
|
||||
($msg:expr, $data:expr) => {{
|
||||
std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
|
||||
return Err(crate::error::Error::new($msg, $msg));
|
||||
}};
|
||||
($usr_msg:expr, $log_value:expr, $data:expr) => {{
|
||||
std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
|
||||
return Err(crate::error::Error::new($usr_msg, $log_value));
|
||||
}};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! err_json {
|
||||
($expr:expr) => {{
|
||||
return Err(crate::error::Error::from($expr));
|
||||
($expr:expr, $log_value:expr) => {{
|
||||
return Err(($log_value, $expr).into());
|
||||
}};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! err_handler {
|
||||
($expr:expr) => {{
|
||||
error!("Unauthorized Error: {}", $expr);
|
||||
error!(target: "auth", "Unauthorized Error: {}", $expr);
|
||||
return rocket::Outcome::Failure((rocket::http::Status::Unauthorized, $expr));
|
||||
}};
|
||||
($usr_msg:expr, $log_value:expr) => {{
|
||||
error!("Unauthorized Error: {}. {}", $usr_msg, $log_value);
|
||||
error!(target: "auth", "Unauthorized Error: {}. {}", $usr_msg, $log_value);
|
||||
return rocket::Outcome::Failure((rocket::http::Status::Unauthorized, $usr_msg));
|
||||
}};
|
||||
}
|
||||
|
143
src/mail.rs
143
src/mail.rs
@@ -1,14 +1,16 @@
|
||||
use lettre::smtp::authentication::Credentials;
|
||||
use lettre::smtp::authentication::Mechanism as SmtpAuthMechanism;
|
||||
use lettre::smtp::ConnectionReuseParameters;
|
||||
use lettre::{ClientSecurity, ClientTlsParameters, SmtpClient, SmtpTransport, Transport};
|
||||
use lettre_email::{EmailBuilder, MimeMultipartType, PartBuilder};
|
||||
use lettre::{
|
||||
builder::{EmailBuilder, MimeMultipartType, PartBuilder},
|
||||
ClientSecurity, ClientTlsParameters, SmtpClient, SmtpTransport, Transport,
|
||||
};
|
||||
use native_tls::{Protocol, TlsConnector};
|
||||
use percent_encoding::{percent_encode, NON_ALPHANUMERIC};
|
||||
use quoted_printable::encode_to_str;
|
||||
|
||||
use crate::api::EmptyResult;
|
||||
use crate::auth::{encode_jwt, generate_invite_claims};
|
||||
use crate::auth::{encode_jwt, generate_delete_claims, generate_invite_claims, generate_verify_email_claims};
|
||||
use crate::error::Error;
|
||||
use crate::CONFIG;
|
||||
use chrono::NaiveDateTime;
|
||||
@@ -33,6 +35,8 @@ fn mailer() -> SmtpTransport {
|
||||
ClientSecurity::None
|
||||
};
|
||||
|
||||
use std::time::Duration;
|
||||
|
||||
let smtp_client = SmtpClient::new((host.as_str(), CONFIG.smtp_port()), client_security).unwrap();
|
||||
|
||||
let smtp_client = match (&CONFIG.smtp_username(), &CONFIG.smtp_password()) {
|
||||
@@ -40,19 +44,21 @@ fn mailer() -> SmtpTransport {
|
||||
_ => smtp_client,
|
||||
};
|
||||
|
||||
let smtp_client = match &CONFIG.smtp_auth_mechanism() {
|
||||
Some(auth_mechanism_json) => {
|
||||
let auth_mechanism = serde_json::from_str::<SmtpAuthMechanism>(&auth_mechanism_json);
|
||||
match auth_mechanism {
|
||||
let smtp_client = match CONFIG.smtp_auth_mechanism() {
|
||||
Some(mechanism) => {
|
||||
let correct_mechanism = format!("\"{}\"", crate::util::upcase_first(&mechanism.trim_matches('"')));
|
||||
|
||||
match serde_json::from_str::<SmtpAuthMechanism>(&correct_mechanism) {
|
||||
Ok(auth_mechanism) => smtp_client.authentication_mechanism(auth_mechanism),
|
||||
Err(_) => panic!("Failure to parse mechanism. Is it proper Json? Eg. `\"Plain\"` not `Plain`"),
|
||||
_ => panic!("Failure to parse mechanism. Is it proper Json? Eg. `\"Plain\"` not `Plain`"),
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => smtp_client,
|
||||
};
|
||||
|
||||
smtp_client
|
||||
.smtp_utf8(true)
|
||||
.timeout(Some(Duration::from_secs(CONFIG.smtp_timeout())))
|
||||
.connection_reuse(ConnectionReuseParameters::NoReuse)
|
||||
.transport()
|
||||
}
|
||||
@@ -92,6 +98,67 @@ pub fn send_password_hint(address: &str, hint: Option<String>) -> EmptyResult {
|
||||
send_email(&address, &subject, &body_html, &body_text)
|
||||
}
|
||||
|
||||
pub fn send_delete_account(address: &str, uuid: &str) -> EmptyResult {
|
||||
let claims = generate_delete_claims(uuid.to_string());
|
||||
let delete_token = encode_jwt(&claims);
|
||||
|
||||
let (subject, body_html, body_text) = get_text(
|
||||
"email/delete_account",
|
||||
json!({
|
||||
"url": CONFIG.domain(),
|
||||
"user_id": uuid,
|
||||
"email": percent_encode(address.as_bytes(), NON_ALPHANUMERIC).to_string(),
|
||||
"token": delete_token,
|
||||
}),
|
||||
)?;
|
||||
|
||||
send_email(&address, &subject, &body_html, &body_text)
|
||||
}
|
||||
|
||||
pub fn send_verify_email(address: &str, uuid: &str) -> EmptyResult {
|
||||
let claims = generate_verify_email_claims(uuid.to_string());
|
||||
let verify_email_token = encode_jwt(&claims);
|
||||
|
||||
let (subject, body_html, body_text) = get_text(
|
||||
"email/verify_email",
|
||||
json!({
|
||||
"url": CONFIG.domain(),
|
||||
"user_id": uuid,
|
||||
"email": percent_encode(address.as_bytes(), NON_ALPHANUMERIC).to_string(),
|
||||
"token": verify_email_token,
|
||||
}),
|
||||
)?;
|
||||
|
||||
send_email(&address, &subject, &body_html, &body_text)
|
||||
}
|
||||
|
||||
pub fn send_welcome(address: &str) -> EmptyResult {
|
||||
let (subject, body_html, body_text) = get_text(
|
||||
"email/welcome",
|
||||
json!({
|
||||
"url": CONFIG.domain(),
|
||||
}),
|
||||
)?;
|
||||
|
||||
send_email(&address, &subject, &body_html, &body_text)
|
||||
}
|
||||
|
||||
pub fn send_welcome_must_verify(address: &str, uuid: &str) -> EmptyResult {
|
||||
let claims = generate_verify_email_claims(uuid.to_string());
|
||||
let verify_email_token = encode_jwt(&claims);
|
||||
|
||||
let (subject, body_html, body_text) = get_text(
|
||||
"email/welcome_must_verify",
|
||||
json!({
|
||||
"url": CONFIG.domain(),
|
||||
"user_id": uuid,
|
||||
"token": verify_email_token,
|
||||
}),
|
||||
)?;
|
||||
|
||||
send_email(&address, &subject, &body_html, &body_text)
|
||||
}
|
||||
|
||||
pub fn send_invite(
|
||||
address: &str,
|
||||
uuid: &str,
|
||||
@@ -105,7 +172,7 @@ pub fn send_invite(
|
||||
String::from(address),
|
||||
org_id.clone(),
|
||||
org_user_id.clone(),
|
||||
invited_by_email.clone(),
|
||||
invited_by_email,
|
||||
);
|
||||
let invite_token = encode_jwt(&claims);
|
||||
|
||||
@@ -168,7 +235,54 @@ pub fn send_new_device_logged_in(address: &str, ip: &str, dt: &NaiveDateTime, de
|
||||
send_email(&address, &subject, &body_html, &body_text)
|
||||
}
|
||||
|
||||
pub fn send_token(address: &str, token: &str) -> EmptyResult {
|
||||
let (subject, body_html, body_text) = get_text(
|
||||
"email/twofactor_email",
|
||||
json!({
|
||||
"url": CONFIG.domain(),
|
||||
"token": token,
|
||||
}),
|
||||
)?;
|
||||
|
||||
send_email(&address, &subject, &body_html, &body_text)
|
||||
}
|
||||
|
||||
pub fn send_change_email(address: &str, token: &str) -> EmptyResult {
|
||||
let (subject, body_html, body_text) = get_text(
|
||||
"email/change_email",
|
||||
json!({
|
||||
"url": CONFIG.domain(),
|
||||
"token": token,
|
||||
}),
|
||||
)?;
|
||||
|
||||
send_email(&address, &subject, &body_html, &body_text)
|
||||
}
|
||||
|
||||
pub fn send_test(address: &str) -> EmptyResult {
|
||||
let (subject, body_html, body_text) = get_text(
|
||||
"email/smtp_test",
|
||||
json!({
|
||||
"url": CONFIG.domain(),
|
||||
}),
|
||||
)?;
|
||||
|
||||
send_email(&address, &subject, &body_html, &body_text)
|
||||
}
|
||||
|
||||
fn send_email(address: &str, subject: &str, body_html: &str, body_text: &str) -> EmptyResult {
|
||||
let address_split: Vec<&str> = address.rsplitn(2, '@').collect();
|
||||
if address_split.len() != 2 {
|
||||
err!("Invalid email address (no @)");
|
||||
}
|
||||
|
||||
let domain_puny = match idna::domain_to_ascii_strict(address_split[0]) {
|
||||
Ok(d) => d,
|
||||
Err(_) => err!("Can't convert email domain to ASCII representation"),
|
||||
};
|
||||
|
||||
let address = format!("{}@{}", address_split[1], domain_puny);
|
||||
|
||||
let html = PartBuilder::new()
|
||||
.body(encode_to_str(body_html))
|
||||
.header(("Content-Type", "text/html; charset=utf-8"))
|
||||
@@ -196,12 +310,11 @@ fn send_email(address: &str, subject: &str, body_html: &str, body_text: &str) ->
|
||||
|
||||
let mut transport = mailer();
|
||||
|
||||
let result = transport
|
||||
.send(email.into())
|
||||
.map_err(|e| Error::new("Error sending email", e.to_string()))
|
||||
.and(Ok(()));
|
||||
let result = transport.send(email);
|
||||
|
||||
// Explicitly close the connection, in case of error
|
||||
transport.close();
|
||||
result
|
||||
|
||||
result?;
|
||||
Ok(())
|
||||
}
|
||||
|
190
src/main.rs
190
src/main.rs
@@ -1,6 +1,7 @@
|
||||
#![feature(proc_macro_hygiene, decl_macro, vec_remove_item, try_trait)]
|
||||
#![feature(proc_macro_hygiene, vec_remove_item, try_trait, ip)]
|
||||
#![recursion_limit = "256"]
|
||||
|
||||
extern crate openssl;
|
||||
#[macro_use]
|
||||
extern crate rocket;
|
||||
#[macro_use]
|
||||
@@ -14,15 +15,18 @@ extern crate diesel;
|
||||
#[macro_use]
|
||||
extern crate diesel_migrations;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
#[macro_use]
|
||||
extern crate derive_more;
|
||||
#[macro_use]
|
||||
extern crate num_derive;
|
||||
|
||||
extern crate backtrace;
|
||||
|
||||
use std::{
|
||||
fs::create_dir_all,
|
||||
path::Path,
|
||||
process::{exit, Command},
|
||||
str::FromStr,
|
||||
panic, thread, fmt // For panic logging
|
||||
};
|
||||
|
||||
#[macro_use]
|
||||
@@ -38,19 +42,59 @@ mod util;
|
||||
pub use config::CONFIG;
|
||||
pub use error::{Error, MapResult};
|
||||
|
||||
use structopt::StructOpt;
|
||||
|
||||
// Used for catching panics and log them to file instead of stderr
|
||||
use backtrace::Backtrace;
|
||||
struct Shim(Backtrace);
|
||||
|
||||
impl fmt::Debug for Shim {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(fmt, "\n{:?}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, StructOpt)]
|
||||
#[structopt(name = "bitwarden_rs", about = "A Bitwarden API server written in Rust")]
|
||||
struct Opt {
|
||||
/// Prints the app version
|
||||
#[structopt(short, long)]
|
||||
version: bool,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
parse_args();
|
||||
launch_info();
|
||||
|
||||
if CONFIG.extended_logging() {
|
||||
init_logging().ok();
|
||||
}
|
||||
use log::LevelFilter as LF;
|
||||
let level = LF::from_str(&CONFIG.log_level()).expect("Valid log level");
|
||||
init_logging(level).ok();
|
||||
|
||||
let extra_debug = match level {
|
||||
LF::Trace | LF::Debug => true,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
check_db();
|
||||
check_rsa_keys();
|
||||
check_web_vault();
|
||||
migrations::run_migrations();
|
||||
|
||||
launch_rocket();
|
||||
create_icon_cache_folder();
|
||||
|
||||
launch_rocket(extra_debug);
|
||||
}
|
||||
|
||||
fn parse_args() {
|
||||
let opt = Opt::from_args();
|
||||
if opt.version {
|
||||
if let Some(version) = option_env!("GIT_VERSION") {
|
||||
println!("bitwarden_rs {}", version);
|
||||
} else {
|
||||
println!("bitwarden_rs (Version info from Git not present)");
|
||||
}
|
||||
exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
fn launch_info() {
|
||||
@@ -68,10 +112,23 @@ fn launch_info() {
|
||||
println!("\\--------------------------------------------------------------------/\n");
|
||||
}
|
||||
|
||||
fn init_logging() -> Result<(), fern::InitError> {
|
||||
use std::str::FromStr;
|
||||
fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> {
|
||||
let mut logger = fern::Dispatch::new()
|
||||
.format(|out, message, record| {
|
||||
.level(level)
|
||||
// Hide unknown certificate errors if using self-signed
|
||||
.level_for("rustls::session", log::LevelFilter::Off)
|
||||
// Hide failed to close stream messages
|
||||
.level_for("hyper::server", log::LevelFilter::Warn)
|
||||
// Silence rocket logs
|
||||
.level_for("_", log::LevelFilter::Off)
|
||||
.level_for("launch", log::LevelFilter::Off)
|
||||
.level_for("launch_", log::LevelFilter::Off)
|
||||
.level_for("rocket::rocket", log::LevelFilter::Off)
|
||||
.level_for("rocket::fairing", log::LevelFilter::Off)
|
||||
.chain(std::io::stdout());
|
||||
|
||||
if CONFIG.extended_logging() {
|
||||
logger = logger.format(|out, message, record| {
|
||||
out.finish(format_args!(
|
||||
"{}[{}][{}] {}",
|
||||
chrono::Local::now().format("[%Y-%m-%d %H:%M:%S]"),
|
||||
@@ -79,13 +136,10 @@ fn init_logging() -> Result<(), fern::InitError> {
|
||||
record.level(),
|
||||
message
|
||||
))
|
||||
})
|
||||
.level(log::LevelFilter::from_str(&CONFIG.log_level()).expect("Valid log level"))
|
||||
// Hide unknown certificate errors if using self-signed
|
||||
.level_for("rustls::session", log::LevelFilter::Off)
|
||||
// Hide failed to close stream messages
|
||||
.level_for("hyper::server", log::LevelFilter::Warn)
|
||||
.chain(std::io::stdout());
|
||||
});
|
||||
} else {
|
||||
logger = logger.format(|out, message, _| out.finish(format_args!("{}", message)));
|
||||
}
|
||||
|
||||
if let Some(log_file) = CONFIG.log_file() {
|
||||
logger = logger.chain(fern::log_file(log_file)?);
|
||||
@@ -100,6 +154,44 @@ fn init_logging() -> Result<(), fern::InitError> {
|
||||
|
||||
logger.apply()?;
|
||||
|
||||
// Catch panics and log them instead of default output to StdErr
|
||||
panic::set_hook(Box::new(|info| {
|
||||
let backtrace = Backtrace::new();
|
||||
|
||||
let thread = thread::current();
|
||||
let thread = thread.name().unwrap_or("unnamed");
|
||||
|
||||
let msg = match info.payload().downcast_ref::<&'static str>() {
|
||||
Some(s) => *s,
|
||||
None => match info.payload().downcast_ref::<String>() {
|
||||
Some(s) => &**s,
|
||||
None => "Box<Any>",
|
||||
},
|
||||
};
|
||||
|
||||
match info.location() {
|
||||
Some(location) => {
|
||||
error!(
|
||||
target: "panic", "thread '{}' panicked at '{}': {}:{}{:?}",
|
||||
thread,
|
||||
msg,
|
||||
location.file(),
|
||||
location.line(),
|
||||
Shim(backtrace)
|
||||
);
|
||||
}
|
||||
None => {
|
||||
error!(
|
||||
target: "panic",
|
||||
"thread '{}' panicked at '{}'{:?}",
|
||||
thread,
|
||||
msg,
|
||||
Shim(backtrace)
|
||||
)
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -127,8 +219,7 @@ fn check_db() {
|
||||
let path = Path::new(&url);
|
||||
|
||||
if let Some(parent) = path.parent() {
|
||||
use std::fs;
|
||||
if fs::create_dir_all(parent).is_err() {
|
||||
if create_dir_all(parent).is_err() {
|
||||
error!("Error creating database directory");
|
||||
exit(1);
|
||||
}
|
||||
@@ -137,7 +228,7 @@ fn check_db() {
|
||||
// Turn on WAL in SQLite
|
||||
if CONFIG.enable_db_wal() {
|
||||
use diesel::RunQueryDsl;
|
||||
let connection = db::get_connection().expect("Can't conect to DB");
|
||||
let connection = db::get_connection().expect("Can't connect to DB");
|
||||
diesel::sql_query("PRAGMA journal_mode=wal")
|
||||
.execute(&connection)
|
||||
.expect("Failed to turn on WAL");
|
||||
@@ -146,13 +237,20 @@ fn check_db() {
|
||||
db::get_connection().expect("Can't connect to DB");
|
||||
}
|
||||
|
||||
fn create_icon_cache_folder() {
|
||||
// Try to create the icon cache folder, and generate an error if it could not.
|
||||
create_dir_all(&CONFIG.icon_cache_folder()).expect("Error creating icon cache directory");
|
||||
}
|
||||
|
||||
fn check_rsa_keys() {
|
||||
// If the RSA keys don't exist, try to create them
|
||||
if !util::file_exists(&CONFIG.private_rsa_key()) || !util::file_exists(&CONFIG.public_rsa_key()) {
|
||||
info!("JWT keys don't exist, checking if OpenSSL is available...");
|
||||
|
||||
Command::new("openssl").arg("version").status().unwrap_or_else(|_| {
|
||||
info!("Can't create keys because OpenSSL is not available, make sure it's installed and available on the PATH");
|
||||
info!(
|
||||
"Can't create keys because OpenSSL is not available, make sure it's installed and available on the PATH"
|
||||
);
|
||||
exit(1);
|
||||
});
|
||||
|
||||
@@ -200,7 +298,9 @@ fn check_web_vault() {
|
||||
let index_path = Path::new(&CONFIG.web_vault_folder()).join("index.html");
|
||||
|
||||
if !index_path.exists() {
|
||||
error!("Web vault is not found. To install it, please follow the steps in https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault");
|
||||
error!("Web vault is not found. To install it, please follow the steps in: ");
|
||||
error!("https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault");
|
||||
error!("You can also set the environment variable 'WEB_VAULT_ENABLED=false' to disable it");
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
@@ -215,42 +315,48 @@ mod migrations {
|
||||
embed_migrations!("migrations/sqlite");
|
||||
#[cfg(feature = "mysql")]
|
||||
embed_migrations!("migrations/mysql");
|
||||
#[cfg(feature = "postgresql")]
|
||||
embed_migrations!("migrations/postgresql");
|
||||
|
||||
pub fn run_migrations() {
|
||||
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
|
||||
let connection = crate::db::get_connection().expect("Can't connect to DB");
|
||||
|
||||
use std::io::stdout;
|
||||
|
||||
// Disable Foreign Key Checks during migration
|
||||
use diesel::RunQueryDsl;
|
||||
#[cfg(feature = "postgres")]
|
||||
diesel::sql_query("SET CONSTRAINTS ALL DEFERRED").execute(&connection).expect("Failed to disable Foreign Key Checks during migrations");
|
||||
#[cfg(feature = "mysql")]
|
||||
diesel::sql_query("SET FOREIGN_KEY_CHECKS = 0").execute(&connection).expect("Failed to disable Foreign Key Checks during migrations");
|
||||
#[cfg(feature = "sqlite")]
|
||||
diesel::sql_query("PRAGMA defer_foreign_keys = ON").execute(&connection).expect("Failed to disable Foreign Key Checks during migrations");
|
||||
|
||||
embedded_migrations::run_with_output(&connection, &mut stdout()).expect("Can't run migrations");
|
||||
}
|
||||
}
|
||||
|
||||
fn launch_rocket() {
|
||||
// Create Rocket object, this stores current log level and sets it's own
|
||||
fn launch_rocket(extra_debug: bool) {
|
||||
// Create Rocket object, this stores current log level and sets its own
|
||||
let rocket = rocket::ignite();
|
||||
|
||||
// If we aren't logging the mounts, we force the logging level down
|
||||
if !CONFIG.log_mounts() {
|
||||
log::set_max_level(log::LevelFilter::Warn);
|
||||
}
|
||||
|
||||
let rocket = rocket
|
||||
.mount("/", api::web_routes())
|
||||
.mount("/api", api::core_routes())
|
||||
.mount("/admin", api::admin_routes())
|
||||
.mount("/identity", api::identity_routes())
|
||||
.mount("/icons", api::icons_routes())
|
||||
.mount("/notifications", api::notifications_routes());
|
||||
|
||||
// Force the level up for the fairings, managed state and lauch
|
||||
if !CONFIG.log_mounts() {
|
||||
log::set_max_level(log::LevelFilter::max());
|
||||
}
|
||||
let basepath = &CONFIG.domain_path();
|
||||
|
||||
// If adding more paths here, consider also adding them to
|
||||
// crate::utils::LOGGED_ROUTES to make sure they appear in the log
|
||||
let rocket = rocket
|
||||
.mount(&[basepath, "/"].concat(), api::web_routes())
|
||||
.mount(&[basepath, "/api"].concat(), api::core_routes())
|
||||
.mount(&[basepath, "/admin"].concat(), api::admin_routes())
|
||||
.mount(&[basepath, "/identity"].concat(), api::identity_routes())
|
||||
.mount(&[basepath, "/icons"].concat(), api::icons_routes())
|
||||
.mount(&[basepath, "/notifications"].concat(), api::notifications_routes())
|
||||
.manage(db::init_pool())
|
||||
.manage(api::start_notification_server())
|
||||
.attach(util::AppHeaders());
|
||||
.attach(util::AppHeaders())
|
||||
.attach(util::CORS())
|
||||
.attach(util::BetterLogging(extra_debug));
|
||||
|
||||
// Launch and print error if there is one
|
||||
// The launch will restore the original logging level
|
||||
|
@@ -39,7 +39,8 @@
|
||||
"Type": 1,
|
||||
"Domains": [
|
||||
"apple.com",
|
||||
"icloud.com"
|
||||
"icloud.com",
|
||||
"tv.apple.com"
|
||||
],
|
||||
"Excluded": false
|
||||
},
|
||||
@@ -106,7 +107,8 @@
|
||||
"windows.com",
|
||||
"microsoftonline.com",
|
||||
"office365.com",
|
||||
"microsoftstore.com"
|
||||
"microsoftstore.com",
|
||||
"xbox.com"
|
||||
],
|
||||
"Excluded": false
|
||||
},
|
||||
@@ -760,7 +762,83 @@
|
||||
"superuser.com",
|
||||
"stackoverflow.com",
|
||||
"serverfault.com",
|
||||
"mathoverflow.net"
|
||||
"mathoverflow.net",
|
||||
"askubuntu.com"
|
||||
],
|
||||
"Excluded": false
|
||||
},
|
||||
{
|
||||
"Type": 75,
|
||||
"Domains": [
|
||||
"netcup.de",
|
||||
"netcup.eu",
|
||||
"customercontrolpanel.de"
|
||||
],
|
||||
"Excluded": false
|
||||
},
|
||||
{
|
||||
"Type": 76,
|
||||
"Domains": [
|
||||
"docusign.com",
|
||||
"docusign.net"
|
||||
],
|
||||
"Excluded": false
|
||||
},
|
||||
{
|
||||
"Type": 77,
|
||||
"Domains": [
|
||||
"envato.com",
|
||||
"themeforest.net",
|
||||
"codecanyon.net",
|
||||
"videohive.net",
|
||||
"audiojungle.net",
|
||||
"graphicriver.net",
|
||||
"photodune.net",
|
||||
"3docean.net"
|
||||
],
|
||||
"Excluded": false
|
||||
},
|
||||
{
|
||||
"Type": 78,
|
||||
"Domains": [
|
||||
"x10hosting.com",
|
||||
"x10premium.com"
|
||||
],
|
||||
"Excluded": false
|
||||
},
|
||||
{
|
||||
"Type": 79,
|
||||
"Domains": [
|
||||
"dnsomatic.com",
|
||||
"opendns.com",
|
||||
"umbrella.com"
|
||||
],
|
||||
"Excluded": false
|
||||
},
|
||||
{
|
||||
"Type": 80,
|
||||
"Domains": [
|
||||
"cagreatamerica.com",
|
||||
"canadaswonderland.com",
|
||||
"carowinds.com",
|
||||
"cedarfair.com",
|
||||
"cedarpoint.com",
|
||||
"dorneypark.com",
|
||||
"kingsdominion.com",
|
||||
"knotts.com",
|
||||
"miadventure.com",
|
||||
"schlitterbahn.com",
|
||||
"valleyfair.com",
|
||||
"visitkingsisland.com",
|
||||
"worldsoffun.com"
|
||||
],
|
||||
"Excluded": false
|
||||
},
|
||||
{
|
||||
"Type": 81,
|
||||
"Domains": [
|
||||
"ubnt.com",
|
||||
"ui.com"
|
||||
],
|
||||
"Excluded": false
|
||||
}
|
||||
|
BIN
src/static/images/hibp.png
Normal file
BIN
src/static/images/hibp.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 9.7 KiB |
2009
src/static/scripts/bootstrap-native-v4.js
vendored
Normal file
2009
src/static/scripts/bootstrap-native-v4.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
10038
src/static/scripts/bootstrap.css
vendored
Normal file
10038
src/static/scripts/bootstrap.css
vendored
Normal file
File diff suppressed because it is too large
Load Diff
205
src/static/scripts/identicon.js
Normal file
205
src/static/scripts/identicon.js
Normal file
@@ -0,0 +1,205 @@
|
||||
/**
|
||||
* Identicon.js 2.3.3
|
||||
* http://github.com/stewartlord/identicon.js
|
||||
*
|
||||
* PNGLib required for PNG output
|
||||
* http://www.xarg.org/download/pnglib.js
|
||||
*
|
||||
* Copyright 2018, Stewart Lord
|
||||
* Released under the BSD license
|
||||
* http://www.opensource.org/licenses/bsd-license.php
|
||||
*/
|
||||
|
||||
(function() {
|
||||
var PNGlib;
|
||||
if (typeof module !== 'undefined' && typeof module.exports !== 'undefined') {
|
||||
PNGlib = require('./pnglib');
|
||||
} else {
|
||||
PNGlib = window.PNGlib;
|
||||
}
|
||||
|
||||
var Identicon = function(hash, options){
|
||||
if (typeof(hash) !== 'string' || hash.length < 15) {
|
||||
throw 'A hash of at least 15 characters is required.';
|
||||
}
|
||||
|
||||
this.defaults = {
|
||||
background: [240, 240, 240, 255],
|
||||
margin: 0.08,
|
||||
size: 64,
|
||||
saturation: 0.7,
|
||||
brightness: 0.5,
|
||||
format: 'png'
|
||||
};
|
||||
|
||||
this.options = typeof(options) === 'object' ? options : this.defaults;
|
||||
|
||||
// backward compatibility with old constructor (hash, size, margin)
|
||||
if (typeof(arguments[1]) === 'number') { this.options.size = arguments[1]; }
|
||||
if (arguments[2]) { this.options.margin = arguments[2]; }
|
||||
|
||||
this.hash = hash
|
||||
this.background = this.options.background || this.defaults.background;
|
||||
this.size = this.options.size || this.defaults.size;
|
||||
this.format = this.options.format || this.defaults.format;
|
||||
this.margin = this.options.margin !== undefined ? this.options.margin : this.defaults.margin;
|
||||
|
||||
// foreground defaults to last 7 chars as hue at 70% saturation, 50% brightness
|
||||
var hue = parseInt(this.hash.substr(-7), 16) / 0xfffffff;
|
||||
var saturation = this.options.saturation || this.defaults.saturation;
|
||||
var brightness = this.options.brightness || this.defaults.brightness;
|
||||
this.foreground = this.options.foreground || this.hsl2rgb(hue, saturation, brightness);
|
||||
};
|
||||
|
||||
Identicon.prototype = {
|
||||
background: null,
|
||||
foreground: null,
|
||||
hash: null,
|
||||
margin: null,
|
||||
size: null,
|
||||
format: null,
|
||||
|
||||
image: function(){
|
||||
return this.isSvg()
|
||||
? new Svg(this.size, this.foreground, this.background)
|
||||
: new PNGlib(this.size, this.size, 256);
|
||||
},
|
||||
|
||||
render: function(){
|
||||
var image = this.image(),
|
||||
size = this.size,
|
||||
baseMargin = Math.floor(size * this.margin),
|
||||
cell = Math.floor((size - (baseMargin * 2)) / 5),
|
||||
margin = Math.floor((size - cell * 5) / 2),
|
||||
bg = image.color.apply(image, this.background),
|
||||
fg = image.color.apply(image, this.foreground);
|
||||
|
||||
// the first 15 characters of the hash control the pixels (even/odd)
|
||||
// they are drawn down the middle first, then mirrored outwards
|
||||
var i, color;
|
||||
for (i = 0; i < 15; i++) {
|
||||
color = parseInt(this.hash.charAt(i), 16) % 2 ? bg : fg;
|
||||
if (i < 5) {
|
||||
this.rectangle(2 * cell + margin, i * cell + margin, cell, cell, color, image);
|
||||
} else if (i < 10) {
|
||||
this.rectangle(1 * cell + margin, (i - 5) * cell + margin, cell, cell, color, image);
|
||||
this.rectangle(3 * cell + margin, (i - 5) * cell + margin, cell, cell, color, image);
|
||||
} else if (i < 15) {
|
||||
this.rectangle(0 * cell + margin, (i - 10) * cell + margin, cell, cell, color, image);
|
||||
this.rectangle(4 * cell + margin, (i - 10) * cell + margin, cell, cell, color, image);
|
||||
}
|
||||
}
|
||||
|
||||
return image;
|
||||
},
|
||||
|
||||
rectangle: function(x, y, w, h, color, image){
|
||||
if (this.isSvg()) {
|
||||
image.rectangles.push({x: x, y: y, w: w, h: h, color: color});
|
||||
} else {
|
||||
var i, j;
|
||||
for (i = x; i < x + w; i++) {
|
||||
for (j = y; j < y + h; j++) {
|
||||
image.buffer[image.index(i, j)] = color;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// adapted from: https://gist.github.com/aemkei/1325937
|
||||
hsl2rgb: function(h, s, b){
|
||||
h *= 6;
|
||||
s = [
|
||||
b += s *= b < .5 ? b : 1 - b,
|
||||
b - h % 1 * s * 2,
|
||||
b -= s *= 2,
|
||||
b,
|
||||
b + h % 1 * s,
|
||||
b + s
|
||||
];
|
||||
|
||||
return[
|
||||
s[ ~~h % 6 ] * 255, // red
|
||||
s[ (h|16) % 6 ] * 255, // green
|
||||
s[ (h|8) % 6 ] * 255 // blue
|
||||
];
|
||||
},
|
||||
|
||||
toString: function(raw){
|
||||
// backward compatibility with old toString, default to base64
|
||||
if (raw) {
|
||||
return this.render().getDump();
|
||||
} else {
|
||||
return this.render().getBase64();
|
||||
}
|
||||
},
|
||||
|
||||
isSvg: function(){
|
||||
return this.format.match(/svg/i)
|
||||
}
|
||||
};
|
||||
|
||||
var Svg = function(size, foreground, background){
|
||||
this.size = size;
|
||||
this.foreground = this.color.apply(this, foreground);
|
||||
this.background = this.color.apply(this, background);
|
||||
this.rectangles = [];
|
||||
};
|
||||
|
||||
Svg.prototype = {
|
||||
size: null,
|
||||
foreground: null,
|
||||
background: null,
|
||||
rectangles: null,
|
||||
|
||||
color: function(r, g, b, a){
|
||||
var values = [r, g, b].map(Math.round);
|
||||
values.push((a >= 0) && (a <= 255) ? a/255 : 1);
|
||||
return 'rgba(' + values.join(',') + ')';
|
||||
},
|
||||
|
||||
getDump: function(){
|
||||
var i,
|
||||
xml,
|
||||
rect,
|
||||
fg = this.foreground,
|
||||
bg = this.background,
|
||||
stroke = this.size * 0.005;
|
||||
|
||||
xml = "<svg xmlns='http://www.w3.org/2000/svg'"
|
||||
+ " width='" + this.size + "' height='" + this.size + "'"
|
||||
+ " style='background-color:" + bg + ";'>"
|
||||
+ "<g style='fill:" + fg + "; stroke:" + fg + "; stroke-width:" + stroke + ";'>";
|
||||
|
||||
for (i = 0; i < this.rectangles.length; i++) {
|
||||
rect = this.rectangles[i];
|
||||
if (rect.color == bg) continue;
|
||||
xml += "<rect "
|
||||
+ " x='" + rect.x + "'"
|
||||
+ " y='" + rect.y + "'"
|
||||
+ " width='" + rect.w + "'"
|
||||
+ " height='" + rect.h + "'"
|
||||
+ "/>";
|
||||
}
|
||||
xml += "</g></svg>"
|
||||
|
||||
return xml;
|
||||
},
|
||||
|
||||
getBase64: function(){
|
||||
if ('function' === typeof btoa) {
|
||||
return btoa(this.getDump());
|
||||
} else if (Buffer) {
|
||||
return new Buffer(this.getDump(), 'binary').toString('base64');
|
||||
} else {
|
||||
throw 'Cannot generate base64 output';
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (typeof module !== 'undefined' && typeof module.exports !== 'undefined') {
|
||||
module.exports = Identicon;
|
||||
} else {
|
||||
window.Identicon = Identicon;
|
||||
}
|
||||
})();
|
402
src/static/scripts/md5.js
Normal file
402
src/static/scripts/md5.js
Normal file
@@ -0,0 +1,402 @@
|
||||
/*
|
||||
* JavaScript MD5
|
||||
* https://github.com/blueimp/JavaScript-MD5
|
||||
*
|
||||
* Copyright 2011, Sebastian Tschan
|
||||
* https://blueimp.net
|
||||
*
|
||||
* Licensed under the MIT license:
|
||||
* https://opensource.org/licenses/MIT
|
||||
*
|
||||
* Based on
|
||||
* A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
|
||||
* Digest Algorithm, as defined in RFC 1321.
|
||||
* Version 2.2 Copyright (C) Paul Johnston 1999 - 2009
|
||||
* Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
|
||||
* Distributed under the BSD License
|
||||
* See http://pajhome.org.uk/crypt/md5 for more info.
|
||||
*/
|
||||
|
||||
/* global define */
|
||||
|
||||
/* eslint-disable strict */
|
||||
|
||||
;(function($) {
|
||||
'use strict'
|
||||
|
||||
/**
|
||||
* Add integers, wrapping at 2^32.
|
||||
* This uses 16-bit operations internally to work around bugs in interpreters.
|
||||
*
|
||||
* @param {number} x First integer
|
||||
* @param {number} y Second integer
|
||||
* @returns {number} Sum
|
||||
*/
|
||||
function safeAdd(x, y) {
|
||||
var lsw = (x & 0xffff) + (y & 0xffff)
|
||||
var msw = (x >> 16) + (y >> 16) + (lsw >> 16)
|
||||
return (msw << 16) | (lsw & 0xffff)
|
||||
}
|
||||
|
||||
/**
|
||||
* Bitwise rotate a 32-bit number to the left.
|
||||
*
|
||||
* @param {number} num 32-bit number
|
||||
* @param {number} cnt Rotation count
|
||||
* @returns {number} Rotated number
|
||||
*/
|
||||
function bitRotateLeft(num, cnt) {
|
||||
return (num << cnt) | (num >>> (32 - cnt))
|
||||
}
|
||||
|
||||
/**
|
||||
* Basic operation the algorithm uses.
|
||||
*
|
||||
* @param {number} q q
|
||||
* @param {number} a a
|
||||
* @param {number} b b
|
||||
* @param {number} x x
|
||||
* @param {number} s s
|
||||
* @param {number} t t
|
||||
* @returns {number} Result
|
||||
*/
|
||||
function md5cmn(q, a, b, x, s, t) {
|
||||
return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b)
|
||||
}
|
||||
/**
|
||||
* Basic operation the algorithm uses.
|
||||
*
|
||||
* @param {number} a a
|
||||
* @param {number} b b
|
||||
* @param {number} c c
|
||||
* @param {number} d d
|
||||
* @param {number} x x
|
||||
* @param {number} s s
|
||||
* @param {number} t t
|
||||
* @returns {number} Result
|
||||
*/
|
||||
function md5ff(a, b, c, d, x, s, t) {
|
||||
return md5cmn((b & c) | (~b & d), a, b, x, s, t)
|
||||
}
|
||||
/**
|
||||
* Basic operation the algorithm uses.
|
||||
*
|
||||
* @param {number} a a
|
||||
* @param {number} b b
|
||||
* @param {number} c c
|
||||
* @param {number} d d
|
||||
* @param {number} x x
|
||||
* @param {number} s s
|
||||
* @param {number} t t
|
||||
* @returns {number} Result
|
||||
*/
|
||||
function md5gg(a, b, c, d, x, s, t) {
|
||||
return md5cmn((b & d) | (c & ~d), a, b, x, s, t)
|
||||
}
|
||||
/**
|
||||
* Basic operation the algorithm uses.
|
||||
*
|
||||
* @param {number} a a
|
||||
* @param {number} b b
|
||||
* @param {number} c c
|
||||
* @param {number} d d
|
||||
* @param {number} x x
|
||||
* @param {number} s s
|
||||
* @param {number} t t
|
||||
* @returns {number} Result
|
||||
*/
|
||||
function md5hh(a, b, c, d, x, s, t) {
|
||||
return md5cmn(b ^ c ^ d, a, b, x, s, t)
|
||||
}
|
||||
/**
|
||||
* Basic operation the algorithm uses.
|
||||
*
|
||||
* @param {number} a a
|
||||
* @param {number} b b
|
||||
* @param {number} c c
|
||||
* @param {number} d d
|
||||
* @param {number} x x
|
||||
* @param {number} s s
|
||||
* @param {number} t t
|
||||
* @returns {number} Result
|
||||
*/
|
||||
function md5ii(a, b, c, d, x, s, t) {
|
||||
return md5cmn(c ^ (b | ~d), a, b, x, s, t)
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the MD5 of an array of little-endian words, and a bit length.
|
||||
*
|
||||
* @param {Array} x Array of little-endian words
|
||||
* @param {number} len Bit length
|
||||
* @returns {Array<number>} MD5 Array
|
||||
*/
|
||||
function binlMD5(x, len) {
|
||||
/* append padding */
|
||||
x[len >> 5] |= 0x80 << len % 32
|
||||
x[(((len + 64) >>> 9) << 4) + 14] = len
|
||||
|
||||
var i
|
||||
var olda
|
||||
var oldb
|
||||
var oldc
|
||||
var oldd
|
||||
var a = 1732584193
|
||||
var b = -271733879
|
||||
var c = -1732584194
|
||||
var d = 271733878
|
||||
|
||||
for (i = 0; i < x.length; i += 16) {
|
||||
olda = a
|
||||
oldb = b
|
||||
oldc = c
|
||||
oldd = d
|
||||
|
||||
a = md5ff(a, b, c, d, x[i], 7, -680876936)
|
||||
d = md5ff(d, a, b, c, x[i + 1], 12, -389564586)
|
||||
c = md5ff(c, d, a, b, x[i + 2], 17, 606105819)
|
||||
b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330)
|
||||
a = md5ff(a, b, c, d, x[i + 4], 7, -176418897)
|
||||
d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426)
|
||||
c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341)
|
||||
b = md5ff(b, c, d, a, x[i + 7], 22, -45705983)
|
||||
a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416)
|
||||
d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417)
|
||||
c = md5ff(c, d, a, b, x[i + 10], 17, -42063)
|
||||
b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162)
|
||||
a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682)
|
||||
d = md5ff(d, a, b, c, x[i + 13], 12, -40341101)
|
||||
c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290)
|
||||
b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329)
|
||||
|
||||
a = md5gg(a, b, c, d, x[i + 1], 5, -165796510)
|
||||
d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632)
|
||||
c = md5gg(c, d, a, b, x[i + 11], 14, 643717713)
|
||||
b = md5gg(b, c, d, a, x[i], 20, -373897302)
|
||||
a = md5gg(a, b, c, d, x[i + 5], 5, -701558691)
|
||||
d = md5gg(d, a, b, c, x[i + 10], 9, 38016083)
|
||||
c = md5gg(c, d, a, b, x[i + 15], 14, -660478335)
|
||||
b = md5gg(b, c, d, a, x[i + 4], 20, -405537848)
|
||||
a = md5gg(a, b, c, d, x[i + 9], 5, 568446438)
|
||||
d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690)
|
||||
c = md5gg(c, d, a, b, x[i + 3], 14, -187363961)
|
||||
b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501)
|
||||
a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467)
|
||||
d = md5gg(d, a, b, c, x[i + 2], 9, -51403784)
|
||||
c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473)
|
||||
b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734)
|
||||
|
||||
a = md5hh(a, b, c, d, x[i + 5], 4, -378558)
|
||||
d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463)
|
||||
c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562)
|
||||
b = md5hh(b, c, d, a, x[i + 14], 23, -35309556)
|
||||
a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060)
|
||||
d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353)
|
||||
c = md5hh(c, d, a, b, x[i + 7], 16, -155497632)
|
||||
b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640)
|
||||
a = md5hh(a, b, c, d, x[i + 13], 4, 681279174)
|
||||
d = md5hh(d, a, b, c, x[i], 11, -358537222)
|
||||
c = md5hh(c, d, a, b, x[i + 3], 16, -722521979)
|
||||
b = md5hh(b, c, d, a, x[i + 6], 23, 76029189)
|
||||
a = md5hh(a, b, c, d, x[i + 9], 4, -640364487)
|
||||
d = md5hh(d, a, b, c, x[i + 12], 11, -421815835)
|
||||
c = md5hh(c, d, a, b, x[i + 15], 16, 530742520)
|
||||
b = md5hh(b, c, d, a, x[i + 2], 23, -995338651)
|
||||
|
||||
a = md5ii(a, b, c, d, x[i], 6, -198630844)
|
||||
d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415)
|
||||
c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905)
|
||||
b = md5ii(b, c, d, a, x[i + 5], 21, -57434055)
|
||||
a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571)
|
||||
d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606)
|
||||
c = md5ii(c, d, a, b, x[i + 10], 15, -1051523)
|
||||
b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799)
|
||||
a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359)
|
||||
d = md5ii(d, a, b, c, x[i + 15], 10, -30611744)
|
||||
c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380)
|
||||
b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649)
|
||||
a = md5ii(a, b, c, d, x[i + 4], 6, -145523070)
|
||||
d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379)
|
||||
c = md5ii(c, d, a, b, x[i + 2], 15, 718787259)
|
||||
b = md5ii(b, c, d, a, x[i + 9], 21, -343485551)
|
||||
|
||||
a = safeAdd(a, olda)
|
||||
b = safeAdd(b, oldb)
|
||||
c = safeAdd(c, oldc)
|
||||
d = safeAdd(d, oldd)
|
||||
}
|
||||
return [a, b, c, d]
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an array of little-endian words to a string
|
||||
*
|
||||
* @param {Array<number>} input MD5 Array
|
||||
* @returns {string} MD5 string
|
||||
*/
|
||||
function binl2rstr(input) {
|
||||
var i
|
||||
var output = ''
|
||||
var length32 = input.length * 32
|
||||
for (i = 0; i < length32; i += 8) {
|
||||
output += String.fromCharCode((input[i >> 5] >>> i % 32) & 0xff)
|
||||
}
|
||||
return output
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a raw string to an array of little-endian words
|
||||
* Characters >255 have their high-byte silently ignored.
|
||||
*
|
||||
* @param {string} input Raw input string
|
||||
* @returns {Array<number>} Array of little-endian words
|
||||
*/
|
||||
function rstr2binl(input) {
|
||||
var i
|
||||
var output = []
|
||||
output[(input.length >> 2) - 1] = undefined
|
||||
for (i = 0; i < output.length; i += 1) {
|
||||
output[i] = 0
|
||||
}
|
||||
var length8 = input.length * 8
|
||||
for (i = 0; i < length8; i += 8) {
|
||||
output[i >> 5] |= (input.charCodeAt(i / 8) & 0xff) << i % 32
|
||||
}
|
||||
return output
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the MD5 of a raw string
|
||||
*
|
||||
* @param {string} s Input string
|
||||
* @returns {string} Raw MD5 string
|
||||
*/
|
||||
function rstrMD5(s) {
|
||||
return binl2rstr(binlMD5(rstr2binl(s), s.length * 8))
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the HMAC-MD5 of a key and some data (raw strings)
|
||||
*
|
||||
* @param {string} key HMAC key
|
||||
* @param {string} data Raw input string
|
||||
* @returns {string} Raw MD5 string
|
||||
*/
|
||||
function rstrHMACMD5(key, data) {
|
||||
var i
|
||||
var bkey = rstr2binl(key)
|
||||
var ipad = []
|
||||
var opad = []
|
||||
var hash
|
||||
ipad[15] = opad[15] = undefined
|
||||
if (bkey.length > 16) {
|
||||
bkey = binlMD5(bkey, key.length * 8)
|
||||
}
|
||||
for (i = 0; i < 16; i += 1) {
|
||||
ipad[i] = bkey[i] ^ 0x36363636
|
||||
opad[i] = bkey[i] ^ 0x5c5c5c5c
|
||||
}
|
||||
hash = binlMD5(ipad.concat(rstr2binl(data)), 512 + data.length * 8)
|
||||
return binl2rstr(binlMD5(opad.concat(hash), 512 + 128))
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a raw string to a hex string
|
||||
*
|
||||
* @param {string} input Raw input string
|
||||
* @returns {string} Hex encoded string
|
||||
*/
|
||||
function rstr2hex(input) {
|
||||
var hexTab = '0123456789abcdef'
|
||||
var output = ''
|
||||
var x
|
||||
var i
|
||||
for (i = 0; i < input.length; i += 1) {
|
||||
x = input.charCodeAt(i)
|
||||
output += hexTab.charAt((x >>> 4) & 0x0f) + hexTab.charAt(x & 0x0f)
|
||||
}
|
||||
return output
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode a string as UTF-8
|
||||
*
|
||||
* @param {string} input Input string
|
||||
* @returns {string} UTF8 string
|
||||
*/
|
||||
function str2rstrUTF8(input) {
|
||||
return unescape(encodeURIComponent(input))
|
||||
}
|
||||
|
||||
/**
|
||||
* Encodes input string as raw MD5 string
|
||||
*
|
||||
* @param {string} s Input string
|
||||
* @returns {string} Raw MD5 string
|
||||
*/
|
||||
function rawMD5(s) {
|
||||
return rstrMD5(str2rstrUTF8(s))
|
||||
}
|
||||
/**
|
||||
* Encodes input string as Hex encoded string
|
||||
*
|
||||
* @param {string} s Input string
|
||||
* @returns {string} Hex encoded string
|
||||
*/
|
||||
function hexMD5(s) {
|
||||
return rstr2hex(rawMD5(s))
|
||||
}
|
||||
/**
|
||||
* Calculates the raw HMAC-MD5 for the given key and data
|
||||
*
|
||||
* @param {string} k HMAC key
|
||||
* @param {string} d Input string
|
||||
* @returns {string} Raw MD5 string
|
||||
*/
|
||||
function rawHMACMD5(k, d) {
|
||||
return rstrHMACMD5(str2rstrUTF8(k), str2rstrUTF8(d))
|
||||
}
|
||||
/**
|
||||
* Calculates the Hex encoded HMAC-MD5 for the given key and data
|
||||
*
|
||||
* @param {string} k HMAC key
|
||||
* @param {string} d Input string
|
||||
* @returns {string} Raw MD5 string
|
||||
*/
|
||||
function hexHMACMD5(k, d) {
|
||||
return rstr2hex(rawHMACMD5(k, d))
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates MD5 value for a given string.
|
||||
* If a key is provided, calculates the HMAC-MD5 value.
|
||||
* Returns a Hex encoded string unless the raw argument is given.
|
||||
*
|
||||
* @param {string} string Input string
|
||||
* @param {string} [key] HMAC key
|
||||
* @param {boolean} [raw] Raw output switch
|
||||
* @returns {string} MD5 output
|
||||
*/
|
||||
function md5(string, key, raw) {
|
||||
if (!key) {
|
||||
if (!raw) {
|
||||
return hexMD5(string)
|
||||
}
|
||||
return rawMD5(string)
|
||||
}
|
||||
if (!raw) {
|
||||
return hexHMACMD5(key, string)
|
||||
}
|
||||
return rawHMACMD5(key, string)
|
||||
}
|
||||
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
define(function() {
|
||||
return md5
|
||||
})
|
||||
} else if (typeof module === 'object' && module.exports) {
|
||||
module.exports = md5
|
||||
} else {
|
||||
$.md5 = md5
|
||||
}
|
||||
})(this)
|
@@ -6,16 +6,10 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
<title>Bitwarden_rs Admin Panel</title>
|
||||
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/4.3.1/css/bootstrap.min.css"
|
||||
integrity="sha256-YLGeXaapI0/5IgZopewRJcFXomhRMlYYjugPLSyNjTY=" crossorigin="anonymous" />
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.3.1/jquery.min.js"
|
||||
integrity="sha256-FgpCb/KJQlLNfOu91ta32o/NMZxltwRo8QtmkMRdAu8=" crossorigin="anonymous"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/blueimp-md5/2.10.0/js/md5.min.js"
|
||||
integrity="sha256-J9IhvkIJb0diRVJOyu+Ndtg41RibFkF8eaA60jdjtB8=" crossorigin="anonymous"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/identicon.js/2.3.3/identicon.min.js"
|
||||
integrity="sha256-nYoL3nK/HA1e1pJvLwNPnpKuKG9q89VFX862r5aohmA=" crossorigin="anonymous"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/4.3.1/js/bootstrap.bundle.min.js"
|
||||
integrity="sha256-fzFFyH01cBVPYzl16KT40wqjhgPtq6FFUB6ckN2+GGw=" crossorigin="anonymous"></script>
|
||||
<link rel="stylesheet" href="{{urlpath}}/bwrs_static/bootstrap.css" />
|
||||
<script src="{{urlpath}}/bwrs_static/bootstrap-native-v4.js"></script>
|
||||
<script src="{{urlpath}}/bwrs_static/md5.js"></script>
|
||||
<script src="{{urlpath}}/bwrs_static/identicon.js"></script>
|
||||
<style>
|
||||
body {
|
||||
padding-top: 70px;
|
||||
@@ -39,21 +33,32 @@
|
||||
</head>
|
||||
|
||||
<body class="bg-light">
|
||||
<nav class="navbar navbar-expand-md navbar-dark bg-dark fixed-top shadow">
|
||||
<nav class="navbar navbar-expand-sm navbar-dark bg-dark fixed-top shadow">
|
||||
<a class="navbar-brand" href="#">Bitwarden_rs</a>
|
||||
<div class="navbar-collapse">
|
||||
<ul class="navbar-nav">
|
||||
<li class="nav-item active">
|
||||
<a class="nav-link" href="/admin">Admin Panel</a>
|
||||
<a class="nav-link" href="{{urlpath}}/admin">Admin Panel</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="/">Vault</a>
|
||||
<a class="nav-link" href="{{urlpath}}/">Vault</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<ul class="navbar-nav">
|
||||
{{#if version}}
|
||||
<div class="navbar-text">Version: {{version}}</div>
|
||||
<li class="nav-item">
|
||||
<span class="navbar-text mr-2">Version: {{version}}</span>
|
||||
</li>
|
||||
{{/if}}
|
||||
|
||||
{{#if logged_in}}
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="{{urlpath}}/admin/logout">Log Out</a>
|
||||
</li>
|
||||
{{/if}}
|
||||
</ul>
|
||||
</nav>
|
||||
|
||||
{{> (page_content) }}
|
||||
|
@@ -14,7 +14,7 @@
|
||||
|
||||
<form class="form-inline" method="post">
|
||||
<input type="password" class="form-control w-50 mr-2" name="token" placeholder="Enter admin token">
|
||||
<button type="submit" class="btn btn-primary">Save</button>
|
||||
<button type="submit" class="btn btn-primary">Enter</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user