mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-09-10 10:45:57 +03:00
Compare commits
267 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
59e50b03bd | ||
|
0a88f020e1 | ||
|
c058a1d63c | ||
|
96a189deb9 | ||
|
8c229920ad | ||
|
d592323e39 | ||
|
95dd1cd7ad | ||
|
36ae946655 | ||
|
24edc94f9d | ||
|
4deae76347 | ||
|
8ee0c57224 | ||
|
cb6f392774 | ||
|
5c6081c4e2 | ||
|
88c56de97b | ||
|
e274af6e3d | ||
|
a0ece3754b | ||
|
0bcc2ae7ab | ||
|
bdb90460c4 | ||
|
824137a02c | ||
|
2edc699eac | ||
|
8e79366076 | ||
|
c1e39b182f | ||
|
13eb276085 | ||
|
4cec502f7b | ||
|
2545469713 | ||
|
f09996a21d | ||
|
5cabf4d040 | ||
|
a03db6d224 | ||
|
8d1b72b951 | ||
|
912e1f93b7 | ||
|
a5aa4d9b54 | ||
|
e777be3dde | ||
|
b5441f6b77 | ||
|
dbbd63e519 | ||
|
adc443ea80 | ||
|
0d32179d07 | ||
|
b45b02b37e | ||
|
12928b832c | ||
|
1e224220a8 | ||
|
3471e2660f | ||
|
924ba153aa | ||
|
bd1e8be328 | ||
|
cf5a985b31 | ||
|
607521c88f | ||
|
486c7d8c56 | ||
|
4b71197c97 | ||
|
8b8839d049 | ||
|
b209c1bc4d | ||
|
2b8d08a3f4 | ||
|
cbadf00941 | ||
|
c5b7447dac | ||
|
64d6f72e6c | ||
|
a19a6fb016 | ||
|
b889e5185e | ||
|
cd83a9e7b2 | ||
|
748c825202 | ||
|
204993568a | ||
|
70be2d93ce | ||
|
f5638716d2 | ||
|
fbc2fad9c9 | ||
|
3f39e35123 | ||
|
3f6809bcdf | ||
|
9ff577a7b4 | ||
|
c52adef919 | ||
|
cbb92bcbc0 | ||
|
948798a84f | ||
|
2ffc3eac4d | ||
|
0ff7fd939e | ||
|
ca7c5129b2 | ||
|
07e0fdbd2a | ||
|
b4dfc24040 | ||
|
85dbf4e16c | ||
|
efc65b93f8 | ||
|
9a0fe6f617 | ||
|
3442eb1b9d | ||
|
e449912f05 | ||
|
72a46fb386 | ||
|
d29b6bee28 | ||
|
e2e3712921 | ||
|
00a11b1b78 | ||
|
77b78f0991 | ||
|
ee550be80c | ||
|
97d41c2686 | ||
|
fccc0a4b05 | ||
|
57b1d3f850 | ||
|
77d40833d9 | ||
|
7814218208 | ||
|
95a7ffdf6b | ||
|
ebc47dc161 | ||
|
cd8acc2e8c | ||
|
3b7a5bd102 | ||
|
d3054d4f83 | ||
|
5ac66b05e3 | ||
|
83fd44eeef | ||
|
2edecf34ff | ||
|
18bc8331f9 | ||
|
7d956c5117 | ||
|
603a964579 | ||
|
dc515b83f3 | ||
|
9466f02696 | ||
|
d3bd2774dc | ||
|
f482585d7c | ||
|
2cde814aaa | ||
|
d989a19f76 | ||
|
d292269ea0 | ||
|
ebf40099f2 | ||
|
0586c00285 | ||
|
bb9ddd5680 | ||
|
cb1663fc12 | ||
|
45d9d8db94 | ||
|
edc482c8ea | ||
|
6e5c03cc78 | ||
|
881c1978eb | ||
|
662bc27523 | ||
|
b4b62c22a4 | ||
|
05569147af | ||
|
99a635d327 | ||
|
e6b763026e | ||
|
c182583e09 | ||
|
d821389c2e | ||
|
be2916333b | ||
|
9124d8a3fb | ||
|
7b1da527a6 | ||
|
e7b8602e1f | ||
|
d6e9af909b | ||
|
acdd42935b | ||
|
8367d1d715 | ||
|
56f12dc982 | ||
|
4c07f05b3a | ||
|
b73ff886c3 | ||
|
2e7bd62353 | ||
|
1264eb640a | ||
|
3a90364b32 | ||
|
f5f9861a78 | ||
|
f9408a00c6 | ||
|
ae8bf954c1 | ||
|
c656f2f694 | ||
|
eea3f13bb3 | ||
|
df8114f8be | ||
|
dda244edd8 | ||
|
cce3ce816c | ||
|
65c0d1064b | ||
|
5a2f968d7a | ||
|
16d88402cb | ||
|
7dcf18151d | ||
|
e3404dd322 | ||
|
bfc517ee80 | ||
|
4a7d2a1e28 | ||
|
66a68f6d22 | ||
|
469318bcbd | ||
|
c07c9995ea | ||
|
2c2276c5bb | ||
|
672a245548 | ||
|
5d50b1ee3c | ||
|
c99df1c310 | ||
|
591ae10144 | ||
|
2d2745195e | ||
|
026f9da035 | ||
|
d23d4f2c1d | ||
|
515b87755a | ||
|
d8ea3d2bfe | ||
|
ee7837d022 | ||
|
07743e490b | ||
|
9101d6e48f | ||
|
27c23b60b8 | ||
|
e7b6238f43 | ||
|
ad2225b6e5 | ||
|
5609103a97 | ||
|
6d460b44b0 | ||
|
efd8d9f528 | ||
|
29aedd388e | ||
|
27e0e41835 | ||
|
0b60f20eb3 | ||
|
8be2ed6255 | ||
|
c9c3f07171 | ||
|
8a21c6df10 | ||
|
df71f57d86 | ||
|
60e39a9dd1 | ||
|
bc6a53b847 | ||
|
05a1137828 | ||
|
cef38bf40b | ||
|
0b13a8c4aa | ||
|
3fbd7919d8 | ||
|
5f688ff209 | ||
|
f6cfb5bf21 | ||
|
df8c9f39ac | ||
|
d7ee7caed4 | ||
|
2e300da057 | ||
|
3fb63bbe8c | ||
|
9671ed4cca | ||
|
d10ef3fd4b | ||
|
dd0b847912 | ||
|
8c34ff5d23 | ||
|
15750256e2 | ||
|
6989fc7bdb | ||
|
4923614730 | ||
|
76f38621de | ||
|
fff72889f6 | ||
|
12af32b9ea | ||
|
9add8e19eb | ||
|
5710703c50 | ||
|
1322b876e9 | ||
|
9ed2ba61c6 | ||
|
62a461ae15 | ||
|
6f7220b68e | ||
|
4859932d35 | ||
|
ee277de707 | ||
|
c11f47903a | ||
|
6a5f1613e7 | ||
|
dc36f0cb6c | ||
|
6c38026ef5 | ||
|
4c9cc9890c | ||
|
f57b407c60 | ||
|
ce0651b79c | ||
|
edc26cb1e1 | ||
|
ff759397f6 | ||
|
badd22ac3d | ||
|
6f78395ef7 | ||
|
5fb6531db8 | ||
|
eb9d5e1196 | ||
|
233b48bdad | ||
|
e22e290f67 | ||
|
ab95a69dc8 | ||
|
85c8a01f4a | ||
|
42af7c6dab | ||
|
08a445e2ac | ||
|
c0b2877da3 | ||
|
cf8ca85289 | ||
|
a8a92f6c51 | ||
|
95f833aacd | ||
|
4f45cc081f | ||
|
2a4cd24c60 | ||
|
ef551f4cc6 | ||
|
4545f271c3 | ||
|
2768396a72 | ||
|
5521a86693 | ||
|
3160780549 | ||
|
f0701657a9 | ||
|
21325b7523 | ||
|
874f5c34bd | ||
|
eadab2e9ca | ||
|
253faaf023 | ||
|
3d843a6a51 | ||
|
03fdf36bf9 | ||
|
fdcc32beda | ||
|
bf20355c5e | ||
|
0136c793b4 | ||
|
2e12114350 | ||
|
f25ab42ebb | ||
|
d3a8a278e6 | ||
|
8d9827c55f | ||
|
cad63f9761 | ||
|
bf446f44f9 | ||
|
621f607297 | ||
|
d89bd707a8 | ||
|
754087b990 | ||
|
cfbeb56371 | ||
|
3bb46ce496 | ||
|
c5832f2b30 | ||
|
d9406b0095 | ||
|
2475c36a75 | ||
|
c384f9c0ca | ||
|
afbfebf659 | ||
|
6b686c18f7 | ||
|
84fb6aaddb | ||
|
8526055bb7 | ||
|
a79334ea4c |
@@ -4,8 +4,13 @@
|
|||||||
## Main data folder
|
## Main data folder
|
||||||
# DATA_FOLDER=data
|
# DATA_FOLDER=data
|
||||||
|
|
||||||
## Individual folders, these override %DATA_FOLDER%
|
## Database URL
|
||||||
|
## When using SQLite, this is the path to the DB file, default to %DATA_FOLDER%/db.sqlite3
|
||||||
|
## When using MySQL, this it is the URL to the DB, including username and password:
|
||||||
|
## Format: mysql://[user[:password]@]host/database_name
|
||||||
# DATABASE_URL=data/db.sqlite3
|
# DATABASE_URL=data/db.sqlite3
|
||||||
|
|
||||||
|
## Individual folders, these override %DATA_FOLDER%
|
||||||
# RSA_KEY_FILENAME=data/rsa_key
|
# RSA_KEY_FILENAME=data/rsa_key
|
||||||
# ICON_CACHE_FOLDER=data/icon_cache
|
# ICON_CACHE_FOLDER=data/icon_cache
|
||||||
# ATTACHMENTS_FOLDER=data/attachments
|
# ATTACHMENTS_FOLDER=data/attachments
|
||||||
@@ -16,6 +21,10 @@
|
|||||||
## Automatically reload the templates for every request, slow, use only for development
|
## Automatically reload the templates for every request, slow, use only for development
|
||||||
# RELOAD_TEMPLATES=false
|
# RELOAD_TEMPLATES=false
|
||||||
|
|
||||||
|
## Client IP Header, used to identify the IP of the client, defaults to "X-Client-IP"
|
||||||
|
## Set to the string "none" (without quotes), to disable any headers and just use the remote IP
|
||||||
|
# IP_HEADER=X-Client-IP
|
||||||
|
|
||||||
## Cache time-to-live for successfully obtained icons, in seconds (0 is "forever")
|
## Cache time-to-live for successfully obtained icons, in seconds (0 is "forever")
|
||||||
# ICON_CACHE_TTL=2592000
|
# ICON_CACHE_TTL=2592000
|
||||||
## Cache time-to-live for icons which weren't available, in seconds (0 is "forever")
|
## Cache time-to-live for icons which weren't available, in seconds (0 is "forever")
|
||||||
@@ -32,17 +41,25 @@
|
|||||||
# WEBSOCKET_ADDRESS=0.0.0.0
|
# WEBSOCKET_ADDRESS=0.0.0.0
|
||||||
# WEBSOCKET_PORT=3012
|
# WEBSOCKET_PORT=3012
|
||||||
|
|
||||||
## Enable extended logging
|
## Enable extended logging, which shows timestamps and targets in the logs
|
||||||
## This shows timestamps and allows logging to file and to syslog
|
|
||||||
### To enable logging to file, use the LOG_FILE env variable
|
|
||||||
### To enable syslog, you need to compile with `cargo build --features=enable_syslog'
|
|
||||||
# EXTENDED_LOGGING=true
|
# EXTENDED_LOGGING=true
|
||||||
|
|
||||||
## Logging to file
|
## Logging to file
|
||||||
## This requires extended logging
|
|
||||||
## It's recommended to also set 'ROCKET_CLI_COLORS=off'
|
## It's recommended to also set 'ROCKET_CLI_COLORS=off'
|
||||||
# LOG_FILE=/path/to/log
|
# LOG_FILE=/path/to/log
|
||||||
|
|
||||||
|
## Logging to Syslog
|
||||||
|
## This requires extended logging
|
||||||
|
## It's recommended to also set 'ROCKET_CLI_COLORS=off'
|
||||||
|
# USE_SYSLOG=false
|
||||||
|
|
||||||
|
## Log level
|
||||||
|
## Change the verbosity of the log output
|
||||||
|
## Valid values are "trace", "debug", "info", "warn", "error" and "off"
|
||||||
|
## Setting it to "trace" or "debug" would also show logs for mounted
|
||||||
|
## routes and static file, websocket and alive requests
|
||||||
|
# LOG_LEVEL=Info
|
||||||
|
|
||||||
## Enable WAL for the DB
|
## Enable WAL for the DB
|
||||||
## Set to false to avoid enabling WAL during startup.
|
## Set to false to avoid enabling WAL during startup.
|
||||||
## Note that if the DB already has WAL enabled, you will also need to disable WAL in the DB,
|
## Note that if the DB already has WAL enabled, you will also need to disable WAL in the DB,
|
||||||
@@ -67,6 +84,10 @@
|
|||||||
## Useful to hide other servers in the local network. Check the WIKI for more details
|
## Useful to hide other servers in the local network. Check the WIKI for more details
|
||||||
# ICON_BLACKLIST_REGEX=192\.168\.1\.[0-9].*^
|
# ICON_BLACKLIST_REGEX=192\.168\.1\.[0-9].*^
|
||||||
|
|
||||||
|
## Any IP which is not defined as a global IP will be blacklisted.
|
||||||
|
## Usefull to secure your internal environment: See https://en.wikipedia.org/wiki/Reserved_IP_addresses for a list of IPs which it will block
|
||||||
|
# ICON_BLACKLIST_NON_GLOBAL_IPS=true
|
||||||
|
|
||||||
## Disable 2FA remember
|
## Disable 2FA remember
|
||||||
## Enabling this would force the users to use a second factor to login every time.
|
## Enabling this would force the users to use a second factor to login every time.
|
||||||
## Note that the checkbox would still be present, but ignored.
|
## Note that the checkbox would still be present, but ignored.
|
||||||
@@ -75,10 +96,31 @@
|
|||||||
## Controls if new users can register
|
## Controls if new users can register
|
||||||
# SIGNUPS_ALLOWED=true
|
# SIGNUPS_ALLOWED=true
|
||||||
|
|
||||||
|
## Controls if new users need to verify their email address upon registration
|
||||||
|
## Note that setting this option to true prevents logins until the email address has been verified!
|
||||||
|
## The welcome email will include a verification link, and login attempts will periodically
|
||||||
|
## trigger another verification email to be sent.
|
||||||
|
# SIGNUPS_VERIFY=false
|
||||||
|
|
||||||
|
## If SIGNUPS_VERIFY is set to true, this limits how many seconds after the last time
|
||||||
|
## an email verification link has been sent another verification email will be sent
|
||||||
|
# SIGNUPS_VERIFY_RESEND_TIME=3600
|
||||||
|
|
||||||
|
## If SIGNUPS_VERIFY is set to true, this limits how many times an email verification
|
||||||
|
## email will be re-sent upon an attempted login.
|
||||||
|
# SIGNUPS_VERIFY_RESEND_LIMIT=6
|
||||||
|
|
||||||
|
## Controls if new users from a list of comma-separated domains can register
|
||||||
|
## even if SIGNUPS_ALLOWED is set to false
|
||||||
|
# SIGNUPS_DOMAINS_WHITELIST=example.com,example.net,example.org
|
||||||
|
|
||||||
## Token for the admin interface, preferably use a long random string
|
## Token for the admin interface, preferably use a long random string
|
||||||
## One option is to use 'openssl rand -base64 48'
|
## One option is to use 'openssl rand -base64 48'
|
||||||
## If not set, the admin panel is disabled
|
## If not set, the admin panel is disabled
|
||||||
# ADMIN_TOKEN=Vy2VyYTTsKPv8W5aEOWUbB/Bt3DEKePbHmI4m9VcemUMS2rEviDowNAFqYi1xjmp
|
# ADMIN_TOKEN=Vy2VyYTTsKPv8W5aEOWUbB/Bt3DEKePbHmI4m9VcemUMS2rEviDowNAFqYi1xjmp
|
||||||
|
|
||||||
|
## Enable this to bypass the admin panel security. This option is only
|
||||||
|
## meant to be used with the use of a separate auth layer in front
|
||||||
# DISABLE_ADMIN_TOKEN=false
|
# DISABLE_ADMIN_TOKEN=false
|
||||||
|
|
||||||
## Invitations org admins to invite users, even when signups are disabled
|
## Invitations org admins to invite users, even when signups are disabled
|
||||||
@@ -106,6 +148,29 @@
|
|||||||
# YUBICO_SECRET_KEY=AAAAAAAAAAAAAAAAAAAAAAAA
|
# YUBICO_SECRET_KEY=AAAAAAAAAAAAAAAAAAAAAAAA
|
||||||
# YUBICO_SERVER=http://yourdomain.com/wsapi/2.0/verify
|
# YUBICO_SERVER=http://yourdomain.com/wsapi/2.0/verify
|
||||||
|
|
||||||
|
## Duo Settings
|
||||||
|
## You need to configure all options to enable global Duo support, otherwise users would need to configure it themselves
|
||||||
|
## Create an account and protect an application as mentioned in this link (only the first step, not the rest):
|
||||||
|
## https://help.bitwarden.com/article/setup-two-step-login-duo/#create-a-duo-security-account
|
||||||
|
## Then set the following options, based on the values obtained from the last step:
|
||||||
|
# DUO_IKEY=<Integration Key>
|
||||||
|
# DUO_SKEY=<Secret Key>
|
||||||
|
# DUO_HOST=<API Hostname>
|
||||||
|
## After that, you should be able to follow the rest of the guide linked above,
|
||||||
|
## ignoring the fields that ask for the values that you already configured beforehand.
|
||||||
|
|
||||||
|
## Authenticator Settings
|
||||||
|
## Disable authenticator time drifted codes to be valid.
|
||||||
|
## TOTP codes of the previous and next 30 seconds will be invalid
|
||||||
|
##
|
||||||
|
## According to the RFC6238 (https://tools.ietf.org/html/rfc6238),
|
||||||
|
## we allow by default the TOTP code which was valid one step back and one in the future.
|
||||||
|
## This can however allow attackers to be a bit more lucky with there attempts because there are 3 valid codes.
|
||||||
|
## You can disable this, so that only the current TOTP Code is allowed.
|
||||||
|
## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid.
|
||||||
|
## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid.
|
||||||
|
# AUTHENTICATOR_DISABLE_TIME_DRIFT = false
|
||||||
|
|
||||||
## Rocket specific settings, check Rocket documentation to learn more
|
## Rocket specific settings, check Rocket documentation to learn more
|
||||||
# ROCKET_ENV=staging
|
# ROCKET_ENV=staging
|
||||||
# ROCKET_ADDRESS=0.0.0.0 # Enable this to test mobile app
|
# ROCKET_ADDRESS=0.0.0.0 # Enable this to test mobile app
|
||||||
@@ -122,3 +187,7 @@
|
|||||||
# SMTP_SSL=true
|
# SMTP_SSL=true
|
||||||
# SMTP_USERNAME=username
|
# SMTP_USERNAME=username
|
||||||
# SMTP_PASSWORD=password
|
# SMTP_PASSWORD=password
|
||||||
|
# SMTP_AUTH_MECHANISM="Plain"
|
||||||
|
# SMTP_TIMEOUT=15
|
||||||
|
|
||||||
|
# vim: syntax=ini
|
||||||
|
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
github: dani-garcia
|
33
.github/ISSUE_TEMPLATE.md
vendored
Normal file
33
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
<!--
|
||||||
|
Please fill out the following template to make solving your problem easier and faster for us.
|
||||||
|
This is only a guideline. If you think that parts are unneccessary for your issue, feel free to remove them.
|
||||||
|
|
||||||
|
Remember to hide/obfuscate personal and confidential information,
|
||||||
|
such as names, global IP/DNS adresses and especially passwords, if neccessary.
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Subject of the issue
|
||||||
|
<!-- Describe your issue here.-->
|
||||||
|
|
||||||
|
### Your environment
|
||||||
|
<!-- The version number, obtained from the logs or the admin page -->
|
||||||
|
* Bitwarden_rs version:
|
||||||
|
<!-- How the server was installed: Docker image / package / built from source -->
|
||||||
|
* Install method:
|
||||||
|
* Clients used: <!-- if applicable -->
|
||||||
|
* Reverse proxy and version: <!-- if applicable -->
|
||||||
|
* Version of mysql/postgresql: <!-- if applicable -->
|
||||||
|
* Other relevant information:
|
||||||
|
|
||||||
|
### Steps to reproduce
|
||||||
|
<!-- Tell us how to reproduce this issue. What parameters did you set (differently from the defaults)
|
||||||
|
and how did you start bitwarden_rs? -->
|
||||||
|
|
||||||
|
### Expected behaviour
|
||||||
|
<!-- Tell us what should happen -->
|
||||||
|
|
||||||
|
### Actual behaviour
|
||||||
|
<!-- Tell us what happens instead -->
|
||||||
|
|
||||||
|
### Relevant logs
|
||||||
|
<!-- Share some logfiles, screenshots or output of relevant programs with us. -->
|
70
.github/workflows/rust-win.yml.disabled
vendored
Normal file
70
.github/workflows/rust-win.yml.disabled
vendored
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
name: build-windows
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
|
||||||
|
runs-on: windows-latest
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
db-backend: [sqlite, mysql, postgresql]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
|
||||||
|
- name: Cache choco cache
|
||||||
|
uses: actions/cache@v1.0.3
|
||||||
|
with:
|
||||||
|
path: ~\AppData\Local\Temp\chocolatey
|
||||||
|
key: ${{ runner.os }}-choco-cache
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: choco install openssl sqlite postgresql12 mysql
|
||||||
|
|
||||||
|
- name: Cache cargo registry
|
||||||
|
uses: actions/cache@v1.0.3
|
||||||
|
with:
|
||||||
|
path: ~/.cargo/registry
|
||||||
|
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
- name: Cache cargo index
|
||||||
|
uses: actions/cache@v1.0.3
|
||||||
|
with:
|
||||||
|
path: ~/.cargo/git
|
||||||
|
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
- name: Cache cargo build
|
||||||
|
uses: actions/cache@v1.0.3
|
||||||
|
with:
|
||||||
|
path: target
|
||||||
|
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Install latest nightly
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
override: true
|
||||||
|
profile: minimal
|
||||||
|
target: x86_64-pc-windows-msvc
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: cargo.exe build --verbose --features ${{ matrix.db-backend }} --release --target x86_64-pc-windows-msvc
|
||||||
|
env:
|
||||||
|
OPENSSL_DIR: C:\Program Files\OpenSSL-Win64\
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: cargo test --features ${{ matrix.db-backend }}
|
||||||
|
|
||||||
|
- name: Upload windows artifact
|
||||||
|
uses: actions/upload-artifact@v1.0.0
|
||||||
|
with:
|
||||||
|
name: x86_64-pc-windows-msvc-${{ matrix.db-backend }}-bitwarden_rs
|
||||||
|
path: target/release/bitwarden_rs.exe
|
||||||
|
|
||||||
|
- name: Release
|
||||||
|
uses: Shopify/upload-to-release@1.0.0
|
||||||
|
if: startsWith(github.ref, 'refs/tags/')
|
||||||
|
with:
|
||||||
|
name: x86_64-pc-windows-msvc-${{ matrix.db-backend }}-bitwarden_rs
|
||||||
|
path: target/release/bitwarden_rs.exe
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
149
.github/workflows/workspace.yml
vendored
Normal file
149
.github/workflows/workspace.yml
vendored
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
name: Workflow
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths-ignore:
|
||||||
|
- "**.md"
|
||||||
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- "**.md"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: Build
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
db-backend: [sqlite, mysql, postgresql]
|
||||||
|
target:
|
||||||
|
- x86_64-unknown-linux-gnu
|
||||||
|
# - x86_64-unknown-linux-musl
|
||||||
|
- x86_64-apple-darwin
|
||||||
|
# - x86_64-pc-windows-msvc
|
||||||
|
include:
|
||||||
|
- target: x86_64-unknown-linux-gnu
|
||||||
|
os: ubuntu-latest
|
||||||
|
ext:
|
||||||
|
# - target: x86_64-unknown-linux-musl
|
||||||
|
# os: ubuntu-latest
|
||||||
|
# ext:
|
||||||
|
- target: x86_64-apple-darwin
|
||||||
|
os: macOS-latest
|
||||||
|
ext:
|
||||||
|
# - target: x86_64-pc-windows-msvc
|
||||||
|
# os: windows-latest
|
||||||
|
# ext: .exe
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
|
||||||
|
# - name: Cache choco cache
|
||||||
|
# uses: actions/cache@v1.0.3
|
||||||
|
# if: matrix.os == 'windows-latest'
|
||||||
|
# with:
|
||||||
|
# path: ~\AppData\Local\Temp\chocolatey
|
||||||
|
# key: ${{ runner.os }}-choco-cache-${{ matrix.db-backend }}
|
||||||
|
|
||||||
|
- name: Cache vcpkg installed
|
||||||
|
uses: actions/cache@v1.0.3
|
||||||
|
if: matrix.os == 'windows-latest'
|
||||||
|
with:
|
||||||
|
path: $VCPKG_ROOT/installed
|
||||||
|
key: ${{ runner.os }}-vcpkg-cache-${{ matrix.db-backend }}
|
||||||
|
env:
|
||||||
|
VCPKG_ROOT: 'C:\vcpkg'
|
||||||
|
|
||||||
|
- name: Cache vcpkg downloads
|
||||||
|
uses: actions/cache@v1.0.3
|
||||||
|
if: matrix.os == 'windows-latest'
|
||||||
|
with:
|
||||||
|
path: $VCPKG_ROOT/downloads
|
||||||
|
key: ${{ runner.os }}-vcpkg-cache-${{ matrix.db-backend }}
|
||||||
|
env:
|
||||||
|
VCPKG_ROOT: 'C:\vcpkg'
|
||||||
|
|
||||||
|
# - name: Cache homebrew
|
||||||
|
# uses: actions/cache@v1.0.3
|
||||||
|
# if: matrix.os == 'macOS-latest'
|
||||||
|
# with:
|
||||||
|
# path: ~/Library/Caches/Homebrew
|
||||||
|
# key: ${{ runner.os }}-brew-cache
|
||||||
|
|
||||||
|
# - name: Cache apt
|
||||||
|
# uses: actions/cache@v1.0.3
|
||||||
|
# if: matrix.os == 'ubuntu-latest'
|
||||||
|
# with:
|
||||||
|
# path: /var/cache/apt/archives
|
||||||
|
# key: ${{ runner.os }}-apt-cache
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
- name: Install dependencies macOS
|
||||||
|
run: brew update; brew install openssl sqlite libpq mysql
|
||||||
|
if: matrix.os == 'macOS-latest'
|
||||||
|
|
||||||
|
- name: Install dependencies Ubuntu
|
||||||
|
run: sudo apt-get update && sudo apt-get install --no-install-recommends openssl sqlite libpq-dev libmysql++-dev
|
||||||
|
if: matrix.os == 'ubuntu-latest'
|
||||||
|
|
||||||
|
- name: Install dependencies Windows
|
||||||
|
run: vcpkg integrate install; vcpkg install sqlite3:x64-windows openssl:x64-windows libpq:x64-windows libmysql:x64-windows
|
||||||
|
if: matrix.os == 'windows-latest'
|
||||||
|
env:
|
||||||
|
VCPKG_ROOT: 'C:\vcpkg'
|
||||||
|
# End Install dependencies
|
||||||
|
|
||||||
|
# Install rust nightly toolchain
|
||||||
|
- name: Cache cargo registry
|
||||||
|
uses: actions/cache@v1.0.3
|
||||||
|
with:
|
||||||
|
path: ~/.cargo/registry
|
||||||
|
key: ${{ runner.os }}-${{matrix.db-backend}}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
- name: Cache cargo index
|
||||||
|
uses: actions/cache@v1.0.3
|
||||||
|
with:
|
||||||
|
path: ~/.cargo/git
|
||||||
|
key: ${{ runner.os }}-${{matrix.db-backend}}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
- name: Cache cargo build
|
||||||
|
uses: actions/cache@v1.0.3
|
||||||
|
with:
|
||||||
|
path: target
|
||||||
|
key: ${{ runner.os }}-${{matrix.db-backend}}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
|
||||||
|
- name: Install latest nightly
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
override: true
|
||||||
|
profile: minimal
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
|
||||||
|
# Build
|
||||||
|
- name: Build Win
|
||||||
|
if: matrix.os == 'windows-latest'
|
||||||
|
run: cargo.exe build --features ${{ matrix.db-backend }} --release --target ${{ matrix.target }}
|
||||||
|
env:
|
||||||
|
RUSTFLAGS: -Ctarget-feature=+crt-static
|
||||||
|
VCPKG_ROOT: 'C:\vcpkg'
|
||||||
|
|
||||||
|
- name: Build macOS / Ubuntu
|
||||||
|
if: matrix.os == 'macOS-latest' || matrix.os == 'ubuntu-latest'
|
||||||
|
run: cargo build --verbose --features ${{ matrix.db-backend }} --release --target ${{ matrix.target }}
|
||||||
|
|
||||||
|
# Test
|
||||||
|
- name: Run tests
|
||||||
|
run: cargo test --features ${{ matrix.db-backend }}
|
||||||
|
|
||||||
|
# Upload & Release
|
||||||
|
- name: Upload artifact
|
||||||
|
uses: actions/upload-artifact@v1.0.0
|
||||||
|
with:
|
||||||
|
name: bitwarden_rs-${{ matrix.db-backend }}-${{ matrix.target }}${{ matrix.ext }}
|
||||||
|
path: target/${{ matrix.target }}/release/bitwarden_rs${{ matrix.ext }}
|
||||||
|
|
||||||
|
- name: Release
|
||||||
|
uses: Shopify/upload-to-release@1.0.0
|
||||||
|
if: startsWith(github.ref, 'refs/tags/')
|
||||||
|
with:
|
||||||
|
name: bitwarden_rs-${{ matrix.db-backend }}-${{ matrix.target }}${{ matrix.ext }}
|
||||||
|
path: target/${{ matrix.target }}/release/bitwarden_rs${{ matrix.ext }}
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
7
.hadolint.yaml
Normal file
7
.hadolint.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
ignored:
|
||||||
|
# disable explicit version for apt install
|
||||||
|
- DL3008
|
||||||
|
# disable explicit version for apk install
|
||||||
|
- DL3018
|
||||||
|
trustedRegistries:
|
||||||
|
- docker.io
|
14
.travis.yml
14
.travis.yml
@@ -1,9 +1,21 @@
|
|||||||
dist: xenial
|
dist: xenial
|
||||||
|
|
||||||
|
env:
|
||||||
|
global:
|
||||||
|
- HADOLINT_VERSION=1.17.1
|
||||||
|
|
||||||
language: rust
|
language: rust
|
||||||
rust: nightly
|
rust: nightly
|
||||||
cache: cargo
|
cache: cargo
|
||||||
|
|
||||||
|
before_install:
|
||||||
|
- sudo curl -L https://github.com/hadolint/hadolint/releases/download/v$HADOLINT_VERSION/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint
|
||||||
|
- sudo chmod +rx /usr/local/bin/hadolint
|
||||||
|
- rustup set profile minimal
|
||||||
|
|
||||||
# Nothing to install
|
# Nothing to install
|
||||||
install: true
|
install: true
|
||||||
script: cargo build --all-features
|
script:
|
||||||
|
- git ls-files --exclude='Dockerfile*' --ignored | xargs --max-lines=1 hadolint
|
||||||
|
- cargo test --features "sqlite"
|
||||||
|
- cargo test --features "mysql"
|
||||||
|
2235
Cargo.lock
generated
2235
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
90
Cargo.toml
90
Cargo.toml
@@ -11,53 +11,59 @@ publish = false
|
|||||||
build = "build.rs"
|
build = "build.rs"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
enable_syslog = ["syslog", "fern/syslog-4"]
|
# Empty to keep compatibility, prefer to set USE_SYSLOG=true
|
||||||
|
enable_syslog = []
|
||||||
|
mysql = ["diesel/mysql", "diesel_migrations/mysql"]
|
||||||
|
postgresql = ["diesel/postgres", "diesel_migrations/postgres", "openssl"]
|
||||||
|
sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "libsqlite3-sys"]
|
||||||
|
|
||||||
|
[target."cfg(not(windows))".dependencies]
|
||||||
|
syslog = "4.0.1"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# Web framework for nightly with a focus on ease-of-use, expressibility, and speed.
|
# Web framework for nightly with a focus on ease-of-use, expressibility, and speed.
|
||||||
rocket = { version = "0.4.0", features = ["tls"], default-features = false }
|
rocket = { version = "0.5.0-dev", features = ["tls"], default-features = false }
|
||||||
rocket_contrib = "0.4.0"
|
rocket_contrib = "0.5.0-dev"
|
||||||
|
|
||||||
# HTTP client
|
# HTTP client
|
||||||
reqwest = "0.9.12"
|
reqwest = "0.9.24"
|
||||||
|
|
||||||
# multipart/form-data support
|
# multipart/form-data support
|
||||||
multipart = { version = "0.16.1", features = ["server"], default-features = false }
|
multipart = { version = "0.16.1", features = ["server"], default-features = false }
|
||||||
|
|
||||||
# WebSockets library
|
# WebSockets library
|
||||||
ws = "0.8.0"
|
ws = "0.9.1"
|
||||||
|
|
||||||
# MessagePack library
|
# MessagePack library
|
||||||
rmpv = "0.4.0"
|
rmpv = "0.4.3"
|
||||||
|
|
||||||
# Concurrent hashmap implementation
|
# Concurrent hashmap implementation
|
||||||
chashmap = "2.2.2"
|
chashmap = "2.2.2"
|
||||||
|
|
||||||
# A generic serialization/deserialization framework
|
# A generic serialization/deserialization framework
|
||||||
serde = "1.0.89"
|
serde = "1.0.104"
|
||||||
serde_derive = "1.0.89"
|
serde_derive = "1.0.104"
|
||||||
serde_json = "1.0.39"
|
serde_json = "1.0.44"
|
||||||
|
|
||||||
# Logging
|
# Logging
|
||||||
log = "0.4.6"
|
log = "0.4.8"
|
||||||
fern = "0.5.7"
|
fern = { version = "0.5.9", features = ["syslog-4"] }
|
||||||
syslog = { version = "4.0.1", optional = true }
|
|
||||||
|
|
||||||
# A safe, extensible ORM and Query builder
|
# A safe, extensible ORM and Query builder
|
||||||
diesel = { version = "1.4.2", features = ["sqlite", "chrono", "r2d2"] }
|
diesel = { version = "1.4.3", features = [ "chrono", "r2d2"] }
|
||||||
diesel_migrations = { version = "1.4.0", features = ["sqlite"] }
|
diesel_migrations = "1.4.0"
|
||||||
|
|
||||||
# Bundled SQLite
|
# Bundled SQLite
|
||||||
libsqlite3-sys = { version = "0.12.0", features = ["bundled"] }
|
libsqlite3-sys = { version = "0.16.0", features = ["bundled"], optional = true }
|
||||||
|
|
||||||
# Crypto library
|
# Crypto library
|
||||||
ring = { version = "0.13.5", features = ["rsa_signing"] }
|
ring = "0.14.6"
|
||||||
|
|
||||||
# UUID generation
|
# UUID generation
|
||||||
uuid = { version = "0.7.2", features = ["v4"] }
|
uuid = { version = "0.8.1", features = ["v4"] }
|
||||||
|
|
||||||
# Date and time library for Rust
|
# Date and time library for Rust
|
||||||
chrono = "0.4.6"
|
chrono = "0.4.10"
|
||||||
|
|
||||||
# TOTP library
|
# TOTP library
|
||||||
oath = "0.10.2"
|
oath = "0.10.2"
|
||||||
@@ -66,39 +72,55 @@ oath = "0.10.2"
|
|||||||
data-encoding = "2.1.2"
|
data-encoding = "2.1.2"
|
||||||
|
|
||||||
# JWT library
|
# JWT library
|
||||||
jsonwebtoken = "5.0.1"
|
jsonwebtoken = "6.0.1"
|
||||||
|
|
||||||
# U2F library
|
# U2F library
|
||||||
u2f = "0.1.4"
|
u2f = "0.1.6"
|
||||||
|
|
||||||
# Yubico Library
|
# Yubico Library
|
||||||
yubico = { version = "0.5.1", features = ["online"], default-features = false }
|
yubico = { version = "0.7.1", features = ["online-tokio"], default-features = false }
|
||||||
|
|
||||||
# A `dotenv` implementation for Rust
|
# A `dotenv` implementation for Rust
|
||||||
dotenv = { version = "0.13.0", default-features = false }
|
dotenv = { version = "0.15.0", default-features = false }
|
||||||
|
|
||||||
# Lazy static macro
|
# Lazy static macro
|
||||||
lazy_static = "1.3.0"
|
lazy_static = "1.4.0"
|
||||||
|
|
||||||
# More derives
|
# More derives
|
||||||
derive_more = "0.14.0"
|
derive_more = "0.99.2"
|
||||||
|
|
||||||
# Numerical libraries
|
# Numerical libraries
|
||||||
num-traits = "0.2.6"
|
num-traits = "0.2.10"
|
||||||
num-derive = "0.2.4"
|
num-derive = "0.3.0"
|
||||||
|
|
||||||
# Email libraries
|
# Email libraries
|
||||||
lettre = "0.9.0"
|
lettre = "0.9.2"
|
||||||
lettre_email = "0.9.0"
|
lettre_email = "0.9.2"
|
||||||
native-tls = "0.2.2"
|
native-tls = "0.2.3"
|
||||||
|
quoted_printable = "0.4.1"
|
||||||
|
|
||||||
# Template library
|
# Template library
|
||||||
handlebars = "1.1.0"
|
handlebars = "=2.0.2"
|
||||||
|
|
||||||
# For favicon extraction from main website
|
# For favicon extraction from main website
|
||||||
soup = "0.3.0"
|
soup = "0.4.1"
|
||||||
regex = "1.1.2"
|
regex = "1.3.1"
|
||||||
|
data-url = "0.1.0"
|
||||||
|
|
||||||
|
# Required for SSL support for PostgreSQL
|
||||||
|
openssl = { version = "0.10.26", optional = true }
|
||||||
|
|
||||||
|
# URL encoding library
|
||||||
|
percent-encoding = "2.1.0"
|
||||||
|
|
||||||
[patch.crates-io]
|
[patch.crates-io]
|
||||||
# Add support for Timestamp type
|
# Use newest ring
|
||||||
rmp = { git = 'https://github.com/dani-garcia/msgpack-rust' }
|
rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = 'b95b6765e1cc8be7c1e7eaef8a9d9ad940b0ac13' }
|
||||||
|
rocket_contrib = { git = 'https://github.com/SergioBenitez/Rocket', rev = 'b95b6765e1cc8be7c1e7eaef8a9d9ad940b0ac13' }
|
||||||
|
|
||||||
|
# Use git version for timeout fix #706
|
||||||
|
lettre = { git = 'https://github.com/lettre/lettre', rev = '24d694db3be017d82b1cdc8bf9da601420b31bb0' }
|
||||||
|
lettre_email = { git = 'https://github.com/lettre/lettre', rev = '24d694db3be017d82b1cdc8bf9da601420b31bb0' }
|
||||||
|
|
||||||
|
# For favicon extraction from main website
|
||||||
|
data-url = { git = 'https://github.com/servo/rust-url', package="data-url", rev = '7f1bd6ce1c2fde599a757302a843a60e714c5f72' }
|
||||||
|
86
Dockerfile
86
Dockerfile
@@ -1,86 +0,0 @@
|
|||||||
# Using multistage build:
|
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
|
||||||
####################### VAULT BUILD IMAGE #######################
|
|
||||||
FROM alpine as vault
|
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.9.0"
|
|
||||||
|
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
|
||||||
|
|
||||||
RUN apk add --update-cache --upgrade \
|
|
||||||
curl \
|
|
||||||
tar
|
|
||||||
|
|
||||||
RUN mkdir /web-vault
|
|
||||||
WORKDIR /web-vault
|
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
|
||||||
RUN ls
|
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
|
||||||
# We need to use the Rust build image, because
|
|
||||||
# we need the Rust compiler and Cargo tooling
|
|
||||||
FROM rust as build
|
|
||||||
|
|
||||||
# Using bundled SQLite, no need to install it
|
|
||||||
# RUN apt-get update && apt-get install -y\
|
|
||||||
# sqlite3\
|
|
||||||
# --no-install-recommends\
|
|
||||||
# && rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
|
||||||
RUN USER=root cargo new --bin app
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Copies over *only* your manifests and build files
|
|
||||||
COPY ./Cargo.* ./
|
|
||||||
COPY ./rust-toolchain ./rust-toolchain
|
|
||||||
COPY ./build.rs ./build.rs
|
|
||||||
|
|
||||||
# Builds your dependencies and removes the
|
|
||||||
# dummy project, except the target folder
|
|
||||||
# This folder contains the compiled dependencies
|
|
||||||
RUN cargo build --release
|
|
||||||
RUN find . -not -path "./target*" -delete
|
|
||||||
|
|
||||||
# Copies the complete project
|
|
||||||
# To avoid copying unneeded files, use .dockerignore
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
# Make sure that we actually build the project
|
|
||||||
RUN touch src/main.rs
|
|
||||||
|
|
||||||
# Builds again, this time it'll just be
|
|
||||||
# your actual source files being built
|
|
||||||
RUN cargo build --release
|
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
|
||||||
# Create a new stage with a minimal image
|
|
||||||
# because we already have a binary built
|
|
||||||
FROM debian:stretch-slim
|
|
||||||
|
|
||||||
ENV ROCKET_ENV "staging"
|
|
||||||
ENV ROCKET_PORT=80
|
|
||||||
ENV ROCKET_WORKERS=10
|
|
||||||
|
|
||||||
# Install needed libraries
|
|
||||||
RUN apt-get update && apt-get install -y\
|
|
||||||
openssl\
|
|
||||||
ca-certificates\
|
|
||||||
--no-install-recommends\
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
RUN mkdir /data
|
|
||||||
VOLUME /data
|
|
||||||
EXPOSE 80
|
|
||||||
EXPOSE 3012
|
|
||||||
|
|
||||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
|
||||||
# and the binary from the "build" stage to the current stage
|
|
||||||
COPY Rocket.toml .
|
|
||||||
COPY --from=vault /web-vault ./web-vault
|
|
||||||
COPY --from=build app/target/release/bitwarden_rs .
|
|
||||||
|
|
||||||
# Configures the startup!
|
|
||||||
CMD ./bitwarden_rs
|
|
1
Dockerfile
Symbolic link
1
Dockerfile
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
docker/amd64/sqlite/Dockerfile
|
13
README.md
13
README.md
@@ -3,7 +3,7 @@
|
|||||||
---
|
---
|
||||||
|
|
||||||
[](https://travis-ci.org/dani-garcia/bitwarden_rs)
|
[](https://travis-ci.org/dani-garcia/bitwarden_rs)
|
||||||
[](https://hub.docker.com/r/mprasil/bitwarden)
|
[](https://hub.docker.com/r/bitwardenrs/server)
|
||||||
[](https://deps.rs/repo/github/dani-garcia/bitwarden_rs)
|
[](https://deps.rs/repo/github/dani-garcia/bitwarden_rs)
|
||||||
[](https://github.com/dani-garcia/bitwarden_rs/releases/latest)
|
[](https://github.com/dani-garcia/bitwarden_rs/releases/latest)
|
||||||
[](https://github.com/dani-garcia/bitwarden_rs/blob/master/LICENSE.txt)
|
[](https://github.com/dani-garcia/bitwarden_rs/blob/master/LICENSE.txt)
|
||||||
@@ -34,8 +34,8 @@ Basically full implementation of Bitwarden API is provided including:
|
|||||||
Pull the docker image and mount a volume from the host for persistent storage:
|
Pull the docker image and mount a volume from the host for persistent storage:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker pull mprasil/bitwarden:latest
|
docker pull bitwardenrs/server:latest
|
||||||
docker run -d --name bitwarden -v /bw-data/:/data/ -p 80:80 mprasil/bitwarden:latest
|
docker run -d --name bitwarden -v /bw-data/:/data/ -p 80:80 bitwardenrs/server:latest
|
||||||
```
|
```
|
||||||
This will preserve any persistent data under /bw-data/, you can adapt the path to whatever suits you.
|
This will preserve any persistent data under /bw-data/, you can adapt the path to whatever suits you.
|
||||||
|
|
||||||
@@ -50,6 +50,11 @@ See the [bitwarden_rs wiki](https://github.com/dani-garcia/bitwarden_rs/wiki) fo
|
|||||||
|
|
||||||
## Get in touch
|
## Get in touch
|
||||||
|
|
||||||
To ask an question, [raising an issue](https://github.com/dani-garcia/bitwarden_rs/issues/new) is fine, also please report any bugs spotted here.
|
To ask a question, [raising an issue](https://github.com/dani-garcia/bitwarden_rs/issues/new) is fine. Please also report any bugs spotted here.
|
||||||
|
|
||||||
If you prefer to chat, we're usually hanging around at [#bitwarden_rs:matrix.org](https://matrix.to/#/#bitwarden_rs:matrix.org) room on Matrix. Feel free to join us!
|
If you prefer to chat, we're usually hanging around at [#bitwarden_rs:matrix.org](https://matrix.to/#/#bitwarden_rs:matrix.org) room on Matrix. Feel free to join us!
|
||||||
|
|
||||||
|
### Sponsors
|
||||||
|
Thanks for your contribution to the project!
|
||||||
|
|
||||||
|
- [@Skaronator](https://github.com/Skaronator)
|
||||||
|
@@ -4,14 +4,22 @@ pool:
|
|||||||
steps:
|
steps:
|
||||||
- script: |
|
- script: |
|
||||||
ls -la
|
ls -la
|
||||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain $(cat rust-toolchain)
|
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain $(cat rust-toolchain) --profile=minimal
|
||||||
echo "##vso[task.prependpath]$HOME/.cargo/bin"
|
echo "##vso[task.prependpath]$HOME/.cargo/bin"
|
||||||
displayName: 'Install Rust'
|
displayName: 'Install Rust'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y libmysql++-dev
|
||||||
|
displayName: Install libmysql
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
rustc -Vv
|
rustc -Vv
|
||||||
cargo -V
|
cargo -V
|
||||||
displayName: Query rust and cargo versions
|
displayName: Query rust and cargo versions
|
||||||
|
|
||||||
- script : cargo build --all-features
|
- script : cargo test --features "sqlite"
|
||||||
displayName: 'Build project'
|
displayName: 'Test project with sqlite backend'
|
||||||
|
|
||||||
|
- script : cargo test --features "mysql"
|
||||||
|
displayName: 'Test project with mysql backend'
|
||||||
|
10
build.rs
10
build.rs
@@ -1,6 +1,16 @@
|
|||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
#[cfg(all(feature = "sqlite", feature = "mysql"))]
|
||||||
|
compile_error!("Can't enable both sqlite and mysql at the same time");
|
||||||
|
#[cfg(all(feature = "sqlite", feature = "postgresql"))]
|
||||||
|
compile_error!("Can't enable both sqlite and postgresql at the same time");
|
||||||
|
#[cfg(all(feature = "mysql", feature = "postgresql"))]
|
||||||
|
compile_error!("Can't enable both mysql and postgresql at the same time");
|
||||||
|
|
||||||
|
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
|
||||||
|
compile_error!("You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite");
|
||||||
|
|
||||||
read_git_info().ok();
|
read_git_info().ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
110
docker/aarch64/mysql/Dockerfile
Normal file
110
docker/aarch64/mysql/Dockerfile
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
# Using multistage build:
|
||||||
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
|
####################### VAULT BUILD IMAGE #######################
|
||||||
|
FROM alpine:3.11 as vault
|
||||||
|
|
||||||
|
ENV VAULT_VERSION "v2.12.0b"
|
||||||
|
|
||||||
|
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||||
|
|
||||||
|
RUN apk add --no-cache --upgrade \
|
||||||
|
curl \
|
||||||
|
tar
|
||||||
|
|
||||||
|
RUN mkdir /web-vault
|
||||||
|
WORKDIR /web-vault
|
||||||
|
|
||||||
|
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||||
|
|
||||||
|
RUN curl -L $URL | tar xz
|
||||||
|
RUN ls
|
||||||
|
|
||||||
|
########################## BUILD IMAGE ##########################
|
||||||
|
# We need to use the Rust build image, because
|
||||||
|
# we need the Rust compiler and Cargo tooling
|
||||||
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
|
# set mysql backend
|
||||||
|
ARG DB=mysql
|
||||||
|
|
||||||
|
# Don't download rust docs
|
||||||
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
gcc-aarch64-linux-gnu \
|
||||||
|
&& mkdir -p ~/.cargo \
|
||||||
|
&& echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config \
|
||||||
|
&& echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config
|
||||||
|
|
||||||
|
ENV CARGO_HOME "/root/.cargo"
|
||||||
|
ENV USER "root"
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Prepare openssl arm64 libs
|
||||||
|
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||||
|
/etc/apt/sources.list.d/deb-src.list \
|
||||||
|
&& dpkg --add-architecture arm64 \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libssl-dev:arm64 \
|
||||||
|
libc6-dev:arm64 \
|
||||||
|
libmariadb-dev:arm64
|
||||||
|
|
||||||
|
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc"
|
||||||
|
ENV CROSS_COMPILE="1"
|
||||||
|
ENV OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu"
|
||||||
|
ENV OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
||||||
|
|
||||||
|
# Copies the complete project
|
||||||
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Build
|
||||||
|
RUN rustup target add aarch64-unknown-linux-gnu
|
||||||
|
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||||
|
|
||||||
|
######################## RUNTIME IMAGE ########################
|
||||||
|
# Create a new stage with a minimal image
|
||||||
|
# because we already have a binary built
|
||||||
|
FROM balenalib/aarch64-debian:buster
|
||||||
|
|
||||||
|
ENV ROCKET_ENV "staging"
|
||||||
|
ENV ROCKET_PORT=80
|
||||||
|
ENV ROCKET_WORKERS=10
|
||||||
|
|
||||||
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
|
# Install needed libraries
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
openssl \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
libmariadbclient-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN mkdir /data
|
||||||
|
|
||||||
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
|
VOLUME /data
|
||||||
|
EXPOSE 80
|
||||||
|
|
||||||
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
|
# and the binary from the "build" stage to the current stage
|
||||||
|
COPY Rocket.toml .
|
||||||
|
COPY --from=vault /web-vault ./web-vault
|
||||||
|
COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs .
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||||
|
|
||||||
|
# Configures the startup!
|
||||||
|
WORKDIR /
|
||||||
|
CMD ["/bitwarden_rs"]
|
@@ -2,29 +2,38 @@
|
|||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine as vault
|
FROM alpine:3.11 as vault
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.9.0"
|
ENV VAULT_VERSION "v2.12.0b"
|
||||||
|
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||||
|
|
||||||
RUN apk add --update-cache --upgrade \
|
RUN apk add --no-cache --upgrade \
|
||||||
curl \
|
curl \
|
||||||
tar
|
tar
|
||||||
|
|
||||||
RUN mkdir /web-vault
|
RUN mkdir /web-vault
|
||||||
WORKDIR /web-vault
|
WORKDIR /web-vault
|
||||||
|
|
||||||
|
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
RUN curl -L $URL | tar xz
|
||||||
RUN ls
|
RUN ls
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# We need to use the Rust build image, because
|
# We need to use the Rust build image, because
|
||||||
# we need the Rust compiler and Cargo tooling
|
# we need the Rust compiler and Cargo tooling
|
||||||
FROM rust as build
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
|
# set sqlite as default for DB ARG for backward comaptibility
|
||||||
|
ARG DB=sqlite
|
||||||
|
|
||||||
|
# Don't download rust docs
|
||||||
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
gcc-aarch64-linux-gnu \
|
gcc-aarch64-linux-gnu \
|
||||||
&& mkdir -p ~/.cargo \
|
&& mkdir -p ~/.cargo \
|
||||||
&& echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config \
|
&& echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config \
|
||||||
@@ -41,6 +50,7 @@ RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
|||||||
&& dpkg --add-architecture arm64 \
|
&& dpkg --add-architecture arm64 \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
libssl-dev:arm64 \
|
libssl-dev:arm64 \
|
||||||
libc6-dev:arm64
|
libc6-dev:arm64
|
||||||
|
|
||||||
@@ -55,12 +65,12 @@ COPY . .
|
|||||||
|
|
||||||
# Build
|
# Build
|
||||||
RUN rustup target add aarch64-unknown-linux-gnu
|
RUN rustup target add aarch64-unknown-linux-gnu
|
||||||
RUN cargo build --release --target=aarch64-unknown-linux-gnu -v
|
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/aarch64-debian:stretch
|
FROM balenalib/aarch64-debian:buster
|
||||||
|
|
||||||
ENV ROCKET_ENV "staging"
|
ENV ROCKET_ENV "staging"
|
||||||
ENV ROCKET_PORT=80
|
ENV ROCKET_PORT=80
|
||||||
@@ -69,10 +79,12 @@ ENV ROCKET_WORKERS=10
|
|||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Install needed libraries
|
# Install needed libraries
|
||||||
RUN apt-get update && apt-get install -y\
|
RUN apt-get update && apt-get install -y \
|
||||||
openssl\
|
--no-install-recommends \
|
||||||
ca-certificates\
|
openssl \
|
||||||
--no-install-recommends\
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
sqlite3 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN mkdir /data
|
RUN mkdir /data
|
||||||
@@ -88,5 +100,10 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs .
|
COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs .
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
CMD ./bitwarden_rs
|
WORKDIR /
|
||||||
|
CMD ["/bitwarden_rs"]
|
101
docker/amd64/mysql/Dockerfile
Normal file
101
docker/amd64/mysql/Dockerfile
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
# Using multistage build:
|
||||||
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
|
####################### VAULT BUILD IMAGE #######################
|
||||||
|
FROM alpine:3.11 as vault
|
||||||
|
|
||||||
|
ENV VAULT_VERSION "v2.12.0b"
|
||||||
|
|
||||||
|
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||||
|
|
||||||
|
RUN apk add --no-cache --upgrade \
|
||||||
|
curl \
|
||||||
|
tar
|
||||||
|
|
||||||
|
RUN mkdir /web-vault
|
||||||
|
WORKDIR /web-vault
|
||||||
|
|
||||||
|
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||||
|
|
||||||
|
RUN curl -L $URL | tar xz
|
||||||
|
RUN ls
|
||||||
|
|
||||||
|
########################## BUILD IMAGE ##########################
|
||||||
|
# We need to use the Rust build image, because
|
||||||
|
# we need the Rust compiler and Cargo tooling
|
||||||
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
|
# set mysql backend
|
||||||
|
ARG DB=mysql
|
||||||
|
|
||||||
|
# Don't download rust docs
|
||||||
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
# Install MySQL package
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libmariadb-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Creates a dummy project used to grab dependencies
|
||||||
|
RUN USER=root cargo new --bin app
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copies over *only* your manifests and build files
|
||||||
|
COPY ./Cargo.* ./
|
||||||
|
COPY ./rust-toolchain ./rust-toolchain
|
||||||
|
COPY ./build.rs ./build.rs
|
||||||
|
|
||||||
|
# Builds your dependencies and removes the
|
||||||
|
# dummy project, except the target folder
|
||||||
|
# This folder contains the compiled dependencies
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
RUN find . -not -path "./target*" -delete
|
||||||
|
|
||||||
|
# Copies the complete project
|
||||||
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Make sure that we actually build the project
|
||||||
|
RUN touch src/main.rs
|
||||||
|
|
||||||
|
# Builds again, this time it'll just be
|
||||||
|
# your actual source files being built
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
|
||||||
|
######################## RUNTIME IMAGE ########################
|
||||||
|
# Create a new stage with a minimal image
|
||||||
|
# because we already have a binary built
|
||||||
|
FROM debian:buster-slim
|
||||||
|
|
||||||
|
ENV ROCKET_ENV "staging"
|
||||||
|
ENV ROCKET_PORT=80
|
||||||
|
ENV ROCKET_WORKERS=10
|
||||||
|
|
||||||
|
# Install needed libraries
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
openssl \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
libmariadbclient-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN mkdir /data
|
||||||
|
VOLUME /data
|
||||||
|
EXPOSE 80
|
||||||
|
EXPOSE 3012
|
||||||
|
|
||||||
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
|
# and the binary from the "build" stage to the current stage
|
||||||
|
COPY Rocket.toml .
|
||||||
|
COPY --from=vault /web-vault ./web-vault
|
||||||
|
COPY --from=build app/target/release/bitwarden_rs .
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||||
|
|
||||||
|
# Configures the startup!
|
||||||
|
WORKDIR /
|
||||||
|
CMD ["/bitwarden_rs"]
|
89
docker/amd64/mysql/Dockerfile.alpine
Normal file
89
docker/amd64/mysql/Dockerfile.alpine
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
# Using multistage build:
|
||||||
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
|
####################### VAULT BUILD IMAGE #######################
|
||||||
|
FROM alpine:3.11 as vault
|
||||||
|
|
||||||
|
ENV VAULT_VERSION "v2.12.0b"
|
||||||
|
|
||||||
|
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||||
|
|
||||||
|
RUN apk add --no-cache --upgrade \
|
||||||
|
curl \
|
||||||
|
tar
|
||||||
|
|
||||||
|
RUN mkdir /web-vault
|
||||||
|
WORKDIR /web-vault
|
||||||
|
|
||||||
|
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||||
|
|
||||||
|
RUN curl -L $URL | tar xz
|
||||||
|
RUN ls
|
||||||
|
|
||||||
|
########################## BUILD IMAGE ##########################
|
||||||
|
# Musl build image for statically compiled binary
|
||||||
|
FROM clux/muslrust:nightly-2019-12-19 as build
|
||||||
|
|
||||||
|
# set mysql backend
|
||||||
|
ARG DB=mysql
|
||||||
|
|
||||||
|
# Don't download rust docs
|
||||||
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
ENV USER "root"
|
||||||
|
|
||||||
|
# Install needed libraries
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libmysqlclient-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copies the complete project
|
||||||
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN rustup target add x86_64-unknown-linux-musl
|
||||||
|
|
||||||
|
# Make sure that we actually build the project
|
||||||
|
RUN touch src/main.rs
|
||||||
|
|
||||||
|
# Build
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
|
||||||
|
######################## RUNTIME IMAGE ########################
|
||||||
|
# Create a new stage with a minimal image
|
||||||
|
# because we already have a binary built
|
||||||
|
FROM alpine:3.11
|
||||||
|
|
||||||
|
ENV ROCKET_ENV "staging"
|
||||||
|
ENV ROCKET_PORT=80
|
||||||
|
ENV ROCKET_WORKERS=10
|
||||||
|
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||||
|
|
||||||
|
# Install needed libraries
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
openssl \
|
||||||
|
mariadb-connector-c \
|
||||||
|
curl \
|
||||||
|
ca-certificates
|
||||||
|
|
||||||
|
RUN mkdir /data
|
||||||
|
VOLUME /data
|
||||||
|
EXPOSE 80
|
||||||
|
EXPOSE 3012
|
||||||
|
|
||||||
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
|
# and the binary from the "build" stage to the current stage
|
||||||
|
COPY Rocket.toml .
|
||||||
|
COPY --from=vault /web-vault ./web-vault
|
||||||
|
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||||
|
|
||||||
|
# Configures the startup!
|
||||||
|
WORKDIR /
|
||||||
|
CMD ["/bitwarden_rs"]
|
108
docker/amd64/postgresql/Dockerfile
Normal file
108
docker/amd64/postgresql/Dockerfile
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
# Using multistage build:
|
||||||
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
|
####################### VAULT BUILD IMAGE #######################
|
||||||
|
FROM alpine:3.11 as vault
|
||||||
|
|
||||||
|
ENV VAULT_VERSION "v2.12.0b"
|
||||||
|
|
||||||
|
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||||
|
|
||||||
|
RUN apk add --no-cache --upgrade \
|
||||||
|
curl \
|
||||||
|
tar
|
||||||
|
|
||||||
|
RUN mkdir /web-vault
|
||||||
|
WORKDIR /web-vault
|
||||||
|
|
||||||
|
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||||
|
|
||||||
|
RUN curl -L $URL | tar xz
|
||||||
|
RUN ls
|
||||||
|
|
||||||
|
########################## BUILD IMAGE ##########################
|
||||||
|
# We need to use the Rust build image, because
|
||||||
|
# we need the Rust compiler and Cargo tooling
|
||||||
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
|
# set mysql backend
|
||||||
|
ARG DB=postgresql
|
||||||
|
|
||||||
|
# Don't download rust docs
|
||||||
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
# Using bundled SQLite, no need to install it
|
||||||
|
# RUN apt-get update && apt-get install -y\
|
||||||
|
# --no-install-recommends \
|
||||||
|
# sqlite3\
|
||||||
|
# && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install MySQL package
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libpq-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Creates a dummy project used to grab dependencies
|
||||||
|
RUN USER=root cargo new --bin app
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copies over *only* your manifests and build files
|
||||||
|
COPY ./Cargo.* ./
|
||||||
|
COPY ./rust-toolchain ./rust-toolchain
|
||||||
|
COPY ./build.rs ./build.rs
|
||||||
|
|
||||||
|
# Builds your dependencies and removes the
|
||||||
|
# dummy project, except the target folder
|
||||||
|
# This folder contains the compiled dependencies
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
RUN find . -not -path "./target*" -delete
|
||||||
|
|
||||||
|
# Copies the complete project
|
||||||
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Make sure that we actually build the project
|
||||||
|
RUN touch src/main.rs
|
||||||
|
|
||||||
|
# Builds again, this time it'll just be
|
||||||
|
# your actual source files being built
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
|
||||||
|
######################## RUNTIME IMAGE ########################
|
||||||
|
# Create a new stage with a minimal image
|
||||||
|
# because we already have a binary built
|
||||||
|
FROM debian:buster-slim
|
||||||
|
|
||||||
|
ENV ROCKET_ENV "staging"
|
||||||
|
ENV ROCKET_PORT=80
|
||||||
|
ENV ROCKET_WORKERS=10
|
||||||
|
|
||||||
|
# Install needed libraries
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
openssl \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
sqlite3 \
|
||||||
|
libpq5 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN mkdir /data
|
||||||
|
VOLUME /data
|
||||||
|
EXPOSE 80
|
||||||
|
EXPOSE 3012
|
||||||
|
|
||||||
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
|
# and the binary from the "build" stage to the current stage
|
||||||
|
COPY Rocket.toml .
|
||||||
|
COPY --from=vault /web-vault ./web-vault
|
||||||
|
COPY --from=build app/target/release/bitwarden_rs .
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||||
|
|
||||||
|
# Configures the startup!
|
||||||
|
WORKDIR /
|
||||||
|
CMD ["/bitwarden_rs"]
|
90
docker/amd64/postgresql/Dockerfile.alpine
Normal file
90
docker/amd64/postgresql/Dockerfile.alpine
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
# Using multistage build:
|
||||||
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
|
####################### VAULT BUILD IMAGE #######################
|
||||||
|
FROM alpine:3.11 as vault
|
||||||
|
|
||||||
|
ENV VAULT_VERSION "v2.12.0b"
|
||||||
|
|
||||||
|
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||||
|
|
||||||
|
RUN apk add --no-cache --upgrade \
|
||||||
|
curl \
|
||||||
|
tar
|
||||||
|
|
||||||
|
RUN mkdir /web-vault
|
||||||
|
WORKDIR /web-vault
|
||||||
|
|
||||||
|
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||||
|
|
||||||
|
RUN curl -L $URL | tar xz
|
||||||
|
RUN ls
|
||||||
|
|
||||||
|
########################## BUILD IMAGE ##########################
|
||||||
|
# Musl build image for statically compiled binary
|
||||||
|
FROM clux/muslrust:nightly-2019-12-19 as build
|
||||||
|
|
||||||
|
# set postgresql backend
|
||||||
|
ARG DB=postgresql
|
||||||
|
|
||||||
|
# Don't download rust docs
|
||||||
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
ENV USER "root"
|
||||||
|
|
||||||
|
# Install needed libraries
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libpq-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copies the complete project
|
||||||
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN rustup target add x86_64-unknown-linux-musl
|
||||||
|
|
||||||
|
# Make sure that we actually build the project
|
||||||
|
RUN touch src/main.rs
|
||||||
|
|
||||||
|
# Build
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
|
||||||
|
######################## RUNTIME IMAGE ########################
|
||||||
|
# Create a new stage with a minimal image
|
||||||
|
# because we already have a binary built
|
||||||
|
FROM alpine:3.11
|
||||||
|
|
||||||
|
ENV ROCKET_ENV "staging"
|
||||||
|
ENV ROCKET_PORT=80
|
||||||
|
ENV ROCKET_WORKERS=10
|
||||||
|
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||||
|
|
||||||
|
# Install needed libraries
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
openssl \
|
||||||
|
postgresql-libs \
|
||||||
|
curl \
|
||||||
|
sqlite \
|
||||||
|
ca-certificates
|
||||||
|
|
||||||
|
RUN mkdir /data
|
||||||
|
VOLUME /data
|
||||||
|
EXPOSE 80
|
||||||
|
EXPOSE 3012
|
||||||
|
|
||||||
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
|
# and the binary from the "build" stage to the current stage
|
||||||
|
COPY Rocket.toml .
|
||||||
|
COPY --from=vault /web-vault ./web-vault
|
||||||
|
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||||
|
|
||||||
|
# Configures the startup!
|
||||||
|
WORKDIR /
|
||||||
|
CMD ["/bitwarden_rs"]
|
95
docker/amd64/sqlite/Dockerfile
Normal file
95
docker/amd64/sqlite/Dockerfile
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
# Using multistage build:
|
||||||
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
|
####################### VAULT BUILD IMAGE #######################
|
||||||
|
FROM alpine:3.11 as vault
|
||||||
|
|
||||||
|
ENV VAULT_VERSION "v2.12.0b"
|
||||||
|
|
||||||
|
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||||
|
|
||||||
|
RUN apk add --no-cache --upgrade \
|
||||||
|
curl \
|
||||||
|
tar
|
||||||
|
|
||||||
|
RUN mkdir /web-vault
|
||||||
|
WORKDIR /web-vault
|
||||||
|
|
||||||
|
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||||
|
|
||||||
|
RUN curl -L $URL | tar xz
|
||||||
|
RUN ls
|
||||||
|
|
||||||
|
########################## BUILD IMAGE ##########################
|
||||||
|
# We need to use the Rust build image, because
|
||||||
|
# we need the Rust compiler and Cargo tooling
|
||||||
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
|
# set sqlite as default for DB ARG for backward comaptibility
|
||||||
|
ARG DB=sqlite
|
||||||
|
|
||||||
|
# Don't download rust docs
|
||||||
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
# Creates a dummy project used to grab dependencies
|
||||||
|
RUN USER=root cargo new --bin app
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copies over *only* your manifests and build files
|
||||||
|
COPY ./Cargo.* ./
|
||||||
|
COPY ./rust-toolchain ./rust-toolchain
|
||||||
|
COPY ./build.rs ./build.rs
|
||||||
|
|
||||||
|
# Builds your dependencies and removes the
|
||||||
|
# dummy project, except the target folder
|
||||||
|
# This folder contains the compiled dependencies
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
RUN find . -not -path "./target*" -delete
|
||||||
|
|
||||||
|
# Copies the complete project
|
||||||
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Make sure that we actually build the project
|
||||||
|
RUN touch src/main.rs
|
||||||
|
|
||||||
|
# Builds again, this time it'll just be
|
||||||
|
# your actual source files being built
|
||||||
|
RUN cargo build --features ${DB} --release
|
||||||
|
|
||||||
|
######################## RUNTIME IMAGE ########################
|
||||||
|
# Create a new stage with a minimal image
|
||||||
|
# because we already have a binary built
|
||||||
|
FROM debian:buster-slim
|
||||||
|
|
||||||
|
ENV ROCKET_ENV "staging"
|
||||||
|
ENV ROCKET_PORT=80
|
||||||
|
ENV ROCKET_WORKERS=10
|
||||||
|
|
||||||
|
# Install needed libraries
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
openssl \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
sqlite3 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN mkdir /data
|
||||||
|
VOLUME /data
|
||||||
|
EXPOSE 80
|
||||||
|
EXPOSE 3012
|
||||||
|
|
||||||
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
|
# and the binary from the "build" stage to the current stage
|
||||||
|
COPY Rocket.toml .
|
||||||
|
COPY --from=vault /web-vault ./web-vault
|
||||||
|
COPY --from=build app/target/release/bitwarden_rs .
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||||
|
|
||||||
|
# Configures the startup!
|
||||||
|
WORKDIR /
|
||||||
|
CMD ["/bitwarden_rs"]
|
@@ -2,25 +2,33 @@
|
|||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine as vault
|
FROM alpine:3.11 as vault
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.9.0"
|
ENV VAULT_VERSION "v2.12.0b"
|
||||||
|
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||||
|
|
||||||
RUN apk add --update-cache --upgrade \
|
RUN apk add --no-cache --upgrade \
|
||||||
curl \
|
curl \
|
||||||
tar
|
tar
|
||||||
|
|
||||||
RUN mkdir /web-vault
|
RUN mkdir /web-vault
|
||||||
WORKDIR /web-vault
|
WORKDIR /web-vault
|
||||||
|
|
||||||
|
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
RUN curl -L $URL | tar xz
|
||||||
RUN ls
|
RUN ls
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# Musl build image for statically compiled binary
|
# Musl build image for statically compiled binary
|
||||||
FROM clux/muslrust:nightly-2018-12-01 as build
|
FROM clux/muslrust:nightly-2019-12-19 as build
|
||||||
|
|
||||||
|
# set sqlite as default for DB ARG for backward comaptibility
|
||||||
|
ARG DB=sqlite
|
||||||
|
|
||||||
|
# Don't download rust docs
|
||||||
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
ENV USER "root"
|
ENV USER "root"
|
||||||
|
|
||||||
@@ -32,13 +40,16 @@ COPY . .
|
|||||||
|
|
||||||
RUN rustup target add x86_64-unknown-linux-musl
|
RUN rustup target add x86_64-unknown-linux-musl
|
||||||
|
|
||||||
|
# Make sure that we actually build the project
|
||||||
|
RUN touch src/main.rs
|
||||||
|
|
||||||
# Build
|
# Build
|
||||||
RUN cargo build --release
|
RUN cargo build --features ${DB} --release
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM alpine:3.8
|
FROM alpine:3.11
|
||||||
|
|
||||||
ENV ROCKET_ENV "staging"
|
ENV ROCKET_ENV "staging"
|
||||||
ENV ROCKET_PORT=80
|
ENV ROCKET_PORT=80
|
||||||
@@ -46,10 +57,11 @@ ENV ROCKET_WORKERS=10
|
|||||||
ENV SSL_CERT_DIR=/etc/ssl/certs
|
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||||
|
|
||||||
# Install needed libraries
|
# Install needed libraries
|
||||||
RUN apk add \
|
RUN apk add --no-cache \
|
||||||
openssl\
|
openssl \
|
||||||
ca-certificates \
|
curl \
|
||||||
&& rm /var/cache/apk/*
|
sqlite \
|
||||||
|
ca-certificates
|
||||||
|
|
||||||
RUN mkdir /data
|
RUN mkdir /data
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
@@ -62,5 +74,11 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||||
|
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
CMD ./bitwarden_rs
|
WORKDIR /
|
||||||
|
CMD ["/bitwarden_rs"]
|
110
docker/armv6/mysql/Dockerfile
Normal file
110
docker/armv6/mysql/Dockerfile
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
# Using multistage build:
|
||||||
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
|
####################### VAULT BUILD IMAGE #######################
|
||||||
|
FROM alpine:3.11 as vault
|
||||||
|
|
||||||
|
ENV VAULT_VERSION "v2.12.0b"
|
||||||
|
|
||||||
|
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||||
|
|
||||||
|
RUN apk add --no-cache --upgrade \
|
||||||
|
curl \
|
||||||
|
tar
|
||||||
|
|
||||||
|
RUN mkdir /web-vault
|
||||||
|
WORKDIR /web-vault
|
||||||
|
|
||||||
|
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||||
|
|
||||||
|
RUN curl -L $URL | tar xz
|
||||||
|
RUN ls
|
||||||
|
|
||||||
|
########################## BUILD IMAGE ##########################
|
||||||
|
# We need to use the Rust build image, because
|
||||||
|
# we need the Rust compiler and Cargo tooling
|
||||||
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
|
# set mysql backend
|
||||||
|
ARG DB=mysql
|
||||||
|
|
||||||
|
# Don't download rust docs
|
||||||
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
gcc-arm-linux-gnueabi \
|
||||||
|
&& mkdir -p ~/.cargo \
|
||||||
|
&& echo '[target.arm-unknown-linux-gnueabi]' >> ~/.cargo/config \
|
||||||
|
&& echo 'linker = "arm-linux-gnueabi-gcc"' >> ~/.cargo/config
|
||||||
|
|
||||||
|
ENV CARGO_HOME "/root/.cargo"
|
||||||
|
ENV USER "root"
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Prepare openssl armel libs
|
||||||
|
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||||
|
/etc/apt/sources.list.d/deb-src.list \
|
||||||
|
&& dpkg --add-architecture armel \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libssl-dev:armel \
|
||||||
|
libc6-dev:armel \
|
||||||
|
libmariadb-dev:armel
|
||||||
|
|
||||||
|
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
||||||
|
ENV CROSS_COMPILE="1"
|
||||||
|
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi"
|
||||||
|
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||||
|
|
||||||
|
# Copies the complete project
|
||||||
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Build
|
||||||
|
RUN rustup target add arm-unknown-linux-gnueabi
|
||||||
|
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||||
|
|
||||||
|
######################## RUNTIME IMAGE ########################
|
||||||
|
# Create a new stage with a minimal image
|
||||||
|
# because we already have a binary built
|
||||||
|
FROM balenalib/rpi-debian:buster
|
||||||
|
|
||||||
|
ENV ROCKET_ENV "staging"
|
||||||
|
ENV ROCKET_PORT=80
|
||||||
|
ENV ROCKET_WORKERS=10
|
||||||
|
|
||||||
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
|
# Install needed libraries
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
openssl \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
libmariadbclient-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN mkdir /data
|
||||||
|
|
||||||
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
|
VOLUME /data
|
||||||
|
EXPOSE 80
|
||||||
|
|
||||||
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
|
# and the binary from the "build" stage to the current stage
|
||||||
|
COPY Rocket.toml .
|
||||||
|
COPY --from=vault /web-vault ./web-vault
|
||||||
|
COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs .
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||||
|
|
||||||
|
# Configures the startup!
|
||||||
|
WORKDIR /
|
||||||
|
CMD ["/bitwarden_rs"]
|
@@ -2,29 +2,38 @@
|
|||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine as vault
|
FROM alpine:3.11 as vault
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.9.0"
|
ENV VAULT_VERSION "v2.12.0b"
|
||||||
|
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||||
|
|
||||||
RUN apk add --update-cache --upgrade \
|
RUN apk add --no-cache --upgrade \
|
||||||
curl \
|
curl \
|
||||||
tar
|
tar
|
||||||
|
|
||||||
RUN mkdir /web-vault
|
RUN mkdir /web-vault
|
||||||
WORKDIR /web-vault
|
WORKDIR /web-vault
|
||||||
|
|
||||||
|
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
RUN curl -L $URL | tar xz
|
||||||
RUN ls
|
RUN ls
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# We need to use the Rust build image, because
|
# We need to use the Rust build image, because
|
||||||
# we need the Rust compiler and Cargo tooling
|
# we need the Rust compiler and Cargo tooling
|
||||||
FROM rust as build
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
|
# set sqlite as default for DB ARG for backward comaptibility
|
||||||
|
ARG DB=sqlite
|
||||||
|
|
||||||
|
# Don't download rust docs
|
||||||
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
gcc-arm-linux-gnueabi \
|
gcc-arm-linux-gnueabi \
|
||||||
&& mkdir -p ~/.cargo \
|
&& mkdir -p ~/.cargo \
|
||||||
&& echo '[target.arm-unknown-linux-gnueabi]' >> ~/.cargo/config \
|
&& echo '[target.arm-unknown-linux-gnueabi]' >> ~/.cargo/config \
|
||||||
@@ -41,6 +50,7 @@ RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
|||||||
&& dpkg --add-architecture armel \
|
&& dpkg --add-architecture armel \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
libssl-dev:armel \
|
libssl-dev:armel \
|
||||||
libc6-dev:armel
|
libc6-dev:armel
|
||||||
|
|
||||||
@@ -55,12 +65,12 @@ COPY . .
|
|||||||
|
|
||||||
# Build
|
# Build
|
||||||
RUN rustup target add arm-unknown-linux-gnueabi
|
RUN rustup target add arm-unknown-linux-gnueabi
|
||||||
RUN cargo build --release --target=arm-unknown-linux-gnueabi -v
|
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/rpi-debian:stretch
|
FROM balenalib/rpi-debian:buster
|
||||||
|
|
||||||
ENV ROCKET_ENV "staging"
|
ENV ROCKET_ENV "staging"
|
||||||
ENV ROCKET_PORT=80
|
ENV ROCKET_PORT=80
|
||||||
@@ -69,12 +79,13 @@ ENV ROCKET_WORKERS=10
|
|||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Install needed libraries
|
# Install needed libraries
|
||||||
RUN apt-get update && apt-get install -y\
|
RUN apt-get update && apt-get install -y \
|
||||||
openssl\
|
--no-install-recommends \
|
||||||
ca-certificates\
|
openssl \
|
||||||
--no-install-recommends\
|
ca-certificates \
|
||||||
&& ln -s /lib/ld-linux-armhf.so.3 /lib/ld-linux.so.3\
|
curl \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
sqlite3 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN mkdir /data
|
RUN mkdir /data
|
||||||
|
|
||||||
@@ -89,5 +100,10 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs .
|
COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs .
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
CMD ./bitwarden_rs
|
WORKDIR /
|
||||||
|
CMD ["/bitwarden_rs"]
|
111
docker/armv7/mysql/Dockerfile
Normal file
111
docker/armv7/mysql/Dockerfile
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
# Using multistage build:
|
||||||
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
|
####################### VAULT BUILD IMAGE #######################
|
||||||
|
FROM alpine:3.11 as vault
|
||||||
|
|
||||||
|
ENV VAULT_VERSION "v2.12.0b"
|
||||||
|
|
||||||
|
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||||
|
|
||||||
|
RUN apk add --no-cache --upgrade \
|
||||||
|
curl \
|
||||||
|
tar
|
||||||
|
|
||||||
|
RUN mkdir /web-vault
|
||||||
|
WORKDIR /web-vault
|
||||||
|
|
||||||
|
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||||
|
|
||||||
|
RUN curl -L $URL | tar xz
|
||||||
|
RUN ls
|
||||||
|
|
||||||
|
########################## BUILD IMAGE ##########################
|
||||||
|
# We need to use the Rust build image, because
|
||||||
|
# we need the Rust compiler and Cargo tooling
|
||||||
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
|
# set mysql backend
|
||||||
|
ARG DB=mysql
|
||||||
|
|
||||||
|
# Don't download rust docs
|
||||||
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
gcc-arm-linux-gnueabihf \
|
||||||
|
&& mkdir -p ~/.cargo \
|
||||||
|
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> ~/.cargo/config \
|
||||||
|
&& echo 'linker = "arm-linux-gnueabihf-gcc"' >> ~/.cargo/config
|
||||||
|
|
||||||
|
ENV CARGO_HOME "/root/.cargo"
|
||||||
|
ENV USER "root"
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Prepare openssl armhf libs
|
||||||
|
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||||
|
/etc/apt/sources.list.d/deb-src.list \
|
||||||
|
&& dpkg --add-architecture armhf \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
libssl-dev:armhf \
|
||||||
|
libc6-dev:armhf \
|
||||||
|
libmariadb-dev:armhf
|
||||||
|
|
||||||
|
|
||||||
|
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc"
|
||||||
|
ENV CROSS_COMPILE="1"
|
||||||
|
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf"
|
||||||
|
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
||||||
|
|
||||||
|
# Copies the complete project
|
||||||
|
# To avoid copying unneeded files, use .dockerignore
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Build
|
||||||
|
RUN rustup target add armv7-unknown-linux-gnueabihf
|
||||||
|
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
||||||
|
|
||||||
|
######################## RUNTIME IMAGE ########################
|
||||||
|
# Create a new stage with a minimal image
|
||||||
|
# because we already have a binary built
|
||||||
|
FROM balenalib/armv7hf-debian:buster
|
||||||
|
|
||||||
|
ENV ROCKET_ENV "staging"
|
||||||
|
ENV ROCKET_PORT=80
|
||||||
|
ENV ROCKET_WORKERS=10
|
||||||
|
|
||||||
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
|
# Install needed libraries
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
|
openssl \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
libmariadbclient-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN mkdir /data
|
||||||
|
|
||||||
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
|
VOLUME /data
|
||||||
|
EXPOSE 80
|
||||||
|
|
||||||
|
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||||
|
# and the binary from the "build" stage to the current stage
|
||||||
|
COPY Rocket.toml .
|
||||||
|
COPY --from=vault /web-vault ./web-vault
|
||||||
|
COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs .
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||||
|
|
||||||
|
# Configures the startup!
|
||||||
|
WORKDIR /
|
||||||
|
CMD ["/bitwarden_rs"]
|
@@ -2,29 +2,38 @@
|
|||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
FROM alpine as vault
|
FROM alpine:3.11 as vault
|
||||||
|
|
||||||
ENV VAULT_VERSION "v2.9.0"
|
ENV VAULT_VERSION "v2.12.0b"
|
||||||
|
|
||||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||||
|
|
||||||
RUN apk add --update-cache --upgrade \
|
RUN apk add --no-cache --upgrade \
|
||||||
curl \
|
curl \
|
||||||
tar
|
tar
|
||||||
|
|
||||||
RUN mkdir /web-vault
|
RUN mkdir /web-vault
|
||||||
WORKDIR /web-vault
|
WORKDIR /web-vault
|
||||||
|
|
||||||
|
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||||
|
|
||||||
RUN curl -L $URL | tar xz
|
RUN curl -L $URL | tar xz
|
||||||
RUN ls
|
RUN ls
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# We need to use the Rust build image, because
|
# We need to use the Rust build image, because
|
||||||
# we need the Rust compiler and Cargo tooling
|
# we need the Rust compiler and Cargo tooling
|
||||||
FROM rust as build
|
FROM rust:1.40 as build
|
||||||
|
|
||||||
|
# set sqlite as default for DB ARG for backward comaptibility
|
||||||
|
ARG DB=sqlite
|
||||||
|
|
||||||
|
# Don't download rust docs
|
||||||
|
RUN rustup set profile minimal
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
gcc-arm-linux-gnueabihf \
|
gcc-arm-linux-gnueabihf \
|
||||||
&& mkdir -p ~/.cargo \
|
&& mkdir -p ~/.cargo \
|
||||||
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> ~/.cargo/config \
|
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> ~/.cargo/config \
|
||||||
@@ -41,6 +50,7 @@ RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
|||||||
&& dpkg --add-architecture armhf \
|
&& dpkg --add-architecture armhf \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
|
--no-install-recommends \
|
||||||
libssl-dev:armhf \
|
libssl-dev:armhf \
|
||||||
libc6-dev:armhf
|
libc6-dev:armhf
|
||||||
|
|
||||||
@@ -55,12 +65,12 @@ COPY . .
|
|||||||
|
|
||||||
# Build
|
# Build
|
||||||
RUN rustup target add armv7-unknown-linux-gnueabihf
|
RUN rustup target add armv7-unknown-linux-gnueabihf
|
||||||
RUN cargo build --release --target=armv7-unknown-linux-gnueabihf -v
|
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/armv7hf-debian:stretch
|
FROM balenalib/armv7hf-debian:buster
|
||||||
|
|
||||||
ENV ROCKET_ENV "staging"
|
ENV ROCKET_ENV "staging"
|
||||||
ENV ROCKET_PORT=80
|
ENV ROCKET_PORT=80
|
||||||
@@ -69,11 +79,13 @@ ENV ROCKET_WORKERS=10
|
|||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Install needed libraries
|
# Install needed libraries
|
||||||
RUN apt-get update && apt-get install -y\
|
RUN apt-get update && apt-get install -y \
|
||||||
openssl\
|
--no-install-recommends \
|
||||||
ca-certificates\
|
openssl \
|
||||||
--no-install-recommends\
|
ca-certificates \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
curl \
|
||||||
|
sqlite3 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN mkdir /data
|
RUN mkdir /data
|
||||||
|
|
||||||
@@ -88,5 +100,10 @@ COPY Rocket.toml .
|
|||||||
COPY --from=vault /web-vault ./web-vault
|
COPY --from=vault /web-vault ./web-vault
|
||||||
COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs .
|
COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs .
|
||||||
|
|
||||||
|
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||||
|
|
||||||
# Configures the startup!
|
# Configures the startup!
|
||||||
CMD ./bitwarden_rs
|
WORKDIR /
|
||||||
|
CMD ["/bitwarden_rs"]
|
8
docker/healthcheck.sh
Normal file
8
docker/healthcheck.sh
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/usr/bin/env sh
|
||||||
|
|
||||||
|
if [ -z "$ROCKET_TLS"]
|
||||||
|
then
|
||||||
|
curl --fail http://localhost:${ROCKET_PORT:-"80"}/alive || exit 1
|
||||||
|
else
|
||||||
|
curl --insecure --fail https://localhost:${ROCKET_PORT:-"80"}/alive || exit 1
|
||||||
|
fi
|
62
migrations/mysql/2018-01-14-171611_create_tables/up.sql
Normal file
62
migrations/mysql/2018-01-14-171611_create_tables/up.sql
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
CREATE TABLE users (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
created_at DATETIME NOT NULL,
|
||||||
|
updated_at DATETIME NOT NULL,
|
||||||
|
email VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
password_hash BLOB NOT NULL,
|
||||||
|
salt BLOB NOT NULL,
|
||||||
|
password_iterations INTEGER NOT NULL,
|
||||||
|
password_hint TEXT,
|
||||||
|
`key` TEXT NOT NULL,
|
||||||
|
private_key TEXT,
|
||||||
|
public_key TEXT,
|
||||||
|
totp_secret TEXT,
|
||||||
|
totp_recover TEXT,
|
||||||
|
security_stamp TEXT NOT NULL,
|
||||||
|
equivalent_domains TEXT NOT NULL,
|
||||||
|
excluded_globals TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE devices (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
created_at DATETIME NOT NULL,
|
||||||
|
updated_at DATETIME NOT NULL,
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
type INTEGER NOT NULL,
|
||||||
|
push_token TEXT,
|
||||||
|
refresh_token TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE ciphers (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
created_at DATETIME NOT NULL,
|
||||||
|
updated_at DATETIME NOT NULL,
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
folder_uuid CHAR(36) REFERENCES folders (uuid),
|
||||||
|
organization_uuid CHAR(36),
|
||||||
|
type INTEGER NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
notes TEXT,
|
||||||
|
fields TEXT,
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
favorite BOOLEAN NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE attachments (
|
||||||
|
id CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||||
|
file_name TEXT NOT NULL,
|
||||||
|
file_size INTEGER NOT NULL
|
||||||
|
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE folders (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
created_at DATETIME NOT NULL,
|
||||||
|
updated_at DATETIME NOT NULL,
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
name TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
@@ -0,0 +1,30 @@
|
|||||||
|
CREATE TABLE collections (
|
||||||
|
uuid VARCHAR(40) NOT NULL PRIMARY KEY,
|
||||||
|
org_uuid VARCHAR(40) NOT NULL REFERENCES organizations (uuid),
|
||||||
|
name TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE organizations (
|
||||||
|
uuid VARCHAR(40) NOT NULL PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
billing_email TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE users_collections (
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
|
||||||
|
PRIMARY KEY (user_uuid, collection_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE users_organizations (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
org_uuid CHAR(36) NOT NULL REFERENCES organizations (uuid),
|
||||||
|
|
||||||
|
access_all BOOLEAN NOT NULL,
|
||||||
|
`key` TEXT NOT NULL,
|
||||||
|
status INTEGER NOT NULL,
|
||||||
|
type INTEGER NOT NULL,
|
||||||
|
|
||||||
|
UNIQUE (user_uuid, org_uuid)
|
||||||
|
);
|
@@ -0,0 +1,34 @@
|
|||||||
|
ALTER TABLE ciphers RENAME TO oldCiphers;
|
||||||
|
|
||||||
|
CREATE TABLE ciphers (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
created_at DATETIME NOT NULL,
|
||||||
|
updated_at DATETIME NOT NULL,
|
||||||
|
user_uuid CHAR(36) REFERENCES users (uuid), -- Make this optional
|
||||||
|
organization_uuid CHAR(36) REFERENCES organizations (uuid), -- Add reference to orgs table
|
||||||
|
-- Remove folder_uuid
|
||||||
|
type INTEGER NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
notes TEXT,
|
||||||
|
fields TEXT,
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
favorite BOOLEAN NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE folders_ciphers (
|
||||||
|
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||||
|
folder_uuid CHAR(36) NOT NULL REFERENCES folders (uuid),
|
||||||
|
|
||||||
|
PRIMARY KEY (cipher_uuid, folder_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO ciphers (uuid, created_at, updated_at, user_uuid, organization_uuid, type, name, notes, fields, data, favorite)
|
||||||
|
SELECT uuid, created_at, updated_at, user_uuid, organization_uuid, type, name, notes, fields, data, favorite FROM oldCiphers;
|
||||||
|
|
||||||
|
INSERT INTO folders_ciphers (cipher_uuid, folder_uuid)
|
||||||
|
SELECT uuid, folder_uuid FROM oldCiphers WHERE folder_uuid IS NOT NULL;
|
||||||
|
|
||||||
|
|
||||||
|
DROP TABLE oldCiphers;
|
||||||
|
|
||||||
|
ALTER TABLE users_collections ADD COLUMN read_only BOOLEAN NOT NULL DEFAULT 0; -- False
|
@@ -0,0 +1,5 @@
|
|||||||
|
CREATE TABLE ciphers_collections (
|
||||||
|
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||||
|
collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
|
||||||
|
PRIMARY KEY (cipher_uuid, collection_uuid)
|
||||||
|
);
|
@@ -0,0 +1,14 @@
|
|||||||
|
ALTER TABLE attachments RENAME TO oldAttachments;
|
||||||
|
|
||||||
|
CREATE TABLE attachments (
|
||||||
|
id CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||||
|
file_name TEXT NOT NULL,
|
||||||
|
file_size INTEGER NOT NULL
|
||||||
|
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO attachments (id, cipher_uuid, file_name, file_size)
|
||||||
|
SELECT id, cipher_uuid, file_name, file_size FROM oldAttachments;
|
||||||
|
|
||||||
|
DROP TABLE oldAttachments;
|
@@ -0,0 +1,15 @@
|
|||||||
|
CREATE TABLE twofactor (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
type INTEGER NOT NULL,
|
||||||
|
enabled BOOLEAN NOT NULL,
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
|
||||||
|
UNIQUE (user_uuid, type)
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
INSERT INTO twofactor (uuid, user_uuid, type, enabled, data)
|
||||||
|
SELECT UUID(), uuid, 0, 1, u.totp_secret FROM users u where u.totp_secret IS NOT NULL;
|
||||||
|
|
||||||
|
UPDATE users SET totp_secret = NULL; -- Instead of recreating the table, just leave the columns empty
|
3
migrations/mysql/2018-09-10-111213_add_invites/up.sql
Normal file
3
migrations/mysql/2018-09-10-111213_add_invites/up.sql
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
CREATE TABLE invitations (
|
||||||
|
email VARCHAR(255) NOT NULL PRIMARY KEY
|
||||||
|
);
|
@@ -4,4 +4,4 @@ ALTER TABLE users
|
|||||||
|
|
||||||
ALTER TABLE users
|
ALTER TABLE users
|
||||||
ADD COLUMN
|
ADD COLUMN
|
||||||
client_kdf_iter INTEGER NOT NULL DEFAULT 5000;
|
client_kdf_iter INTEGER NOT NULL DEFAULT 100000;
|
@@ -0,0 +1,3 @@
|
|||||||
|
ALTER TABLE attachments
|
||||||
|
ADD COLUMN
|
||||||
|
`key` TEXT;
|
@@ -0,0 +1,7 @@
|
|||||||
|
ALTER TABLE attachments CHANGE COLUMN akey `key` TEXT;
|
||||||
|
ALTER TABLE ciphers CHANGE COLUMN atype type INTEGER NOT NULL;
|
||||||
|
ALTER TABLE devices CHANGE COLUMN atype type INTEGER NOT NULL;
|
||||||
|
ALTER TABLE twofactor CHANGE COLUMN atype type INTEGER NOT NULL;
|
||||||
|
ALTER TABLE users CHANGE COLUMN akey `key` TEXT;
|
||||||
|
ALTER TABLE users_organizations CHANGE COLUMN akey `key` TEXT;
|
||||||
|
ALTER TABLE users_organizations CHANGE COLUMN atype type INTEGER NOT NULL;
|
@@ -0,0 +1,7 @@
|
|||||||
|
ALTER TABLE attachments CHANGE COLUMN `key` akey TEXT;
|
||||||
|
ALTER TABLE ciphers CHANGE COLUMN type atype INTEGER NOT NULL;
|
||||||
|
ALTER TABLE devices CHANGE COLUMN type atype INTEGER NOT NULL;
|
||||||
|
ALTER TABLE twofactor CHANGE COLUMN type atype INTEGER NOT NULL;
|
||||||
|
ALTER TABLE users CHANGE COLUMN `key` akey TEXT;
|
||||||
|
ALTER TABLE users_organizations CHANGE COLUMN `key` akey TEXT;
|
||||||
|
ALTER TABLE users_organizations CHANGE COLUMN type atype INTEGER NOT NULL;
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE twofactor ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;
|
@@ -0,0 +1 @@
|
|||||||
|
|
@@ -0,0 +1,5 @@
|
|||||||
|
ALTER TABLE users ADD COLUMN verified_at DATETIME DEFAULT NULL;
|
||||||
|
ALTER TABLE users ADD COLUMN last_verifying_at DATETIME DEFAULT NULL;
|
||||||
|
ALTER TABLE users ADD COLUMN login_verify_count INTEGER NOT NULL DEFAULT 0;
|
||||||
|
ALTER TABLE users ADD COLUMN email_new VARCHAR(255) DEFAULT NULL;
|
||||||
|
ALTER TABLE users ADD COLUMN email_new_token VARCHAR(16) DEFAULT NULL;
|
@@ -0,0 +1,13 @@
|
|||||||
|
DROP TABLE devices;
|
||||||
|
DROP TABLE attachments;
|
||||||
|
DROP TABLE users_collections;
|
||||||
|
DROP TABLE users_organizations;
|
||||||
|
DROP TABLE folders_ciphers;
|
||||||
|
DROP TABLE ciphers_collections;
|
||||||
|
DROP TABLE twofactor;
|
||||||
|
DROP TABLE invitations;
|
||||||
|
DROP TABLE collections;
|
||||||
|
DROP TABLE folders;
|
||||||
|
DROP TABLE ciphers;
|
||||||
|
DROP TABLE users;
|
||||||
|
DROP TABLE organizations;
|
121
migrations/postgresql/2019-09-12-100000_create_tables/up.sql
Normal file
121
migrations/postgresql/2019-09-12-100000_create_tables/up.sql
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
CREATE TABLE users (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
created_at TIMESTAMP NOT NULL,
|
||||||
|
updated_at TIMESTAMP NOT NULL,
|
||||||
|
email VARCHAR(255) NOT NULL UNIQUE,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
password_hash BYTEA NOT NULL,
|
||||||
|
salt BYTEA NOT NULL,
|
||||||
|
password_iterations INTEGER NOT NULL,
|
||||||
|
password_hint TEXT,
|
||||||
|
akey TEXT NOT NULL,
|
||||||
|
private_key TEXT,
|
||||||
|
public_key TEXT,
|
||||||
|
totp_secret TEXT,
|
||||||
|
totp_recover TEXT,
|
||||||
|
security_stamp TEXT NOT NULL,
|
||||||
|
equivalent_domains TEXT NOT NULL,
|
||||||
|
excluded_globals TEXT NOT NULL,
|
||||||
|
client_kdf_type INTEGER NOT NULL DEFAULT 0,
|
||||||
|
client_kdf_iter INTEGER NOT NULL DEFAULT 100000
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE devices (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
created_at TIMESTAMP NOT NULL,
|
||||||
|
updated_at TIMESTAMP NOT NULL,
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
atype INTEGER NOT NULL,
|
||||||
|
push_token TEXT,
|
||||||
|
refresh_token TEXT NOT NULL,
|
||||||
|
twofactor_remember TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE organizations (
|
||||||
|
uuid VARCHAR(40) NOT NULL PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
billing_email TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE ciphers (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
created_at TIMESTAMP NOT NULL,
|
||||||
|
updated_at TIMESTAMP NOT NULL,
|
||||||
|
user_uuid CHAR(36) REFERENCES users (uuid),
|
||||||
|
organization_uuid CHAR(36) REFERENCES organizations (uuid),
|
||||||
|
atype INTEGER NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
notes TEXT,
|
||||||
|
fields TEXT,
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
favorite BOOLEAN NOT NULL,
|
||||||
|
password_history TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE attachments (
|
||||||
|
id CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||||
|
file_name TEXT NOT NULL,
|
||||||
|
file_size INTEGER NOT NULL,
|
||||||
|
akey TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE folders (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
created_at TIMESTAMP NOT NULL,
|
||||||
|
updated_at TIMESTAMP NOT NULL,
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
name TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE collections (
|
||||||
|
uuid VARCHAR(40) NOT NULL PRIMARY KEY,
|
||||||
|
org_uuid VARCHAR(40) NOT NULL REFERENCES organizations (uuid),
|
||||||
|
name TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE users_collections (
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
|
||||||
|
read_only BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
PRIMARY KEY (user_uuid, collection_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE users_organizations (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
org_uuid CHAR(36) NOT NULL REFERENCES organizations (uuid),
|
||||||
|
|
||||||
|
access_all BOOLEAN NOT NULL,
|
||||||
|
akey TEXT NOT NULL,
|
||||||
|
status INTEGER NOT NULL,
|
||||||
|
atype INTEGER NOT NULL,
|
||||||
|
|
||||||
|
UNIQUE (user_uuid, org_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE folders_ciphers (
|
||||||
|
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||||
|
folder_uuid CHAR(36) NOT NULL REFERENCES folders (uuid),
|
||||||
|
PRIMARY KEY (cipher_uuid, folder_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE ciphers_collections (
|
||||||
|
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||||
|
collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
|
||||||
|
PRIMARY KEY (cipher_uuid, collection_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE twofactor (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||||
|
atype INTEGER NOT NULL,
|
||||||
|
enabled BOOLEAN NOT NULL,
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
UNIQUE (user_uuid, atype)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE invitations (
|
||||||
|
email VARCHAR(255) NOT NULL PRIMARY KEY
|
||||||
|
);
|
@@ -0,0 +1,26 @@
|
|||||||
|
ALTER TABLE attachments ALTER COLUMN id TYPE CHAR(36);
|
||||||
|
ALTER TABLE attachments ALTER COLUMN cipher_uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE users ALTER COLUMN uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE users ALTER COLUMN email TYPE VARCHAR(255);
|
||||||
|
ALTER TABLE devices ALTER COLUMN uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE devices ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE organizations ALTER COLUMN uuid TYPE CHAR(40);
|
||||||
|
ALTER TABLE ciphers ALTER COLUMN uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE ciphers ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE ciphers ALTER COLUMN organization_uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE folders ALTER COLUMN uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE folders ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE collections ALTER COLUMN uuid TYPE CHAR(40);
|
||||||
|
ALTER TABLE collections ALTER COLUMN org_uuid TYPE CHAR(40);
|
||||||
|
ALTER TABLE users_collections ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE users_collections ALTER COLUMN collection_uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE users_organizations ALTER COLUMN uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE users_organizations ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE users_organizations ALTER COLUMN org_uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE folders_ciphers ALTER COLUMN cipher_uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE folders_ciphers ALTER COLUMN folder_uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE ciphers_collections ALTER COLUMN cipher_uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE ciphers_collections ALTER COLUMN collection_uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE twofactor ALTER COLUMN uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE twofactor ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||||
|
ALTER TABLE invitations ALTER COLUMN email TYPE VARCHAR(255);
|
@@ -0,0 +1,27 @@
|
|||||||
|
-- Switch from CHAR() types to VARCHAR() types to avoid padding issues.
|
||||||
|
ALTER TABLE attachments ALTER COLUMN id TYPE TEXT;
|
||||||
|
ALTER TABLE attachments ALTER COLUMN cipher_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE users ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE users ALTER COLUMN email TYPE TEXT;
|
||||||
|
ALTER TABLE devices ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE devices ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE organizations ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE ciphers ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE ciphers ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE ciphers ALTER COLUMN organization_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE folders ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE folders ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE collections ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE collections ALTER COLUMN org_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE users_collections ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE users_collections ALTER COLUMN collection_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE users_organizations ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE users_organizations ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE users_organizations ALTER COLUMN org_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE folders_ciphers ALTER COLUMN cipher_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE folders_ciphers ALTER COLUMN folder_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE ciphers_collections ALTER COLUMN cipher_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE ciphers_collections ALTER COLUMN collection_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE twofactor ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE twofactor ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||||
|
ALTER TABLE invitations ALTER COLUMN email TYPE TEXT;
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE twofactor ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;
|
@@ -0,0 +1 @@
|
|||||||
|
|
@@ -0,0 +1,5 @@
|
|||||||
|
ALTER TABLE users ADD COLUMN verified_at TIMESTAMP DEFAULT NULL;
|
||||||
|
ALTER TABLE users ADD COLUMN last_verifying_at TIMESTAMP DEFAULT NULL;
|
||||||
|
ALTER TABLE users ADD COLUMN login_verify_count INTEGER NOT NULL DEFAULT 0;
|
||||||
|
ALTER TABLE users ADD COLUMN email_new VARCHAR(255) DEFAULT NULL;
|
||||||
|
ALTER TABLE users ADD COLUMN email_new_token VARCHAR(16) DEFAULT NULL;
|
@@ -0,0 +1,9 @@
|
|||||||
|
DROP TABLE users;
|
||||||
|
|
||||||
|
DROP TABLE devices;
|
||||||
|
|
||||||
|
DROP TABLE ciphers;
|
||||||
|
|
||||||
|
DROP TABLE attachments;
|
||||||
|
|
||||||
|
DROP TABLE folders;
|
@@ -0,0 +1,8 @@
|
|||||||
|
DROP TABLE collections;
|
||||||
|
|
||||||
|
DROP TABLE organizations;
|
||||||
|
|
||||||
|
|
||||||
|
DROP TABLE users_collections;
|
||||||
|
|
||||||
|
DROP TABLE users_organizations;
|
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE ciphers_collections;
|
@@ -0,0 +1 @@
|
|||||||
|
-- This file should undo anything in `up.sql`
|
@@ -0,0 +1,3 @@
|
|||||||
|
ALTER TABLE devices
|
||||||
|
ADD COLUMN
|
||||||
|
twofactor_remember TEXT;
|
@@ -0,0 +1,8 @@
|
|||||||
|
UPDATE users
|
||||||
|
SET totp_secret = (
|
||||||
|
SELECT twofactor.data FROM twofactor
|
||||||
|
WHERE twofactor.type = 0
|
||||||
|
AND twofactor.user_uuid = users.uuid
|
||||||
|
);
|
||||||
|
|
||||||
|
DROP TABLE twofactor;
|
@@ -0,0 +1,3 @@
|
|||||||
|
ALTER TABLE ciphers
|
||||||
|
ADD COLUMN
|
||||||
|
password_history TEXT;
|
1
migrations/sqlite/2018-09-10-111213_add_invites/down.sql
Normal file
1
migrations/sqlite/2018-09-10-111213_add_invites/down.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE invitations;
|
@@ -0,0 +1,7 @@
|
|||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN
|
||||||
|
client_kdf_type INTEGER NOT NULL DEFAULT 0; -- PBKDF2
|
||||||
|
|
||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN
|
||||||
|
client_kdf_iter INTEGER NOT NULL DEFAULT 100000;
|
@@ -0,0 +1,7 @@
|
|||||||
|
ALTER TABLE attachments RENAME COLUMN akey TO key;
|
||||||
|
ALTER TABLE ciphers RENAME COLUMN atype TO type;
|
||||||
|
ALTER TABLE devices RENAME COLUMN atype TO type;
|
||||||
|
ALTER TABLE twofactor RENAME COLUMN atype TO type;
|
||||||
|
ALTER TABLE users RENAME COLUMN akey TO key;
|
||||||
|
ALTER TABLE users_organizations RENAME COLUMN akey TO key;
|
||||||
|
ALTER TABLE users_organizations RENAME COLUMN atype TO type;
|
@@ -0,0 +1,7 @@
|
|||||||
|
ALTER TABLE attachments RENAME COLUMN key TO akey;
|
||||||
|
ALTER TABLE ciphers RENAME COLUMN type TO atype;
|
||||||
|
ALTER TABLE devices RENAME COLUMN type TO atype;
|
||||||
|
ALTER TABLE twofactor RENAME COLUMN type TO atype;
|
||||||
|
ALTER TABLE users RENAME COLUMN key TO akey;
|
||||||
|
ALTER TABLE users_organizations RENAME COLUMN key TO akey;
|
||||||
|
ALTER TABLE users_organizations RENAME COLUMN type TO atype;
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE twofactor ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;
|
@@ -0,0 +1 @@
|
|||||||
|
|
@@ -0,0 +1,5 @@
|
|||||||
|
ALTER TABLE users ADD COLUMN verified_at DATETIME DEFAULT NULL;
|
||||||
|
ALTER TABLE users ADD COLUMN last_verifying_at DATETIME DEFAULT NULL;
|
||||||
|
ALTER TABLE users ADD COLUMN login_verify_count INTEGER NOT NULL DEFAULT 0;
|
||||||
|
ALTER TABLE users ADD COLUMN email_new TEXT DEFAULT NULL;
|
||||||
|
ALTER TABLE users ADD COLUMN email_new_token TEXT DEFAULT NULL;
|
@@ -1 +1 @@
|
|||||||
nightly-2019-03-14
|
nightly-2019-12-19
|
@@ -1 +1,2 @@
|
|||||||
|
version = "Two"
|
||||||
max_width = 120
|
max_width = 120
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
use std::process::Command;
|
||||||
|
|
||||||
use rocket::http::{Cookie, Cookies, SameSite};
|
use rocket::http::{Cookie, Cookies, SameSite};
|
||||||
use rocket::request::{self, FlashMessage, Form, FromRequest, Request};
|
use rocket::request::{self, FlashMessage, Form, FromRequest, Request};
|
||||||
@@ -6,10 +7,10 @@ use rocket::response::{content::Html, Flash, Redirect};
|
|||||||
use rocket::{Outcome, Route};
|
use rocket::{Outcome, Route};
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
|
|
||||||
use crate::api::{ApiResult, EmptyResult};
|
use crate::api::{ApiResult, EmptyResult, JsonResult};
|
||||||
use crate::auth::{decode_admin, encode_jwt, generate_admin_claims, ClientIp};
|
use crate::auth::{decode_admin, encode_jwt, generate_admin_claims, ClientIp};
|
||||||
use crate::config::ConfigBuilder;
|
use crate::config::ConfigBuilder;
|
||||||
use crate::db::{models::*, DbConn};
|
use crate::db::{backup_database, models::*, DbConn};
|
||||||
use crate::error::Error;
|
use crate::error::Error;
|
||||||
use crate::mail;
|
use crate::mail;
|
||||||
use crate::CONFIG;
|
use crate::CONFIG;
|
||||||
@@ -21,17 +22,25 @@ pub fn routes() -> Vec<Route> {
|
|||||||
|
|
||||||
routes![
|
routes![
|
||||||
admin_login,
|
admin_login,
|
||||||
|
get_users,
|
||||||
post_admin_login,
|
post_admin_login,
|
||||||
admin_page,
|
admin_page,
|
||||||
invite_user,
|
invite_user,
|
||||||
|
logout,
|
||||||
delete_user,
|
delete_user,
|
||||||
deauth_user,
|
deauth_user,
|
||||||
|
remove_2fa,
|
||||||
update_revision_users,
|
update_revision_users,
|
||||||
post_config,
|
post_config,
|
||||||
delete_config,
|
delete_config,
|
||||||
|
backup_db,
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref CAN_BACKUP: bool = cfg!(feature = "sqlite") && Command::new("sqlite3").arg("-version").status().is_ok();
|
||||||
|
}
|
||||||
|
|
||||||
#[get("/")]
|
#[get("/")]
|
||||||
fn admin_disabled() -> &'static str {
|
fn admin_disabled() -> &'static str {
|
||||||
"The admin panel is disabled, please configure the 'ADMIN_TOKEN' variable to enable it"
|
"The admin panel is disabled, please configure the 'ADMIN_TOKEN' variable to enable it"
|
||||||
@@ -100,6 +109,8 @@ struct AdminTemplateData {
|
|||||||
version: Option<&'static str>,
|
version: Option<&'static str>,
|
||||||
users: Vec<Value>,
|
users: Vec<Value>,
|
||||||
config: Value,
|
config: Value,
|
||||||
|
can_backup: bool,
|
||||||
|
logged_in: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AdminTemplateData {
|
impl AdminTemplateData {
|
||||||
@@ -109,6 +120,8 @@ impl AdminTemplateData {
|
|||||||
version: VERSION,
|
version: VERSION,
|
||||||
users,
|
users,
|
||||||
config: CONFIG.prepare_json(),
|
config: CONFIG.prepare_json(),
|
||||||
|
can_backup: *CAN_BACKUP,
|
||||||
|
logged_in: true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -144,9 +157,10 @@ fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -> Empt
|
|||||||
err!("Invitations are not allowed")
|
err!("Invitations are not allowed")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let mut user = User::new(email);
|
||||||
|
user.save(&conn)?;
|
||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
let mut user = User::new(email);
|
|
||||||
user.save(&conn)?;
|
|
||||||
let org_name = "bitwarden_rs";
|
let org_name = "bitwarden_rs";
|
||||||
mail::send_invite(&user.email, &user.uuid, None, None, &org_name, None)
|
mail::send_invite(&user.email, &user.uuid, None, None, &org_name, None)
|
||||||
} else {
|
} else {
|
||||||
@@ -155,6 +169,20 @@ fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -> Empt
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[get("/logout")]
|
||||||
|
fn logout(mut cookies: Cookies) -> Result<Redirect, ()> {
|
||||||
|
cookies.remove(Cookie::named(COOKIE_NAME));
|
||||||
|
Ok(Redirect::to(ADMIN_PATH))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/users")]
|
||||||
|
fn get_users(_token: AdminToken, conn: DbConn) -> JsonResult {
|
||||||
|
let users = User::get_all(&conn);
|
||||||
|
let users_json: Vec<Value> = users.iter().map(|u| u.to_json(&conn)).collect();
|
||||||
|
|
||||||
|
Ok(Json(Value::Array(users_json)))
|
||||||
|
}
|
||||||
|
|
||||||
#[post("/users/<uuid>/delete")]
|
#[post("/users/<uuid>/delete")]
|
||||||
fn delete_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
fn delete_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||||
let user = match User::find_by_uuid(&uuid, &conn) {
|
let user = match User::find_by_uuid(&uuid, &conn) {
|
||||||
@@ -178,6 +206,18 @@ fn deauth_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
|||||||
user.save(&conn)
|
user.save(&conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[post("/users/<uuid>/remove-2fa")]
|
||||||
|
fn remove_2fa(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||||
|
let mut user = match User::find_by_uuid(&uuid, &conn) {
|
||||||
|
Some(user) => user,
|
||||||
|
None => err!("User doesn't exist"),
|
||||||
|
};
|
||||||
|
|
||||||
|
TwoFactor::delete_all_by_user(&user.uuid, &conn)?;
|
||||||
|
user.totp_recover = None;
|
||||||
|
user.save(&conn)
|
||||||
|
}
|
||||||
|
|
||||||
#[post("/users/update_revision")]
|
#[post("/users/update_revision")]
|
||||||
fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
|
fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||||
User::update_all_revisions(&conn)
|
User::update_all_revisions(&conn)
|
||||||
@@ -194,6 +234,15 @@ fn delete_config(_token: AdminToken) -> EmptyResult {
|
|||||||
CONFIG.delete_user_config()
|
CONFIG.delete_user_config()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[post("/config/backup_db")]
|
||||||
|
fn backup_db(_token: AdminToken) -> EmptyResult {
|
||||||
|
if *CAN_BACKUP {
|
||||||
|
backup_database()
|
||||||
|
} else {
|
||||||
|
err!("Can't back up current DB (either it's not SQLite or the 'sqlite' binary is not present)");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct AdminToken {}
|
pub struct AdminToken {}
|
||||||
|
|
||||||
impl<'a, 'r> FromRequest<'a, 'r> for AdminToken {
|
impl<'a, 'r> FromRequest<'a, 'r> for AdminToken {
|
||||||
|
@@ -1,10 +1,12 @@
|
|||||||
|
use chrono::Utc;
|
||||||
use rocket_contrib::json::Json;
|
use rocket_contrib::json::Json;
|
||||||
|
|
||||||
use crate::db::models::*;
|
use crate::db::models::*;
|
||||||
use crate::db::DbConn;
|
use crate::db::DbConn;
|
||||||
|
|
||||||
use crate::api::{EmptyResult, JsonResult, JsonUpcase, Notify, NumberOrString, PasswordData, UpdateType};
|
use crate::api::{EmptyResult, JsonResult, JsonUpcase, Notify, NumberOrString, PasswordData, UpdateType};
|
||||||
use crate::auth::{decode_invite, Headers};
|
use crate::auth::{decode_delete, decode_invite, decode_verify_email, Headers};
|
||||||
|
use crate::crypto;
|
||||||
use crate::mail;
|
use crate::mail;
|
||||||
|
|
||||||
use crate::CONFIG;
|
use crate::CONFIG;
|
||||||
@@ -25,6 +27,10 @@ pub fn routes() -> Vec<Route> {
|
|||||||
post_sstamp,
|
post_sstamp,
|
||||||
post_email_token,
|
post_email_token,
|
||||||
post_email,
|
post_email,
|
||||||
|
post_verify_email,
|
||||||
|
post_verify_email_token,
|
||||||
|
post_delete_recover,
|
||||||
|
post_delete_recover_token,
|
||||||
delete_account,
|
delete_account,
|
||||||
post_delete_account,
|
post_delete_account,
|
||||||
revision_date,
|
revision_date,
|
||||||
@@ -62,7 +68,11 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
|
|||||||
let mut user = match User::find_by_mail(&data.Email, &conn) {
|
let mut user = match User::find_by_mail(&data.Email, &conn) {
|
||||||
Some(user) => {
|
Some(user) => {
|
||||||
if !user.password_hash.is_empty() {
|
if !user.password_hash.is_empty() {
|
||||||
err!("User already exists")
|
if CONFIG.signups_allowed() {
|
||||||
|
err!("User already exists")
|
||||||
|
} else {
|
||||||
|
err!("Registration not allowed or user already exists")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(token) = data.Token {
|
if let Some(token) = data.Token {
|
||||||
@@ -82,14 +92,14 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
|
|||||||
} else if CONFIG.signups_allowed() {
|
} else if CONFIG.signups_allowed() {
|
||||||
err!("Account with this email already exists")
|
err!("Account with this email already exists")
|
||||||
} else {
|
} else {
|
||||||
err!("Registration not allowed")
|
err!("Registration not allowed or user already exists")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
if CONFIG.signups_allowed() || Invitation::take(&data.Email, &conn) {
|
if CONFIG.signups_allowed() || Invitation::take(&data.Email, &conn) || CONFIG.can_signup_user(&data.Email) {
|
||||||
User::new(data.Email.clone())
|
User::new(data.Email.clone())
|
||||||
} else {
|
} else {
|
||||||
err!("Registration not allowed")
|
err!("Registration not allowed or user already exists")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -106,7 +116,7 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
user.set_password(&data.MasterPasswordHash);
|
user.set_password(&data.MasterPasswordHash);
|
||||||
user.key = data.Key;
|
user.akey = data.Key;
|
||||||
|
|
||||||
// Add extra fields if present
|
// Add extra fields if present
|
||||||
if let Some(name) = data.Name {
|
if let Some(name) = data.Name {
|
||||||
@@ -122,6 +132,20 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
|
|||||||
user.public_key = Some(keys.PublicKey);
|
user.public_key = Some(keys.PublicKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if CONFIG.mail_enabled() {
|
||||||
|
if CONFIG.signups_verify() {
|
||||||
|
if let Err(e) = mail::send_welcome_must_verify(&user.email, &user.uuid) {
|
||||||
|
error!("Error sending welcome email: {:#?}", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
user.last_verifying_at = Some(user.created_at);
|
||||||
|
} else {
|
||||||
|
if let Err(e) = mail::send_welcome(&user.email) {
|
||||||
|
error!("Error sending welcome email: {:#?}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
user.save(&conn)
|
user.save(&conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -204,7 +228,7 @@ fn post_password(data: JsonUpcase<ChangePassData>, headers: Headers, conn: DbCon
|
|||||||
}
|
}
|
||||||
|
|
||||||
user.set_password(&data.NewMasterPasswordHash);
|
user.set_password(&data.NewMasterPasswordHash);
|
||||||
user.key = data.Key;
|
user.akey = data.Key;
|
||||||
user.save(&conn)
|
user.save(&conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -231,7 +255,7 @@ fn post_kdf(data: JsonUpcase<ChangeKdfData>, headers: Headers, conn: DbConn) ->
|
|||||||
user.client_kdf_iter = data.KdfIterations;
|
user.client_kdf_iter = data.KdfIterations;
|
||||||
user.client_kdf_type = data.Kdf;
|
user.client_kdf_type = data.Kdf;
|
||||||
user.set_password(&data.NewMasterPasswordHash);
|
user.set_password(&data.NewMasterPasswordHash);
|
||||||
user.key = data.Key;
|
user.akey = data.Key;
|
||||||
user.save(&conn)
|
user.save(&conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -306,7 +330,7 @@ fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbConn, nt:
|
|||||||
// Update user data
|
// Update user data
|
||||||
let mut user = headers.user;
|
let mut user = headers.user;
|
||||||
|
|
||||||
user.key = data.Key;
|
user.akey = data.Key;
|
||||||
user.private_key = Some(data.PrivateKey);
|
user.private_key = Some(data.PrivateKey);
|
||||||
user.reset_security_stamp();
|
user.reset_security_stamp();
|
||||||
|
|
||||||
@@ -337,8 +361,9 @@ struct EmailTokenData {
|
|||||||
#[post("/accounts/email-token", data = "<data>")]
|
#[post("/accounts/email-token", data = "<data>")]
|
||||||
fn post_email_token(data: JsonUpcase<EmailTokenData>, headers: Headers, conn: DbConn) -> EmptyResult {
|
fn post_email_token(data: JsonUpcase<EmailTokenData>, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||||
let data: EmailTokenData = data.into_inner().data;
|
let data: EmailTokenData = data.into_inner().data;
|
||||||
|
let mut user = headers.user;
|
||||||
|
|
||||||
if !headers.user.check_valid_password(&data.MasterPasswordHash) {
|
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||||
err!("Invalid password")
|
err!("Invalid password")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -346,7 +371,21 @@ fn post_email_token(data: JsonUpcase<EmailTokenData>, headers: Headers, conn: Db
|
|||||||
err!("Email already in use");
|
err!("Email already in use");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
if !CONFIG.signups_allowed() && !CONFIG.can_signup_user(&data.NewEmail) {
|
||||||
|
err!("Email cannot be changed to this address");
|
||||||
|
}
|
||||||
|
|
||||||
|
let token = crypto::generate_token(6)?;
|
||||||
|
|
||||||
|
if CONFIG.mail_enabled() {
|
||||||
|
if let Err(e) = mail::send_change_email(&data.NewEmail, &token) {
|
||||||
|
error!("Error sending change-email email: {:#?}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
user.email_new = Some(data.NewEmail);
|
||||||
|
user.email_new_token = Some(token);
|
||||||
|
user.save(&conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@@ -357,8 +396,7 @@ struct ChangeEmailData {
|
|||||||
|
|
||||||
Key: String,
|
Key: String,
|
||||||
NewMasterPasswordHash: String,
|
NewMasterPasswordHash: String,
|
||||||
#[serde(rename = "Token")]
|
Token: NumberOrString,
|
||||||
_Token: NumberOrString,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/accounts/email", data = "<data>")]
|
#[post("/accounts/email", data = "<data>")]
|
||||||
@@ -374,14 +412,142 @@ fn post_email(data: JsonUpcase<ChangeEmailData>, headers: Headers, conn: DbConn)
|
|||||||
err!("Email already in use");
|
err!("Email already in use");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
match user.email_new {
|
||||||
|
Some(ref val) => {
|
||||||
|
if val != &data.NewEmail {
|
||||||
|
err!("Email change mismatch");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => err!("No email change pending"),
|
||||||
|
}
|
||||||
|
|
||||||
|
if CONFIG.mail_enabled() {
|
||||||
|
// Only check the token if we sent out an email...
|
||||||
|
match user.email_new_token {
|
||||||
|
Some(ref val) => {
|
||||||
|
if *val != data.Token.into_string() {
|
||||||
|
err!("Token mismatch");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => err!("No email change pending"),
|
||||||
|
}
|
||||||
|
user.verified_at = Some(Utc::now().naive_utc());
|
||||||
|
} else {
|
||||||
|
user.verified_at = None;
|
||||||
|
}
|
||||||
|
|
||||||
user.email = data.NewEmail;
|
user.email = data.NewEmail;
|
||||||
|
user.email_new = None;
|
||||||
|
user.email_new_token = None;
|
||||||
|
|
||||||
user.set_password(&data.NewMasterPasswordHash);
|
user.set_password(&data.NewMasterPasswordHash);
|
||||||
user.key = data.Key;
|
user.akey = data.Key;
|
||||||
|
|
||||||
user.save(&conn)
|
user.save(&conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[post("/accounts/verify-email")]
|
||||||
|
fn post_verify_email(headers: Headers, _conn: DbConn) -> EmptyResult {
|
||||||
|
let user = headers.user;
|
||||||
|
|
||||||
|
if !CONFIG.mail_enabled() {
|
||||||
|
err!("Cannot verify email address");
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Err(e) = mail::send_verify_email(&user.email, &user.uuid) {
|
||||||
|
error!("Error sending delete account email: {:#?}", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
struct VerifyEmailTokenData {
|
||||||
|
UserId: String,
|
||||||
|
Token: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/accounts/verify-email-token", data = "<data>")]
|
||||||
|
fn post_verify_email_token(data: JsonUpcase<VerifyEmailTokenData>, conn: DbConn) -> EmptyResult {
|
||||||
|
let data: VerifyEmailTokenData = data.into_inner().data;
|
||||||
|
|
||||||
|
let mut user = match User::find_by_uuid(&data.UserId, &conn) {
|
||||||
|
Some(user) => user,
|
||||||
|
None => err!("User doesn't exist"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let claims = match decode_verify_email(&data.Token) {
|
||||||
|
Ok(claims) => claims,
|
||||||
|
Err(_) => err!("Invalid claim"),
|
||||||
|
};
|
||||||
|
if claims.sub != user.uuid {
|
||||||
|
err!("Invalid claim");
|
||||||
|
}
|
||||||
|
user.verified_at = Some(Utc::now().naive_utc());
|
||||||
|
user.last_verifying_at = None;
|
||||||
|
user.login_verify_count = 0;
|
||||||
|
if let Err(e) = user.save(&conn) {
|
||||||
|
error!("Error saving email verification: {:#?}", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
struct DeleteRecoverData {
|
||||||
|
Email: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/accounts/delete-recover", data = "<data>")]
|
||||||
|
fn post_delete_recover(data: JsonUpcase<DeleteRecoverData>, conn: DbConn) -> EmptyResult {
|
||||||
|
let data: DeleteRecoverData = data.into_inner().data;
|
||||||
|
|
||||||
|
let user = User::find_by_mail(&data.Email, &conn);
|
||||||
|
|
||||||
|
if CONFIG.mail_enabled() {
|
||||||
|
if let Some(user) = user {
|
||||||
|
if let Err(e) = mail::send_delete_account(&user.email, &user.uuid) {
|
||||||
|
error!("Error sending delete account email: {:#?}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
// We don't support sending emails, but we shouldn't allow anybody
|
||||||
|
// to delete accounts without at least logging in... And if the user
|
||||||
|
// cannot remember their password then they will need to contact
|
||||||
|
// the administrator to delete it...
|
||||||
|
err!("Please contact the administrator to delete your account");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
struct DeleteRecoverTokenData {
|
||||||
|
UserId: String,
|
||||||
|
Token: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/accounts/delete-recover-token", data = "<data>")]
|
||||||
|
fn post_delete_recover_token(data: JsonUpcase<DeleteRecoverTokenData>, conn: DbConn) -> EmptyResult {
|
||||||
|
let data: DeleteRecoverTokenData = data.into_inner().data;
|
||||||
|
|
||||||
|
let user = match User::find_by_uuid(&data.UserId, &conn) {
|
||||||
|
Some(user) => user,
|
||||||
|
None => err!("User doesn't exist"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let claims = match decode_delete(&data.Token) {
|
||||||
|
Ok(claims) => claims,
|
||||||
|
Err(_) => err!("Invalid claim"),
|
||||||
|
};
|
||||||
|
if claims.sub != user.uuid {
|
||||||
|
err!("Invalid claim");
|
||||||
|
}
|
||||||
|
user.delete(&conn)
|
||||||
|
}
|
||||||
|
|
||||||
#[post("/accounts/delete", data = "<data>")]
|
#[post("/accounts/delete", data = "<data>")]
|
||||||
fn post_delete_account(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> EmptyResult {
|
fn post_delete_account(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||||
delete_account(data, headers, conn)
|
delete_account(data, headers, conn)
|
||||||
|
@@ -88,7 +88,7 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|||||||
let domains_json = if data.exclude_domains {
|
let domains_json = if data.exclude_domains {
|
||||||
Value::Null
|
Value::Null
|
||||||
} else {
|
} else {
|
||||||
api::core::get_eq_domains(headers).unwrap().into_inner()
|
api::core::_get_eq_domains(headers, true).unwrap().into_inner()
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
@@ -267,7 +267,7 @@ pub fn update_cipher_from_data(
|
|||||||
err!("Attachment is not owned by the cipher")
|
err!("Attachment is not owned by the cipher")
|
||||||
}
|
}
|
||||||
|
|
||||||
saved_att.key = Some(attachment.Key);
|
saved_att.akey = Some(attachment.Key);
|
||||||
saved_att.file_name = attachment.FileName;
|
saved_att.file_name = attachment.FileName;
|
||||||
|
|
||||||
saved_att.save(&conn)?;
|
saved_att.save(&conn)?;
|
||||||
@@ -691,7 +691,7 @@ fn post_attachment(
|
|||||||
};
|
};
|
||||||
|
|
||||||
let mut attachment = Attachment::new(file_name, cipher.uuid.clone(), name, size);
|
let mut attachment = Attachment::new(file_name, cipher.uuid.clone(), name, size);
|
||||||
attachment.key = attachment_key.clone();
|
attachment.akey = attachment_key.clone();
|
||||||
attachment.save(&conn).expect("Error saving attachment");
|
attachment.save(&conn).expect("Error saving attachment");
|
||||||
}
|
}
|
||||||
_ => error!("Invalid multipart name"),
|
_ => error!("Invalid multipart name"),
|
||||||
@@ -870,8 +870,20 @@ fn move_cipher_selected_put(
|
|||||||
move_cipher_selected(data, headers, conn, nt)
|
move_cipher_selected(data, headers, conn, nt)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/ciphers/purge", data = "<data>")]
|
#[derive(FromForm)]
|
||||||
fn delete_all(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
struct OrganizationId {
|
||||||
|
#[form(field = "organizationId")]
|
||||||
|
org_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/ciphers/purge?<organization..>", data = "<data>")]
|
||||||
|
fn delete_all(
|
||||||
|
organization: Option<Form<OrganizationId>>,
|
||||||
|
data: JsonUpcase<PasswordData>,
|
||||||
|
headers: Headers,
|
||||||
|
conn: DbConn,
|
||||||
|
nt: Notify,
|
||||||
|
) -> EmptyResult {
|
||||||
let data: PasswordData = data.into_inner().data;
|
let data: PasswordData = data.into_inner().data;
|
||||||
let password_hash = data.MasterPasswordHash;
|
let password_hash = data.MasterPasswordHash;
|
||||||
|
|
||||||
@@ -881,19 +893,40 @@ fn delete_all(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn, nt
|
|||||||
err!("Invalid password")
|
err!("Invalid password")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete ciphers and their attachments
|
match organization {
|
||||||
for cipher in Cipher::find_owned_by_user(&user.uuid, &conn) {
|
Some(org_data) => {
|
||||||
cipher.delete(&conn)?;
|
// Organization ID in query params, purging organization vault
|
||||||
}
|
match UserOrganization::find_by_user_and_org(&user.uuid, &org_data.org_id, &conn) {
|
||||||
|
None => err!("You don't have permission to purge the organization vault"),
|
||||||
|
Some(user_org) => {
|
||||||
|
if user_org.atype == UserOrgType::Owner {
|
||||||
|
Cipher::delete_all_by_organization(&org_data.org_id, &conn)?;
|
||||||
|
Collection::delete_all_by_organization(&org_data.org_id, &conn)?;
|
||||||
|
nt.send_user_update(UpdateType::Vault, &user);
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
err!("You don't have permission to purge the organization vault");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
// No organization ID in query params, purging user vault
|
||||||
|
// Delete ciphers and their attachments
|
||||||
|
for cipher in Cipher::find_owned_by_user(&user.uuid, &conn) {
|
||||||
|
cipher.delete(&conn)?;
|
||||||
|
}
|
||||||
|
|
||||||
// Delete folders
|
// Delete folders
|
||||||
for f in Folder::find_by_user(&user.uuid, &conn) {
|
for f in Folder::find_by_user(&user.uuid, &conn) {
|
||||||
f.delete(&conn)?;
|
f.delete(&conn)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
user.update_revision(&conn)?;
|
user.update_revision(&conn)?;
|
||||||
nt.send_user_update(UpdateType::Vault, &user);
|
nt.send_user_update(UpdateType::Vault, &user);
|
||||||
Ok(())
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, nt: &Notify) -> EmptyResult {
|
fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, nt: &Notify) -> EmptyResult {
|
||||||
|
@@ -59,7 +59,7 @@ pub struct FolderData {
|
|||||||
fn post_folders(data: JsonUpcase<FolderData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
|
fn post_folders(data: JsonUpcase<FolderData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
|
||||||
let data: FolderData = data.into_inner().data;
|
let data: FolderData = data.into_inner().data;
|
||||||
|
|
||||||
let mut folder = Folder::new(headers.user.uuid.clone(), data.Name);
|
let mut folder = Folder::new(headers.user.uuid, data.Name);
|
||||||
|
|
||||||
folder.save(&conn)?;
|
folder.save(&conn)?;
|
||||||
nt.send_folder_update(UpdateType::FolderCreate, &folder);
|
nt.send_folder_update(UpdateType::FolderCreate, &folder);
|
||||||
|
@@ -63,7 +63,7 @@ fn put_device_token(uuid: String, data: JsonUpcase<Value>, headers: Headers) ->
|
|||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
"Id": headers.device.uuid,
|
"Id": headers.device.uuid,
|
||||||
"Name": headers.device.name,
|
"Name": headers.device.name,
|
||||||
"Type": headers.device.type_,
|
"Type": headers.device.atype,
|
||||||
"Identifier": headers.device.uuid,
|
"Identifier": headers.device.uuid,
|
||||||
"CreationDate": crate::util::format_date(&headers.device.created_at),
|
"CreationDate": crate::util::format_date(&headers.device.created_at),
|
||||||
})))
|
})))
|
||||||
@@ -81,6 +81,10 @@ const GLOBAL_DOMAINS: &str = include_str!("../../static/global_domains.json");
|
|||||||
|
|
||||||
#[get("/settings/domains")]
|
#[get("/settings/domains")]
|
||||||
fn get_eq_domains(headers: Headers) -> JsonResult {
|
fn get_eq_domains(headers: Headers) -> JsonResult {
|
||||||
|
_get_eq_domains(headers, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn _get_eq_domains(headers: Headers, no_excluded: bool) -> JsonResult {
|
||||||
let user = headers.user;
|
let user = headers.user;
|
||||||
use serde_json::from_str;
|
use serde_json::from_str;
|
||||||
|
|
||||||
@@ -93,6 +97,10 @@ fn get_eq_domains(headers: Headers) -> JsonResult {
|
|||||||
global.Excluded = excluded_globals.contains(&global.Type);
|
global.Excluded = excluded_globals.contains(&global.Type);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if no_excluded {
|
||||||
|
globals.retain(|g| !g.Excluded);
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
"EquivalentDomains": equivalent_domains,
|
"EquivalentDomains": equivalent_domains,
|
||||||
"GlobalEquivalentDomains": globals,
|
"GlobalEquivalentDomains": globals,
|
||||||
@@ -132,18 +140,43 @@ fn put_eq_domains(data: JsonUpcase<EquivDomainData>, headers: Headers, conn: DbC
|
|||||||
|
|
||||||
#[get("/hibp/breach?<username>")]
|
#[get("/hibp/breach?<username>")]
|
||||||
fn hibp_breach(username: String) -> JsonResult {
|
fn hibp_breach(username: String) -> JsonResult {
|
||||||
let url = format!("https://haveibeenpwned.com/api/v2/breachedaccount/{}", username);
|
|
||||||
let user_agent = "Bitwarden_RS";
|
let user_agent = "Bitwarden_RS";
|
||||||
|
let url = format!(
|
||||||
|
"https://haveibeenpwned.com/api/v3/breachedaccount/{}?truncateResponse=false&includeUnverified=false",
|
||||||
|
username
|
||||||
|
);
|
||||||
|
|
||||||
use reqwest::{header::USER_AGENT, Client};
|
use reqwest::{header::USER_AGENT, Client};
|
||||||
|
|
||||||
let res = Client::new().get(&url).header(USER_AGENT, user_agent).send()?;
|
if let Some(api_key) = crate::CONFIG.hibp_api_key() {
|
||||||
|
let hibp_client = Client::builder().use_sys_proxy().build()?;
|
||||||
|
|
||||||
// If we get a 404, return a 404, it means no breached accounts
|
let res = hibp_client
|
||||||
if res.status() == 404 {
|
.get(&url)
|
||||||
return Err(Error::empty().with_code(404));
|
.header(USER_AGENT, user_agent)
|
||||||
|
.header("hibp-api-key", api_key)
|
||||||
|
.send()?;
|
||||||
|
|
||||||
|
// If we get a 404, return a 404, it means no breached accounts
|
||||||
|
if res.status() == 404 {
|
||||||
|
return Err(Error::empty().with_code(404));
|
||||||
|
}
|
||||||
|
|
||||||
|
let value: Value = res.error_for_status()?.json()?;
|
||||||
|
Ok(Json(value))
|
||||||
|
} else {
|
||||||
|
Ok(Json(json!([{
|
||||||
|
"Name": "HaveIBeenPwned",
|
||||||
|
"Title": "Manual HIBP Check",
|
||||||
|
"Domain": "haveibeenpwned.com",
|
||||||
|
"BreachDate": "2019-08-18T00:00:00Z",
|
||||||
|
"AddedDate": "2019-08-18T00:00:00Z",
|
||||||
|
"Description": format!("Go to: <a href=\"https://haveibeenpwned.com/account/{account}\" target=\"_blank\" rel=\"noopener\">https://haveibeenpwned.com/account/{account}</a> for a manual check.<br/><br/>HaveIBeenPwned API key not set!<br/>Go to <a href=\"https://haveibeenpwned.com/API/Key\" target=\"_blank\" rel=\"noopener\">https://haveibeenpwned.com/API/Key</a> to purchase an API key from HaveIBeenPwned.<br/><br/>", account=username),
|
||||||
|
"LogoPath": "/bwrs_static/hibp.png",
|
||||||
|
"PwnCount": 0,
|
||||||
|
"DataClasses": [
|
||||||
|
"Error - No API key set!"
|
||||||
|
]
|
||||||
|
}])))
|
||||||
}
|
}
|
||||||
|
|
||||||
let value: Value = res.error_for_status()?.json()?;
|
|
||||||
Ok(Json(value))
|
|
||||||
}
|
}
|
||||||
|
@@ -77,12 +77,12 @@ fn create_organization(headers: Headers, data: JsonUpcase<OrgData>, conn: DbConn
|
|||||||
let data: OrgData = data.into_inner().data;
|
let data: OrgData = data.into_inner().data;
|
||||||
|
|
||||||
let org = Organization::new(data.Name, data.BillingEmail);
|
let org = Organization::new(data.Name, data.BillingEmail);
|
||||||
let mut user_org = UserOrganization::new(headers.user.uuid.clone(), org.uuid.clone());
|
let mut user_org = UserOrganization::new(headers.user.uuid, org.uuid.clone());
|
||||||
let collection = Collection::new(org.uuid.clone(), data.CollectionName);
|
let collection = Collection::new(org.uuid.clone(), data.CollectionName);
|
||||||
|
|
||||||
user_org.key = data.Key;
|
user_org.akey = data.Key;
|
||||||
user_org.access_all = true;
|
user_org.access_all = true;
|
||||||
user_org.type_ = UserOrgType::Owner as i32;
|
user_org.atype = UserOrgType::Owner as i32;
|
||||||
user_org.status = UserOrgStatus::Confirmed as i32;
|
user_org.status = UserOrgStatus::Confirmed as i32;
|
||||||
|
|
||||||
org.save(&conn)?;
|
org.save(&conn)?;
|
||||||
@@ -127,7 +127,7 @@ fn leave_organization(org_id: String, headers: Headers, conn: DbConn) -> EmptyRe
|
|||||||
match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) {
|
match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) {
|
||||||
None => err!("User not part of organization"),
|
None => err!("User not part of organization"),
|
||||||
Some(user_org) => {
|
Some(user_org) => {
|
||||||
if user_org.type_ == UserOrgType::Owner {
|
if user_org.atype == UserOrgType::Owner {
|
||||||
let num_owners =
|
let num_owners =
|
||||||
UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len();
|
UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len();
|
||||||
|
|
||||||
@@ -221,7 +221,7 @@ fn post_organization_collections(
|
|||||||
None => err!("Can't find organization details"),
|
None => err!("Can't find organization details"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let collection = Collection::new(org.uuid.clone(), data.Name);
|
let collection = Collection::new(org.uuid, data.Name);
|
||||||
collection.save(&conn)?;
|
collection.save(&conn)?;
|
||||||
|
|
||||||
Ok(Json(collection.to_json()))
|
Ok(Json(collection.to_json()))
|
||||||
@@ -262,7 +262,7 @@ fn post_organization_collection_update(
|
|||||||
err!("Collection is not owned by organization");
|
err!("Collection is not owned by organization");
|
||||||
}
|
}
|
||||||
|
|
||||||
collection.name = data.Name.clone();
|
collection.name = data.Name;
|
||||||
collection.save(&conn)?;
|
collection.save(&conn)?;
|
||||||
|
|
||||||
Ok(Json(collection.to_json()))
|
Ok(Json(collection.to_json()))
|
||||||
@@ -505,7 +505,7 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
|
|||||||
let mut new_user = UserOrganization::new(user.uuid.clone(), org_id.clone());
|
let mut new_user = UserOrganization::new(user.uuid.clone(), org_id.clone());
|
||||||
let access_all = data.AccessAll.unwrap_or(false);
|
let access_all = data.AccessAll.unwrap_or(false);
|
||||||
new_user.access_all = access_all;
|
new_user.access_all = access_all;
|
||||||
new_user.type_ = new_type;
|
new_user.atype = new_type;
|
||||||
new_user.status = user_org_status;
|
new_user.status = user_org_status;
|
||||||
|
|
||||||
// If no accessAll, add the collections received
|
// If no accessAll, add the collections received
|
||||||
@@ -581,7 +581,7 @@ fn reinvite_user(org_id: String, user_org: String, headers: AdminHeaders, conn:
|
|||||||
Some(headers.user.email),
|
Some(headers.user.email),
|
||||||
)?;
|
)?;
|
||||||
} else {
|
} else {
|
||||||
let invitation = Invitation::new(user.email.clone());
|
let invitation = Invitation::new(user.email);
|
||||||
invitation.save(&conn)?;
|
invitation.save(&conn)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -657,7 +657,7 @@ fn confirm_invite(
|
|||||||
None => err!("The specified user isn't a member of the organization"),
|
None => err!("The specified user isn't a member of the organization"),
|
||||||
};
|
};
|
||||||
|
|
||||||
if user_to_confirm.type_ != UserOrgType::User && headers.org_user_type != UserOrgType::Owner {
|
if user_to_confirm.atype != UserOrgType::User && headers.org_user_type != UserOrgType::Owner {
|
||||||
err!("Only Owners can confirm Managers, Admins or Owners")
|
err!("Only Owners can confirm Managers, Admins or Owners")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -666,7 +666,7 @@ fn confirm_invite(
|
|||||||
}
|
}
|
||||||
|
|
||||||
user_to_confirm.status = UserOrgStatus::Confirmed as i32;
|
user_to_confirm.status = UserOrgStatus::Confirmed as i32;
|
||||||
user_to_confirm.key = match data["Key"].as_str() {
|
user_to_confirm.akey = match data["Key"].as_str() {
|
||||||
Some(key) => key.to_string(),
|
Some(key) => key.to_string(),
|
||||||
None => err!("Invalid key provided"),
|
None => err!("Invalid key provided"),
|
||||||
};
|
};
|
||||||
@@ -735,18 +735,18 @@ fn edit_user(
|
|||||||
None => err!("The specified user isn't member of the organization"),
|
None => err!("The specified user isn't member of the organization"),
|
||||||
};
|
};
|
||||||
|
|
||||||
if new_type != user_to_edit.type_
|
if new_type != user_to_edit.atype
|
||||||
&& (user_to_edit.type_ >= UserOrgType::Admin || new_type >= UserOrgType::Admin)
|
&& (user_to_edit.atype >= UserOrgType::Admin || new_type >= UserOrgType::Admin)
|
||||||
&& headers.org_user_type != UserOrgType::Owner
|
&& headers.org_user_type != UserOrgType::Owner
|
||||||
{
|
{
|
||||||
err!("Only Owners can grant and remove Admin or Owner privileges")
|
err!("Only Owners can grant and remove Admin or Owner privileges")
|
||||||
}
|
}
|
||||||
|
|
||||||
if user_to_edit.type_ == UserOrgType::Owner && headers.org_user_type != UserOrgType::Owner {
|
if user_to_edit.atype == UserOrgType::Owner && headers.org_user_type != UserOrgType::Owner {
|
||||||
err!("Only Owners can edit Owner users")
|
err!("Only Owners can edit Owner users")
|
||||||
}
|
}
|
||||||
|
|
||||||
if user_to_edit.type_ == UserOrgType::Owner && new_type != UserOrgType::Owner {
|
if user_to_edit.atype == UserOrgType::Owner && new_type != UserOrgType::Owner {
|
||||||
// Removing owner permmission, check that there are at least another owner
|
// Removing owner permmission, check that there are at least another owner
|
||||||
let num_owners = UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len();
|
let num_owners = UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len();
|
||||||
|
|
||||||
@@ -756,7 +756,7 @@ fn edit_user(
|
|||||||
}
|
}
|
||||||
|
|
||||||
user_to_edit.access_all = data.AccessAll;
|
user_to_edit.access_all = data.AccessAll;
|
||||||
user_to_edit.type_ = new_type as i32;
|
user_to_edit.atype = new_type as i32;
|
||||||
|
|
||||||
// Delete all the odd collections
|
// Delete all the odd collections
|
||||||
for c in CollectionUser::find_by_organization_and_user_uuid(&org_id, &user_to_edit.user_uuid, &conn) {
|
for c in CollectionUser::find_by_organization_and_user_uuid(&org_id, &user_to_edit.user_uuid, &conn) {
|
||||||
@@ -785,11 +785,11 @@ fn delete_user(org_id: String, org_user_id: String, headers: AdminHeaders, conn:
|
|||||||
None => err!("User to delete isn't member of the organization"),
|
None => err!("User to delete isn't member of the organization"),
|
||||||
};
|
};
|
||||||
|
|
||||||
if user_to_delete.type_ != UserOrgType::User && headers.org_user_type != UserOrgType::Owner {
|
if user_to_delete.atype != UserOrgType::User && headers.org_user_type != UserOrgType::Owner {
|
||||||
err!("Only Owners can delete Admins or Owners")
|
err!("Only Owners can delete Admins or Owners")
|
||||||
}
|
}
|
||||||
|
|
||||||
if user_to_delete.type_ == UserOrgType::Owner {
|
if user_to_delete.atype == UserOrgType::Owner {
|
||||||
// Removing owner, check that there are at least another owner
|
// Removing owner, check that there are at least another owner
|
||||||
let num_owners = UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len();
|
let num_owners = UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len();
|
||||||
|
|
||||||
@@ -842,7 +842,7 @@ fn post_org_import(
|
|||||||
None => err!("User is not part of the organization"),
|
None => err!("User is not part of the organization"),
|
||||||
};
|
};
|
||||||
|
|
||||||
if org_user.type_ < UserOrgType::Admin {
|
if org_user.atype < UserOrgType::Admin {
|
||||||
err!("Only admins or owners can import into an organization")
|
err!("Only admins or owners can import into an organization")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -1,716 +0,0 @@
|
|||||||
use data_encoding::BASE32;
|
|
||||||
use rocket_contrib::json::Json;
|
|
||||||
use serde_json;
|
|
||||||
use serde_json::Value;
|
|
||||||
|
|
||||||
use crate::api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, NumberOrString, PasswordData};
|
|
||||||
use crate::auth::Headers;
|
|
||||||
use crate::crypto;
|
|
||||||
use crate::db::{
|
|
||||||
models::{TwoFactor, TwoFactorType, User},
|
|
||||||
DbConn,
|
|
||||||
};
|
|
||||||
use crate::error::{Error, MapResult};
|
|
||||||
|
|
||||||
use rocket::Route;
|
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
|
||||||
routes![
|
|
||||||
get_twofactor,
|
|
||||||
get_recover,
|
|
||||||
recover,
|
|
||||||
disable_twofactor,
|
|
||||||
disable_twofactor_put,
|
|
||||||
generate_authenticator,
|
|
||||||
activate_authenticator,
|
|
||||||
activate_authenticator_put,
|
|
||||||
generate_u2f,
|
|
||||||
generate_u2f_challenge,
|
|
||||||
activate_u2f,
|
|
||||||
activate_u2f_put,
|
|
||||||
generate_yubikey,
|
|
||||||
activate_yubikey,
|
|
||||||
activate_yubikey_put,
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/two-factor")]
|
|
||||||
fn get_twofactor(headers: Headers, conn: DbConn) -> JsonResult {
|
|
||||||
let twofactors = TwoFactor::find_by_user(&headers.user.uuid, &conn);
|
|
||||||
let twofactors_json: Vec<Value> = twofactors.iter().map(TwoFactor::to_json_list).collect();
|
|
||||||
|
|
||||||
Ok(Json(json!({
|
|
||||||
"Data": twofactors_json,
|
|
||||||
"Object": "list",
|
|
||||||
"ContinuationToken": null,
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[post("/two-factor/get-recover", data = "<data>")]
|
|
||||||
fn get_recover(data: JsonUpcase<PasswordData>, headers: Headers) -> JsonResult {
|
|
||||||
let data: PasswordData = data.into_inner().data;
|
|
||||||
let user = headers.user;
|
|
||||||
|
|
||||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
|
||||||
err!("Invalid password");
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Json(json!({
|
|
||||||
"Code": user.totp_recover,
|
|
||||||
"Object": "twoFactorRecover"
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
struct RecoverTwoFactor {
|
|
||||||
MasterPasswordHash: String,
|
|
||||||
Email: String,
|
|
||||||
RecoveryCode: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[post("/two-factor/recover", data = "<data>")]
|
|
||||||
fn recover(data: JsonUpcase<RecoverTwoFactor>, conn: DbConn) -> JsonResult {
|
|
||||||
let data: RecoverTwoFactor = data.into_inner().data;
|
|
||||||
|
|
||||||
use crate::db::models::User;
|
|
||||||
|
|
||||||
// Get the user
|
|
||||||
let mut user = match User::find_by_mail(&data.Email, &conn) {
|
|
||||||
Some(user) => user,
|
|
||||||
None => err!("Username or password is incorrect. Try again."),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Check password
|
|
||||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
|
||||||
err!("Username or password is incorrect. Try again.")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if recovery code is correct
|
|
||||||
if !user.check_valid_recovery_code(&data.RecoveryCode) {
|
|
||||||
err!("Recovery code is incorrect. Try again.")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove all twofactors from the user
|
|
||||||
for twofactor in TwoFactor::find_by_user(&user.uuid, &conn) {
|
|
||||||
twofactor.delete(&conn)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove the recovery code, not needed without twofactors
|
|
||||||
user.totp_recover = None;
|
|
||||||
user.save(&conn)?;
|
|
||||||
Ok(Json(json!({})))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn _generate_recover_code(user: &mut User, conn: &DbConn) {
|
|
||||||
if user.totp_recover.is_none() {
|
|
||||||
let totp_recover = BASE32.encode(&crypto::get_random(vec![0u8; 20]));
|
|
||||||
user.totp_recover = Some(totp_recover);
|
|
||||||
user.save(conn).ok();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
struct DisableTwoFactorData {
|
|
||||||
MasterPasswordHash: String,
|
|
||||||
Type: NumberOrString,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[post("/two-factor/disable", data = "<data>")]
|
|
||||||
fn disable_twofactor(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|
||||||
let data: DisableTwoFactorData = data.into_inner().data;
|
|
||||||
let password_hash = data.MasterPasswordHash;
|
|
||||||
let user = headers.user;
|
|
||||||
|
|
||||||
if !user.check_valid_password(&password_hash) {
|
|
||||||
err!("Invalid password");
|
|
||||||
}
|
|
||||||
|
|
||||||
let type_ = data.Type.into_i32()?;
|
|
||||||
|
|
||||||
if let Some(twofactor) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn) {
|
|
||||||
twofactor.delete(&conn)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Json(json!({
|
|
||||||
"Enabled": false,
|
|
||||||
"Type": type_,
|
|
||||||
"Object": "twoFactorProvider"
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[put("/two-factor/disable", data = "<data>")]
|
|
||||||
fn disable_twofactor_put(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|
||||||
disable_twofactor(data, headers, conn)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[post("/two-factor/get-authenticator", data = "<data>")]
|
|
||||||
fn generate_authenticator(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|
||||||
let data: PasswordData = data.into_inner().data;
|
|
||||||
let user = headers.user;
|
|
||||||
|
|
||||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
|
||||||
err!("Invalid password");
|
|
||||||
}
|
|
||||||
|
|
||||||
let type_ = TwoFactorType::Authenticator as i32;
|
|
||||||
let twofactor = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn);
|
|
||||||
|
|
||||||
let (enabled, key) = match twofactor {
|
|
||||||
Some(tf) => (true, tf.data),
|
|
||||||
_ => (false, BASE32.encode(&crypto::get_random(vec![0u8; 20]))),
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Json(json!({
|
|
||||||
"Enabled": enabled,
|
|
||||||
"Key": key,
|
|
||||||
"Object": "twoFactorAuthenticator"
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
struct EnableAuthenticatorData {
|
|
||||||
MasterPasswordHash: String,
|
|
||||||
Key: String,
|
|
||||||
Token: NumberOrString,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[post("/two-factor/authenticator", data = "<data>")]
|
|
||||||
fn activate_authenticator(data: JsonUpcase<EnableAuthenticatorData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|
||||||
let data: EnableAuthenticatorData = data.into_inner().data;
|
|
||||||
let password_hash = data.MasterPasswordHash;
|
|
||||||
let key = data.Key;
|
|
||||||
let token = data.Token.into_i32()? as u64;
|
|
||||||
|
|
||||||
let mut user = headers.user;
|
|
||||||
|
|
||||||
if !user.check_valid_password(&password_hash) {
|
|
||||||
err!("Invalid password");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate key as base32 and 20 bytes length
|
|
||||||
let decoded_key: Vec<u8> = match BASE32.decode(key.as_bytes()) {
|
|
||||||
Ok(decoded) => decoded,
|
|
||||||
_ => err!("Invalid totp secret"),
|
|
||||||
};
|
|
||||||
|
|
||||||
if decoded_key.len() != 20 {
|
|
||||||
err!("Invalid key length")
|
|
||||||
}
|
|
||||||
|
|
||||||
let type_ = TwoFactorType::Authenticator;
|
|
||||||
let twofactor = TwoFactor::new(user.uuid.clone(), type_, key.to_uppercase());
|
|
||||||
|
|
||||||
// Validate the token provided with the key
|
|
||||||
validate_totp_code(token, &twofactor.data)?;
|
|
||||||
|
|
||||||
_generate_recover_code(&mut user, &conn);
|
|
||||||
twofactor.save(&conn)?;
|
|
||||||
|
|
||||||
Ok(Json(json!({
|
|
||||||
"Enabled": true,
|
|
||||||
"Key": key,
|
|
||||||
"Object": "twoFactorAuthenticator"
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[put("/two-factor/authenticator", data = "<data>")]
|
|
||||||
fn activate_authenticator_put(data: JsonUpcase<EnableAuthenticatorData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|
||||||
activate_authenticator(data, headers, conn)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn validate_totp_code_str(totp_code: &str, secret: &str) -> EmptyResult {
|
|
||||||
let totp_code: u64 = match totp_code.parse() {
|
|
||||||
Ok(code) => code,
|
|
||||||
_ => err!("TOTP code is not a number"),
|
|
||||||
};
|
|
||||||
|
|
||||||
validate_totp_code(totp_code, secret)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn validate_totp_code(totp_code: u64, secret: &str) -> EmptyResult {
|
|
||||||
use data_encoding::BASE32;
|
|
||||||
use oath::{totp_raw_now, HashType};
|
|
||||||
|
|
||||||
let decoded_secret = match BASE32.decode(secret.as_bytes()) {
|
|
||||||
Ok(s) => s,
|
|
||||||
Err(_) => err!("Invalid TOTP secret"),
|
|
||||||
};
|
|
||||||
|
|
||||||
let generated = totp_raw_now(&decoded_secret, 6, 0, 30, &HashType::SHA1);
|
|
||||||
if generated != totp_code {
|
|
||||||
err!("Invalid TOTP code");
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
use u2f::messages::{RegisterResponse, SignResponse, U2fSignRequest};
|
|
||||||
use u2f::protocol::{Challenge, U2f};
|
|
||||||
use u2f::register::Registration;
|
|
||||||
|
|
||||||
use crate::CONFIG;
|
|
||||||
|
|
||||||
const U2F_VERSION: &str = "U2F_V2";
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref APP_ID: String = format!("{}/app-id.json", &CONFIG.domain());
|
|
||||||
static ref U2F: U2f = U2f::new(APP_ID.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[post("/two-factor/get-u2f", data = "<data>")]
|
|
||||||
fn generate_u2f(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|
||||||
if !CONFIG.domain_set() {
|
|
||||||
err!("`DOMAIN` environment variable is not set. U2F disabled")
|
|
||||||
}
|
|
||||||
let data: PasswordData = data.into_inner().data;
|
|
||||||
|
|
||||||
if !headers.user.check_valid_password(&data.MasterPasswordHash) {
|
|
||||||
err!("Invalid password");
|
|
||||||
}
|
|
||||||
|
|
||||||
let (enabled, keys) = get_u2f_registrations(&headers.user.uuid, &conn)?;
|
|
||||||
let keys_json: Vec<Value> = keys.iter().map(U2FRegistration::to_json).collect();
|
|
||||||
|
|
||||||
Ok(Json(json!({
|
|
||||||
"Enabled": enabled,
|
|
||||||
"Keys": keys_json,
|
|
||||||
"Object": "twoFactorU2f"
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[post("/two-factor/get-u2f-challenge", data = "<data>")]
|
|
||||||
fn generate_u2f_challenge(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|
||||||
let data: PasswordData = data.into_inner().data;
|
|
||||||
|
|
||||||
if !headers.user.check_valid_password(&data.MasterPasswordHash) {
|
|
||||||
err!("Invalid password");
|
|
||||||
}
|
|
||||||
|
|
||||||
let _type = TwoFactorType::U2fRegisterChallenge;
|
|
||||||
let challenge = _create_u2f_challenge(&headers.user.uuid, _type, &conn).challenge;
|
|
||||||
|
|
||||||
Ok(Json(json!({
|
|
||||||
"UserId": headers.user.uuid,
|
|
||||||
"AppId": APP_ID.to_string(),
|
|
||||||
"Challenge": challenge,
|
|
||||||
"Version": U2F_VERSION,
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
struct EnableU2FData {
|
|
||||||
Id: NumberOrString, // 1..5
|
|
||||||
Name: String,
|
|
||||||
MasterPasswordHash: String,
|
|
||||||
DeviceResponse: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
// This struct is referenced from the U2F lib
|
|
||||||
// because it doesn't implement Deserialize
|
|
||||||
#[derive(Serialize, Deserialize)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
#[serde(remote = "Registration")]
|
|
||||||
struct RegistrationDef {
|
|
||||||
key_handle: Vec<u8>,
|
|
||||||
pub_key: Vec<u8>,
|
|
||||||
attestation_cert: Option<Vec<u8>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
|
||||||
struct U2FRegistration {
|
|
||||||
id: i32,
|
|
||||||
name: String,
|
|
||||||
#[serde(with = "RegistrationDef")]
|
|
||||||
reg: Registration,
|
|
||||||
counter: u32,
|
|
||||||
compromised: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl U2FRegistration {
|
|
||||||
fn to_json(&self) -> Value {
|
|
||||||
json!({
|
|
||||||
"Id": self.id,
|
|
||||||
"Name": self.name,
|
|
||||||
"Compromised": self.compromised,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This struct is copied from the U2F lib
|
|
||||||
// to add an optional error code
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
struct RegisterResponseCopy {
|
|
||||||
pub registration_data: String,
|
|
||||||
pub version: String,
|
|
||||||
pub client_data: String,
|
|
||||||
|
|
||||||
pub error_code: Option<NumberOrString>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Into<RegisterResponse> for RegisterResponseCopy {
|
|
||||||
fn into(self) -> RegisterResponse {
|
|
||||||
RegisterResponse {
|
|
||||||
registration_data: self.registration_data,
|
|
||||||
version: self.version,
|
|
||||||
client_data: self.client_data,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[post("/two-factor/u2f", data = "<data>")]
|
|
||||||
fn activate_u2f(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|
||||||
let data: EnableU2FData = data.into_inner().data;
|
|
||||||
let mut user = headers.user;
|
|
||||||
|
|
||||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
|
||||||
err!("Invalid password");
|
|
||||||
}
|
|
||||||
|
|
||||||
let tf_type = TwoFactorType::U2fRegisterChallenge as i32;
|
|
||||||
let tf_challenge = match TwoFactor::find_by_user_and_type(&user.uuid, tf_type, &conn) {
|
|
||||||
Some(c) => c,
|
|
||||||
None => err!("Can't recover challenge"),
|
|
||||||
};
|
|
||||||
|
|
||||||
let challenge: Challenge = serde_json::from_str(&tf_challenge.data)?;
|
|
||||||
tf_challenge.delete(&conn)?;
|
|
||||||
|
|
||||||
let response: RegisterResponseCopy = serde_json::from_str(&data.DeviceResponse)?;
|
|
||||||
|
|
||||||
let error_code = response
|
|
||||||
.error_code
|
|
||||||
.clone()
|
|
||||||
.map_or("0".into(), NumberOrString::into_string);
|
|
||||||
|
|
||||||
if error_code != "0" {
|
|
||||||
err!("Error registering U2F token")
|
|
||||||
}
|
|
||||||
|
|
||||||
let registration = U2F.register_response(challenge.clone(), response.into())?;
|
|
||||||
let full_registration = U2FRegistration {
|
|
||||||
id: data.Id.into_i32()?,
|
|
||||||
name: data.Name,
|
|
||||||
reg: registration,
|
|
||||||
compromised: false,
|
|
||||||
counter: 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut regs = get_u2f_registrations(&user.uuid, &conn)?.1;
|
|
||||||
|
|
||||||
// TODO: Check that there is no repeat Id
|
|
||||||
regs.push(full_registration);
|
|
||||||
save_u2f_registrations(&user.uuid, ®s, &conn)?;
|
|
||||||
|
|
||||||
_generate_recover_code(&mut user, &conn);
|
|
||||||
|
|
||||||
let keys_json: Vec<Value> = regs.iter().map(U2FRegistration::to_json).collect();
|
|
||||||
Ok(Json(json!({
|
|
||||||
"Enabled": true,
|
|
||||||
"Keys": keys_json,
|
|
||||||
"Object": "twoFactorU2f"
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[put("/two-factor/u2f", data = "<data>")]
|
|
||||||
fn activate_u2f_put(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|
||||||
activate_u2f(data, headers, conn)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn _create_u2f_challenge(user_uuid: &str, type_: TwoFactorType, conn: &DbConn) -> Challenge {
|
|
||||||
let challenge = U2F.generate_challenge().unwrap();
|
|
||||||
|
|
||||||
TwoFactor::new(user_uuid.into(), type_, serde_json::to_string(&challenge).unwrap())
|
|
||||||
.save(conn)
|
|
||||||
.expect("Error saving challenge");
|
|
||||||
|
|
||||||
challenge
|
|
||||||
}
|
|
||||||
|
|
||||||
fn save_u2f_registrations(user_uuid: &str, regs: &[U2FRegistration], conn: &DbConn) -> EmptyResult {
|
|
||||||
TwoFactor::new(user_uuid.into(), TwoFactorType::U2f, serde_json::to_string(regs)?).save(&conn)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_u2f_registrations(user_uuid: &str, conn: &DbConn) -> Result<(bool, Vec<U2FRegistration>), Error> {
|
|
||||||
let type_ = TwoFactorType::U2f as i32;
|
|
||||||
let (enabled, regs) = match TwoFactor::find_by_user_and_type(user_uuid, type_, conn) {
|
|
||||||
Some(tf) => (tf.enabled, tf.data),
|
|
||||||
None => return Ok((false, Vec::new())), // If no data, return empty list
|
|
||||||
};
|
|
||||||
|
|
||||||
let data = match serde_json::from_str(®s) {
|
|
||||||
Ok(d) => d,
|
|
||||||
Err(_) => {
|
|
||||||
// If error, try old format
|
|
||||||
let mut old_regs = _old_parse_registrations(®s);
|
|
||||||
|
|
||||||
if old_regs.len() != 1 {
|
|
||||||
err!("The old U2F format only allows one device")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert to new format
|
|
||||||
let new_regs = vec![U2FRegistration {
|
|
||||||
id: 1,
|
|
||||||
name: "Unnamed U2F key".into(),
|
|
||||||
reg: old_regs.remove(0),
|
|
||||||
compromised: false,
|
|
||||||
counter: 0,
|
|
||||||
}];
|
|
||||||
|
|
||||||
// Save new format
|
|
||||||
save_u2f_registrations(user_uuid, &new_regs, &conn)?;
|
|
||||||
|
|
||||||
new_regs
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok((enabled, data))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn _old_parse_registrations(registations: &str) -> Vec<Registration> {
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct Helper(#[serde(with = "RegistrationDef")] Registration);
|
|
||||||
|
|
||||||
let regs: Vec<Value> = serde_json::from_str(registations).expect("Can't parse Registration data");
|
|
||||||
|
|
||||||
regs.into_iter()
|
|
||||||
.map(|r| serde_json::from_value(r).unwrap())
|
|
||||||
.map(|Helper(r)| r)
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn generate_u2f_login(user_uuid: &str, conn: &DbConn) -> ApiResult<U2fSignRequest> {
|
|
||||||
let challenge = _create_u2f_challenge(user_uuid, TwoFactorType::U2fLoginChallenge, conn);
|
|
||||||
|
|
||||||
let registrations: Vec<_> = get_u2f_registrations(user_uuid, conn)?
|
|
||||||
.1
|
|
||||||
.into_iter()
|
|
||||||
.map(|r| r.reg)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
if registrations.is_empty() {
|
|
||||||
err!("No U2F devices registered")
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(U2F.sign_request(challenge, registrations))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn validate_u2f_login(user_uuid: &str, response: &str, conn: &DbConn) -> EmptyResult {
|
|
||||||
let challenge_type = TwoFactorType::U2fLoginChallenge as i32;
|
|
||||||
let tf_challenge = TwoFactor::find_by_user_and_type(user_uuid, challenge_type, &conn);
|
|
||||||
|
|
||||||
let challenge = match tf_challenge {
|
|
||||||
Some(tf_challenge) => {
|
|
||||||
let challenge: Challenge = serde_json::from_str(&tf_challenge.data)?;
|
|
||||||
tf_challenge.delete(&conn)?;
|
|
||||||
challenge
|
|
||||||
}
|
|
||||||
None => err!("Can't recover login challenge"),
|
|
||||||
};
|
|
||||||
let response: SignResponse = serde_json::from_str(response)?;
|
|
||||||
let mut registrations = get_u2f_registrations(user_uuid, conn)?.1;
|
|
||||||
if registrations.is_empty() {
|
|
||||||
err!("No U2F devices registered")
|
|
||||||
}
|
|
||||||
|
|
||||||
for reg in &mut registrations {
|
|
||||||
let response = U2F.sign_response(challenge.clone(), reg.reg.clone(), response.clone(), reg.counter);
|
|
||||||
match response {
|
|
||||||
Ok(new_counter) => {
|
|
||||||
reg.counter = new_counter;
|
|
||||||
save_u2f_registrations(user_uuid, ®istrations, &conn)?;
|
|
||||||
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
Err(u2f::u2ferror::U2fError::CounterTooLow) => {
|
|
||||||
reg.compromised = true;
|
|
||||||
save_u2f_registrations(user_uuid, ®istrations, &conn)?;
|
|
||||||
|
|
||||||
err!("This device might be compromised!");
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
warn!("E {:#}", e);
|
|
||||||
// break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
err!("error verifying response")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
struct EnableYubikeyData {
|
|
||||||
MasterPasswordHash: String,
|
|
||||||
Key1: Option<String>,
|
|
||||||
Key2: Option<String>,
|
|
||||||
Key3: Option<String>,
|
|
||||||
Key4: Option<String>,
|
|
||||||
Key5: Option<String>,
|
|
||||||
Nfc: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize, Debug)]
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub struct YubikeyMetadata {
|
|
||||||
Keys: Vec<String>,
|
|
||||||
pub Nfc: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
use yubico::config::Config;
|
|
||||||
use yubico::Yubico;
|
|
||||||
|
|
||||||
fn parse_yubikeys(data: &EnableYubikeyData) -> Vec<String> {
|
|
||||||
let data_keys = [&data.Key1, &data.Key2, &data.Key3, &data.Key4, &data.Key5];
|
|
||||||
|
|
||||||
data_keys.iter().filter_map(|e| e.as_ref().cloned()).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn jsonify_yubikeys(yubikeys: Vec<String>) -> serde_json::Value {
|
|
||||||
let mut result = json!({});
|
|
||||||
|
|
||||||
for (i, key) in yubikeys.into_iter().enumerate() {
|
|
||||||
result[format!("Key{}", i + 1)] = Value::String(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_yubico_credentials() -> Result<(String, String), Error> {
|
|
||||||
match (CONFIG.yubico_client_id(), CONFIG.yubico_secret_key()) {
|
|
||||||
(Some(id), Some(secret)) => Ok((id, secret)),
|
|
||||||
_ => err!("`YUBICO_CLIENT_ID` or `YUBICO_SECRET_KEY` environment variable is not set. Yubikey OTP Disabled"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn verify_yubikey_otp(otp: String) -> EmptyResult {
|
|
||||||
let (yubico_id, yubico_secret) = get_yubico_credentials()?;
|
|
||||||
|
|
||||||
let yubico = Yubico::new();
|
|
||||||
let config = Config::default().set_client_id(yubico_id).set_key(yubico_secret);
|
|
||||||
|
|
||||||
match CONFIG.yubico_server() {
|
|
||||||
Some(server) => yubico.verify(otp, config.set_api_hosts(vec![server])),
|
|
||||||
None => yubico.verify(otp, config),
|
|
||||||
}
|
|
||||||
.map_res("Failed to verify OTP")
|
|
||||||
.and(Ok(()))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[post("/two-factor/get-yubikey", data = "<data>")]
|
|
||||||
fn generate_yubikey(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|
||||||
// Make sure the credentials are set
|
|
||||||
get_yubico_credentials()?;
|
|
||||||
|
|
||||||
let data: PasswordData = data.into_inner().data;
|
|
||||||
let user = headers.user;
|
|
||||||
|
|
||||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
|
||||||
err!("Invalid password");
|
|
||||||
}
|
|
||||||
|
|
||||||
let user_uuid = &user.uuid;
|
|
||||||
let yubikey_type = TwoFactorType::YubiKey as i32;
|
|
||||||
|
|
||||||
let r = TwoFactor::find_by_user_and_type(user_uuid, yubikey_type, &conn);
|
|
||||||
|
|
||||||
if let Some(r) = r {
|
|
||||||
let yubikey_metadata: YubikeyMetadata = serde_json::from_str(&r.data)?;
|
|
||||||
|
|
||||||
let mut result = jsonify_yubikeys(yubikey_metadata.Keys);
|
|
||||||
|
|
||||||
result["Enabled"] = Value::Bool(true);
|
|
||||||
result["Nfc"] = Value::Bool(yubikey_metadata.Nfc);
|
|
||||||
result["Object"] = Value::String("twoFactorU2f".to_owned());
|
|
||||||
|
|
||||||
Ok(Json(result))
|
|
||||||
} else {
|
|
||||||
Ok(Json(json!({
|
|
||||||
"Enabled": false,
|
|
||||||
"Object": "twoFactorU2f",
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[post("/two-factor/yubikey", data = "<data>")]
|
|
||||||
fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|
||||||
let data: EnableYubikeyData = data.into_inner().data;
|
|
||||||
let mut user = headers.user;
|
|
||||||
|
|
||||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
|
||||||
err!("Invalid password");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if we already have some data
|
|
||||||
let mut yubikey_data = match TwoFactor::find_by_user_and_type(&user.uuid, TwoFactorType::YubiKey as i32, &conn) {
|
|
||||||
Some(data) => data,
|
|
||||||
None => TwoFactor::new(user.uuid.clone(), TwoFactorType::YubiKey, String::new()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let yubikeys = parse_yubikeys(&data);
|
|
||||||
|
|
||||||
if yubikeys.is_empty() {
|
|
||||||
return Ok(Json(json!({
|
|
||||||
"Enabled": false,
|
|
||||||
"Object": "twoFactorU2f",
|
|
||||||
})));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure they are valid OTPs
|
|
||||||
for yubikey in &yubikeys {
|
|
||||||
if yubikey.len() == 12 {
|
|
||||||
// YubiKey ID
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
verify_yubikey_otp(yubikey.to_owned()).map_res("Invalid Yubikey OTP provided")?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let yubikey_ids: Vec<String> = yubikeys.into_iter().map(|x| (&x[..12]).to_owned()).collect();
|
|
||||||
|
|
||||||
let yubikey_metadata = YubikeyMetadata {
|
|
||||||
Keys: yubikey_ids,
|
|
||||||
Nfc: data.Nfc,
|
|
||||||
};
|
|
||||||
|
|
||||||
yubikey_data.data = serde_json::to_string(&yubikey_metadata).unwrap();
|
|
||||||
yubikey_data.save(&conn)?;
|
|
||||||
|
|
||||||
_generate_recover_code(&mut user, &conn);
|
|
||||||
|
|
||||||
let mut result = jsonify_yubikeys(yubikey_metadata.Keys);
|
|
||||||
|
|
||||||
result["Enabled"] = Value::Bool(true);
|
|
||||||
result["Nfc"] = Value::Bool(yubikey_metadata.Nfc);
|
|
||||||
result["Object"] = Value::String("twoFactorU2f".to_owned());
|
|
||||||
|
|
||||||
Ok(Json(result))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[put("/two-factor/yubikey", data = "<data>")]
|
|
||||||
fn activate_yubikey_put(data: JsonUpcase<EnableYubikeyData>, headers: Headers, conn: DbConn) -> JsonResult {
|
|
||||||
activate_yubikey(data, headers, conn)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn validate_yubikey_login(response: &str, twofactor_data: &str) -> EmptyResult {
|
|
||||||
if response.len() != 44 {
|
|
||||||
err!("Invalid Yubikey OTP length");
|
|
||||||
}
|
|
||||||
|
|
||||||
let yubikey_metadata: YubikeyMetadata = serde_json::from_str(twofactor_data).expect("Can't parse Yubikey Metadata");
|
|
||||||
let response_id = &response[..12];
|
|
||||||
|
|
||||||
if !yubikey_metadata.Keys.contains(&response_id.to_owned()) {
|
|
||||||
err!("Given Yubikey is not registered");
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = verify_yubikey_otp(response.to_owned());
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Ok(_answer) => Ok(()),
|
|
||||||
Err(_e) => err!("Failed to verify Yubikey against OTP server"),
|
|
||||||
}
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user