mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-09-10 18:55:57 +03:00
Compare commits
408 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
59e50b03bd | ||
|
0a88f020e1 | ||
|
c058a1d63c | ||
|
96a189deb9 | ||
|
8c229920ad | ||
|
d592323e39 | ||
|
95dd1cd7ad | ||
|
36ae946655 | ||
|
24edc94f9d | ||
|
4deae76347 | ||
|
8ee0c57224 | ||
|
cb6f392774 | ||
|
5c6081c4e2 | ||
|
88c56de97b | ||
|
e274af6e3d | ||
|
a0ece3754b | ||
|
0bcc2ae7ab | ||
|
bdb90460c4 | ||
|
824137a02c | ||
|
2edc699eac | ||
|
8e79366076 | ||
|
c1e39b182f | ||
|
13eb276085 | ||
|
4cec502f7b | ||
|
2545469713 | ||
|
f09996a21d | ||
|
5cabf4d040 | ||
|
a03db6d224 | ||
|
8d1b72b951 | ||
|
912e1f93b7 | ||
|
a5aa4d9b54 | ||
|
e777be3dde | ||
|
b5441f6b77 | ||
|
dbbd63e519 | ||
|
adc443ea80 | ||
|
0d32179d07 | ||
|
b45b02b37e | ||
|
12928b832c | ||
|
1e224220a8 | ||
|
3471e2660f | ||
|
924ba153aa | ||
|
bd1e8be328 | ||
|
cf5a985b31 | ||
|
607521c88f | ||
|
486c7d8c56 | ||
|
4b71197c97 | ||
|
8b8839d049 | ||
|
b209c1bc4d | ||
|
2b8d08a3f4 | ||
|
cbadf00941 | ||
|
c5b7447dac | ||
|
64d6f72e6c | ||
|
a19a6fb016 | ||
|
b889e5185e | ||
|
cd83a9e7b2 | ||
|
748c825202 | ||
|
204993568a | ||
|
70be2d93ce | ||
|
f5638716d2 | ||
|
fbc2fad9c9 | ||
|
3f39e35123 | ||
|
3f6809bcdf | ||
|
9ff577a7b4 | ||
|
c52adef919 | ||
|
cbb92bcbc0 | ||
|
948798a84f | ||
|
2ffc3eac4d | ||
|
0ff7fd939e | ||
|
ca7c5129b2 | ||
|
07e0fdbd2a | ||
|
b4dfc24040 | ||
|
85dbf4e16c | ||
|
efc65b93f8 | ||
|
9a0fe6f617 | ||
|
3442eb1b9d | ||
|
e449912f05 | ||
|
72a46fb386 | ||
|
d29b6bee28 | ||
|
e2e3712921 | ||
|
00a11b1b78 | ||
|
77b78f0991 | ||
|
ee550be80c | ||
|
97d41c2686 | ||
|
fccc0a4b05 | ||
|
57b1d3f850 | ||
|
77d40833d9 | ||
|
7814218208 | ||
|
95a7ffdf6b | ||
|
ebc47dc161 | ||
|
cd8acc2e8c | ||
|
3b7a5bd102 | ||
|
d3054d4f83 | ||
|
5ac66b05e3 | ||
|
83fd44eeef | ||
|
2edecf34ff | ||
|
18bc8331f9 | ||
|
7d956c5117 | ||
|
603a964579 | ||
|
dc515b83f3 | ||
|
9466f02696 | ||
|
d3bd2774dc | ||
|
f482585d7c | ||
|
2cde814aaa | ||
|
d989a19f76 | ||
|
d292269ea0 | ||
|
ebf40099f2 | ||
|
0586c00285 | ||
|
bb9ddd5680 | ||
|
cb1663fc12 | ||
|
45d9d8db94 | ||
|
edc482c8ea | ||
|
6e5c03cc78 | ||
|
881c1978eb | ||
|
662bc27523 | ||
|
b4b62c22a4 | ||
|
05569147af | ||
|
99a635d327 | ||
|
e6b763026e | ||
|
c182583e09 | ||
|
d821389c2e | ||
|
be2916333b | ||
|
9124d8a3fb | ||
|
7b1da527a6 | ||
|
e7b8602e1f | ||
|
d6e9af909b | ||
|
acdd42935b | ||
|
8367d1d715 | ||
|
56f12dc982 | ||
|
4c07f05b3a | ||
|
b73ff886c3 | ||
|
2e7bd62353 | ||
|
1264eb640a | ||
|
3a90364b32 | ||
|
f5f9861a78 | ||
|
f9408a00c6 | ||
|
ae8bf954c1 | ||
|
c656f2f694 | ||
|
eea3f13bb3 | ||
|
df8114f8be | ||
|
dda244edd8 | ||
|
cce3ce816c | ||
|
65c0d1064b | ||
|
5a2f968d7a | ||
|
16d88402cb | ||
|
7dcf18151d | ||
|
e3404dd322 | ||
|
bfc517ee80 | ||
|
4a7d2a1e28 | ||
|
66a68f6d22 | ||
|
469318bcbd | ||
|
c07c9995ea | ||
|
2c2276c5bb | ||
|
672a245548 | ||
|
5d50b1ee3c | ||
|
c99df1c310 | ||
|
591ae10144 | ||
|
2d2745195e | ||
|
026f9da035 | ||
|
d23d4f2c1d | ||
|
515b87755a | ||
|
d8ea3d2bfe | ||
|
ee7837d022 | ||
|
07743e490b | ||
|
9101d6e48f | ||
|
27c23b60b8 | ||
|
e7b6238f43 | ||
|
ad2225b6e5 | ||
|
5609103a97 | ||
|
6d460b44b0 | ||
|
efd8d9f528 | ||
|
29aedd388e | ||
|
27e0e41835 | ||
|
0b60f20eb3 | ||
|
8be2ed6255 | ||
|
c9c3f07171 | ||
|
8a21c6df10 | ||
|
df71f57d86 | ||
|
60e39a9dd1 | ||
|
bc6a53b847 | ||
|
05a1137828 | ||
|
cef38bf40b | ||
|
0b13a8c4aa | ||
|
3fbd7919d8 | ||
|
5f688ff209 | ||
|
f6cfb5bf21 | ||
|
df8c9f39ac | ||
|
d7ee7caed4 | ||
|
2e300da057 | ||
|
3fb63bbe8c | ||
|
9671ed4cca | ||
|
d10ef3fd4b | ||
|
dd0b847912 | ||
|
8c34ff5d23 | ||
|
15750256e2 | ||
|
6989fc7bdb | ||
|
4923614730 | ||
|
76f38621de | ||
|
fff72889f6 | ||
|
12af32b9ea | ||
|
9add8e19eb | ||
|
5710703c50 | ||
|
1322b876e9 | ||
|
9ed2ba61c6 | ||
|
62a461ae15 | ||
|
6f7220b68e | ||
|
4859932d35 | ||
|
ee277de707 | ||
|
c11f47903a | ||
|
6a5f1613e7 | ||
|
dc36f0cb6c | ||
|
6c38026ef5 | ||
|
4c9cc9890c | ||
|
f57b407c60 | ||
|
ce0651b79c | ||
|
edc26cb1e1 | ||
|
ff759397f6 | ||
|
badd22ac3d | ||
|
6f78395ef7 | ||
|
5fb6531db8 | ||
|
eb9d5e1196 | ||
|
233b48bdad | ||
|
e22e290f67 | ||
|
ab95a69dc8 | ||
|
85c8a01f4a | ||
|
42af7c6dab | ||
|
08a445e2ac | ||
|
c0b2877da3 | ||
|
cf8ca85289 | ||
|
a8a92f6c51 | ||
|
95f833aacd | ||
|
4f45cc081f | ||
|
2a4cd24c60 | ||
|
ef551f4cc6 | ||
|
4545f271c3 | ||
|
2768396a72 | ||
|
5521a86693 | ||
|
3160780549 | ||
|
f0701657a9 | ||
|
21325b7523 | ||
|
874f5c34bd | ||
|
eadab2e9ca | ||
|
253faaf023 | ||
|
3d843a6a51 | ||
|
03fdf36bf9 | ||
|
fdcc32beda | ||
|
bf20355c5e | ||
|
0136c793b4 | ||
|
2e12114350 | ||
|
f25ab42ebb | ||
|
d3a8a278e6 | ||
|
8d9827c55f | ||
|
cad63f9761 | ||
|
bf446f44f9 | ||
|
621f607297 | ||
|
d89bd707a8 | ||
|
754087b990 | ||
|
cfbeb56371 | ||
|
3bb46ce496 | ||
|
c5832f2b30 | ||
|
d9406b0095 | ||
|
2475c36a75 | ||
|
c384f9c0ca | ||
|
afbfebf659 | ||
|
6b686c18f7 | ||
|
349cb33fbd | ||
|
d7542b6818 | ||
|
7976d39d9d | ||
|
5ee9676941 | ||
|
4b40cda910 | ||
|
4689ed7b30 | ||
|
084bc2aee3 | ||
|
6d7e15b2fd | ||
|
61515160a7 | ||
|
a25bfdd16d | ||
|
e93538cea9 | ||
|
b4244b28b6 | ||
|
43f9038325 | ||
|
27872f476e | ||
|
339044f8aa | ||
|
0718a090e1 | ||
|
9e1f030a80 | ||
|
04922f6aa0 | ||
|
7d2bc9e162 | ||
|
c6c00729e3 | ||
|
10756b0920 | ||
|
1eb1502a07 | ||
|
30e72a96a9 | ||
|
2646db78a4 | ||
|
f5358b13f5 | ||
|
d156170971 | ||
|
d9bfe847db | ||
|
473f8b8e31 | ||
|
aeb4b4c8a5 | ||
|
980a3e45db | ||
|
5794969f5b | ||
|
8b5b06c3d1 | ||
|
b50c27b619 | ||
|
5ee04e31e5 | ||
|
bf6ae91a6d | ||
|
828e3a5795 | ||
|
7b5bcd45f8 | ||
|
72de16fb86 | ||
|
0b903fc5f4 | ||
|
4df686f49e | ||
|
d7eeaaf249 | ||
|
84fb6aaddb | ||
|
a744b9437a | ||
|
6027b969f5 | ||
|
93805a5d7b | ||
|
71da961ecd | ||
|
dd421809e5 | ||
|
8526055bb7 | ||
|
a79334ea4c | ||
|
274ea9a4f2 | ||
|
8743d18aca | ||
|
d3773a433a | ||
|
0f0a87becf | ||
|
4b57bb8eeb | ||
|
3b27dbb0aa | ||
|
ff2fbd322e | ||
|
9636f33fdb | ||
|
bbe2a1b264 | ||
|
79fdfd6524 | ||
|
d086a99e5b | ||
|
22b0b95209 | ||
|
28d1588e73 | ||
|
f3b1a5ff3e | ||
|
330e90a6ac | ||
|
8fac72db53 | ||
|
820c8b0dce | ||
|
8b4a6f2a64 | ||
|
ef63342e20 | ||
|
89840790e7 | ||
|
a72809b225 | ||
|
9976e4736e | ||
|
dc92f07232 | ||
|
3db815b969 | ||
|
ade293cf52 | ||
|
877408b808 | ||
|
86ed75bf7c | ||
|
20d8d800f3 | ||
|
7ce06b3808 | ||
|
08ca47cadb | ||
|
0bd3a26051 | ||
|
5272b465cc | ||
|
b75f38033b | ||
|
637f655b6f | ||
|
b3f7394c06 | ||
|
1a5ecd4d4a | ||
|
bd65c4e312 | ||
|
bce656c787 | ||
|
06522c9ac0 | ||
|
9026cc8d42 | ||
|
574b040142 | ||
|
48113b7bd9 | ||
|
c13f115473 | ||
|
1e20f9f1d8 | ||
|
bc461d9baa | ||
|
5016e30cf2 | ||
|
f42ac5f2c0 | ||
|
2a60414031 | ||
|
9a2a304860 | ||
|
feb74a5e86 | ||
|
c0e350b734 | ||
|
bef1183c49 | ||
|
f935f5cf46 | ||
|
07388d327f | ||
|
4de16b2d17 | ||
|
da068a43c1 | ||
|
9657463717 | ||
|
69036cc6a4 | ||
|
700e084101 | ||
|
a1dc47b826 | ||
|
86de0ca17b | ||
|
80414f8452 | ||
|
fc0e239bdf | ||
|
928ad6c1d8 | ||
|
9d027b96d8 | ||
|
ddd49596ba | ||
|
b8cabadd43 | ||
|
ce42b07a80 | ||
|
bfd93e5b13 | ||
|
a797459560 | ||
|
6cbb683f99 | ||
|
92bbb98d48 | ||
|
834c847746 | ||
|
97aa407fe4 | ||
|
86a254ad9e | ||
|
64c38856cc | ||
|
b4f6206eda | ||
|
82f828a327 | ||
|
d8116a80df | ||
|
e0aec8d373 | ||
|
1ce2587330 | ||
|
20964ac2d8 | ||
|
71a10e0378 | ||
|
9bf13b7872 | ||
|
d420992f8c | ||
|
c259a0e3e2 | ||
|
432be274ba | ||
|
484bf5b703 | ||
|
979b6305af | ||
|
4bf32af60e | ||
|
0e4a746eeb | ||
|
2fe919cc5e | ||
|
bcd750695f | ||
|
19b6bb0fd6 | ||
|
60f6a350be |
@@ -9,10 +9,6 @@ data
|
||||
.idea
|
||||
*.iml
|
||||
|
||||
# Git files
|
||||
.git
|
||||
.gitignore
|
||||
|
||||
# Documentation
|
||||
*.md
|
||||
|
||||
|
129
.env.template
129
.env.template
@@ -4,12 +4,27 @@
|
||||
## Main data folder
|
||||
# DATA_FOLDER=data
|
||||
|
||||
## Individual folders, these override %DATA_FOLDER%
|
||||
## Database URL
|
||||
## When using SQLite, this is the path to the DB file, default to %DATA_FOLDER%/db.sqlite3
|
||||
## When using MySQL, this it is the URL to the DB, including username and password:
|
||||
## Format: mysql://[user[:password]@]host/database_name
|
||||
# DATABASE_URL=data/db.sqlite3
|
||||
|
||||
## Individual folders, these override %DATA_FOLDER%
|
||||
# RSA_KEY_FILENAME=data/rsa_key
|
||||
# ICON_CACHE_FOLDER=data/icon_cache
|
||||
# ATTACHMENTS_FOLDER=data/attachments
|
||||
|
||||
## Templates data folder, by default uses embedded templates
|
||||
## Check source code to see the format
|
||||
# TEMPLATES_FOLDER=/path/to/templates
|
||||
## Automatically reload the templates for every request, slow, use only for development
|
||||
# RELOAD_TEMPLATES=false
|
||||
|
||||
## Client IP Header, used to identify the IP of the client, defaults to "X-Client-IP"
|
||||
## Set to the string "none" (without quotes), to disable any headers and just use the remote IP
|
||||
# IP_HEADER=X-Client-IP
|
||||
|
||||
## Cache time-to-live for successfully obtained icons, in seconds (0 is "forever")
|
||||
# ICON_CACHE_TTL=2592000
|
||||
## Cache time-to-live for icons which weren't available, in seconds (0 is "forever")
|
||||
@@ -19,35 +34,95 @@
|
||||
# WEB_VAULT_FOLDER=web-vault/
|
||||
# WEB_VAULT_ENABLED=true
|
||||
|
||||
## Enables websocket notifications
|
||||
# WEBSOCKET_ENABLED=false
|
||||
|
||||
## Controls the WebSocket server address and port
|
||||
# WEBSOCKET_ADDRESS=0.0.0.0
|
||||
# WEBSOCKET_PORT=3012
|
||||
|
||||
## Enable extended logging
|
||||
## This shows timestamps and allows logging to file and to syslog
|
||||
### To enable logging to file, use the LOG_FILE env variable
|
||||
### To enable syslog, you need to compile with `cargo build --features=enable_syslog'
|
||||
## Enable extended logging, which shows timestamps and targets in the logs
|
||||
# EXTENDED_LOGGING=true
|
||||
|
||||
## Logging to file
|
||||
## This requires extended logging
|
||||
## It's recommended to also set 'ROCKET_CLI_COLORS=off'
|
||||
# LOG_FILE=/path/to/log
|
||||
|
||||
## Use a local favicon extractor
|
||||
## Set to false to use bitwarden's official icon servers
|
||||
## Set to true to use the local version, which is not as smart,
|
||||
## but it doesn't send the cipher domains to bitwarden's servers
|
||||
# LOCAL_ICON_EXTRACTOR=false
|
||||
## Logging to Syslog
|
||||
## This requires extended logging
|
||||
## It's recommended to also set 'ROCKET_CLI_COLORS=off'
|
||||
# USE_SYSLOG=false
|
||||
|
||||
## Log level
|
||||
## Change the verbosity of the log output
|
||||
## Valid values are "trace", "debug", "info", "warn", "error" and "off"
|
||||
## Setting it to "trace" or "debug" would also show logs for mounted
|
||||
## routes and static file, websocket and alive requests
|
||||
# LOG_LEVEL=Info
|
||||
|
||||
## Enable WAL for the DB
|
||||
## Set to false to avoid enabling WAL during startup.
|
||||
## Note that if the DB already has WAL enabled, you will also need to disable WAL in the DB,
|
||||
## this setting only prevents bitwarden_rs from automatically enabling it on start.
|
||||
## Please read project wiki page about this setting first before changing the value as it can
|
||||
## cause performance degradation or might render the service unable to start.
|
||||
# ENABLE_DB_WAL=true
|
||||
|
||||
## Disable icon downloading
|
||||
## Set to true to disable icon downloading, this would still serve icons from $ICON_CACHE_FOLDER,
|
||||
## but it won't produce any external network request. Needs to set $ICON_CACHE_TTL to 0,
|
||||
## otherwise it will delete them and they won't be downloaded again.
|
||||
# DISABLE_ICON_DOWNLOAD=false
|
||||
|
||||
## Icon download timeout
|
||||
## Configure the timeout value when downloading the favicons.
|
||||
## The default is 10 seconds, but this could be to low on slower network connections
|
||||
# ICON_DOWNLOAD_TIMEOUT=10
|
||||
|
||||
## Icon blacklist Regex
|
||||
## Any domains or IPs that match this regex won't be fetched by the icon service.
|
||||
## Useful to hide other servers in the local network. Check the WIKI for more details
|
||||
# ICON_BLACKLIST_REGEX=192\.168\.1\.[0-9].*^
|
||||
|
||||
## Any IP which is not defined as a global IP will be blacklisted.
|
||||
## Usefull to secure your internal environment: See https://en.wikipedia.org/wiki/Reserved_IP_addresses for a list of IPs which it will block
|
||||
# ICON_BLACKLIST_NON_GLOBAL_IPS=true
|
||||
|
||||
## Disable 2FA remember
|
||||
## Enabling this would force the users to use a second factor to login every time.
|
||||
## Note that the checkbox would still be present, but ignored.
|
||||
# DISABLE_2FA_REMEMBER=false
|
||||
|
||||
## Controls if new users can register
|
||||
# SIGNUPS_ALLOWED=true
|
||||
|
||||
## Controls if new users need to verify their email address upon registration
|
||||
## Note that setting this option to true prevents logins until the email address has been verified!
|
||||
## The welcome email will include a verification link, and login attempts will periodically
|
||||
## trigger another verification email to be sent.
|
||||
# SIGNUPS_VERIFY=false
|
||||
|
||||
## If SIGNUPS_VERIFY is set to true, this limits how many seconds after the last time
|
||||
## an email verification link has been sent another verification email will be sent
|
||||
# SIGNUPS_VERIFY_RESEND_TIME=3600
|
||||
|
||||
## If SIGNUPS_VERIFY is set to true, this limits how many times an email verification
|
||||
## email will be re-sent upon an attempted login.
|
||||
# SIGNUPS_VERIFY_RESEND_LIMIT=6
|
||||
|
||||
## Controls if new users from a list of comma-separated domains can register
|
||||
## even if SIGNUPS_ALLOWED is set to false
|
||||
# SIGNUPS_DOMAINS_WHITELIST=example.com,example.net,example.org
|
||||
|
||||
## Token for the admin interface, preferably use a long random string
|
||||
## One option is to use 'openssl rand -base64 48'
|
||||
## If not set, the admin panel is disabled
|
||||
# ADMIN_TOKEN=Vy2VyYTTsKPv8W5aEOWUbB/Bt3DEKePbHmI4m9VcemUMS2rEviDowNAFqYi1xjmp
|
||||
|
||||
## Enable this to bypass the admin panel security. This option is only
|
||||
## meant to be used with the use of a separate auth layer in front
|
||||
# DISABLE_ADMIN_TOKEN=false
|
||||
|
||||
## Invitations org admins to invite users, even when signups are disabled
|
||||
# INVITATIONS_ALLOWED=true
|
||||
|
||||
@@ -60,7 +135,8 @@
|
||||
|
||||
## Domain settings
|
||||
## The domain must match the address from where you access the server
|
||||
## Unless you are using U2F, or having problems with attachments not downloading, there is no need to change this
|
||||
## It's recommended to configure this value, otherwise certain functionality might not work,
|
||||
## like attachment downloads, email links and U2F.
|
||||
## For U2F to work, the server must use HTTPS, you can use Let's Encrypt for free certs
|
||||
# DOMAIN=https://bw.domain.tld:8443
|
||||
|
||||
@@ -72,6 +148,29 @@
|
||||
# YUBICO_SECRET_KEY=AAAAAAAAAAAAAAAAAAAAAAAA
|
||||
# YUBICO_SERVER=http://yourdomain.com/wsapi/2.0/verify
|
||||
|
||||
## Duo Settings
|
||||
## You need to configure all options to enable global Duo support, otherwise users would need to configure it themselves
|
||||
## Create an account and protect an application as mentioned in this link (only the first step, not the rest):
|
||||
## https://help.bitwarden.com/article/setup-two-step-login-duo/#create-a-duo-security-account
|
||||
## Then set the following options, based on the values obtained from the last step:
|
||||
# DUO_IKEY=<Integration Key>
|
||||
# DUO_SKEY=<Secret Key>
|
||||
# DUO_HOST=<API Hostname>
|
||||
## After that, you should be able to follow the rest of the guide linked above,
|
||||
## ignoring the fields that ask for the values that you already configured beforehand.
|
||||
|
||||
## Authenticator Settings
|
||||
## Disable authenticator time drifted codes to be valid.
|
||||
## TOTP codes of the previous and next 30 seconds will be invalid
|
||||
##
|
||||
## According to the RFC6238 (https://tools.ietf.org/html/rfc6238),
|
||||
## we allow by default the TOTP code which was valid one step back and one in the future.
|
||||
## This can however allow attackers to be a bit more lucky with there attempts because there are 3 valid codes.
|
||||
## You can disable this, so that only the current TOTP Code is allowed.
|
||||
## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid.
|
||||
## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid.
|
||||
# AUTHENTICATOR_DISABLE_TIME_DRIFT = false
|
||||
|
||||
## Rocket specific settings, check Rocket documentation to learn more
|
||||
# ROCKET_ENV=staging
|
||||
# ROCKET_ADDRESS=0.0.0.0 # Enable this to test mobile app
|
||||
@@ -79,10 +178,16 @@
|
||||
# ROCKET_TLS={certs="/path/to/certs.pem",key="/path/to/key.pem"}
|
||||
|
||||
## Mail specific settings, set SMTP_HOST and SMTP_FROM to enable the mail service.
|
||||
## To make sure the email links are pointing to the correct host, set the DOMAIN variable.
|
||||
## Note: if SMTP_USERNAME is specified, SMTP_PASSWORD is mandatory
|
||||
# SMTP_HOST=smtp.domain.tld
|
||||
# SMTP_FROM=bitwarden-rs@domain.tld
|
||||
# SMTP_FROM_NAME=Bitwarden_RS
|
||||
# SMTP_PORT=587
|
||||
# SMTP_SSL=true
|
||||
# SMTP_USERNAME=username
|
||||
# SMTP_PASSWORD=password
|
||||
# SMTP_AUTH_MECHANISM="Plain"
|
||||
# SMTP_TIMEOUT=15
|
||||
|
||||
# vim: syntax=ini
|
||||
|
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
github: dani-garcia
|
33
.github/ISSUE_TEMPLATE.md
vendored
Normal file
33
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
<!--
|
||||
Please fill out the following template to make solving your problem easier and faster for us.
|
||||
This is only a guideline. If you think that parts are unneccessary for your issue, feel free to remove them.
|
||||
|
||||
Remember to hide/obfuscate personal and confidential information,
|
||||
such as names, global IP/DNS adresses and especially passwords, if neccessary.
|
||||
-->
|
||||
|
||||
### Subject of the issue
|
||||
<!-- Describe your issue here.-->
|
||||
|
||||
### Your environment
|
||||
<!-- The version number, obtained from the logs or the admin page -->
|
||||
* Bitwarden_rs version:
|
||||
<!-- How the server was installed: Docker image / package / built from source -->
|
||||
* Install method:
|
||||
* Clients used: <!-- if applicable -->
|
||||
* Reverse proxy and version: <!-- if applicable -->
|
||||
* Version of mysql/postgresql: <!-- if applicable -->
|
||||
* Other relevant information:
|
||||
|
||||
### Steps to reproduce
|
||||
<!-- Tell us how to reproduce this issue. What parameters did you set (differently from the defaults)
|
||||
and how did you start bitwarden_rs? -->
|
||||
|
||||
### Expected behaviour
|
||||
<!-- Tell us what should happen -->
|
||||
|
||||
### Actual behaviour
|
||||
<!-- Tell us what happens instead -->
|
||||
|
||||
### Relevant logs
|
||||
<!-- Share some logfiles, screenshots or output of relevant programs with us. -->
|
70
.github/workflows/rust-win.yml.disabled
vendored
Normal file
70
.github/workflows/rust-win.yml.disabled
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
name: build-windows
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: windows-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
db-backend: [sqlite, mysql, postgresql]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
|
||||
- name: Cache choco cache
|
||||
uses: actions/cache@v1.0.3
|
||||
with:
|
||||
path: ~\AppData\Local\Temp\chocolatey
|
||||
key: ${{ runner.os }}-choco-cache
|
||||
|
||||
- name: Install dependencies
|
||||
run: choco install openssl sqlite postgresql12 mysql
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v1.0.3
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v1.0.3
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
- name: Cache cargo build
|
||||
uses: actions/cache@v1.0.3
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Install latest nightly
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
profile: minimal
|
||||
target: x86_64-pc-windows-msvc
|
||||
|
||||
- name: Build
|
||||
run: cargo.exe build --verbose --features ${{ matrix.db-backend }} --release --target x86_64-pc-windows-msvc
|
||||
env:
|
||||
OPENSSL_DIR: C:\Program Files\OpenSSL-Win64\
|
||||
|
||||
- name: Run tests
|
||||
run: cargo test --features ${{ matrix.db-backend }}
|
||||
|
||||
- name: Upload windows artifact
|
||||
uses: actions/upload-artifact@v1.0.0
|
||||
with:
|
||||
name: x86_64-pc-windows-msvc-${{ matrix.db-backend }}-bitwarden_rs
|
||||
path: target/release/bitwarden_rs.exe
|
||||
|
||||
- name: Release
|
||||
uses: Shopify/upload-to-release@1.0.0
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
name: x86_64-pc-windows-msvc-${{ matrix.db-backend }}-bitwarden_rs
|
||||
path: target/release/bitwarden_rs.exe
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
149
.github/workflows/workspace.yml
vendored
Normal file
149
.github/workflows/workspace.yml
vendored
Normal file
@@ -0,0 +1,149 @@
|
||||
name: Workflow
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
db-backend: [sqlite, mysql, postgresql]
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
# - x86_64-unknown-linux-musl
|
||||
- x86_64-apple-darwin
|
||||
# - x86_64-pc-windows-msvc
|
||||
include:
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
os: ubuntu-latest
|
||||
ext:
|
||||
# - target: x86_64-unknown-linux-musl
|
||||
# os: ubuntu-latest
|
||||
# ext:
|
||||
- target: x86_64-apple-darwin
|
||||
os: macOS-latest
|
||||
ext:
|
||||
# - target: x86_64-pc-windows-msvc
|
||||
# os: windows-latest
|
||||
# ext: .exe
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
|
||||
# - name: Cache choco cache
|
||||
# uses: actions/cache@v1.0.3
|
||||
# if: matrix.os == 'windows-latest'
|
||||
# with:
|
||||
# path: ~\AppData\Local\Temp\chocolatey
|
||||
# key: ${{ runner.os }}-choco-cache-${{ matrix.db-backend }}
|
||||
|
||||
- name: Cache vcpkg installed
|
||||
uses: actions/cache@v1.0.3
|
||||
if: matrix.os == 'windows-latest'
|
||||
with:
|
||||
path: $VCPKG_ROOT/installed
|
||||
key: ${{ runner.os }}-vcpkg-cache-${{ matrix.db-backend }}
|
||||
env:
|
||||
VCPKG_ROOT: 'C:\vcpkg'
|
||||
|
||||
- name: Cache vcpkg downloads
|
||||
uses: actions/cache@v1.0.3
|
||||
if: matrix.os == 'windows-latest'
|
||||
with:
|
||||
path: $VCPKG_ROOT/downloads
|
||||
key: ${{ runner.os }}-vcpkg-cache-${{ matrix.db-backend }}
|
||||
env:
|
||||
VCPKG_ROOT: 'C:\vcpkg'
|
||||
|
||||
# - name: Cache homebrew
|
||||
# uses: actions/cache@v1.0.3
|
||||
# if: matrix.os == 'macOS-latest'
|
||||
# with:
|
||||
# path: ~/Library/Caches/Homebrew
|
||||
# key: ${{ runner.os }}-brew-cache
|
||||
|
||||
# - name: Cache apt
|
||||
# uses: actions/cache@v1.0.3
|
||||
# if: matrix.os == 'ubuntu-latest'
|
||||
# with:
|
||||
# path: /var/cache/apt/archives
|
||||
# key: ${{ runner.os }}-apt-cache
|
||||
|
||||
# Install dependencies
|
||||
- name: Install dependencies macOS
|
||||
run: brew update; brew install openssl sqlite libpq mysql
|
||||
if: matrix.os == 'macOS-latest'
|
||||
|
||||
- name: Install dependencies Ubuntu
|
||||
run: sudo apt-get update && sudo apt-get install --no-install-recommends openssl sqlite libpq-dev libmysql++-dev
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
|
||||
- name: Install dependencies Windows
|
||||
run: vcpkg integrate install; vcpkg install sqlite3:x64-windows openssl:x64-windows libpq:x64-windows libmysql:x64-windows
|
||||
if: matrix.os == 'windows-latest'
|
||||
env:
|
||||
VCPKG_ROOT: 'C:\vcpkg'
|
||||
# End Install dependencies
|
||||
|
||||
# Install rust nightly toolchain
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v1.0.3
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-${{matrix.db-backend}}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v1.0.3
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-${{matrix.db-backend}}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
- name: Cache cargo build
|
||||
uses: actions/cache@v1.0.3
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-${{matrix.db-backend}}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Install latest nightly
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
# Build
|
||||
- name: Build Win
|
||||
if: matrix.os == 'windows-latest'
|
||||
run: cargo.exe build --features ${{ matrix.db-backend }} --release --target ${{ matrix.target }}
|
||||
env:
|
||||
RUSTFLAGS: -Ctarget-feature=+crt-static
|
||||
VCPKG_ROOT: 'C:\vcpkg'
|
||||
|
||||
- name: Build macOS / Ubuntu
|
||||
if: matrix.os == 'macOS-latest' || matrix.os == 'ubuntu-latest'
|
||||
run: cargo build --verbose --features ${{ matrix.db-backend }} --release --target ${{ matrix.target }}
|
||||
|
||||
# Test
|
||||
- name: Run tests
|
||||
run: cargo test --features ${{ matrix.db-backend }}
|
||||
|
||||
# Upload & Release
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v1.0.0
|
||||
with:
|
||||
name: bitwarden_rs-${{ matrix.db-backend }}-${{ matrix.target }}${{ matrix.ext }}
|
||||
path: target/${{ matrix.target }}/release/bitwarden_rs${{ matrix.ext }}
|
||||
|
||||
- name: Release
|
||||
uses: Shopify/upload-to-release@1.0.0
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
name: bitwarden_rs-${{ matrix.db-backend }}-${{ matrix.target }}${{ matrix.ext }}
|
||||
path: target/${{ matrix.target }}/release/bitwarden_rs${{ matrix.ext }}
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
7
.hadolint.yaml
Normal file
7
.hadolint.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
ignored:
|
||||
# disable explicit version for apt install
|
||||
- DL3008
|
||||
# disable explicit version for apk install
|
||||
- DL3018
|
||||
trustedRegistries:
|
||||
- docker.io
|
24
.travis.yml
24
.travis.yml
@@ -1,9 +1,21 @@
|
||||
# Copied from Rocket's .travis.yml
|
||||
dist: xenial
|
||||
|
||||
env:
|
||||
global:
|
||||
- HADOLINT_VERSION=1.17.1
|
||||
|
||||
language: rust
|
||||
sudo: required # so we get a VM with higher specs
|
||||
dist: trusty # so we get a VM with higher specs
|
||||
rust: nightly
|
||||
cache: cargo
|
||||
rust:
|
||||
- nightly
|
||||
|
||||
before_install:
|
||||
- sudo curl -L https://github.com/hadolint/hadolint/releases/download/v$HADOLINT_VERSION/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint
|
||||
- sudo chmod +rx /usr/local/bin/hadolint
|
||||
- rustup set profile minimal
|
||||
|
||||
# Nothing to install
|
||||
install: true
|
||||
script:
|
||||
- cargo build --verbose --all-features
|
||||
- git ls-files --exclude='Dockerfile*' --ignored | xargs --max-lines=1 hadolint
|
||||
- cargo test --features "sqlite"
|
||||
- cargo test --features "mysql"
|
||||
|
2833
Cargo.lock
generated
2833
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
98
Cargo.toml
98
Cargo.toml
@@ -11,53 +11,59 @@ publish = false
|
||||
build = "build.rs"
|
||||
|
||||
[features]
|
||||
enable_syslog = ["syslog", "fern/syslog-4"]
|
||||
# Empty to keep compatibility, prefer to set USE_SYSLOG=true
|
||||
enable_syslog = []
|
||||
mysql = ["diesel/mysql", "diesel_migrations/mysql"]
|
||||
postgresql = ["diesel/postgres", "diesel_migrations/postgres", "openssl"]
|
||||
sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "libsqlite3-sys"]
|
||||
|
||||
[target."cfg(not(windows))".dependencies]
|
||||
syslog = "4.0.1"
|
||||
|
||||
[dependencies]
|
||||
# Web framework for nightly with a focus on ease-of-use, expressibility, and speed.
|
||||
rocket = { version = "0.4.0", features = ["tls"], default-features = false }
|
||||
rocket_contrib = "0.4.0"
|
||||
rocket = { version = "0.5.0-dev", features = ["tls"], default-features = false }
|
||||
rocket_contrib = "0.5.0-dev"
|
||||
|
||||
# HTTP client
|
||||
reqwest = "0.9.8"
|
||||
reqwest = "0.9.24"
|
||||
|
||||
# multipart/form-data support
|
||||
multipart = "0.15.4"
|
||||
multipart = { version = "0.16.1", features = ["server"], default-features = false }
|
||||
|
||||
# WebSockets library
|
||||
ws = "0.7.9"
|
||||
ws = "0.9.1"
|
||||
|
||||
# MessagePack library
|
||||
rmpv = "0.4.0"
|
||||
rmpv = "0.4.3"
|
||||
|
||||
# Concurrent hashmap implementation
|
||||
chashmap = "2.2.0"
|
||||
chashmap = "2.2.2"
|
||||
|
||||
# A generic serialization/deserialization framework
|
||||
serde = "1.0.84"
|
||||
serde_derive = "1.0.84"
|
||||
serde_json = "1.0.34"
|
||||
serde = "1.0.104"
|
||||
serde_derive = "1.0.104"
|
||||
serde_json = "1.0.44"
|
||||
|
||||
# Logging
|
||||
log = "0.4.6"
|
||||
fern = "0.5.7"
|
||||
syslog = { version = "4.0.1", optional = true }
|
||||
log = "0.4.8"
|
||||
fern = { version = "0.5.9", features = ["syslog-4"] }
|
||||
|
||||
# A safe, extensible ORM and Query builder
|
||||
diesel = { version = "1.3.3", features = ["sqlite", "chrono", "r2d2"] }
|
||||
diesel_migrations = { version = "1.3.0", features = ["sqlite"] }
|
||||
diesel = { version = "1.4.3", features = [ "chrono", "r2d2"] }
|
||||
diesel_migrations = "1.4.0"
|
||||
|
||||
# Bundled SQLite
|
||||
libsqlite3-sys = { version = "0.9.3", features = ["bundled"] }
|
||||
libsqlite3-sys = { version = "0.16.0", features = ["bundled"], optional = true }
|
||||
|
||||
# Crypto library
|
||||
ring = { version = "0.13.5", features = ["rsa_signing"] }
|
||||
ring = "0.14.6"
|
||||
|
||||
# UUID generation
|
||||
uuid = { version = "0.7.1", features = ["v4"] }
|
||||
uuid = { version = "0.8.1", features = ["v4"] }
|
||||
|
||||
# Date and time library for Rust
|
||||
chrono = "0.4.6"
|
||||
chrono = "0.4.10"
|
||||
|
||||
# TOTP library
|
||||
oath = "0.10.2"
|
||||
@@ -66,39 +72,55 @@ oath = "0.10.2"
|
||||
data-encoding = "2.1.2"
|
||||
|
||||
# JWT library
|
||||
jsonwebtoken = "5.0.1"
|
||||
jsonwebtoken = "6.0.1"
|
||||
|
||||
# U2F library
|
||||
u2f = "0.1.4"
|
||||
u2f = "0.1.6"
|
||||
|
||||
# Yubico Library
|
||||
yubico = { version = "0.5.0", features = ["online"], default-features = false }
|
||||
yubico = { version = "0.7.1", features = ["online-tokio"], default-features = false }
|
||||
|
||||
# A `dotenv` implementation for Rust
|
||||
dotenv = { version = "0.13.0", default-features = false }
|
||||
dotenv = { version = "0.15.0", default-features = false }
|
||||
|
||||
# Lazy static macro
|
||||
lazy_static = { version = "1.2.0", features = ["nightly"] }
|
||||
lazy_static = "1.4.0"
|
||||
|
||||
# More derives
|
||||
derive_more = "0.13.0"
|
||||
derive_more = "0.99.2"
|
||||
|
||||
# Numerical libraries
|
||||
num-traits = "0.2.6"
|
||||
num-derive = "0.2.3"
|
||||
num-traits = "0.2.10"
|
||||
num-derive = "0.3.0"
|
||||
|
||||
# Email libraries
|
||||
lettre = "0.9.0"
|
||||
lettre_email = "0.9.0"
|
||||
native-tls = "0.2.2"
|
||||
lettre = "0.9.2"
|
||||
lettre_email = "0.9.2"
|
||||
native-tls = "0.2.3"
|
||||
quoted_printable = "0.4.1"
|
||||
|
||||
# Number encoding library
|
||||
byteorder = "1.2.7"
|
||||
# Template library
|
||||
handlebars = "=2.0.2"
|
||||
|
||||
# For favicon extraction from main website
|
||||
soup = "0.4.1"
|
||||
regex = "1.3.1"
|
||||
data-url = "0.1.0"
|
||||
|
||||
# Required for SSL support for PostgreSQL
|
||||
openssl = { version = "0.10.26", optional = true }
|
||||
|
||||
# URL encoding library
|
||||
percent-encoding = "2.1.0"
|
||||
|
||||
[patch.crates-io]
|
||||
# Add support for Timestamp type
|
||||
rmp = { git = 'https://github.com/dani-garcia/msgpack-rust' }
|
||||
# Use newest ring
|
||||
rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = 'b95b6765e1cc8be7c1e7eaef8a9d9ad940b0ac13' }
|
||||
rocket_contrib = { git = 'https://github.com/SergioBenitez/Rocket', rev = 'b95b6765e1cc8be7c1e7eaef8a9d9ad940b0ac13' }
|
||||
|
||||
# Use new native_tls version 0.2
|
||||
lettre = { git = 'https://github.com/lettre/lettre', rev = 'c988b1760ad81' }
|
||||
lettre_email = { git = 'https://github.com/lettre/lettre', rev = 'c988b1760ad81' }
|
||||
# Use git version for timeout fix #706
|
||||
lettre = { git = 'https://github.com/lettre/lettre', rev = '24d694db3be017d82b1cdc8bf9da601420b31bb0' }
|
||||
lettre_email = { git = 'https://github.com/lettre/lettre', rev = '24d694db3be017d82b1cdc8bf9da601420b31bb0' }
|
||||
|
||||
# For favicon extraction from main website
|
||||
data-url = { git = 'https://github.com/servo/rust-url', package="data-url", rev = '7f1bd6ce1c2fde599a757302a843a60e714c5f72' }
|
||||
|
86
Dockerfile
86
Dockerfile
@@ -1,86 +0,0 @@
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.8.0b"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --update-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust as build
|
||||
|
||||
# Using bundled SQLite, no need to install it
|
||||
# RUN apt-get update && apt-get install -y\
|
||||
# sqlite3\
|
||||
# --no-install-recommends\
|
||||
# && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --release
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM debian:stretch-slim
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y\
|
||||
openssl\
|
||||
ca-certificates\
|
||||
--no-install-recommends\
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build app/target/release/bitwarden_rs .
|
||||
|
||||
# Configures the startup!
|
||||
CMD ./bitwarden_rs
|
1
Dockerfile
Symbolic link
1
Dockerfile
Symbolic link
@@ -0,0 +1 @@
|
||||
docker/amd64/sqlite/Dockerfile
|
13
README.md
13
README.md
@@ -3,7 +3,7 @@
|
||||
---
|
||||
|
||||
[](https://travis-ci.org/dani-garcia/bitwarden_rs)
|
||||
[](https://hub.docker.com/r/mprasil/bitwarden)
|
||||
[](https://hub.docker.com/r/bitwardenrs/server)
|
||||
[](https://deps.rs/repo/github/dani-garcia/bitwarden_rs)
|
||||
[](https://github.com/dani-garcia/bitwarden_rs/releases/latest)
|
||||
[](https://github.com/dani-garcia/bitwarden_rs/blob/master/LICENSE.txt)
|
||||
@@ -34,8 +34,8 @@ Basically full implementation of Bitwarden API is provided including:
|
||||
Pull the docker image and mount a volume from the host for persistent storage:
|
||||
|
||||
```sh
|
||||
docker pull mprasil/bitwarden:latest
|
||||
docker run -d --name bitwarden -v /bw-data/:/data/ -p 80:80 mprasil/bitwarden:latest
|
||||
docker pull bitwardenrs/server:latest
|
||||
docker run -d --name bitwarden -v /bw-data/:/data/ -p 80:80 bitwardenrs/server:latest
|
||||
```
|
||||
This will preserve any persistent data under /bw-data/, you can adapt the path to whatever suits you.
|
||||
|
||||
@@ -50,6 +50,11 @@ See the [bitwarden_rs wiki](https://github.com/dani-garcia/bitwarden_rs/wiki) fo
|
||||
|
||||
## Get in touch
|
||||
|
||||
To ask an question, [raising an issue](https://github.com/dani-garcia/bitwarden_rs/issues/new) is fine, also please report any bugs spotted here.
|
||||
To ask a question, [raising an issue](https://github.com/dani-garcia/bitwarden_rs/issues/new) is fine. Please also report any bugs spotted here.
|
||||
|
||||
If you prefer to chat, we're usually hanging around at [#bitwarden_rs:matrix.org](https://matrix.to/#/#bitwarden_rs:matrix.org) room on Matrix. Feel free to join us!
|
||||
|
||||
### Sponsors
|
||||
Thanks for your contribution to the project!
|
||||
|
||||
- [@Skaronator](https://github.com/Skaronator)
|
||||
|
25
azure-pipelines.yml
Normal file
25
azure-pipelines.yml
Normal file
@@ -0,0 +1,25 @@
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
|
||||
steps:
|
||||
- script: |
|
||||
ls -la
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain $(cat rust-toolchain) --profile=minimal
|
||||
echo "##vso[task.prependpath]$HOME/.cargo/bin"
|
||||
displayName: 'Install Rust'
|
||||
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libmysql++-dev
|
||||
displayName: Install libmysql
|
||||
|
||||
- script: |
|
||||
rustc -Vv
|
||||
cargo -V
|
||||
displayName: Query rust and cargo versions
|
||||
|
||||
- script : cargo test --features "sqlite"
|
||||
displayName: 'Test project with sqlite backend'
|
||||
|
||||
- script : cargo test --features "mysql"
|
||||
displayName: 'Test project with mysql backend'
|
36
build.rs
36
build.rs
@@ -1,11 +1,25 @@
|
||||
use std::process::Command;
|
||||
|
||||
fn main() {
|
||||
read_git_info().expect("Unable to read Git info");
|
||||
#[cfg(all(feature = "sqlite", feature = "mysql"))]
|
||||
compile_error!("Can't enable both sqlite and mysql at the same time");
|
||||
#[cfg(all(feature = "sqlite", feature = "postgresql"))]
|
||||
compile_error!("Can't enable both sqlite and postgresql at the same time");
|
||||
#[cfg(all(feature = "mysql", feature = "postgresql"))]
|
||||
compile_error!("Can't enable both mysql and postgresql at the same time");
|
||||
|
||||
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
|
||||
compile_error!("You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite");
|
||||
|
||||
read_git_info().ok();
|
||||
}
|
||||
|
||||
fn run(args: &[&str]) -> Result<String, std::io::Error> {
|
||||
let out = Command::new(args[0]).args(&args[1..]).output()?;
|
||||
if !out.status.success() {
|
||||
use std::io::{Error, ErrorKind};
|
||||
return Err(Error::new(ErrorKind::Other, "Command not successful"));
|
||||
}
|
||||
Ok(String::from_utf8(out.stdout).unwrap().trim().to_string())
|
||||
}
|
||||
|
||||
@@ -13,8 +27,10 @@ fn run(args: &[&str]) -> Result<String, std::io::Error> {
|
||||
fn read_git_info() -> Result<(), std::io::Error> {
|
||||
// The exact tag for the current commit, can be empty when
|
||||
// the current commit doesn't have an associated tag
|
||||
let exact_tag = run(&["git", "describe", "--abbrev=0", "--tags", "--exact-match"])?;
|
||||
println!("cargo:rustc-env=GIT_EXACT_TAG={}", exact_tag);
|
||||
let exact_tag = run(&["git", "describe", "--abbrev=0", "--tags", "--exact-match"]).ok();
|
||||
if let Some(ref exact) = exact_tag {
|
||||
println!("cargo:rustc-env=GIT_EXACT_TAG={}", exact);
|
||||
}
|
||||
|
||||
// The last available tag, equal to exact_tag when
|
||||
// the current commit is tagged
|
||||
@@ -27,13 +43,25 @@ fn read_git_info() -> Result<(), std::io::Error> {
|
||||
|
||||
// The current git commit hash
|
||||
let rev = run(&["git", "rev-parse", "HEAD"])?;
|
||||
let rev_short = rev.get(..12).unwrap_or_default();
|
||||
let rev_short = rev.get(..8).unwrap_or_default();
|
||||
println!("cargo:rustc-env=GIT_REV={}", rev_short);
|
||||
|
||||
// Combined version
|
||||
let version = if let Some(exact) = exact_tag {
|
||||
exact
|
||||
} else if &branch != "master" {
|
||||
format!("{}-{} ({})", last_tag, rev_short, branch)
|
||||
} else {
|
||||
format!("{}-{}", last_tag, rev_short)
|
||||
};
|
||||
println!("cargo:rustc-env=GIT_VERSION={}", version);
|
||||
|
||||
// To access these values, use:
|
||||
// env!("GIT_EXACT_TAG")
|
||||
// env!("GIT_LAST_TAG")
|
||||
// env!("GIT_BRANCH")
|
||||
// env!("GIT_REV")
|
||||
// env!("GIT_VERSION")
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
110
docker/aarch64/mysql/Dockerfile
Normal file
110
docker/aarch64/mysql/Dockerfile
Normal file
@@ -0,0 +1,110 @@
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.11 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.12.0b"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set mysql backend
|
||||
ARG DB=mysql
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
gcc-aarch64-linux-gnu \
|
||||
&& mkdir -p ~/.cargo \
|
||||
&& echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config \
|
||||
&& echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config
|
||||
|
||||
ENV CARGO_HOME "/root/.cargo"
|
||||
ENV USER "root"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Prepare openssl arm64 libs
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture arm64 \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:arm64 \
|
||||
libc6-dev:arm64 \
|
||||
libmariadb-dev:arm64
|
||||
|
||||
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Build
|
||||
RUN rustup target add aarch64-unknown-linux-gnu
|
||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/aarch64-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libmariadbclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
|
||||
RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
@@ -2,29 +2,38 @@
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine as vault
|
||||
FROM alpine:3.11 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.8.0b"
|
||||
ENV VAULT_VERSION "v2.12.0b"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --update-cache --upgrade \
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust as build
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set sqlite as default for DB ARG for backward comaptibility
|
||||
ARG DB=sqlite
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
gcc-aarch64-linux-gnu \
|
||||
&& mkdir -p ~/.cargo \
|
||||
&& echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config \
|
||||
@@ -41,6 +50,7 @@ RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
&& dpkg --add-architecture arm64 \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:arm64 \
|
||||
libc6-dev:arm64
|
||||
|
||||
@@ -55,12 +65,12 @@ COPY . .
|
||||
|
||||
# Build
|
||||
RUN rustup target add aarch64-unknown-linux-gnu
|
||||
RUN cargo build --release --target=aarch64-unknown-linux-gnu -v
|
||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/aarch64-debian:stretch
|
||||
FROM balenalib/aarch64-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
@@ -69,10 +79,12 @@ ENV ROCKET_WORKERS=10
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y\
|
||||
openssl\
|
||||
ca-certificates\
|
||||
--no-install-recommends\
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
sqlite3 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
@@ -88,5 +100,10 @@ COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
CMD ./bitwarden_rs
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
101
docker/amd64/mysql/Dockerfile
Normal file
101
docker/amd64/mysql/Dockerfile
Normal file
@@ -0,0 +1,101 @@
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.11 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.12.0b"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set mysql backend
|
||||
ARG DB=mysql
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
# Install MySQL package
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libmariadb-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --features ${DB} --release
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM debian:buster-slim
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libmariadbclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build app/target/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
89
docker/amd64/mysql/Dockerfile.alpine
Normal file
89
docker/amd64/mysql/Dockerfile.alpine
Normal file
@@ -0,0 +1,89 @@
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.11 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.12.0b"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# Musl build image for statically compiled binary
|
||||
FROM clux/muslrust:nightly-2019-12-19 as build
|
||||
|
||||
# set mysql backend
|
||||
ARG DB=mysql
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
ENV USER "root"
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libmysqlclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
RUN rustup target add x86_64-unknown-linux-musl
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Build
|
||||
RUN cargo build --features ${DB} --release
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM alpine:3.11
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
# Install needed libraries
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
mariadb-connector-c \
|
||||
curl \
|
||||
ca-certificates
|
||||
|
||||
RUN mkdir /data
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
108
docker/amd64/postgresql/Dockerfile
Normal file
108
docker/amd64/postgresql/Dockerfile
Normal file
@@ -0,0 +1,108 @@
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.11 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.12.0b"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set mysql backend
|
||||
ARG DB=postgresql
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
# Using bundled SQLite, no need to install it
|
||||
# RUN apt-get update && apt-get install -y\
|
||||
# --no-install-recommends \
|
||||
# sqlite3\
|
||||
# && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install MySQL package
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libpq-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --features ${DB} --release
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM debian:buster-slim
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
sqlite3 \
|
||||
libpq5 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build app/target/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
90
docker/amd64/postgresql/Dockerfile.alpine
Normal file
90
docker/amd64/postgresql/Dockerfile.alpine
Normal file
@@ -0,0 +1,90 @@
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.11 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.12.0b"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# Musl build image for statically compiled binary
|
||||
FROM clux/muslrust:nightly-2019-12-19 as build
|
||||
|
||||
# set postgresql backend
|
||||
ARG DB=postgresql
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
ENV USER "root"
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libpq-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
RUN rustup target add x86_64-unknown-linux-musl
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Build
|
||||
RUN cargo build --features ${DB} --release
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM alpine:3.11
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
# Install needed libraries
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
postgresql-libs \
|
||||
curl \
|
||||
sqlite \
|
||||
ca-certificates
|
||||
|
||||
RUN mkdir /data
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
95
docker/amd64/sqlite/Dockerfile
Normal file
95
docker/amd64/sqlite/Dockerfile
Normal file
@@ -0,0 +1,95 @@
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.11 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.12.0b"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set sqlite as default for DB ARG for backward comaptibility
|
||||
ARG DB=sqlite
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --features ${DB} --release
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM debian:buster-slim
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
sqlite3 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build app/target/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
@@ -2,25 +2,33 @@
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine as vault
|
||||
FROM alpine:3.11 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.8.0b"
|
||||
ENV VAULT_VERSION "v2.12.0b"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --update-cache --upgrade \
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# Musl build image for statically compiled binary
|
||||
FROM clux/muslrust:nightly-2018-12-01 as build
|
||||
FROM clux/muslrust:nightly-2019-12-19 as build
|
||||
|
||||
# set sqlite as default for DB ARG for backward comaptibility
|
||||
ARG DB=sqlite
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
ENV USER "root"
|
||||
|
||||
@@ -32,13 +40,16 @@ COPY . .
|
||||
|
||||
RUN rustup target add x86_64-unknown-linux-musl
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Build
|
||||
RUN cargo build --release
|
||||
RUN cargo build --features ${DB} --release
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM alpine:3.8
|
||||
FROM alpine:3.11
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
@@ -46,10 +57,11 @@ ENV ROCKET_WORKERS=10
|
||||
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
# Install needed libraries
|
||||
RUN apk add \
|
||||
openssl\
|
||||
ca-certificates \
|
||||
&& rm /var/cache/apk/*
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
curl \
|
||||
sqlite \
|
||||
ca-certificates
|
||||
|
||||
RUN mkdir /data
|
||||
VOLUME /data
|
||||
@@ -62,5 +74,11 @@ COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
|
||||
# Configures the startup!
|
||||
CMD ./bitwarden_rs
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
110
docker/armv6/mysql/Dockerfile
Normal file
110
docker/armv6/mysql/Dockerfile
Normal file
@@ -0,0 +1,110 @@
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.11 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.12.0b"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set mysql backend
|
||||
ARG DB=mysql
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
gcc-arm-linux-gnueabi \
|
||||
&& mkdir -p ~/.cargo \
|
||||
&& echo '[target.arm-unknown-linux-gnueabi]' >> ~/.cargo/config \
|
||||
&& echo 'linker = "arm-linux-gnueabi-gcc"' >> ~/.cargo/config
|
||||
|
||||
ENV CARGO_HOME "/root/.cargo"
|
||||
ENV USER "root"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Prepare openssl armel libs
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armel \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armel \
|
||||
libc6-dev:armel \
|
||||
libmariadb-dev:armel
|
||||
|
||||
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Build
|
||||
RUN rustup target add arm-unknown-linux-gnueabi
|
||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/rpi-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libmariadbclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
|
||||
RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
109
docker/armv6/sqlite/Dockerfile
Normal file
109
docker/armv6/sqlite/Dockerfile
Normal file
@@ -0,0 +1,109 @@
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.11 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.12.0b"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set sqlite as default for DB ARG for backward comaptibility
|
||||
ARG DB=sqlite
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
gcc-arm-linux-gnueabi \
|
||||
&& mkdir -p ~/.cargo \
|
||||
&& echo '[target.arm-unknown-linux-gnueabi]' >> ~/.cargo/config \
|
||||
&& echo 'linker = "arm-linux-gnueabi-gcc"' >> ~/.cargo/config
|
||||
|
||||
ENV CARGO_HOME "/root/.cargo"
|
||||
ENV USER "root"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Prepare openssl armel libs
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armel \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armel \
|
||||
libc6-dev:armel
|
||||
|
||||
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Build
|
||||
RUN rustup target add arm-unknown-linux-gnueabi
|
||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/rpi-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
sqlite3 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
|
||||
RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
111
docker/armv7/mysql/Dockerfile
Normal file
111
docker/armv7/mysql/Dockerfile
Normal file
@@ -0,0 +1,111 @@
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine:3.11 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.12.0b"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set mysql backend
|
||||
ARG DB=mysql
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
gcc-arm-linux-gnueabihf \
|
||||
&& mkdir -p ~/.cargo \
|
||||
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> ~/.cargo/config \
|
||||
&& echo 'linker = "arm-linux-gnueabihf-gcc"' >> ~/.cargo/config
|
||||
|
||||
ENV CARGO_HOME "/root/.cargo"
|
||||
ENV USER "root"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Prepare openssl armhf libs
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
/etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armhf \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armhf \
|
||||
libc6-dev:armhf \
|
||||
libmariadb-dev:armhf
|
||||
|
||||
|
||||
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Build
|
||||
RUN rustup target add armv7-unknown-linux-gnueabihf
|
||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/armv7hf-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libmariadbclient-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
|
||||
RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
@@ -2,29 +2,38 @@
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM alpine as vault
|
||||
FROM alpine:3.11 as vault
|
||||
|
||||
ENV VAULT_VERSION "v2.8.0b"
|
||||
ENV VAULT_VERSION "v2.12.0b"
|
||||
|
||||
ENV URL "https://github.com/dani-garcia/bw_web_builds/releases/download/$VAULT_VERSION/bw_web_$VAULT_VERSION.tar.gz"
|
||||
|
||||
RUN apk add --update-cache --upgrade \
|
||||
RUN apk add --no-cache --upgrade \
|
||||
curl \
|
||||
tar
|
||||
|
||||
RUN mkdir /web-vault
|
||||
WORKDIR /web-vault
|
||||
|
||||
SHELL ["/bin/ash", "-eo", "pipefail", "-c"]
|
||||
|
||||
RUN curl -L $URL | tar xz
|
||||
RUN ls
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rust as build
|
||||
FROM rust:1.40 as build
|
||||
|
||||
# set sqlite as default for DB ARG for backward comaptibility
|
||||
ARG DB=sqlite
|
||||
|
||||
# Don't download rust docs
|
||||
RUN rustup set profile minimal
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
gcc-arm-linux-gnueabihf \
|
||||
&& mkdir -p ~/.cargo \
|
||||
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> ~/.cargo/config \
|
||||
@@ -41,6 +50,7 @@ RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > \
|
||||
&& dpkg --add-architecture armhf \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armhf \
|
||||
libc6-dev:armhf
|
||||
|
||||
@@ -55,12 +65,12 @@ COPY . .
|
||||
|
||||
# Build
|
||||
RUN rustup target add armv7-unknown-linux-gnueabihf
|
||||
RUN cargo build --release --target=armv7-unknown-linux-gnueabihf -v
|
||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/armv7hf-debian:stretch
|
||||
FROM balenalib/armv7hf-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
@@ -69,10 +79,12 @@ ENV ROCKET_WORKERS=10
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y\
|
||||
openssl\
|
||||
ca-certificates\
|
||||
--no-install-recommends\
|
||||
RUN apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
openssl \
|
||||
ca-certificates \
|
||||
curl \
|
||||
sqlite3 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
@@ -88,5 +100,10 @@ COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs .
|
||||
|
||||
COPY docker/healthcheck.sh ./healthcheck.sh
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=3s CMD sh healthcheck.sh || exit 1
|
||||
|
||||
# Configures the startup!
|
||||
CMD ./bitwarden_rs
|
||||
WORKDIR /
|
||||
CMD ["/bitwarden_rs"]
|
8
docker/healthcheck.sh
Normal file
8
docker/healthcheck.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
if [ -z "$ROCKET_TLS"]
|
||||
then
|
||||
curl --fail http://localhost:${ROCKET_PORT:-"80"}/alive || exit 1
|
||||
else
|
||||
curl --insecure --fail https://localhost:${ROCKET_PORT:-"80"}/alive || exit 1
|
||||
fi
|
62
migrations/mysql/2018-01-14-171611_create_tables/up.sql
Normal file
62
migrations/mysql/2018-01-14-171611_create_tables/up.sql
Normal file
@@ -0,0 +1,62 @@
|
||||
CREATE TABLE users (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL,
|
||||
updated_at DATETIME NOT NULL,
|
||||
email VARCHAR(255) NOT NULL UNIQUE,
|
||||
name TEXT NOT NULL,
|
||||
password_hash BLOB NOT NULL,
|
||||
salt BLOB NOT NULL,
|
||||
password_iterations INTEGER NOT NULL,
|
||||
password_hint TEXT,
|
||||
`key` TEXT NOT NULL,
|
||||
private_key TEXT,
|
||||
public_key TEXT,
|
||||
totp_secret TEXT,
|
||||
totp_recover TEXT,
|
||||
security_stamp TEXT NOT NULL,
|
||||
equivalent_domains TEXT NOT NULL,
|
||||
excluded_globals TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE devices (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL,
|
||||
updated_at DATETIME NOT NULL,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
name TEXT NOT NULL,
|
||||
type INTEGER NOT NULL,
|
||||
push_token TEXT,
|
||||
refresh_token TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE ciphers (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL,
|
||||
updated_at DATETIME NOT NULL,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
folder_uuid CHAR(36) REFERENCES folders (uuid),
|
||||
organization_uuid CHAR(36),
|
||||
type INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
notes TEXT,
|
||||
fields TEXT,
|
||||
data TEXT NOT NULL,
|
||||
favorite BOOLEAN NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE attachments (
|
||||
id CHAR(36) NOT NULL PRIMARY KEY,
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||
file_name TEXT NOT NULL,
|
||||
file_size INTEGER NOT NULL
|
||||
|
||||
);
|
||||
|
||||
CREATE TABLE folders (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL,
|
||||
updated_at DATETIME NOT NULL,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
|
@@ -0,0 +1,30 @@
|
||||
CREATE TABLE collections (
|
||||
uuid VARCHAR(40) NOT NULL PRIMARY KEY,
|
||||
org_uuid VARCHAR(40) NOT NULL REFERENCES organizations (uuid),
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE organizations (
|
||||
uuid VARCHAR(40) NOT NULL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
billing_email TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE users_collections (
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
|
||||
PRIMARY KEY (user_uuid, collection_uuid)
|
||||
);
|
||||
|
||||
CREATE TABLE users_organizations (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
org_uuid CHAR(36) NOT NULL REFERENCES organizations (uuid),
|
||||
|
||||
access_all BOOLEAN NOT NULL,
|
||||
`key` TEXT NOT NULL,
|
||||
status INTEGER NOT NULL,
|
||||
type INTEGER NOT NULL,
|
||||
|
||||
UNIQUE (user_uuid, org_uuid)
|
||||
);
|
@@ -0,0 +1,34 @@
|
||||
ALTER TABLE ciphers RENAME TO oldCiphers;
|
||||
|
||||
CREATE TABLE ciphers (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL,
|
||||
updated_at DATETIME NOT NULL,
|
||||
user_uuid CHAR(36) REFERENCES users (uuid), -- Make this optional
|
||||
organization_uuid CHAR(36) REFERENCES organizations (uuid), -- Add reference to orgs table
|
||||
-- Remove folder_uuid
|
||||
type INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
notes TEXT,
|
||||
fields TEXT,
|
||||
data TEXT NOT NULL,
|
||||
favorite BOOLEAN NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE folders_ciphers (
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||
folder_uuid CHAR(36) NOT NULL REFERENCES folders (uuid),
|
||||
|
||||
PRIMARY KEY (cipher_uuid, folder_uuid)
|
||||
);
|
||||
|
||||
INSERT INTO ciphers (uuid, created_at, updated_at, user_uuid, organization_uuid, type, name, notes, fields, data, favorite)
|
||||
SELECT uuid, created_at, updated_at, user_uuid, organization_uuid, type, name, notes, fields, data, favorite FROM oldCiphers;
|
||||
|
||||
INSERT INTO folders_ciphers (cipher_uuid, folder_uuid)
|
||||
SELECT uuid, folder_uuid FROM oldCiphers WHERE folder_uuid IS NOT NULL;
|
||||
|
||||
|
||||
DROP TABLE oldCiphers;
|
||||
|
||||
ALTER TABLE users_collections ADD COLUMN read_only BOOLEAN NOT NULL DEFAULT 0; -- False
|
@@ -0,0 +1,5 @@
|
||||
CREATE TABLE ciphers_collections (
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||
collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
|
||||
PRIMARY KEY (cipher_uuid, collection_uuid)
|
||||
);
|
@@ -0,0 +1,14 @@
|
||||
ALTER TABLE attachments RENAME TO oldAttachments;
|
||||
|
||||
CREATE TABLE attachments (
|
||||
id CHAR(36) NOT NULL PRIMARY KEY,
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||
file_name TEXT NOT NULL,
|
||||
file_size INTEGER NOT NULL
|
||||
|
||||
);
|
||||
|
||||
INSERT INTO attachments (id, cipher_uuid, file_name, file_size)
|
||||
SELECT id, cipher_uuid, file_name, file_size FROM oldAttachments;
|
||||
|
||||
DROP TABLE oldAttachments;
|
@@ -0,0 +1,15 @@
|
||||
CREATE TABLE twofactor (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
type INTEGER NOT NULL,
|
||||
enabled BOOLEAN NOT NULL,
|
||||
data TEXT NOT NULL,
|
||||
|
||||
UNIQUE (user_uuid, type)
|
||||
);
|
||||
|
||||
|
||||
INSERT INTO twofactor (uuid, user_uuid, type, enabled, data)
|
||||
SELECT UUID(), uuid, 0, 1, u.totp_secret FROM users u where u.totp_secret IS NOT NULL;
|
||||
|
||||
UPDATE users SET totp_secret = NULL; -- Instead of recreating the table, just leave the columns empty
|
3
migrations/mysql/2018-09-10-111213_add_invites/up.sql
Normal file
3
migrations/mysql/2018-09-10-111213_add_invites/up.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
CREATE TABLE invitations (
|
||||
email VARCHAR(255) NOT NULL PRIMARY KEY
|
||||
);
|
@@ -4,4 +4,4 @@ ALTER TABLE users
|
||||
|
||||
ALTER TABLE users
|
||||
ADD COLUMN
|
||||
client_kdf_iter INTEGER NOT NULL DEFAULT 5000;
|
||||
client_kdf_iter INTEGER NOT NULL DEFAULT 100000;
|
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE attachments
|
||||
ADD COLUMN
|
||||
`key` TEXT;
|
@@ -0,0 +1,7 @@
|
||||
ALTER TABLE attachments CHANGE COLUMN akey `key` TEXT;
|
||||
ALTER TABLE ciphers CHANGE COLUMN atype type INTEGER NOT NULL;
|
||||
ALTER TABLE devices CHANGE COLUMN atype type INTEGER NOT NULL;
|
||||
ALTER TABLE twofactor CHANGE COLUMN atype type INTEGER NOT NULL;
|
||||
ALTER TABLE users CHANGE COLUMN akey `key` TEXT;
|
||||
ALTER TABLE users_organizations CHANGE COLUMN akey `key` TEXT;
|
||||
ALTER TABLE users_organizations CHANGE COLUMN atype type INTEGER NOT NULL;
|
@@ -0,0 +1,7 @@
|
||||
ALTER TABLE attachments CHANGE COLUMN `key` akey TEXT;
|
||||
ALTER TABLE ciphers CHANGE COLUMN type atype INTEGER NOT NULL;
|
||||
ALTER TABLE devices CHANGE COLUMN type atype INTEGER NOT NULL;
|
||||
ALTER TABLE twofactor CHANGE COLUMN type atype INTEGER NOT NULL;
|
||||
ALTER TABLE users CHANGE COLUMN `key` akey TEXT;
|
||||
ALTER TABLE users_organizations CHANGE COLUMN `key` akey TEXT;
|
||||
ALTER TABLE users_organizations CHANGE COLUMN type atype INTEGER NOT NULL;
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE twofactor ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;
|
@@ -0,0 +1 @@
|
||||
|
@@ -0,0 +1,5 @@
|
||||
ALTER TABLE users ADD COLUMN verified_at DATETIME DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN last_verifying_at DATETIME DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN login_verify_count INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE users ADD COLUMN email_new VARCHAR(255) DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN email_new_token VARCHAR(16) DEFAULT NULL;
|
@@ -0,0 +1,13 @@
|
||||
DROP TABLE devices;
|
||||
DROP TABLE attachments;
|
||||
DROP TABLE users_collections;
|
||||
DROP TABLE users_organizations;
|
||||
DROP TABLE folders_ciphers;
|
||||
DROP TABLE ciphers_collections;
|
||||
DROP TABLE twofactor;
|
||||
DROP TABLE invitations;
|
||||
DROP TABLE collections;
|
||||
DROP TABLE folders;
|
||||
DROP TABLE ciphers;
|
||||
DROP TABLE users;
|
||||
DROP TABLE organizations;
|
121
migrations/postgresql/2019-09-12-100000_create_tables/up.sql
Normal file
121
migrations/postgresql/2019-09-12-100000_create_tables/up.sql
Normal file
@@ -0,0 +1,121 @@
|
||||
CREATE TABLE users (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
updated_at TIMESTAMP NOT NULL,
|
||||
email VARCHAR(255) NOT NULL UNIQUE,
|
||||
name TEXT NOT NULL,
|
||||
password_hash BYTEA NOT NULL,
|
||||
salt BYTEA NOT NULL,
|
||||
password_iterations INTEGER NOT NULL,
|
||||
password_hint TEXT,
|
||||
akey TEXT NOT NULL,
|
||||
private_key TEXT,
|
||||
public_key TEXT,
|
||||
totp_secret TEXT,
|
||||
totp_recover TEXT,
|
||||
security_stamp TEXT NOT NULL,
|
||||
equivalent_domains TEXT NOT NULL,
|
||||
excluded_globals TEXT NOT NULL,
|
||||
client_kdf_type INTEGER NOT NULL DEFAULT 0,
|
||||
client_kdf_iter INTEGER NOT NULL DEFAULT 100000
|
||||
);
|
||||
|
||||
CREATE TABLE devices (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
updated_at TIMESTAMP NOT NULL,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
name TEXT NOT NULL,
|
||||
atype INTEGER NOT NULL,
|
||||
push_token TEXT,
|
||||
refresh_token TEXT NOT NULL,
|
||||
twofactor_remember TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE organizations (
|
||||
uuid VARCHAR(40) NOT NULL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
billing_email TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE ciphers (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
updated_at TIMESTAMP NOT NULL,
|
||||
user_uuid CHAR(36) REFERENCES users (uuid),
|
||||
organization_uuid CHAR(36) REFERENCES organizations (uuid),
|
||||
atype INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
notes TEXT,
|
||||
fields TEXT,
|
||||
data TEXT NOT NULL,
|
||||
favorite BOOLEAN NOT NULL,
|
||||
password_history TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE attachments (
|
||||
id CHAR(36) NOT NULL PRIMARY KEY,
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||
file_name TEXT NOT NULL,
|
||||
file_size INTEGER NOT NULL,
|
||||
akey TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE folders (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at TIMESTAMP NOT NULL,
|
||||
updated_at TIMESTAMP NOT NULL,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE collections (
|
||||
uuid VARCHAR(40) NOT NULL PRIMARY KEY,
|
||||
org_uuid VARCHAR(40) NOT NULL REFERENCES organizations (uuid),
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE users_collections (
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
|
||||
read_only BOOLEAN NOT NULL DEFAULT false,
|
||||
PRIMARY KEY (user_uuid, collection_uuid)
|
||||
);
|
||||
|
||||
CREATE TABLE users_organizations (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
org_uuid CHAR(36) NOT NULL REFERENCES organizations (uuid),
|
||||
|
||||
access_all BOOLEAN NOT NULL,
|
||||
akey TEXT NOT NULL,
|
||||
status INTEGER NOT NULL,
|
||||
atype INTEGER NOT NULL,
|
||||
|
||||
UNIQUE (user_uuid, org_uuid)
|
||||
);
|
||||
|
||||
CREATE TABLE folders_ciphers (
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||
folder_uuid CHAR(36) NOT NULL REFERENCES folders (uuid),
|
||||
PRIMARY KEY (cipher_uuid, folder_uuid)
|
||||
);
|
||||
|
||||
CREATE TABLE ciphers_collections (
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||
collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
|
||||
PRIMARY KEY (cipher_uuid, collection_uuid)
|
||||
);
|
||||
|
||||
CREATE TABLE twofactor (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
atype INTEGER NOT NULL,
|
||||
enabled BOOLEAN NOT NULL,
|
||||
data TEXT NOT NULL,
|
||||
UNIQUE (user_uuid, atype)
|
||||
);
|
||||
|
||||
CREATE TABLE invitations (
|
||||
email VARCHAR(255) NOT NULL PRIMARY KEY
|
||||
);
|
@@ -0,0 +1,26 @@
|
||||
ALTER TABLE attachments ALTER COLUMN id TYPE CHAR(36);
|
||||
ALTER TABLE attachments ALTER COLUMN cipher_uuid TYPE CHAR(36);
|
||||
ALTER TABLE users ALTER COLUMN uuid TYPE CHAR(36);
|
||||
ALTER TABLE users ALTER COLUMN email TYPE VARCHAR(255);
|
||||
ALTER TABLE devices ALTER COLUMN uuid TYPE CHAR(36);
|
||||
ALTER TABLE devices ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||
ALTER TABLE organizations ALTER COLUMN uuid TYPE CHAR(40);
|
||||
ALTER TABLE ciphers ALTER COLUMN uuid TYPE CHAR(36);
|
||||
ALTER TABLE ciphers ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||
ALTER TABLE ciphers ALTER COLUMN organization_uuid TYPE CHAR(36);
|
||||
ALTER TABLE folders ALTER COLUMN uuid TYPE CHAR(36);
|
||||
ALTER TABLE folders ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||
ALTER TABLE collections ALTER COLUMN uuid TYPE CHAR(40);
|
||||
ALTER TABLE collections ALTER COLUMN org_uuid TYPE CHAR(40);
|
||||
ALTER TABLE users_collections ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||
ALTER TABLE users_collections ALTER COLUMN collection_uuid TYPE CHAR(36);
|
||||
ALTER TABLE users_organizations ALTER COLUMN uuid TYPE CHAR(36);
|
||||
ALTER TABLE users_organizations ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||
ALTER TABLE users_organizations ALTER COLUMN org_uuid TYPE CHAR(36);
|
||||
ALTER TABLE folders_ciphers ALTER COLUMN cipher_uuid TYPE CHAR(36);
|
||||
ALTER TABLE folders_ciphers ALTER COLUMN folder_uuid TYPE CHAR(36);
|
||||
ALTER TABLE ciphers_collections ALTER COLUMN cipher_uuid TYPE CHAR(36);
|
||||
ALTER TABLE ciphers_collections ALTER COLUMN collection_uuid TYPE CHAR(36);
|
||||
ALTER TABLE twofactor ALTER COLUMN uuid TYPE CHAR(36);
|
||||
ALTER TABLE twofactor ALTER COLUMN user_uuid TYPE CHAR(36);
|
||||
ALTER TABLE invitations ALTER COLUMN email TYPE VARCHAR(255);
|
@@ -0,0 +1,27 @@
|
||||
-- Switch from CHAR() types to VARCHAR() types to avoid padding issues.
|
||||
ALTER TABLE attachments ALTER COLUMN id TYPE TEXT;
|
||||
ALTER TABLE attachments ALTER COLUMN cipher_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE users ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE users ALTER COLUMN email TYPE TEXT;
|
||||
ALTER TABLE devices ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE devices ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE organizations ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE ciphers ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE ciphers ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE ciphers ALTER COLUMN organization_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE folders ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE folders ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE collections ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE collections ALTER COLUMN org_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE users_collections ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE users_collections ALTER COLUMN collection_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE users_organizations ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE users_organizations ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE users_organizations ALTER COLUMN org_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE folders_ciphers ALTER COLUMN cipher_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE folders_ciphers ALTER COLUMN folder_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE ciphers_collections ALTER COLUMN cipher_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE ciphers_collections ALTER COLUMN collection_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE twofactor ALTER COLUMN uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE twofactor ALTER COLUMN user_uuid TYPE VARCHAR(40);
|
||||
ALTER TABLE invitations ALTER COLUMN email TYPE TEXT;
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE twofactor ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;
|
@@ -0,0 +1 @@
|
||||
|
@@ -0,0 +1,5 @@
|
||||
ALTER TABLE users ADD COLUMN verified_at TIMESTAMP DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN last_verifying_at TIMESTAMP DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN login_verify_count INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE users ADD COLUMN email_new VARCHAR(255) DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN email_new_token VARCHAR(16) DEFAULT NULL;
|
@@ -0,0 +1,9 @@
|
||||
DROP TABLE users;
|
||||
|
||||
DROP TABLE devices;
|
||||
|
||||
DROP TABLE ciphers;
|
||||
|
||||
DROP TABLE attachments;
|
||||
|
||||
DROP TABLE folders;
|
@@ -0,0 +1,8 @@
|
||||
DROP TABLE collections;
|
||||
|
||||
DROP TABLE organizations;
|
||||
|
||||
|
||||
DROP TABLE users_collections;
|
||||
|
||||
DROP TABLE users_organizations;
|
@@ -0,0 +1 @@
|
||||
DROP TABLE ciphers_collections;
|
@@ -0,0 +1 @@
|
||||
-- This file should undo anything in `up.sql`
|
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE devices
|
||||
ADD COLUMN
|
||||
twofactor_remember TEXT;
|
@@ -0,0 +1,8 @@
|
||||
UPDATE users
|
||||
SET totp_secret = (
|
||||
SELECT twofactor.data FROM twofactor
|
||||
WHERE twofactor.type = 0
|
||||
AND twofactor.user_uuid = users.uuid
|
||||
);
|
||||
|
||||
DROP TABLE twofactor;
|
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE ciphers
|
||||
ADD COLUMN
|
||||
password_history TEXT;
|
1
migrations/sqlite/2018-09-10-111213_add_invites/down.sql
Normal file
1
migrations/sqlite/2018-09-10-111213_add_invites/down.sql
Normal file
@@ -0,0 +1 @@
|
||||
DROP TABLE invitations;
|
@@ -0,0 +1,7 @@
|
||||
ALTER TABLE users
|
||||
ADD COLUMN
|
||||
client_kdf_type INTEGER NOT NULL DEFAULT 0; -- PBKDF2
|
||||
|
||||
ALTER TABLE users
|
||||
ADD COLUMN
|
||||
client_kdf_iter INTEGER NOT NULL DEFAULT 100000;
|
@@ -0,0 +1,7 @@
|
||||
ALTER TABLE attachments RENAME COLUMN akey TO key;
|
||||
ALTER TABLE ciphers RENAME COLUMN atype TO type;
|
||||
ALTER TABLE devices RENAME COLUMN atype TO type;
|
||||
ALTER TABLE twofactor RENAME COLUMN atype TO type;
|
||||
ALTER TABLE users RENAME COLUMN akey TO key;
|
||||
ALTER TABLE users_organizations RENAME COLUMN akey TO key;
|
||||
ALTER TABLE users_organizations RENAME COLUMN atype TO type;
|
@@ -0,0 +1,7 @@
|
||||
ALTER TABLE attachments RENAME COLUMN key TO akey;
|
||||
ALTER TABLE ciphers RENAME COLUMN type TO atype;
|
||||
ALTER TABLE devices RENAME COLUMN type TO atype;
|
||||
ALTER TABLE twofactor RENAME COLUMN type TO atype;
|
||||
ALTER TABLE users RENAME COLUMN key TO akey;
|
||||
ALTER TABLE users_organizations RENAME COLUMN key TO akey;
|
||||
ALTER TABLE users_organizations RENAME COLUMN type TO atype;
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE twofactor ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;
|
@@ -0,0 +1 @@
|
||||
|
@@ -0,0 +1,5 @@
|
||||
ALTER TABLE users ADD COLUMN verified_at DATETIME DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN last_verifying_at DATETIME DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN login_verify_count INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE users ADD COLUMN email_new TEXT DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN email_new_token TEXT DEFAULT NULL;
|
@@ -1 +1 @@
|
||||
nightly-2019-01-08
|
||||
nightly-2019-12-19
|
@@ -1 +1,2 @@
|
||||
version = "Two"
|
||||
max_width = 120
|
||||
|
294
src/api/admin.rs
294
src/api/admin.rs
@@ -1,24 +1,178 @@
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json::Value;
|
||||
use std::process::Command;
|
||||
|
||||
use crate::api::{JsonResult, JsonUpcase};
|
||||
use rocket::http::{Cookie, Cookies, SameSite};
|
||||
use rocket::request::{self, FlashMessage, Form, FromRequest, Request};
|
||||
use rocket::response::{content::Html, Flash, Redirect};
|
||||
use rocket::{Outcome, Route};
|
||||
use rocket_contrib::json::Json;
|
||||
|
||||
use crate::api::{ApiResult, EmptyResult, JsonResult};
|
||||
use crate::auth::{decode_admin, encode_jwt, generate_admin_claims, ClientIp};
|
||||
use crate::config::ConfigBuilder;
|
||||
use crate::db::{backup_database, models::*, DbConn};
|
||||
use crate::error::Error;
|
||||
use crate::mail;
|
||||
use crate::CONFIG;
|
||||
|
||||
use crate::db::models::*;
|
||||
use crate::db::DbConn;
|
||||
use crate::mail;
|
||||
|
||||
use rocket::request::{self, FromRequest, Request};
|
||||
use rocket::{Outcome, Route};
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
routes![get_users, invite_user, delete_user]
|
||||
if CONFIG.admin_token().is_none() && !CONFIG.disable_admin_token() {
|
||||
return routes![admin_disabled];
|
||||
}
|
||||
|
||||
routes![
|
||||
admin_login,
|
||||
get_users,
|
||||
post_admin_login,
|
||||
admin_page,
|
||||
invite_user,
|
||||
logout,
|
||||
delete_user,
|
||||
deauth_user,
|
||||
remove_2fa,
|
||||
update_revision_users,
|
||||
post_config,
|
||||
delete_config,
|
||||
backup_db,
|
||||
]
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref CAN_BACKUP: bool = cfg!(feature = "sqlite") && Command::new("sqlite3").arg("-version").status().is_ok();
|
||||
}
|
||||
|
||||
#[get("/")]
|
||||
fn admin_disabled() -> &'static str {
|
||||
"The admin panel is disabled, please configure the 'ADMIN_TOKEN' variable to enable it"
|
||||
}
|
||||
|
||||
const COOKIE_NAME: &str = "BWRS_ADMIN";
|
||||
const ADMIN_PATH: &str = "/admin";
|
||||
|
||||
const BASE_TEMPLATE: &str = "admin/base";
|
||||
const VERSION: Option<&str> = option_env!("GIT_VERSION");
|
||||
|
||||
#[get("/", rank = 2)]
|
||||
fn admin_login(flash: Option<FlashMessage>) -> ApiResult<Html<String>> {
|
||||
// If there is an error, show it
|
||||
let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg()));
|
||||
let json = json!({"page_content": "admin/login", "version": VERSION, "error": msg});
|
||||
|
||||
// Return the page
|
||||
let text = CONFIG.render_template(BASE_TEMPLATE, &json)?;
|
||||
Ok(Html(text))
|
||||
}
|
||||
|
||||
#[derive(FromForm)]
|
||||
struct LoginForm {
|
||||
token: String,
|
||||
}
|
||||
|
||||
#[post("/", data = "<data>")]
|
||||
fn post_admin_login(data: Form<LoginForm>, mut cookies: Cookies, ip: ClientIp) -> Result<Redirect, Flash<Redirect>> {
|
||||
let data = data.into_inner();
|
||||
|
||||
// If the token is invalid, redirect to login page
|
||||
if !_validate_token(&data.token) {
|
||||
error!("Invalid admin token. IP: {}", ip.ip);
|
||||
Err(Flash::error(
|
||||
Redirect::to(ADMIN_PATH),
|
||||
"Invalid admin token, please try again.",
|
||||
))
|
||||
} else {
|
||||
// If the token received is valid, generate JWT and save it as a cookie
|
||||
let claims = generate_admin_claims();
|
||||
let jwt = encode_jwt(&claims);
|
||||
|
||||
let cookie = Cookie::build(COOKIE_NAME, jwt)
|
||||
.path(ADMIN_PATH)
|
||||
.max_age(chrono::Duration::minutes(20))
|
||||
.same_site(SameSite::Strict)
|
||||
.http_only(true)
|
||||
.finish();
|
||||
|
||||
cookies.add(cookie);
|
||||
Ok(Redirect::to(ADMIN_PATH))
|
||||
}
|
||||
}
|
||||
|
||||
fn _validate_token(token: &str) -> bool {
|
||||
match CONFIG.admin_token().as_ref() {
|
||||
None => false,
|
||||
Some(t) => crate::crypto::ct_eq(t.trim(), token.trim()),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct AdminTemplateData {
|
||||
page_content: String,
|
||||
version: Option<&'static str>,
|
||||
users: Vec<Value>,
|
||||
config: Value,
|
||||
can_backup: bool,
|
||||
logged_in: bool,
|
||||
}
|
||||
|
||||
impl AdminTemplateData {
|
||||
fn new(users: Vec<Value>) -> Self {
|
||||
Self {
|
||||
page_content: String::from("admin/page"),
|
||||
version: VERSION,
|
||||
users,
|
||||
config: CONFIG.prepare_json(),
|
||||
can_backup: *CAN_BACKUP,
|
||||
logged_in: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn render(self) -> Result<String, Error> {
|
||||
CONFIG.render_template(BASE_TEMPLATE, &self)
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/", rank = 1)]
|
||||
fn admin_page(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
||||
let users = User::get_all(&conn);
|
||||
let users_json: Vec<Value> = users.iter().map(|u| u.to_json(&conn)).collect();
|
||||
|
||||
let text = AdminTemplateData::new(users_json).render()?;
|
||||
Ok(Html(text))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[allow(non_snake_case)]
|
||||
struct InviteData {
|
||||
Email: String,
|
||||
email: String,
|
||||
}
|
||||
|
||||
#[post("/invite", data = "<data>")]
|
||||
fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||
let data: InviteData = data.into_inner();
|
||||
let email = data.email.clone();
|
||||
if User::find_by_mail(&data.email, &conn).is_some() {
|
||||
err!("User already exists")
|
||||
}
|
||||
|
||||
if !CONFIG.invitations_allowed() {
|
||||
err!("Invitations are not allowed")
|
||||
}
|
||||
|
||||
let mut user = User::new(email);
|
||||
user.save(&conn)?;
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
let org_name = "bitwarden_rs";
|
||||
mail::send_invite(&user.email, &user.uuid, None, None, &org_name, None)
|
||||
} else {
|
||||
let invitation = Invitation::new(data.email);
|
||||
invitation.save(&conn)
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/logout")]
|
||||
fn logout(mut cookies: Cookies) -> Result<Redirect, ()> {
|
||||
cookies.remove(Cookie::named(COOKIE_NAME));
|
||||
Ok(Redirect::to(ADMIN_PATH))
|
||||
}
|
||||
|
||||
#[get("/users")]
|
||||
@@ -29,40 +183,64 @@ fn get_users(_token: AdminToken, conn: DbConn) -> JsonResult {
|
||||
Ok(Json(Value::Array(users_json)))
|
||||
}
|
||||
|
||||
#[post("/invite", data = "<data>")]
|
||||
fn invite_user(data: JsonUpcase<InviteData>, _token: AdminToken, conn: DbConn) -> JsonResult {
|
||||
let data: InviteData = data.into_inner().data;
|
||||
let email = data.Email.clone();
|
||||
if User::find_by_mail(&data.Email, &conn).is_some() {
|
||||
err!("User already exists")
|
||||
}
|
||||
|
||||
if !CONFIG.invitations_allowed {
|
||||
err!("Invitations are not allowed")
|
||||
}
|
||||
|
||||
if let Some(ref mail_config) = CONFIG.mail {
|
||||
let mut user = User::new(email);
|
||||
user.save(&conn)?;
|
||||
let org_name = "bitwarden_rs";
|
||||
mail::send_invite(&user.email, &user.uuid, None, None, &org_name, None, mail_config)?;
|
||||
} else {
|
||||
let mut invitation = Invitation::new(data.Email);
|
||||
invitation.save(&conn)?;
|
||||
}
|
||||
|
||||
Ok(Json(json!({})))
|
||||
}
|
||||
|
||||
#[post("/users/<uuid>/delete")]
|
||||
fn delete_user(uuid: String, _token: AdminToken, conn: DbConn) -> JsonResult {
|
||||
fn delete_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||
let user = match User::find_by_uuid(&uuid, &conn) {
|
||||
Some(user) => user,
|
||||
None => err!("User doesn't exist"),
|
||||
};
|
||||
|
||||
user.delete(&conn)?;
|
||||
Ok(Json(json!({})))
|
||||
user.delete(&conn)
|
||||
}
|
||||
|
||||
#[post("/users/<uuid>/deauth")]
|
||||
fn deauth_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||
let mut user = match User::find_by_uuid(&uuid, &conn) {
|
||||
Some(user) => user,
|
||||
None => err!("User doesn't exist"),
|
||||
};
|
||||
|
||||
Device::delete_all_by_user(&user.uuid, &conn)?;
|
||||
user.reset_security_stamp();
|
||||
|
||||
user.save(&conn)
|
||||
}
|
||||
|
||||
#[post("/users/<uuid>/remove-2fa")]
|
||||
fn remove_2fa(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||
let mut user = match User::find_by_uuid(&uuid, &conn) {
|
||||
Some(user) => user,
|
||||
None => err!("User doesn't exist"),
|
||||
};
|
||||
|
||||
TwoFactor::delete_all_by_user(&user.uuid, &conn)?;
|
||||
user.totp_recover = None;
|
||||
user.save(&conn)
|
||||
}
|
||||
|
||||
#[post("/users/update_revision")]
|
||||
fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
|
||||
User::update_all_revisions(&conn)
|
||||
}
|
||||
|
||||
#[post("/config", data = "<data>")]
|
||||
fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult {
|
||||
let data: ConfigBuilder = data.into_inner();
|
||||
CONFIG.update_config(data)
|
||||
}
|
||||
|
||||
#[post("/config/delete")]
|
||||
fn delete_config(_token: AdminToken) -> EmptyResult {
|
||||
CONFIG.delete_user_config()
|
||||
}
|
||||
|
||||
#[post("/config/backup_db")]
|
||||
fn backup_db(_token: AdminToken) -> EmptyResult {
|
||||
if *CAN_BACKUP {
|
||||
backup_database()
|
||||
} else {
|
||||
err!("Can't back up current DB (either it's not SQLite or the 'sqlite' binary is not present)");
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AdminToken {}
|
||||
@@ -71,37 +249,29 @@ impl<'a, 'r> FromRequest<'a, 'r> for AdminToken {
|
||||
type Error = &'static str;
|
||||
|
||||
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
|
||||
let config_token = match CONFIG.admin_token.as_ref() {
|
||||
Some(token) => token,
|
||||
None => err_handler!("Admin panel is disabled"),
|
||||
if CONFIG.disable_admin_token() {
|
||||
Outcome::Success(AdminToken {})
|
||||
} else {
|
||||
let mut cookies = request.cookies();
|
||||
|
||||
let access_token = match cookies.get(COOKIE_NAME) {
|
||||
Some(cookie) => cookie.value(),
|
||||
None => return Outcome::Forward(()), // If there is no cookie, redirect to login
|
||||
};
|
||||
|
||||
// Get access_token
|
||||
let access_token: &str = match request.headers().get_one("Authorization") {
|
||||
Some(a) => match a.rsplit("Bearer ").next() {
|
||||
Some(split) => split,
|
||||
None => err_handler!("No access token provided"),
|
||||
},
|
||||
None => err_handler!("No access token provided"),
|
||||
};
|
||||
|
||||
// TODO: What authentication to use?
|
||||
// Option 1: Make it a config option
|
||||
// Option 2: Generate random token, and
|
||||
// Option 2a: Send it to admin email, like upstream
|
||||
// Option 2b: Print in console or save to data dir, so admin can check
|
||||
|
||||
use crate::auth::ClientIp;
|
||||
|
||||
let ip = match request.guard::<ClientIp>() {
|
||||
Outcome::Success(ip) => ip,
|
||||
Outcome::Success(ip) => ip.ip,
|
||||
_ => err_handler!("Error getting Client IP"),
|
||||
};
|
||||
|
||||
if access_token != config_token {
|
||||
err_handler!("Invalid admin token", format!("IP: {}.", ip.ip))
|
||||
if decode_admin(access_token).is_err() {
|
||||
// Remove admin cookie
|
||||
cookies.remove(Cookie::named(COOKIE_NAME));
|
||||
error!("Invalid or expired admin JWT. IP: {}.", ip);
|
||||
return Outcome::Forward(());
|
||||
}
|
||||
|
||||
Outcome::Success(AdminToken {})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,10 +1,12 @@
|
||||
use chrono::Utc;
|
||||
use rocket_contrib::json::Json;
|
||||
|
||||
use crate::db::models::*;
|
||||
use crate::db::DbConn;
|
||||
|
||||
use crate::api::{EmptyResult, JsonResult, JsonUpcase, Notify, NumberOrString, PasswordData, UpdateType};
|
||||
use crate::auth::{decode_invite_jwt, Headers, InviteJWTClaims};
|
||||
use crate::auth::{decode_delete, decode_invite, decode_verify_email, Headers};
|
||||
use crate::crypto;
|
||||
use crate::mail;
|
||||
|
||||
use crate::CONFIG;
|
||||
@@ -25,6 +27,10 @@ pub fn routes() -> Vec<Route> {
|
||||
post_sstamp,
|
||||
post_email_token,
|
||||
post_email,
|
||||
post_verify_email,
|
||||
post_verify_email_token,
|
||||
post_delete_recover,
|
||||
post_delete_recover_token,
|
||||
delete_account,
|
||||
post_delete_account,
|
||||
revision_date,
|
||||
@@ -62,11 +68,15 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
|
||||
let mut user = match User::find_by_mail(&data.Email, &conn) {
|
||||
Some(user) => {
|
||||
if !user.password_hash.is_empty() {
|
||||
if CONFIG.signups_allowed() {
|
||||
err!("User already exists")
|
||||
} else {
|
||||
err!("Registration not allowed or user already exists")
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(token) = data.Token {
|
||||
let claims: InviteJWTClaims = decode_invite_jwt(&token)?;
|
||||
let claims = decode_invite(&token)?;
|
||||
if claims.email == data.Email {
|
||||
user
|
||||
} else {
|
||||
@@ -79,17 +89,17 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
|
||||
}
|
||||
|
||||
user
|
||||
} else if CONFIG.signups_allowed {
|
||||
} else if CONFIG.signups_allowed() {
|
||||
err!("Account with this email already exists")
|
||||
} else {
|
||||
err!("Registration not allowed")
|
||||
err!("Registration not allowed or user already exists")
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if CONFIG.signups_allowed || Invitation::take(&data.Email, &conn) {
|
||||
if CONFIG.signups_allowed() || Invitation::take(&data.Email, &conn) || CONFIG.can_signup_user(&data.Email) {
|
||||
User::new(data.Email.clone())
|
||||
} else {
|
||||
err!("Registration not allowed")
|
||||
err!("Registration not allowed or user already exists")
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -106,7 +116,7 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
|
||||
}
|
||||
|
||||
user.set_password(&data.MasterPasswordHash);
|
||||
user.key = data.Key;
|
||||
user.akey = data.Key;
|
||||
|
||||
// Add extra fields if present
|
||||
if let Some(name) = data.Name {
|
||||
@@ -122,6 +132,20 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
|
||||
user.public_key = Some(keys.PublicKey);
|
||||
}
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
if CONFIG.signups_verify() {
|
||||
if let Err(e) = mail::send_welcome_must_verify(&user.email, &user.uuid) {
|
||||
error!("Error sending welcome email: {:#?}", e);
|
||||
}
|
||||
|
||||
user.last_verifying_at = Some(user.created_at);
|
||||
} else {
|
||||
if let Err(e) = mail::send_welcome(&user.email) {
|
||||
error!("Error sending welcome email: {:#?}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
user.save(&conn)
|
||||
}
|
||||
|
||||
@@ -204,7 +228,7 @@ fn post_password(data: JsonUpcase<ChangePassData>, headers: Headers, conn: DbCon
|
||||
}
|
||||
|
||||
user.set_password(&data.NewMasterPasswordHash);
|
||||
user.key = data.Key;
|
||||
user.akey = data.Key;
|
||||
user.save(&conn)
|
||||
}
|
||||
|
||||
@@ -231,7 +255,7 @@ fn post_kdf(data: JsonUpcase<ChangeKdfData>, headers: Headers, conn: DbConn) ->
|
||||
user.client_kdf_iter = data.KdfIterations;
|
||||
user.client_kdf_type = data.Kdf;
|
||||
user.set_password(&data.NewMasterPasswordHash);
|
||||
user.key = data.Key;
|
||||
user.akey = data.Key;
|
||||
user.save(&conn)
|
||||
}
|
||||
|
||||
@@ -306,7 +330,7 @@ fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbConn, nt:
|
||||
// Update user data
|
||||
let mut user = headers.user;
|
||||
|
||||
user.key = data.Key;
|
||||
user.akey = data.Key;
|
||||
user.private_key = Some(data.PrivateKey);
|
||||
user.reset_security_stamp();
|
||||
|
||||
@@ -322,6 +346,7 @@ fn post_sstamp(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -
|
||||
err!("Invalid password")
|
||||
}
|
||||
|
||||
Device::delete_all_by_user(&user.uuid, &conn)?;
|
||||
user.reset_security_stamp();
|
||||
user.save(&conn)
|
||||
}
|
||||
@@ -336,8 +361,9 @@ struct EmailTokenData {
|
||||
#[post("/accounts/email-token", data = "<data>")]
|
||||
fn post_email_token(data: JsonUpcase<EmailTokenData>, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||
let data: EmailTokenData = data.into_inner().data;
|
||||
let mut user = headers.user;
|
||||
|
||||
if !headers.user.check_valid_password(&data.MasterPasswordHash) {
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password")
|
||||
}
|
||||
|
||||
@@ -345,7 +371,21 @@ fn post_email_token(data: JsonUpcase<EmailTokenData>, headers: Headers, conn: Db
|
||||
err!("Email already in use");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
if !CONFIG.signups_allowed() && !CONFIG.can_signup_user(&data.NewEmail) {
|
||||
err!("Email cannot be changed to this address");
|
||||
}
|
||||
|
||||
let token = crypto::generate_token(6)?;
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
if let Err(e) = mail::send_change_email(&data.NewEmail, &token) {
|
||||
error!("Error sending change-email email: {:#?}", e);
|
||||
}
|
||||
}
|
||||
|
||||
user.email_new = Some(data.NewEmail);
|
||||
user.email_new_token = Some(token);
|
||||
user.save(&conn)
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -356,8 +396,7 @@ struct ChangeEmailData {
|
||||
|
||||
Key: String,
|
||||
NewMasterPasswordHash: String,
|
||||
#[serde(rename = "Token")]
|
||||
_Token: NumberOrString,
|
||||
Token: NumberOrString,
|
||||
}
|
||||
|
||||
#[post("/accounts/email", data = "<data>")]
|
||||
@@ -373,14 +412,142 @@ fn post_email(data: JsonUpcase<ChangeEmailData>, headers: Headers, conn: DbConn)
|
||||
err!("Email already in use");
|
||||
}
|
||||
|
||||
match user.email_new {
|
||||
Some(ref val) => {
|
||||
if val != &data.NewEmail {
|
||||
err!("Email change mismatch");
|
||||
}
|
||||
}
|
||||
None => err!("No email change pending"),
|
||||
}
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
// Only check the token if we sent out an email...
|
||||
match user.email_new_token {
|
||||
Some(ref val) => {
|
||||
if *val != data.Token.into_string() {
|
||||
err!("Token mismatch");
|
||||
}
|
||||
}
|
||||
None => err!("No email change pending"),
|
||||
}
|
||||
user.verified_at = Some(Utc::now().naive_utc());
|
||||
} else {
|
||||
user.verified_at = None;
|
||||
}
|
||||
|
||||
user.email = data.NewEmail;
|
||||
user.email_new = None;
|
||||
user.email_new_token = None;
|
||||
|
||||
user.set_password(&data.NewMasterPasswordHash);
|
||||
user.key = data.Key;
|
||||
user.akey = data.Key;
|
||||
|
||||
user.save(&conn)
|
||||
}
|
||||
|
||||
#[post("/accounts/verify-email")]
|
||||
fn post_verify_email(headers: Headers, _conn: DbConn) -> EmptyResult {
|
||||
let user = headers.user;
|
||||
|
||||
if !CONFIG.mail_enabled() {
|
||||
err!("Cannot verify email address");
|
||||
}
|
||||
|
||||
if let Err(e) = mail::send_verify_email(&user.email, &user.uuid) {
|
||||
error!("Error sending delete account email: {:#?}", e);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct VerifyEmailTokenData {
|
||||
UserId: String,
|
||||
Token: String,
|
||||
}
|
||||
|
||||
#[post("/accounts/verify-email-token", data = "<data>")]
|
||||
fn post_verify_email_token(data: JsonUpcase<VerifyEmailTokenData>, conn: DbConn) -> EmptyResult {
|
||||
let data: VerifyEmailTokenData = data.into_inner().data;
|
||||
|
||||
let mut user = match User::find_by_uuid(&data.UserId, &conn) {
|
||||
Some(user) => user,
|
||||
None => err!("User doesn't exist"),
|
||||
};
|
||||
|
||||
let claims = match decode_verify_email(&data.Token) {
|
||||
Ok(claims) => claims,
|
||||
Err(_) => err!("Invalid claim"),
|
||||
};
|
||||
if claims.sub != user.uuid {
|
||||
err!("Invalid claim");
|
||||
}
|
||||
user.verified_at = Some(Utc::now().naive_utc());
|
||||
user.last_verifying_at = None;
|
||||
user.login_verify_count = 0;
|
||||
if let Err(e) = user.save(&conn) {
|
||||
error!("Error saving email verification: {:#?}", e);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct DeleteRecoverData {
|
||||
Email: String,
|
||||
}
|
||||
|
||||
#[post("/accounts/delete-recover", data = "<data>")]
|
||||
fn post_delete_recover(data: JsonUpcase<DeleteRecoverData>, conn: DbConn) -> EmptyResult {
|
||||
let data: DeleteRecoverData = data.into_inner().data;
|
||||
|
||||
let user = User::find_by_mail(&data.Email, &conn);
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
if let Some(user) = user {
|
||||
if let Err(e) = mail::send_delete_account(&user.email, &user.uuid) {
|
||||
error!("Error sending delete account email: {:#?}", e);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
// We don't support sending emails, but we shouldn't allow anybody
|
||||
// to delete accounts without at least logging in... And if the user
|
||||
// cannot remember their password then they will need to contact
|
||||
// the administrator to delete it...
|
||||
err!("Please contact the administrator to delete your account");
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct DeleteRecoverTokenData {
|
||||
UserId: String,
|
||||
Token: String,
|
||||
}
|
||||
|
||||
#[post("/accounts/delete-recover-token", data = "<data>")]
|
||||
fn post_delete_recover_token(data: JsonUpcase<DeleteRecoverTokenData>, conn: DbConn) -> EmptyResult {
|
||||
let data: DeleteRecoverTokenData = data.into_inner().data;
|
||||
|
||||
let user = match User::find_by_uuid(&data.UserId, &conn) {
|
||||
Some(user) => user,
|
||||
None => err!("User doesn't exist"),
|
||||
};
|
||||
|
||||
let claims = match decode_delete(&data.Token) {
|
||||
Ok(claims) => claims,
|
||||
Err(_) => err!("Invalid claim"),
|
||||
};
|
||||
if claims.sub != user.uuid {
|
||||
err!("Invalid claim");
|
||||
}
|
||||
user.delete(&conn)
|
||||
}
|
||||
|
||||
#[post("/accounts/delete", data = "<data>")]
|
||||
fn post_delete_account(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||
delete_account(data, headers, conn)
|
||||
@@ -419,9 +586,9 @@ fn password_hint(data: JsonUpcase<PasswordHintData>, conn: DbConn) -> EmptyResul
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
if let Some(ref mail_config) = CONFIG.mail {
|
||||
mail::send_password_hint(&data.Email, hint, mail_config)?;
|
||||
} else if CONFIG.show_password_hint {
|
||||
if CONFIG.mail_enabled() {
|
||||
mail::send_password_hint(&data.Email, hint)?;
|
||||
} else if CONFIG.show_password_hint() {
|
||||
if let Some(hint) = hint {
|
||||
err!(format!("Your password hint is: {}", &hint));
|
||||
} else {
|
||||
|
@@ -74,10 +74,10 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let user_json = headers.user.to_json(&conn);
|
||||
|
||||
let folders = Folder::find_by_user(&headers.user.uuid, &conn);
|
||||
let folders_json: Vec<Value> = folders.iter().map(|c| c.to_json()).collect();
|
||||
let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect();
|
||||
|
||||
let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn);
|
||||
let collections_json: Vec<Value> = collections.iter().map(|c| c.to_json()).collect();
|
||||
let collections_json: Vec<Value> = collections.iter().map(Collection::to_json).collect();
|
||||
|
||||
let ciphers = Cipher::find_by_user(&headers.user.uuid, &conn);
|
||||
let ciphers_json: Vec<Value> = ciphers
|
||||
@@ -88,7 +88,7 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let domains_json = if data.exclude_domains {
|
||||
Value::Null
|
||||
} else {
|
||||
api::core::get_eq_domains(headers).unwrap().into_inner()
|
||||
api::core::_get_eq_domains(headers, true).unwrap().into_inner()
|
||||
};
|
||||
|
||||
Ok(Json(json!({
|
||||
@@ -221,6 +221,10 @@ pub fn update_cipher_from_data(
|
||||
nt: &Notify,
|
||||
ut: UpdateType,
|
||||
) -> EmptyResult {
|
||||
if cipher.organization_uuid.is_some() && cipher.organization_uuid != data.OrganizationId {
|
||||
err!("Organization mismatch. Please resync the client before updating the cipher")
|
||||
}
|
||||
|
||||
if let Some(org_id) = data.OrganizationId {
|
||||
match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) {
|
||||
None => err!("You don't have permission to add item to organization"),
|
||||
@@ -263,7 +267,7 @@ pub fn update_cipher_from_data(
|
||||
err!("Attachment is not owned by the cipher")
|
||||
}
|
||||
|
||||
saved_att.key = Some(attachment.Key);
|
||||
saved_att.akey = Some(attachment.Key);
|
||||
saved_att.file_name = attachment.FileName;
|
||||
|
||||
saved_att.save(&conn)?;
|
||||
@@ -300,10 +304,13 @@ pub fn update_cipher_from_data(
|
||||
cipher.password_history = data.PasswordHistory.map(|f| f.to_string());
|
||||
|
||||
cipher.save(&conn)?;
|
||||
cipher.move_to_folder(data.FolderId, &headers.user.uuid, &conn)?;
|
||||
|
||||
if ut != UpdateType::None {
|
||||
nt.send_cipher_update(ut, &cipher, &cipher.update_users_revision(&conn));
|
||||
}
|
||||
|
||||
cipher.move_to_folder(data.FolderId, &headers.user.uuid, &conn)
|
||||
Ok(())
|
||||
}
|
||||
|
||||
use super::folders::FolderData;
|
||||
@@ -346,25 +353,18 @@ fn post_ciphers_import(data: JsonUpcase<ImportData>, headers: Headers, conn: DbC
|
||||
}
|
||||
|
||||
// Read and create the ciphers
|
||||
for (index, cipher_data) in data.Ciphers.into_iter().enumerate() {
|
||||
for (index, mut cipher_data) in data.Ciphers.into_iter().enumerate() {
|
||||
let folder_uuid = relations_map.get(&index).map(|i| folders[*i].uuid.clone());
|
||||
cipher_data.FolderId = folder_uuid;
|
||||
|
||||
let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone());
|
||||
update_cipher_from_data(
|
||||
&mut cipher,
|
||||
cipher_data,
|
||||
&headers,
|
||||
false,
|
||||
&conn,
|
||||
&nt,
|
||||
UpdateType::CipherCreate,
|
||||
)?;
|
||||
|
||||
cipher.move_to_folder(folder_uuid, &headers.user.uuid.clone(), &conn)?;
|
||||
update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::None)?;
|
||||
}
|
||||
|
||||
let mut user = headers.user;
|
||||
user.update_revision(&conn)
|
||||
user.update_revision(&conn)?;
|
||||
nt.send_user_update(UpdateType::Vault, &user);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[put("/ciphers/<uuid>/admin", data = "<data>")]
|
||||
@@ -608,7 +608,7 @@ fn share_cipher_by_uuid(
|
||||
None => err!("Invalid collection ID provided"),
|
||||
Some(collection) => {
|
||||
if collection.is_writable_by_user(&headers.user.uuid, &conn) {
|
||||
CollectionCipher::save(&cipher.uuid.clone(), &collection.uuid, &conn)?;
|
||||
CollectionCipher::save(&cipher.uuid, &collection.uuid, &conn)?;
|
||||
shared_to_collection = true;
|
||||
} else {
|
||||
err!("No rights to modify the collection")
|
||||
@@ -632,7 +632,14 @@ fn share_cipher_by_uuid(
|
||||
}
|
||||
|
||||
#[post("/ciphers/<uuid>/attachment", format = "multipart/form-data", data = "<data>")]
|
||||
fn post_attachment(uuid: String, data: Data, content_type: &ContentType, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
|
||||
fn post_attachment(
|
||||
uuid: String,
|
||||
data: Data,
|
||||
content_type: &ContentType,
|
||||
headers: Headers,
|
||||
conn: DbConn,
|
||||
nt: Notify,
|
||||
) -> JsonResult {
|
||||
let cipher = match Cipher::find_by_uuid(&uuid, &conn) {
|
||||
Some(cipher) => cipher,
|
||||
None => err!("Cipher doesn't exist"),
|
||||
@@ -646,13 +653,13 @@ fn post_attachment(uuid: String, data: Data, content_type: &ContentType, headers
|
||||
let boundary_pair = params.next().expect("No boundary provided");
|
||||
let boundary = boundary_pair.1;
|
||||
|
||||
let base_path = Path::new(&CONFIG.attachments_folder).join(&cipher.uuid);
|
||||
let base_path = Path::new(&CONFIG.attachments_folder()).join(&cipher.uuid);
|
||||
|
||||
let mut attachment_key = None;
|
||||
|
||||
Multipart::with_body(data.open(), boundary)
|
||||
.foreach_entry(|mut field| {
|
||||
match field.headers.name.as_str() {
|
||||
match &*field.headers.name {
|
||||
"key" => {
|
||||
use std::io::Read;
|
||||
let mut key_buffer = String::new();
|
||||
@@ -684,7 +691,7 @@ fn post_attachment(uuid: String, data: Data, content_type: &ContentType, headers
|
||||
};
|
||||
|
||||
let mut attachment = Attachment::new(file_name, cipher.uuid.clone(), name, size);
|
||||
attachment.key = attachment_key.clone();
|
||||
attachment.akey = attachment_key.clone();
|
||||
attachment.save(&conn).expect("Error saving attachment");
|
||||
}
|
||||
_ => error!("Invalid multipart name"),
|
||||
@@ -811,83 +818,115 @@ fn delete_cipher_selected_post(data: JsonUpcase<Value>, headers: Headers, conn:
|
||||
delete_cipher_selected(data, headers, conn, nt)
|
||||
}
|
||||
|
||||
#[post("/ciphers/move", data = "<data>")]
|
||||
fn move_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||
let data = data.into_inner().data;
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct MoveCipherData {
|
||||
FolderId: Option<String>,
|
||||
Ids: Vec<String>,
|
||||
}
|
||||
|
||||
let folder_id = match data.get("FolderId") {
|
||||
Some(folder_id) => match folder_id.as_str() {
|
||||
Some(folder_id) => match Folder::find_by_uuid(folder_id, &conn) {
|
||||
#[post("/ciphers/move", data = "<data>")]
|
||||
fn move_cipher_selected(data: JsonUpcase<MoveCipherData>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||
let data = data.into_inner().data;
|
||||
let user_uuid = headers.user.uuid;
|
||||
|
||||
if let Some(ref folder_id) = data.FolderId {
|
||||
match Folder::find_by_uuid(folder_id, &conn) {
|
||||
Some(folder) => {
|
||||
if folder.user_uuid != headers.user.uuid {
|
||||
if folder.user_uuid != user_uuid {
|
||||
err!("Folder is not owned by user")
|
||||
}
|
||||
Some(folder.uuid)
|
||||
}
|
||||
None => err!("Folder doesn't exist"),
|
||||
},
|
||||
None => err!("Folder id provided in wrong format"),
|
||||
},
|
||||
None => None,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let uuids = match data.get("Ids") {
|
||||
Some(ids) => match ids.as_array() {
|
||||
Some(ids) => ids.iter().filter_map(Value::as_str),
|
||||
None => err!("Posted ids field is not an array"),
|
||||
},
|
||||
None => err!("Request missing ids field"),
|
||||
};
|
||||
|
||||
for uuid in uuids {
|
||||
let mut cipher = match Cipher::find_by_uuid(uuid, &conn) {
|
||||
for uuid in data.Ids {
|
||||
let cipher = match Cipher::find_by_uuid(&uuid, &conn) {
|
||||
Some(cipher) => cipher,
|
||||
None => err!("Cipher doesn't exist"),
|
||||
};
|
||||
|
||||
if !cipher.is_accessible_to_user(&headers.user.uuid, &conn) {
|
||||
if !cipher.is_accessible_to_user(&user_uuid, &conn) {
|
||||
err!("Cipher is not accessible by user")
|
||||
}
|
||||
|
||||
// Move cipher
|
||||
cipher.move_to_folder(folder_id.clone(), &headers.user.uuid, &conn)?;
|
||||
cipher.save(&conn)?;
|
||||
cipher.move_to_folder(data.FolderId.clone(), &user_uuid, &conn)?;
|
||||
|
||||
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&conn));
|
||||
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &[user_uuid.clone()]);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[put("/ciphers/move", data = "<data>")]
|
||||
fn move_cipher_selected_put(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||
fn move_cipher_selected_put(
|
||||
data: JsonUpcase<MoveCipherData>,
|
||||
headers: Headers,
|
||||
conn: DbConn,
|
||||
nt: Notify,
|
||||
) -> EmptyResult {
|
||||
move_cipher_selected(data, headers, conn, nt)
|
||||
}
|
||||
|
||||
#[post("/ciphers/purge", data = "<data>")]
|
||||
fn delete_all(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
|
||||
#[derive(FromForm)]
|
||||
struct OrganizationId {
|
||||
#[form(field = "organizationId")]
|
||||
org_id: String,
|
||||
}
|
||||
|
||||
#[post("/ciphers/purge?<organization..>", data = "<data>")]
|
||||
fn delete_all(
|
||||
organization: Option<Form<OrganizationId>>,
|
||||
data: JsonUpcase<PasswordData>,
|
||||
headers: Headers,
|
||||
conn: DbConn,
|
||||
nt: Notify,
|
||||
) -> EmptyResult {
|
||||
let data: PasswordData = data.into_inner().data;
|
||||
let password_hash = data.MasterPasswordHash;
|
||||
|
||||
let user = headers.user;
|
||||
let mut user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&password_hash) {
|
||||
err!("Invalid password")
|
||||
}
|
||||
|
||||
match organization {
|
||||
Some(org_data) => {
|
||||
// Organization ID in query params, purging organization vault
|
||||
match UserOrganization::find_by_user_and_org(&user.uuid, &org_data.org_id, &conn) {
|
||||
None => err!("You don't have permission to purge the organization vault"),
|
||||
Some(user_org) => {
|
||||
if user_org.atype == UserOrgType::Owner {
|
||||
Cipher::delete_all_by_organization(&org_data.org_id, &conn)?;
|
||||
Collection::delete_all_by_organization(&org_data.org_id, &conn)?;
|
||||
nt.send_user_update(UpdateType::Vault, &user);
|
||||
Ok(())
|
||||
} else {
|
||||
err!("You don't have permission to purge the organization vault");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// No organization ID in query params, purging user vault
|
||||
// Delete ciphers and their attachments
|
||||
for cipher in Cipher::find_owned_by_user(&user.uuid, &conn) {
|
||||
cipher.delete(&conn)?;
|
||||
nt.send_cipher_update(UpdateType::CipherDelete, &cipher, &cipher.update_users_revision(&conn));
|
||||
}
|
||||
|
||||
// Delete folders
|
||||
for f in Folder::find_by_user(&user.uuid, &conn) {
|
||||
f.delete(&conn)?;
|
||||
nt.send_folder_update(UpdateType::FolderCreate, &f);
|
||||
}
|
||||
|
||||
user.update_revision(&conn)?;
|
||||
nt.send_user_update(UpdateType::Vault, &user);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, nt: &Notify) -> EmptyResult {
|
||||
|
@@ -25,7 +25,7 @@ pub fn routes() -> Vec<Route> {
|
||||
fn get_folders(headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let folders = Folder::find_by_user(&headers.user.uuid, &conn);
|
||||
|
||||
let folders_json: Vec<Value> = folders.iter().map(|c| c.to_json()).collect();
|
||||
let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect();
|
||||
|
||||
Ok(Json(json!({
|
||||
"Data": folders_json,
|
||||
@@ -59,7 +59,7 @@ pub struct FolderData {
|
||||
fn post_folders(data: JsonUpcase<FolderData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
|
||||
let data: FolderData = data.into_inner().data;
|
||||
|
||||
let mut folder = Folder::new(headers.user.uuid.clone(), data.Name);
|
||||
let mut folder = Folder::new(headers.user.uuid, data.Name);
|
||||
|
||||
folder.save(&conn)?;
|
||||
nt.send_folder_update(UpdateType::FolderCreate, &folder);
|
||||
|
@@ -11,6 +11,7 @@ pub fn routes() -> Vec<Route> {
|
||||
get_eq_domains,
|
||||
post_eq_domains,
|
||||
put_eq_domains,
|
||||
hibp_breach,
|
||||
];
|
||||
|
||||
let mut routes = Vec::new();
|
||||
@@ -32,10 +33,10 @@ use rocket::Route;
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::db::DbConn;
|
||||
|
||||
use crate::api::{EmptyResult, JsonResult, JsonUpcase};
|
||||
use crate::auth::Headers;
|
||||
use crate::db::DbConn;
|
||||
use crate::error::Error;
|
||||
|
||||
#[put("/devices/identifier/<uuid>/clear-token")]
|
||||
fn clear_device_token(uuid: String) -> EmptyResult {
|
||||
@@ -62,7 +63,7 @@ fn put_device_token(uuid: String, data: JsonUpcase<Value>, headers: Headers) ->
|
||||
Ok(Json(json!({
|
||||
"Id": headers.device.uuid,
|
||||
"Name": headers.device.name,
|
||||
"Type": headers.device.type_,
|
||||
"Type": headers.device.atype,
|
||||
"Identifier": headers.device.uuid,
|
||||
"CreationDate": crate::util::format_date(&headers.device.created_at),
|
||||
})))
|
||||
@@ -80,6 +81,10 @@ const GLOBAL_DOMAINS: &str = include_str!("../../static/global_domains.json");
|
||||
|
||||
#[get("/settings/domains")]
|
||||
fn get_eq_domains(headers: Headers) -> JsonResult {
|
||||
_get_eq_domains(headers, false)
|
||||
}
|
||||
|
||||
fn _get_eq_domains(headers: Headers, no_excluded: bool) -> JsonResult {
|
||||
let user = headers.user;
|
||||
use serde_json::from_str;
|
||||
|
||||
@@ -92,6 +97,10 @@ fn get_eq_domains(headers: Headers) -> JsonResult {
|
||||
global.Excluded = excluded_globals.contains(&global.Type);
|
||||
}
|
||||
|
||||
if no_excluded {
|
||||
globals.retain(|g| !g.Excluded);
|
||||
}
|
||||
|
||||
Ok(Json(json!({
|
||||
"EquivalentDomains": equivalent_domains,
|
||||
"GlobalEquivalentDomains": globals,
|
||||
@@ -116,8 +125,8 @@ fn post_eq_domains(data: JsonUpcase<EquivDomainData>, headers: Headers, conn: Db
|
||||
let mut user = headers.user;
|
||||
use serde_json::to_string;
|
||||
|
||||
user.excluded_globals = to_string(&excluded_globals).unwrap_or("[]".to_string());
|
||||
user.equivalent_domains = to_string(&equivalent_domains).unwrap_or("[]".to_string());
|
||||
user.excluded_globals = to_string(&excluded_globals).unwrap_or_else(|_| "[]".to_string());
|
||||
user.equivalent_domains = to_string(&equivalent_domains).unwrap_or_else(|_| "[]".to_string());
|
||||
|
||||
user.save(&conn)?;
|
||||
|
||||
@@ -128,3 +137,46 @@ fn post_eq_domains(data: JsonUpcase<EquivDomainData>, headers: Headers, conn: Db
|
||||
fn put_eq_domains(data: JsonUpcase<EquivDomainData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
post_eq_domains(data, headers, conn)
|
||||
}
|
||||
|
||||
#[get("/hibp/breach?<username>")]
|
||||
fn hibp_breach(username: String) -> JsonResult {
|
||||
let user_agent = "Bitwarden_RS";
|
||||
let url = format!(
|
||||
"https://haveibeenpwned.com/api/v3/breachedaccount/{}?truncateResponse=false&includeUnverified=false",
|
||||
username
|
||||
);
|
||||
|
||||
use reqwest::{header::USER_AGENT, Client};
|
||||
|
||||
if let Some(api_key) = crate::CONFIG.hibp_api_key() {
|
||||
let hibp_client = Client::builder().use_sys_proxy().build()?;
|
||||
|
||||
let res = hibp_client
|
||||
.get(&url)
|
||||
.header(USER_AGENT, user_agent)
|
||||
.header("hibp-api-key", api_key)
|
||||
.send()?;
|
||||
|
||||
// If we get a 404, return a 404, it means no breached accounts
|
||||
if res.status() == 404 {
|
||||
return Err(Error::empty().with_code(404));
|
||||
}
|
||||
|
||||
let value: Value = res.error_for_status()?.json()?;
|
||||
Ok(Json(value))
|
||||
} else {
|
||||
Ok(Json(json!([{
|
||||
"Name": "HaveIBeenPwned",
|
||||
"Title": "Manual HIBP Check",
|
||||
"Domain": "haveibeenpwned.com",
|
||||
"BreachDate": "2019-08-18T00:00:00Z",
|
||||
"AddedDate": "2019-08-18T00:00:00Z",
|
||||
"Description": format!("Go to: <a href=\"https://haveibeenpwned.com/account/{account}\" target=\"_blank\" rel=\"noopener\">https://haveibeenpwned.com/account/{account}</a> for a manual check.<br/><br/>HaveIBeenPwned API key not set!<br/>Go to <a href=\"https://haveibeenpwned.com/API/Key\" target=\"_blank\" rel=\"noopener\">https://haveibeenpwned.com/API/Key</a> to purchase an API key from HaveIBeenPwned.<br/><br/>", account=username),
|
||||
"LogoPath": "/bwrs_static/hibp.png",
|
||||
"PwnCount": 0,
|
||||
"DataClasses": [
|
||||
"Error - No API key set!"
|
||||
]
|
||||
}])))
|
||||
}
|
||||
}
|
||||
|
@@ -1,19 +1,16 @@
|
||||
use rocket::request::Form;
|
||||
use rocket::Route;
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::api::{
|
||||
EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType,
|
||||
};
|
||||
use crate::auth::{decode_invite, AdminHeaders, Headers, OwnerHeaders};
|
||||
use crate::db::models::*;
|
||||
use crate::db::DbConn;
|
||||
use crate::CONFIG;
|
||||
|
||||
use crate::api::{EmptyResult, JsonResult, JsonUpcase, Notify, NumberOrString, PasswordData, UpdateType};
|
||||
use crate::auth::{decode_invite_jwt, AdminHeaders, Headers, InviteJWTClaims, OwnerHeaders};
|
||||
|
||||
use crate::mail;
|
||||
|
||||
use serde::{Deserialize, Deserializer};
|
||||
|
||||
use rocket::Route;
|
||||
use crate::CONFIG;
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
routes![
|
||||
@@ -26,6 +23,7 @@ pub fn routes() -> Vec<Route> {
|
||||
get_org_collections,
|
||||
get_org_collection_detail,
|
||||
get_collection_users,
|
||||
put_collection_users,
|
||||
put_organization,
|
||||
post_organization,
|
||||
post_organization_collections,
|
||||
@@ -78,13 +76,13 @@ struct NewCollectionData {
|
||||
fn create_organization(headers: Headers, data: JsonUpcase<OrgData>, conn: DbConn) -> JsonResult {
|
||||
let data: OrgData = data.into_inner().data;
|
||||
|
||||
let mut org = Organization::new(data.Name, data.BillingEmail);
|
||||
let mut user_org = UserOrganization::new(headers.user.uuid.clone(), org.uuid.clone());
|
||||
let mut collection = Collection::new(org.uuid.clone(), data.CollectionName);
|
||||
let org = Organization::new(data.Name, data.BillingEmail);
|
||||
let mut user_org = UserOrganization::new(headers.user.uuid, org.uuid.clone());
|
||||
let collection = Collection::new(org.uuid.clone(), data.CollectionName);
|
||||
|
||||
user_org.key = data.Key;
|
||||
user_org.akey = data.Key;
|
||||
user_org.access_all = true;
|
||||
user_org.type_ = UserOrgType::Owner as i32;
|
||||
user_org.atype = UserOrgType::Owner as i32;
|
||||
user_org.status = UserOrgStatus::Confirmed as i32;
|
||||
|
||||
org.save(&conn)?;
|
||||
@@ -129,7 +127,7 @@ fn leave_organization(org_id: String, headers: Headers, conn: DbConn) -> EmptyRe
|
||||
match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) {
|
||||
None => err!("User not part of organization"),
|
||||
Some(user_org) => {
|
||||
if user_org.type_ == UserOrgType::Owner {
|
||||
if user_org.atype == UserOrgType::Owner {
|
||||
let num_owners =
|
||||
UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len();
|
||||
|
||||
@@ -223,7 +221,7 @@ fn post_organization_collections(
|
||||
None => err!("Can't find organization details"),
|
||||
};
|
||||
|
||||
let mut collection = Collection::new(org.uuid.clone(), data.Name);
|
||||
let collection = Collection::new(org.uuid, data.Name);
|
||||
collection.save(&conn)?;
|
||||
|
||||
Ok(Json(collection.to_json()))
|
||||
@@ -264,7 +262,7 @@ fn post_organization_collection_update(
|
||||
err!("Collection is not owned by organization");
|
||||
}
|
||||
|
||||
collection.name = data.Name.clone();
|
||||
collection.name = data.Name;
|
||||
collection.save(&conn)?;
|
||||
|
||||
Ok(Json(collection.to_json()))
|
||||
@@ -371,15 +369,44 @@ fn get_collection_users(org_id: String, coll_id: String, _headers: AdminHeaders,
|
||||
.map(|col_user| {
|
||||
UserOrganization::find_by_user_and_org(&col_user.user_uuid, &org_id, &conn)
|
||||
.unwrap()
|
||||
.to_json_collection_user_details(col_user.read_only, &conn)
|
||||
.to_json_collection_user_details(col_user.read_only)
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(json!({
|
||||
"Data": user_list,
|
||||
"Object": "list",
|
||||
"ContinuationToken": null,
|
||||
})))
|
||||
Ok(Json(json!(user_list)))
|
||||
}
|
||||
|
||||
#[put("/organizations/<org_id>/collections/<coll_id>/users", data = "<data>")]
|
||||
fn put_collection_users(
|
||||
org_id: String,
|
||||
coll_id: String,
|
||||
data: JsonUpcaseVec<CollectionData>,
|
||||
_headers: AdminHeaders,
|
||||
conn: DbConn,
|
||||
) -> EmptyResult {
|
||||
// Get org and collection, check that collection is from org
|
||||
if Collection::find_by_uuid_and_org(&coll_id, &org_id, &conn).is_none() {
|
||||
err!("Collection not found in Organization")
|
||||
}
|
||||
|
||||
// Delete all the user-collections
|
||||
CollectionUser::delete_all_by_collection(&coll_id, &conn)?;
|
||||
|
||||
// And then add all the received ones (except if the user has access_all)
|
||||
for d in data.iter().map(|d| &d.data) {
|
||||
let user = match UserOrganization::find_by_uuid(&d.Id, &conn) {
|
||||
Some(u) => u,
|
||||
None => err!("User is not part of organization"),
|
||||
};
|
||||
|
||||
if user.access_all {
|
||||
continue;
|
||||
}
|
||||
|
||||
CollectionUser::save(&user.user_uuid, &coll_id, d.ReadOnly, &conn)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(FromForm)]
|
||||
@@ -415,14 +442,6 @@ fn get_org_users(org_id: String, _headers: AdminHeaders, conn: DbConn) -> JsonRe
|
||||
})))
|
||||
}
|
||||
|
||||
fn deserialize_collections<'de, D>(deserializer: D) -> Result<Vec<CollectionData>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
// Deserialize null to empty Vec
|
||||
Deserialize::deserialize(deserializer).or(Ok(vec![]))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct CollectionData {
|
||||
@@ -435,8 +454,7 @@ struct CollectionData {
|
||||
struct InviteData {
|
||||
Emails: Vec<String>,
|
||||
Type: NumberOrString,
|
||||
#[serde(deserialize_with = "deserialize_collections")]
|
||||
Collections: Vec<CollectionData>,
|
||||
Collections: Option<Vec<CollectionData>>,
|
||||
AccessAll: Option<bool>,
|
||||
}
|
||||
|
||||
@@ -454,18 +472,19 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
|
||||
}
|
||||
|
||||
for email in data.Emails.iter() {
|
||||
let mut user_org_status = match CONFIG.mail {
|
||||
Some(_) => UserOrgStatus::Invited as i32,
|
||||
None => UserOrgStatus::Accepted as i32, // Automatically mark user as accepted if no email invites
|
||||
let mut user_org_status = if CONFIG.mail_enabled() {
|
||||
UserOrgStatus::Invited as i32
|
||||
} else {
|
||||
UserOrgStatus::Accepted as i32 // Automatically mark user as accepted if no email invites
|
||||
};
|
||||
let user = match User::find_by_mail(&email, &conn) {
|
||||
None => {
|
||||
if !CONFIG.invitations_allowed {
|
||||
if !CONFIG.invitations_allowed() {
|
||||
err!(format!("User email does not exist: {}", email))
|
||||
}
|
||||
|
||||
if CONFIG.mail.is_none() {
|
||||
let mut invitation = Invitation::new(email.clone());
|
||||
if !CONFIG.mail_enabled() {
|
||||
let invitation = Invitation::new(email.clone());
|
||||
invitation.save(&conn)?;
|
||||
}
|
||||
|
||||
@@ -486,12 +505,12 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
|
||||
let mut new_user = UserOrganization::new(user.uuid.clone(), org_id.clone());
|
||||
let access_all = data.AccessAll.unwrap_or(false);
|
||||
new_user.access_all = access_all;
|
||||
new_user.type_ = new_type;
|
||||
new_user.atype = new_type;
|
||||
new_user.status = user_org_status;
|
||||
|
||||
// If no accessAll, add the collections received
|
||||
if !access_all {
|
||||
for col in &data.Collections {
|
||||
for col in data.Collections.iter().flatten() {
|
||||
match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) {
|
||||
None => err!("Collection not found in Organization"),
|
||||
Some(collection) => {
|
||||
@@ -503,7 +522,7 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
|
||||
|
||||
new_user.save(&conn)?;
|
||||
|
||||
if let Some(ref mail_config) = CONFIG.mail {
|
||||
if CONFIG.mail_enabled() {
|
||||
let org_name = match Organization::find_by_uuid(&org_id, &conn) {
|
||||
Some(org) => org.name,
|
||||
None => err!("Error looking up organization"),
|
||||
@@ -516,7 +535,6 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
|
||||
Some(new_user.uuid),
|
||||
&org_name,
|
||||
Some(headers.user.email.clone()),
|
||||
mail_config,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
@@ -526,11 +544,11 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
|
||||
|
||||
#[post("/organizations/<org_id>/users/<user_org>/reinvite")]
|
||||
fn reinvite_user(org_id: String, user_org: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult {
|
||||
if !CONFIG.invitations_allowed {
|
||||
if !CONFIG.invitations_allowed() {
|
||||
err!("Invitations are not allowed.")
|
||||
}
|
||||
|
||||
if CONFIG.mail.is_none() {
|
||||
if !CONFIG.mail_enabled() {
|
||||
err!("SMTP is not configured.")
|
||||
}
|
||||
|
||||
@@ -553,7 +571,7 @@ fn reinvite_user(org_id: String, user_org: String, headers: AdminHeaders, conn:
|
||||
None => err!("Error looking up organization."),
|
||||
};
|
||||
|
||||
if let Some(ref mail_config) = CONFIG.mail {
|
||||
if CONFIG.mail_enabled() {
|
||||
mail::send_invite(
|
||||
&user.email,
|
||||
&user.uuid,
|
||||
@@ -561,10 +579,9 @@ fn reinvite_user(org_id: String, user_org: String, headers: AdminHeaders, conn:
|
||||
Some(user_org.uuid),
|
||||
&org_name,
|
||||
Some(headers.user.email),
|
||||
mail_config,
|
||||
)?;
|
||||
} else {
|
||||
let mut invitation = Invitation::new(user.email.clone());
|
||||
let invitation = Invitation::new(user.email);
|
||||
invitation.save(&conn)?;
|
||||
}
|
||||
|
||||
@@ -582,7 +599,7 @@ fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase<AcceptD
|
||||
// The web-vault passes org_id and org_user_id in the URL, but we are just reading them from the JWT instead
|
||||
let data: AcceptData = data.into_inner().data;
|
||||
let token = &data.Token;
|
||||
let claims: InviteJWTClaims = decode_invite_jwt(&token)?;
|
||||
let claims = decode_invite(&token)?;
|
||||
|
||||
match User::find_by_mail(&claims.email, &conn) {
|
||||
Some(_) => {
|
||||
@@ -605,7 +622,7 @@ fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase<AcceptD
|
||||
None => err!("Invited user not found"),
|
||||
}
|
||||
|
||||
if let Some(ref mail_config) = CONFIG.mail {
|
||||
if CONFIG.mail_enabled() {
|
||||
let mut org_name = String::from("bitwarden_rs");
|
||||
if let Some(org_id) = &claims.org_id {
|
||||
org_name = match Organization::find_by_uuid(&org_id, &conn) {
|
||||
@@ -615,10 +632,10 @@ fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase<AcceptD
|
||||
};
|
||||
if let Some(invited_by_email) = &claims.invited_by_email {
|
||||
// User was invited to an organization, so they must be confirmed manually after acceptance
|
||||
mail::send_invite_accepted(&claims.email, invited_by_email, &org_name, mail_config)?;
|
||||
mail::send_invite_accepted(&claims.email, invited_by_email, &org_name)?;
|
||||
} else {
|
||||
// User was invited from /admin, so they are automatically confirmed
|
||||
mail::send_invite_confirmed(&claims.email, &org_name, mail_config)?;
|
||||
mail::send_invite_confirmed(&claims.email, &org_name)?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -640,7 +657,7 @@ fn confirm_invite(
|
||||
None => err!("The specified user isn't a member of the organization"),
|
||||
};
|
||||
|
||||
if user_to_confirm.type_ != UserOrgType::User && headers.org_user_type != UserOrgType::Owner {
|
||||
if user_to_confirm.atype != UserOrgType::User && headers.org_user_type != UserOrgType::Owner {
|
||||
err!("Only Owners can confirm Managers, Admins or Owners")
|
||||
}
|
||||
|
||||
@@ -649,12 +666,12 @@ fn confirm_invite(
|
||||
}
|
||||
|
||||
user_to_confirm.status = UserOrgStatus::Confirmed as i32;
|
||||
user_to_confirm.key = match data["Key"].as_str() {
|
||||
user_to_confirm.akey = match data["Key"].as_str() {
|
||||
Some(key) => key.to_string(),
|
||||
None => err!("Invalid key provided"),
|
||||
};
|
||||
|
||||
if let Some(ref mail_config) = CONFIG.mail {
|
||||
if CONFIG.mail_enabled() {
|
||||
let org_name = match Organization::find_by_uuid(&org_id, &conn) {
|
||||
Some(org) => org.name,
|
||||
None => err!("Error looking up organization."),
|
||||
@@ -663,7 +680,7 @@ fn confirm_invite(
|
||||
Some(user) => user.email,
|
||||
None => err!("Error looking up user."),
|
||||
};
|
||||
mail::send_invite_confirmed(&address, &org_name, mail_config)?;
|
||||
mail::send_invite_confirmed(&address, &org_name)?;
|
||||
}
|
||||
|
||||
user_to_confirm.save(&conn)
|
||||
@@ -683,8 +700,7 @@ fn get_user(org_id: String, org_user_id: String, _headers: AdminHeaders, conn: D
|
||||
#[allow(non_snake_case)]
|
||||
struct EditUserData {
|
||||
Type: NumberOrString,
|
||||
#[serde(deserialize_with = "deserialize_collections")]
|
||||
Collections: Vec<CollectionData>,
|
||||
Collections: Option<Vec<CollectionData>>,
|
||||
AccessAll: bool,
|
||||
}
|
||||
|
||||
@@ -719,18 +735,18 @@ fn edit_user(
|
||||
None => err!("The specified user isn't member of the organization"),
|
||||
};
|
||||
|
||||
if new_type != user_to_edit.type_
|
||||
&& (user_to_edit.type_ >= UserOrgType::Admin || new_type >= UserOrgType::Admin)
|
||||
if new_type != user_to_edit.atype
|
||||
&& (user_to_edit.atype >= UserOrgType::Admin || new_type >= UserOrgType::Admin)
|
||||
&& headers.org_user_type != UserOrgType::Owner
|
||||
{
|
||||
err!("Only Owners can grant and remove Admin or Owner privileges")
|
||||
}
|
||||
|
||||
if user_to_edit.type_ == UserOrgType::Owner && headers.org_user_type != UserOrgType::Owner {
|
||||
if user_to_edit.atype == UserOrgType::Owner && headers.org_user_type != UserOrgType::Owner {
|
||||
err!("Only Owners can edit Owner users")
|
||||
}
|
||||
|
||||
if user_to_edit.type_ == UserOrgType::Owner && new_type != UserOrgType::Owner {
|
||||
if user_to_edit.atype == UserOrgType::Owner && new_type != UserOrgType::Owner {
|
||||
// Removing owner permmission, check that there are at least another owner
|
||||
let num_owners = UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len();
|
||||
|
||||
@@ -740,7 +756,7 @@ fn edit_user(
|
||||
}
|
||||
|
||||
user_to_edit.access_all = data.AccessAll;
|
||||
user_to_edit.type_ = new_type as i32;
|
||||
user_to_edit.atype = new_type as i32;
|
||||
|
||||
// Delete all the odd collections
|
||||
for c in CollectionUser::find_by_organization_and_user_uuid(&org_id, &user_to_edit.user_uuid, &conn) {
|
||||
@@ -749,7 +765,7 @@ fn edit_user(
|
||||
|
||||
// If no accessAll, add the collections received
|
||||
if !data.AccessAll {
|
||||
for col in &data.Collections {
|
||||
for col in data.Collections.iter().flatten() {
|
||||
match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) {
|
||||
None => err!("Collection not found in Organization"),
|
||||
Some(collection) => {
|
||||
@@ -769,11 +785,11 @@ fn delete_user(org_id: String, org_user_id: String, headers: AdminHeaders, conn:
|
||||
None => err!("User to delete isn't member of the organization"),
|
||||
};
|
||||
|
||||
if user_to_delete.type_ != UserOrgType::User && headers.org_user_type != UserOrgType::Owner {
|
||||
if user_to_delete.atype != UserOrgType::User && headers.org_user_type != UserOrgType::Owner {
|
||||
err!("Only Owners can delete Admins or Owners")
|
||||
}
|
||||
|
||||
if user_to_delete.type_ == UserOrgType::Owner {
|
||||
if user_to_delete.atype == UserOrgType::Owner {
|
||||
// Removing owner, check that there are at least another owner
|
||||
let num_owners = UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len();
|
||||
|
||||
@@ -826,7 +842,7 @@ fn post_org_import(
|
||||
None => err!("User is not part of the organization"),
|
||||
};
|
||||
|
||||
if org_user.type_ < UserOrgType::Admin {
|
||||
if org_user.atype < UserOrgType::Admin {
|
||||
err!("Only admins or owners can import into an organization")
|
||||
}
|
||||
|
||||
@@ -835,7 +851,7 @@ fn post_org_import(
|
||||
.Collections
|
||||
.into_iter()
|
||||
.map(|coll| {
|
||||
let mut collection = Collection::new(org_id.clone(), coll.Name);
|
||||
let collection = Collection::new(org_id.clone(), coll.Name);
|
||||
if collection.save(&conn).is_err() {
|
||||
err!("Failed to create Collection");
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user