Compare commits
523 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
66bff73ebf | ||
|
83d5432cbf | ||
|
f579a4154c | ||
|
f5a19c5f8b | ||
|
aa9bc1f785 | ||
|
f162e85e44 | ||
|
33ef70c192 | ||
|
3d2df6ce11 | ||
|
6cdcb3b297 | ||
|
d1af468700 | ||
|
ae1c53f4e5 | ||
|
bc57c4b193 | ||
|
61ae4c9cf5 | ||
|
8d7b3db33d | ||
|
e9ec3741ae | ||
|
dacd50f3f1 | ||
|
9412112639 | ||
|
aaeae16983 | ||
|
d892880dd2 | ||
|
4395e8e888 | ||
|
3dbfc484a5 | ||
|
4ec2507073 | ||
|
ab65d7989b | ||
|
8707728cdb | ||
|
631d022e17 | ||
|
211f4492fa | ||
|
61f9081827 | ||
|
a8e5384c4a | ||
|
1c7338c7c4 | ||
|
08f37b9935 | ||
|
4826ddca4c | ||
|
2b32b6f78c | ||
|
a6cfdddfd8 | ||
|
814ce9a6ac | ||
|
1bee46f64b | ||
|
556d945396 | ||
|
664b480c71 | ||
|
84e901b7d2 | ||
|
839b2bc950 | ||
|
6050c8dac5 | ||
|
0a6b797e6e | ||
|
fb6f441a4f | ||
|
9876aedd67 | ||
|
19e671ff25 | ||
|
60964c07e6 | ||
|
e4894524e4 | ||
|
e7f083dee9 | ||
|
1074315a87 | ||
|
c56bf38079 | ||
|
3c0cac623d | ||
|
550794b127 | ||
|
e818a0bf37 | ||
|
2aedff50e8 | ||
|
84a23008f4 | ||
|
44e9e1a58e | ||
|
e4606431d1 | ||
|
5b7d7390b0 | ||
|
a05187c0ff | ||
|
8e34495e73 | ||
|
4219249e11 | ||
|
bd883de70e | ||
|
2d66292350 | ||
|
adf67a8ee8 | ||
|
f40f5b8399 | ||
|
2d6ca0ea95 | ||
|
06a10e2c5a | ||
|
445680fb84 | ||
|
83376544d8 | ||
|
04a17dcdef | ||
|
0851561392 | ||
|
95cd6deda6 | ||
|
636f16dc66 | ||
|
9e5b049dca | ||
|
23aa9088f3 | ||
|
4f0ed06b06 | ||
|
349c97efaf | ||
|
8b05a5d192 | ||
|
83bf77d713 | ||
|
4d5c047ddc | ||
|
147c9c7b50 | ||
|
6515a2fcad | ||
|
4a2ed553df | ||
|
ba492c0602 | ||
|
1ec049e2b5 | ||
|
0fb8563b13 | ||
|
f906f6230a | ||
|
951ba55123 | ||
|
18abf226be | ||
|
393645617e | ||
|
5bf243b675 | ||
|
cfba8347a3 | ||
|
55c1b6e8d5 | ||
|
3d7e80a7aa | ||
|
5866338de4 | ||
|
271e3ae757 | ||
|
48cc31a59f | ||
|
6a7cee4e7e | ||
|
f850dbb310 | ||
|
07099df41a | ||
|
0c0a80720e | ||
|
ae437f70a3 | ||
|
3d11f4cd16 | ||
|
3bd4e42fb0 | ||
|
89e94b1d91 | ||
|
0b28ab3be1 | ||
|
c5bcc340fa | ||
|
bff54fbfdb | ||
|
867c6ba056 | ||
|
d1ecf03f44 | ||
|
fc43608eec | ||
|
15dd05c78d | ||
|
aa6f774f65 | ||
|
379f885354 | ||
|
39a5f2dbe8 | ||
|
0daaa9b175 | ||
|
0c085d21ce | ||
|
dcaaa430f0 | ||
|
2cda54ceff | ||
|
525e6bb65a | ||
|
62cebebd3d | ||
|
3646f14042 | ||
|
813e889c97 | ||
|
8bcd0ab0c6 | ||
|
5725d297b4 | ||
|
a428f05e77 | ||
|
467ecfdc99 | ||
|
ed8091a994 | ||
|
56cad93e0f | ||
|
3cf67e0b8d | ||
|
5800aceb2d | ||
|
729b563160 | ||
|
6b5618a5fc | ||
|
2aa72eb240 | ||
|
c8655c4f89 | ||
|
daaa03d1b3 | ||
|
9e5b94924f | ||
|
f21089900e | ||
|
0c0e632bc9 | ||
|
a13a5bd1d8 | ||
|
3b34b429f3 | ||
|
97ffd17789 | ||
|
10c5476d31 | ||
|
d3626eba2a | ||
|
de157b2654 | ||
|
337cbfaf22 | ||
|
f88b6d961e | ||
|
0426051541 | ||
|
4556f668de | ||
|
da8225a3bd | ||
|
f10e6b6ac2 | ||
|
7ec00d3850 | ||
|
8f8d7418ed | ||
|
af6d17b701 | ||
|
61183d001c | ||
|
024d12db08 | ||
|
dc7951efaf | ||
|
06e14fea55 | ||
|
0f656b4889 | ||
|
6fa1dc50be | ||
|
2bb41367bc | ||
|
20d8886bfa | ||
|
59ef82b740 | ||
|
fc543154c0 | ||
|
569b464157 | ||
|
adf83c698d | ||
|
8fcbc58ee2 | ||
|
2dcbb2be59 | ||
|
7026e004e1 | ||
|
a3084feaee | ||
|
e7d36de784 | ||
|
54cc47b14e | ||
|
fac44888cd | ||
|
9f056523c9 | ||
|
0af1ef387d | ||
|
f95f40be15 | ||
|
5c859e2e6c | ||
|
03ff5e6ece | ||
|
52d696aa74 | ||
|
a4e80712dd | ||
|
a947e434f0 | ||
|
2eb4f290a5 | ||
|
8ae799a771 | ||
|
9a5f3a5015 | ||
|
1ca0d6e245 | ||
|
7f69eebeb1 | ||
|
32bd9b83a3 | ||
|
477d60de49 | ||
|
1ba8275dcb | ||
|
a0a4994250 | ||
|
32dfa41970 | ||
|
f92efda0f0 | ||
|
3b0f643e9d | ||
|
5bcee24f88 | ||
|
9e3d7ea44c | ||
|
8cc6dac893 | ||
|
b7c4316c77 | ||
|
0c295d5e6e | ||
|
bc49d1f90d | ||
|
6f6d9dee83 | ||
|
cef5dd4a46 | ||
|
79061c0eb5 | ||
|
6e2c3fc1cc | ||
|
e301fe137f | ||
|
af69c83db2 | ||
|
53fa8da5b1 | ||
|
c58aac585b | ||
|
8c1117fcbf | ||
|
a6dd4f1206 | ||
|
5af1799991 | ||
|
a20a641de3 | ||
|
8abd38573b | ||
|
78abdf0e9d | ||
|
dc031d8d86 | ||
|
de6330b09d | ||
|
68bcc7a4b8 | ||
|
c04a1352cb | ||
|
5d1c11ceba | ||
|
a2aa7c9bc2 | ||
|
b3a351ccb2 | ||
|
679bc7a59b | ||
|
a72d0b518f | ||
|
6741b25907 | ||
|
24b5784f02 | ||
|
eb9b481eba | ||
|
64edc49392 | ||
|
0d1753ac74 | ||
|
a6558f5548 | ||
|
62dfeb80f2 | ||
|
26cd5d9643 | ||
|
e65fbbfc21 | ||
|
a2162f4d69 | ||
|
c9ed9aa733 | ||
|
9b20decdc1 | ||
|
adaefc8628 | ||
|
c6c45c4c49 | ||
|
95494083f2 | ||
|
686474f815 | ||
|
2c6bd8c9dc | ||
|
9366e31452 | ||
|
96ff32fb2f | ||
|
9342fa5744 | ||
|
50fc22966c | ||
|
4fab4c74ff | ||
|
e38e1a5d5f | ||
|
cc91ac6cc0 | ||
|
2d8c8e18f7 | ||
|
b17e2da2cf | ||
|
d121cce0d2 | ||
|
0eba7a88fa | ||
|
34ac16e9d7 | ||
|
906d9e2f1a | ||
|
623d84aeb5 | ||
|
f8122cd2ca | ||
|
9b7e86efc2 | ||
|
e7ccfbdd0e | ||
|
acc1474394 | ||
|
c90b3031a6 | ||
|
aaffb2e007 | ||
|
e0e95e95e4 | ||
|
fa70b440d0 | ||
|
42acb2ebb6 | ||
|
174bea8d6e | ||
|
f68a57950b | ||
|
f747bf126b | ||
|
1ca197fd46 | ||
|
63d05d929b | ||
|
ef5bf5d326 | ||
|
9d6e35d803 | ||
|
0cccdcab83 | ||
|
6607faa390 | ||
|
6fcf18ab51 | ||
|
d122c10573 | ||
|
ae9553ca1c | ||
|
ff919039c9 | ||
|
80eb15d46a | ||
|
c36b870c54 | ||
|
b7cbca590c | ||
|
606a1bbfcb | ||
|
3e5369c8dd | ||
|
dd5e4cec73 | ||
|
a31a040abd | ||
|
f0125b95c1 | ||
|
072f2e24c2 | ||
|
36b5350f9b | ||
|
c7489c9fdf | ||
|
3181e4e96e | ||
|
2ee0d53c5f | ||
|
dfa629ecc7 | ||
|
92dc48b882 | ||
|
367e1ce289 | ||
|
7390f34355 | ||
|
c47d9f6593 | ||
|
5399ee8208 | ||
|
117045e6d3 | ||
|
912ad64555 | ||
|
00855ee31d | ||
|
c18a273b4a | ||
|
ca24a4adf1 | ||
|
a263aaa481 | ||
|
0a20ba0020 | ||
|
6541600af6 | ||
|
525979d5d9 | ||
|
7dd1959eba | ||
|
e266b39254 | ||
|
e935989fee | ||
|
25c401f64d | ||
|
18b72da657 | ||
|
e8e6c89927 | ||
|
fd5f657334 | ||
|
da9605f2d2 | ||
|
7030de32d5 | ||
|
b67c5b77be | ||
|
d30878c4ea | ||
|
6be26f0a38 | ||
|
34a6bfaefa | ||
|
1c8749eb4d | ||
|
1198c36a2b | ||
|
41e6c1a383 | ||
|
0042c3e4a7 | ||
|
724190f262 | ||
|
6867d23ca2 | ||
|
de26af0c2d | ||
|
3f223a7514 | ||
|
23f5a62d61 | ||
|
81e2054f59 | ||
|
f9337effa5 | ||
|
2972904eb8 | ||
|
bdd918b4d4 | ||
|
88085fe17b | ||
|
2020a302d0 | ||
|
ab2dd0f300 | ||
|
8e6fd4b4a1 | ||
|
988d24927e | ||
|
e945d16fcf | ||
|
f1c0aa4f83 | ||
|
68362d06b3 | ||
|
f65c0e2ac8 | ||
|
0f588ced03 | ||
|
b0f03bb49c | ||
|
5063661028 | ||
|
7e66ab78ff | ||
|
665e275dc5 | ||
|
a6da728cca | ||
|
04e02d7f9f | ||
|
7c739dd58e | ||
|
05a552910c | ||
|
c990837066 | ||
|
57aec37507 | ||
|
0c5b4476ad | ||
|
17141147a8 | ||
|
193c2fa860 | ||
|
6d01aaa80f | ||
|
ad60eaa0f3 | ||
|
d878face07 | ||
|
8bf8388cd6 | ||
|
b4db853bcb | ||
|
5ee94c0ba9 | ||
|
f108349547 | ||
|
d25e1ab94b | ||
|
79fee269ee | ||
|
ffe362f856 | ||
|
04bb15a802 | ||
|
4d9d649db9 | ||
|
2897c24e83 | ||
|
5964dc95f0 | ||
|
613b2519ed | ||
|
996b60e43d | ||
|
a6d09407b9 | ||
|
f2e9ddef4e | ||
|
ca417d3257 | ||
|
10dadfca06 | ||
|
bf73a8235f | ||
|
67a584c1d4 | ||
|
8e5f03972e | ||
|
d8abf8f98f | ||
|
cb348d2e05 | ||
|
aceb111024 | ||
|
b60a4a68c7 | ||
|
8b6dfe48b7 | ||
|
6154e03c05 | ||
|
d0b53a6a3d | ||
|
317aa679cf | ||
|
8d1bc2e539 | ||
|
50c46f6e9a | ||
|
4f1928778a | ||
|
5fcba3d7f5 | ||
|
4db42b07c4 | ||
|
cd3e2d7a5a | ||
|
d139e22042 | ||
|
892296e6d5 | ||
|
992ef399ed | ||
|
5afba46743 | ||
|
df0aa7949e | ||
|
353d2e6e01 | ||
|
f9375bb215 | ||
|
8d04ff66e7 | ||
|
e649b11511 | ||
|
bda19bdddf | ||
|
99fd92df21 | ||
|
1210310063 | ||
|
b093384385 | ||
|
cec45ae9bd | ||
|
e6dd584dd6 | ||
|
7cc74dabaf | ||
|
2336f102f9 | ||
|
cebe0f6442 | ||
|
d9c0c23819 | ||
|
aa355a96f9 | ||
|
4a85dd2480 | ||
|
213909baa5 | ||
|
6915a60332 | ||
|
52a50e9ade | ||
|
b7c9a346c1 | ||
|
2d90c6ac24 | ||
|
7f7b5447fd | ||
|
142f7bb50d | ||
|
d209df9e10 | ||
|
1b56f4266b | ||
|
d6dc6070f3 | ||
|
d66323b742 | ||
|
7b09d74b1f | ||
|
c0e3c2c5e1 | ||
|
06189a58fe | ||
|
f402dd81bb | ||
|
c885bbc947 | ||
|
63fb0e5a57 | ||
|
37d0792a7d | ||
|
c8040d2f63 | ||
|
dbcad65b68 | ||
|
226da67bc0 | ||
|
fee2b5c3fb | ||
|
6bbb3d53ae | ||
|
610b183cef | ||
|
1b64b9e164 | ||
|
b022be9ba8 | ||
|
7f11363725 | ||
|
4aa6dd22bb | ||
|
8feed2916f | ||
|
59eaa0aa0d | ||
|
d5e54cb576 | ||
|
8837660ba7 | ||
|
464a489b44 | ||
|
7035700c8d | ||
|
23c2921690 | ||
|
7d506f3633 | ||
|
b186813049 | ||
|
bfa82225da | ||
|
ffa2044563 | ||
|
d57b69952d | ||
|
5a13efefd3 | ||
|
2f9d7060bd | ||
|
0aa33a2cb4 | ||
|
fa7dbedd5d | ||
|
2ea9b66943 | ||
|
f3beaea9e9 | ||
|
39ae2f1f76 | ||
|
366b1050ec | ||
|
b3aab7a6ad | ||
|
aa8d050d6b | ||
|
5200f0e98d | ||
|
5f4abb1b7f | ||
|
dfe1e30d1b | ||
|
e27a5be47a | ||
|
56786a18f1 | ||
|
0d2399d485 | ||
|
5bfc7cfde3 | ||
|
723f0cbc1e | ||
|
b141f789f6 | ||
|
7445ee40f8 | ||
|
4a9a0f7e64 | ||
|
63aad2e5d2 | ||
|
d0baa23f9a | ||
|
7a7673103f | ||
|
05d4788d1d | ||
|
6f0dea1b56 | ||
|
439ef44973 | ||
|
2a525b42cb | ||
|
aee91acfdc | ||
|
17388ec43e | ||
|
bdc1cd13a7 | ||
|
42db4b5c77 | ||
|
53da073274 | ||
|
b010dde661 | ||
|
c9ec389b24 | ||
|
baa2841b04 | ||
|
6af5c86081 | ||
|
f60a6929a9 | ||
|
2aa97fa121 | ||
|
b59809af46 | ||
|
ed24d51d3e | ||
|
870f0d0932 | ||
|
31b77bf178 | ||
|
b525f9aa4c | ||
|
8409b31d6b | ||
|
b878495d64 | ||
|
945b85da2f | ||
|
d4577d161e | ||
|
3c8e1c3ca9 | ||
|
88dba8c4dd | ||
|
21bc3bfd53 | ||
|
4cb5122e90 | ||
|
0a2a8be0ff | ||
|
720a046610 | ||
|
64ae5d4f81 | ||
|
ff7e22c08a | ||
|
0c267d073f | ||
|
bbc6470f65 | ||
|
23f1f8a576 | ||
|
0e6f6e612a | ||
|
4d1b860dad | ||
|
6576914e55 | ||
|
12075639f3 | ||
|
3b9bfe55d0 | ||
|
a0c6a7c0de | ||
|
a2d716aec3 | ||
|
c1c60e3b68 | ||
|
ed6e852904 | ||
|
a54065420c | ||
|
aa5a05960e | ||
|
f41ba2a60f | ||
|
2215cfefb9 | ||
|
4289663a16 | ||
|
ea19c2250e |
@@ -1,13 +1,14 @@
|
|||||||
|
# shellcheck disable=SC2034,SC2148
|
||||||
## Vaultwarden Configuration File
|
## Vaultwarden Configuration File
|
||||||
## Uncomment any of the following lines to change the defaults
|
## Uncomment any of the following lines to change the defaults
|
||||||
##
|
##
|
||||||
## Be aware that most of these settings will be overridden if they were changed
|
## Be aware that most of these settings will be overridden if they were changed
|
||||||
## in the admin interface. Those overrides are stored within DATA_FOLDER/config.json .
|
## in the admin interface. Those overrides are stored within DATA_FOLDER/config.json .
|
||||||
##
|
##
|
||||||
## By default, vaultwarden expects for this file to be named ".env" and located
|
## By default, Vaultwarden expects for this file to be named ".env" and located
|
||||||
## in the current working directory. If this is not the case, the environment
|
## in the current working directory. If this is not the case, the environment
|
||||||
## variable ENV_FILE can be set to the location of this file prior to starting
|
## variable ENV_FILE can be set to the location of this file prior to starting
|
||||||
## vaultwarden.
|
## Vaultwarden.
|
||||||
|
|
||||||
## Main data folder
|
## Main data folder
|
||||||
# DATA_FOLDER=data
|
# DATA_FOLDER=data
|
||||||
@@ -29,6 +30,10 @@
|
|||||||
## Define the size of the connection pool used for connecting to the database.
|
## Define the size of the connection pool used for connecting to the database.
|
||||||
# DATABASE_MAX_CONNS=10
|
# DATABASE_MAX_CONNS=10
|
||||||
|
|
||||||
|
## Database timeout
|
||||||
|
## Timeout when acquiring database connection
|
||||||
|
# DATABASE_TIMEOUT=30
|
||||||
|
|
||||||
## Database connection initialization
|
## Database connection initialization
|
||||||
## Allows SQL statements to be run whenever a new database connection is created.
|
## Allows SQL statements to be run whenever a new database connection is created.
|
||||||
## This is mainly useful for connection-scoped pragmas.
|
## This is mainly useful for connection-scoped pragmas.
|
||||||
@@ -71,6 +76,13 @@
|
|||||||
# WEBSOCKET_ADDRESS=0.0.0.0
|
# WEBSOCKET_ADDRESS=0.0.0.0
|
||||||
# WEBSOCKET_PORT=3012
|
# WEBSOCKET_PORT=3012
|
||||||
|
|
||||||
|
## Enables push notifications (requires key and id from https://bitwarden.com/host)
|
||||||
|
# PUSH_ENABLED=true
|
||||||
|
# PUSH_INSTALLATION_ID=CHANGEME
|
||||||
|
# PUSH_INSTALLATION_KEY=CHANGEME
|
||||||
|
## Don't change this unless you know what you're doing.
|
||||||
|
# PUSH_RELAY_URI=https://push.bitwarden.com
|
||||||
|
|
||||||
## Controls whether users are allowed to create Bitwarden Sends.
|
## Controls whether users are allowed to create Bitwarden Sends.
|
||||||
## This setting applies globally to all users.
|
## This setting applies globally to all users.
|
||||||
## To control this on a per-org basis instead, use the "Disable Send" org policy.
|
## To control this on a per-org basis instead, use the "Disable Send" org policy.
|
||||||
@@ -80,11 +92,34 @@
|
|||||||
## This setting applies globally to all users.
|
## This setting applies globally to all users.
|
||||||
# EMERGENCY_ACCESS_ALLOWED=true
|
# EMERGENCY_ACCESS_ALLOWED=true
|
||||||
|
|
||||||
|
## Controls whether event logging is enabled for organizations
|
||||||
|
## This setting applies to organizations.
|
||||||
|
## Disabled by default. Also check the EVENT_CLEANUP_SCHEDULE and EVENTS_DAYS_RETAIN settings.
|
||||||
|
# ORG_EVENTS_ENABLED=false
|
||||||
|
|
||||||
|
## Number of days to retain events stored in the database.
|
||||||
|
## If unset (the default), events are kept indefinitely and the scheduled job is disabled!
|
||||||
|
# EVENTS_DAYS_RETAIN=
|
||||||
|
|
||||||
|
## BETA FEATURE: Groups
|
||||||
|
## Controls whether group support is enabled for organizations
|
||||||
|
## This setting applies to organizations.
|
||||||
|
## Disabled by default because this is a beta feature, it contains known issues!
|
||||||
|
## KNOW WHAT YOU ARE DOING!
|
||||||
|
# ORG_GROUPS_ENABLED=false
|
||||||
|
|
||||||
## Job scheduler settings
|
## Job scheduler settings
|
||||||
##
|
##
|
||||||
## Job schedules use a cron-like syntax (as parsed by https://crates.io/crates/cron),
|
## Job schedules use a cron-like syntax (as parsed by https://crates.io/crates/cron),
|
||||||
## and are always in terms of UTC time (regardless of your local time zone settings).
|
## and are always in terms of UTC time (regardless of your local time zone settings).
|
||||||
##
|
##
|
||||||
|
## The schedule format is a bit different from crontab as crontab does not contains seconds.
|
||||||
|
## You can test the the format here: https://crontab.guru, but remove the first digit!
|
||||||
|
## SEC MIN HOUR DAY OF MONTH MONTH DAY OF WEEK
|
||||||
|
## "0 30 9,12,15 1,15 May-Aug Mon,Wed,Fri"
|
||||||
|
## "0 30 * * * * "
|
||||||
|
## "0 30 1 * * * "
|
||||||
|
##
|
||||||
## How often (in ms) the job scheduler thread checks for jobs that need running.
|
## How often (in ms) the job scheduler thread checks for jobs that need running.
|
||||||
## Set to 0 to globally disable scheduled jobs.
|
## Set to 0 to globally disable scheduled jobs.
|
||||||
# JOB_POLL_INTERVAL_MS=30000
|
# JOB_POLL_INTERVAL_MS=30000
|
||||||
@@ -102,12 +137,16 @@
|
|||||||
# INCOMPLETE_2FA_SCHEDULE="30 * * * * *"
|
# INCOMPLETE_2FA_SCHEDULE="30 * * * * *"
|
||||||
##
|
##
|
||||||
## Cron schedule of the job that sends expiration reminders to emergency access grantors.
|
## Cron schedule of the job that sends expiration reminders to emergency access grantors.
|
||||||
## Defaults to hourly (5 minutes after the hour). Set blank to disable this job.
|
## Defaults to hourly (3 minutes after the hour). Set blank to disable this job.
|
||||||
# EMERGENCY_NOTIFICATION_REMINDER_SCHEDULE="0 5 * * * *"
|
# EMERGENCY_NOTIFICATION_REMINDER_SCHEDULE="0 3 * * * *"
|
||||||
##
|
##
|
||||||
## Cron schedule of the job that grants emergency access requests that have met the required wait time.
|
## Cron schedule of the job that grants emergency access requests that have met the required wait time.
|
||||||
## Defaults to hourly (5 minutes after the hour). Set blank to disable this job.
|
## Defaults to hourly (7 minutes after the hour). Set blank to disable this job.
|
||||||
# EMERGENCY_REQUEST_TIMEOUT_SCHEDULE="0 5 * * * *"
|
# EMERGENCY_REQUEST_TIMEOUT_SCHEDULE="0 7 * * * *"
|
||||||
|
##
|
||||||
|
## Cron schedule of the job that cleans old events from the event table.
|
||||||
|
## Defaults to daily. Set blank to disable this job. Also without EVENTS_DAYS_RETAIN set, this job will not start.
|
||||||
|
# EVENT_CLEANUP_SCHEDULE="0 10 0 * * *"
|
||||||
|
|
||||||
## Enable extended logging, which shows timestamps and targets in the logs
|
## Enable extended logging, which shows timestamps and targets in the logs
|
||||||
# EXTENDED_LOGGING=true
|
# EXTENDED_LOGGING=true
|
||||||
@@ -133,7 +172,7 @@
|
|||||||
## Enable WAL for the DB
|
## Enable WAL for the DB
|
||||||
## Set to false to avoid enabling WAL during startup.
|
## Set to false to avoid enabling WAL during startup.
|
||||||
## Note that if the DB already has WAL enabled, you will also need to disable WAL in the DB,
|
## Note that if the DB already has WAL enabled, you will also need to disable WAL in the DB,
|
||||||
## this setting only prevents vaultwarden from automatically enabling it on start.
|
## this setting only prevents Vaultwarden from automatically enabling it on start.
|
||||||
## Please read project wiki page about this setting first before changing the value as it can
|
## Please read project wiki page about this setting first before changing the value as it can
|
||||||
## cause performance degradation or might render the service unable to start.
|
## cause performance degradation or might render the service unable to start.
|
||||||
# ENABLE_DB_WAL=true
|
# ENABLE_DB_WAL=true
|
||||||
@@ -231,9 +270,15 @@
|
|||||||
## A comma-separated list means only those users can create orgs:
|
## A comma-separated list means only those users can create orgs:
|
||||||
# ORG_CREATION_USERS=admin1@example.com,admin2@example.com
|
# ORG_CREATION_USERS=admin1@example.com,admin2@example.com
|
||||||
|
|
||||||
## Token for the admin interface, preferably use a long random string
|
## Token for the admin interface, preferably an Argon2 PCH string
|
||||||
## One option is to use 'openssl rand -base64 48'
|
## Vaultwarden has a built-in generator by calling `vaultwarden hash`
|
||||||
|
## For details see: https://github.com/dani-garcia/vaultwarden/wiki/Enabling-admin-page#secure-the-admin_token
|
||||||
## If not set, the admin panel is disabled
|
## If not set, the admin panel is disabled
|
||||||
|
## New Argon2 PHC string
|
||||||
|
## Note that for some environments, like docker-compose you need to escape all the dollar signs `$` with an extra dollar sign like `$$`
|
||||||
|
## Also, use single quotes (') instead of double quotes (") to enclose the string when needed
|
||||||
|
# ADMIN_TOKEN='$argon2id$v=19$m=65540,t=3,p=4$MmeKRnGK5RW5mJS7h3TOL89GrpLPXJPAtTK8FTqj9HM$DqsstvoSAETl9YhnsXbf43WeaUwJC6JhViIvuPoig78'
|
||||||
|
## Old plain text string (Will generate warnings in favor of Argon2)
|
||||||
# ADMIN_TOKEN=Vy2VyYTTsKPv8W5aEOWUbB/Bt3DEKePbHmI4m9VcemUMS2rEviDowNAFqYi1xjmp
|
# ADMIN_TOKEN=Vy2VyYTTsKPv8W5aEOWUbB/Bt3DEKePbHmI4m9VcemUMS2rEviDowNAFqYi1xjmp
|
||||||
|
|
||||||
## Enable this to bypass the admin panel security. This option is only
|
## Enable this to bypass the admin panel security. This option is only
|
||||||
@@ -270,9 +315,9 @@
|
|||||||
## This setting applies globally to all users.
|
## This setting applies globally to all users.
|
||||||
# INCOMPLETE_2FA_TIME_LIMIT=3
|
# INCOMPLETE_2FA_TIME_LIMIT=3
|
||||||
|
|
||||||
## Controls the PBBKDF password iterations to apply on the server
|
## Number of server-side passwords hashing iterations for the password hash.
|
||||||
## The change only applies when the password is changed
|
## The default for new users. If changed, it will be updated during login for existing users.
|
||||||
# PASSWORD_ITERATIONS=100000
|
# PASSWORD_ITERATIONS=350000
|
||||||
|
|
||||||
## Controls whether users can set password hints. This setting applies globally to all users.
|
## Controls whether users can set password hints. This setting applies globally to all users.
|
||||||
# PASSWORD_HINTS_ALLOWED=true
|
# PASSWORD_HINTS_ALLOWED=true
|
||||||
@@ -302,11 +347,14 @@
|
|||||||
## Note that this applies to both the login and the 2FA, so it's recommended to allow a burst size of at least 2.
|
## Note that this applies to both the login and the 2FA, so it's recommended to allow a burst size of at least 2.
|
||||||
# LOGIN_RATELIMIT_MAX_BURST=10
|
# LOGIN_RATELIMIT_MAX_BURST=10
|
||||||
|
|
||||||
## Number of seconds, on average, between admin requests from the same IP address before rate limiting kicks in.
|
## Number of seconds, on average, between admin login requests from the same IP address before rate limiting kicks in.
|
||||||
# ADMIN_RATELIMIT_SECONDS=300
|
# ADMIN_RATELIMIT_SECONDS=300
|
||||||
## Allow a burst of requests of up to this size, while maintaining the average indicated by `ADMIN_RATELIMIT_SECONDS`.
|
## Allow a burst of requests of up to this size, while maintaining the average indicated by `ADMIN_RATELIMIT_SECONDS`.
|
||||||
# ADMIN_RATELIMIT_MAX_BURST=3
|
# ADMIN_RATELIMIT_MAX_BURST=3
|
||||||
|
|
||||||
|
## Set the lifetime of admin sessions to this value (in minutes).
|
||||||
|
# ADMIN_SESSION_LIFETIME=20
|
||||||
|
|
||||||
## Yubico (Yubikey) Settings
|
## Yubico (Yubikey) Settings
|
||||||
## Set your Client ID and Secret Key for Yubikey OTP
|
## Set your Client ID and Secret Key for Yubikey OTP
|
||||||
## You can generate it here: https://upgrade.yubico.com/getapikey/
|
## You can generate it here: https://upgrade.yubico.com/getapikey/
|
||||||
@@ -345,18 +393,23 @@
|
|||||||
# ROCKET_WORKERS=10
|
# ROCKET_WORKERS=10
|
||||||
# ROCKET_TLS={certs="/path/to/certs.pem",key="/path/to/key.pem"}
|
# ROCKET_TLS={certs="/path/to/certs.pem",key="/path/to/key.pem"}
|
||||||
|
|
||||||
## Mail specific settings, set SMTP_HOST and SMTP_FROM to enable the mail service.
|
## Mail specific settings, set SMTP_FROM and either SMTP_HOST or USE_SENDMAIL to enable the mail service.
|
||||||
## To make sure the email links are pointing to the correct host, set the DOMAIN variable.
|
## To make sure the email links are pointing to the correct host, set the DOMAIN variable.
|
||||||
## Note: if SMTP_USERNAME is specified, SMTP_PASSWORD is mandatory
|
## Note: if SMTP_USERNAME is specified, SMTP_PASSWORD is mandatory
|
||||||
# SMTP_HOST=smtp.domain.tld
|
# SMTP_HOST=smtp.domain.tld
|
||||||
# SMTP_FROM=vaultwarden@domain.tld
|
# SMTP_FROM=vaultwarden@domain.tld
|
||||||
# SMTP_FROM_NAME=Vaultwarden
|
# SMTP_FROM_NAME=Vaultwarden
|
||||||
# SMTP_SECURITY=starttls # ("starttls", "force_tls", "off") Enable a secure connection. Default is "starttls" (Explicit - ports 587 or 25), "force_tls" (Implicit - port 465) or "off", no encryption (port 25)
|
# SMTP_SECURITY=starttls # ("starttls", "force_tls", "off") Enable a secure connection. Default is "starttls" (Explicit - ports 587 or 25), "force_tls" (Implicit - port 465) or "off", no encryption (port 25)
|
||||||
# SMTP_PORT=587 # Ports 587 (submission) and 25 (smtp) are standard without encryption and with encryption via STARTTLS (Explicit TLS). Port 465 is outdated and used with Implicit TLS.
|
# SMTP_PORT=587 # Ports 587 (submission) and 25 (smtp) are standard without encryption and with encryption via STARTTLS (Explicit TLS). Port 465 (submissions) is used for encrypted submission (Implicit TLS).
|
||||||
# SMTP_USERNAME=username
|
# SMTP_USERNAME=username
|
||||||
# SMTP_PASSWORD=password
|
# SMTP_PASSWORD=password
|
||||||
# SMTP_TIMEOUT=15
|
# SMTP_TIMEOUT=15
|
||||||
|
|
||||||
|
# Whether to send mail via the `sendmail` command
|
||||||
|
# USE_SENDMAIL=false
|
||||||
|
# Which sendmail command to use. The one found in the $PATH is used if not specified.
|
||||||
|
# SENDMAIL_COMMAND="/path/to/sendmail"
|
||||||
|
|
||||||
## Defaults for SSL is "Plain" and "Login" and nothing for Non-SSL connections.
|
## Defaults for SSL is "Plain" and "Login" and nothing for Non-SSL connections.
|
||||||
## Possible values: ["Plain", "Login", "Xoauth2"].
|
## Possible values: ["Plain", "Login", "Xoauth2"].
|
||||||
## Multiple options need to be separated by a comma ','.
|
## Multiple options need to be separated by a comma ','.
|
||||||
@@ -367,6 +420,9 @@
|
|||||||
## but might need to be changed in case it trips some anti-spam filters
|
## but might need to be changed in case it trips some anti-spam filters
|
||||||
# HELO_NAME=
|
# HELO_NAME=
|
||||||
|
|
||||||
|
## Embed images as email attachments
|
||||||
|
# SMTP_EMBED_IMAGES=false
|
||||||
|
|
||||||
## SMTP debugging
|
## SMTP debugging
|
||||||
## When set to true this will output very detailed SMTP messages.
|
## When set to true this will output very detailed SMTP messages.
|
||||||
## WARNING: This could contain sensitive information like passwords and usernames! Only enable this during troubleshooting!
|
## WARNING: This could contain sensitive information like passwords and usernames! Only enable this during troubleshooting!
|
||||||
|
1
.github/FUNDING.yml
vendored
@@ -1,2 +1,3 @@
|
|||||||
github: dani-garcia
|
github: dani-garcia
|
||||||
|
liberapay: dani-garcia
|
||||||
custom: ["https://paypal.me/DaniGG"]
|
custom: ["https://paypal.me/DaniGG"]
|
||||||
|
103
.github/workflows/build.yml
vendored
@@ -9,6 +9,8 @@ on:
|
|||||||
- "Cargo.*"
|
- "Cargo.*"
|
||||||
- "build.rs"
|
- "build.rs"
|
||||||
- "rust-toolchain"
|
- "rust-toolchain"
|
||||||
|
- "rustfmt.toml"
|
||||||
|
- "diesel.toml"
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/build.yml"
|
- ".github/workflows/build.yml"
|
||||||
@@ -17,61 +19,82 @@ on:
|
|||||||
- "Cargo.*"
|
- "Cargo.*"
|
||||||
- "build.rs"
|
- "build.rs"
|
||||||
- "rust-toolchain"
|
- "rust-toolchain"
|
||||||
|
- "rustfmt.toml"
|
||||||
|
- "diesel.toml"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 120
|
||||||
# Make warnings errors, this is to prevent warnings slipping through.
|
# Make warnings errors, this is to prevent warnings slipping through.
|
||||||
# This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes.
|
# This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes.
|
||||||
env:
|
env:
|
||||||
RUSTFLAGS: "-D warnings"
|
RUSTFLAGS: "-D warnings"
|
||||||
|
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
channel:
|
channel:
|
||||||
- "rust-toolchain" # The version defined in rust-toolchain
|
- "rust-toolchain" # The version defined in rust-toolchain
|
||||||
- "msrv" # The supported MSRV
|
- "msrv" # The supported MSRV
|
||||||
include:
|
|
||||||
- channel: "msrv"
|
|
||||||
version: "1.60.0"
|
|
||||||
|
|
||||||
name: Build and Test ${{ matrix.channel }}
|
name: Build and Test ${{ matrix.channel }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
# Checkout the repo
|
# Checkout the repo
|
||||||
- name: "Checkout"
|
- name: "Checkout"
|
||||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # v3.0.2
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3
|
||||||
# End Checkout the repo
|
# End Checkout the repo
|
||||||
|
|
||||||
|
|
||||||
# Install dependencies
|
# Install dependencies
|
||||||
- name: "Install dependencies Ubuntu"
|
- name: "Install dependencies Ubuntu"
|
||||||
run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl sqlite build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config
|
run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config
|
||||||
# End Install dependencies
|
# End Install dependencies
|
||||||
|
|
||||||
|
|
||||||
# Uses the rust-toolchain file to determine version
|
# Determine rust-toolchain version
|
||||||
|
- name: Init Variables
|
||||||
|
id: toolchain
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [[ "${{ matrix.channel }}" == 'rust-toolchain' ]]; then
|
||||||
|
RUST_TOOLCHAIN="$(cat rust-toolchain)"
|
||||||
|
elif [[ "${{ matrix.channel }}" == 'msrv' ]]; then
|
||||||
|
RUST_TOOLCHAIN="$(grep -oP 'rust-version.*"(\K.*?)(?=")' Cargo.toml)"
|
||||||
|
else
|
||||||
|
RUST_TOOLCHAIN="${{ matrix.channel }}"
|
||||||
|
fi
|
||||||
|
echo "RUST_TOOLCHAIN=${RUST_TOOLCHAIN}" | tee -a "${GITHUB_OUTPUT}"
|
||||||
|
# End Determine rust-toolchain version
|
||||||
|
|
||||||
|
|
||||||
|
# Only install the clippy and rustfmt components on the default rust-toolchain
|
||||||
- name: "Install rust-toolchain version"
|
- name: "Install rust-toolchain version"
|
||||||
uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f # v1.0.6
|
uses: dtolnay/rust-toolchain@b44cb146d03e8d870c57ab64b80f04586349ca5d # master @ 2023-03-28 - 06:32 GMT+2
|
||||||
if: ${{ matrix.channel == 'rust-toolchain' }}
|
if: ${{ matrix.channel == 'rust-toolchain' }}
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||||
components: clippy, rustfmt
|
components: clippy, rustfmt
|
||||||
# End Uses the rust-toolchain file to determine version
|
# End Uses the rust-toolchain file to determine version
|
||||||
|
|
||||||
|
|
||||||
# Install the MSRV channel to be used
|
# Install the any other channel to be used for which we do not execute clippy and rustfmt
|
||||||
- name: "Install MSRV version"
|
- name: "Install MSRV version"
|
||||||
uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f # v1.0.6
|
uses: dtolnay/rust-toolchain@b44cb146d03e8d870c57ab64b80f04586349ca5d # master @ 2023-03-28 - 06:32 GMT+2
|
||||||
if: ${{ matrix.channel != 'rust-toolchain' }}
|
if: ${{ matrix.channel != 'rust-toolchain' }}
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||||
override: true
|
|
||||||
toolchain: ${{ matrix.version }}
|
|
||||||
# End Install the MSRV channel to be used
|
# End Install the MSRV channel to be used
|
||||||
|
|
||||||
|
|
||||||
# Enable Rust Caching
|
# Enable Rust Caching
|
||||||
- uses: Swatinem/rust-cache@6720f05bc48b77f96918929a9019fb2203ff71f8 # v2.0.0
|
- uses: Swatinem/rust-cache@dd05243424bd5c0e585e4b55eb2d7615cdd32f1f # v2.5.1
|
||||||
|
with:
|
||||||
|
# Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
|
||||||
|
# Like changing the build host from Ubuntu 20.04 to 22.04 for example.
|
||||||
|
# Only update when really needed! Use a <year>.<month>[.<inc>] format.
|
||||||
|
prefix-key: "v2023.07-rust"
|
||||||
# End Enable Rust Caching
|
# End Enable Rust Caching
|
||||||
|
|
||||||
|
|
||||||
@@ -87,65 +110,51 @@ jobs:
|
|||||||
# First test all features together, afterwards test them separately.
|
# First test all features together, afterwards test them separately.
|
||||||
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc"
|
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc"
|
||||||
id: test_sqlite_mysql_postgresql_mimalloc
|
id: test_sqlite_mysql_postgresql_mimalloc
|
||||||
uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3
|
|
||||||
if: $${{ always() }}
|
if: $${{ always() }}
|
||||||
with:
|
run: |
|
||||||
command: test
|
cargo test --release --features sqlite,mysql,postgresql,enable_mimalloc
|
||||||
args: --release --features sqlite,mysql,postgresql,enable_mimalloc
|
|
||||||
|
|
||||||
- name: "test features: sqlite,mysql,postgresql"
|
- name: "test features: sqlite,mysql,postgresql"
|
||||||
id: test_sqlite_mysql_postgresql
|
id: test_sqlite_mysql_postgresql
|
||||||
uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3
|
|
||||||
if: $${{ always() }}
|
if: $${{ always() }}
|
||||||
with:
|
run: |
|
||||||
command: test
|
cargo test --release --features sqlite,mysql,postgresql
|
||||||
args: --release --features sqlite,mysql,postgresql
|
|
||||||
|
|
||||||
- name: "test features: sqlite"
|
- name: "test features: sqlite"
|
||||||
id: test_sqlite
|
id: test_sqlite
|
||||||
uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3
|
|
||||||
if: $${{ always() }}
|
if: $${{ always() }}
|
||||||
with:
|
run: |
|
||||||
command: test
|
cargo test --release --features sqlite
|
||||||
args: --release --features sqlite
|
|
||||||
|
|
||||||
- name: "test features: mysql"
|
- name: "test features: mysql"
|
||||||
id: test_mysql
|
id: test_mysql
|
||||||
uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3
|
|
||||||
if: $${{ always() }}
|
if: $${{ always() }}
|
||||||
with:
|
run: |
|
||||||
command: test
|
cargo test --release --features mysql
|
||||||
args: --release --features mysql
|
|
||||||
|
|
||||||
- name: "test features: postgresql"
|
- name: "test features: postgresql"
|
||||||
id: test_postgresql
|
id: test_postgresql
|
||||||
uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3
|
|
||||||
if: $${{ always() }}
|
if: $${{ always() }}
|
||||||
with:
|
run: |
|
||||||
command: test
|
cargo test --release --features postgresql
|
||||||
args: --release --features postgresql
|
|
||||||
# End Run cargo tests
|
# End Run cargo tests
|
||||||
|
|
||||||
|
|
||||||
# Run cargo clippy, and fail on warnings (In release mode to speed up future builds)
|
# Run cargo clippy, and fail on warnings (In release mode to speed up future builds)
|
||||||
- name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc"
|
- name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc"
|
||||||
id: clippy
|
id: clippy
|
||||||
uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3
|
|
||||||
if: ${{ always() && matrix.channel == 'rust-toolchain' }}
|
if: ${{ always() && matrix.channel == 'rust-toolchain' }}
|
||||||
with:
|
run: |
|
||||||
command: clippy
|
cargo clippy --release --features sqlite,mysql,postgresql,enable_mimalloc -- -D warnings
|
||||||
args: --release --features sqlite,mysql,postgresql,enable_mimalloc -- -D warnings
|
|
||||||
# End Run cargo clippy
|
# End Run cargo clippy
|
||||||
|
|
||||||
|
|
||||||
# Run cargo fmt (Only run on rust-toolchain defined version)
|
# Run cargo fmt (Only run on rust-toolchain defined version)
|
||||||
- name: "check formatting"
|
- name: "check formatting"
|
||||||
id: formatting
|
id: formatting
|
||||||
uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3
|
|
||||||
if: ${{ always() && matrix.channel == 'rust-toolchain' }}
|
if: ${{ always() && matrix.channel == 'rust-toolchain' }}
|
||||||
with:
|
run: |
|
||||||
command: fmt
|
cargo fmt --all -- --check
|
||||||
args: --all -- --check
|
|
||||||
# End Run cargo fmt
|
# End Run cargo fmt
|
||||||
|
|
||||||
|
|
||||||
@@ -182,17 +191,15 @@ jobs:
|
|||||||
|
|
||||||
# Build the binary to upload to the artifacts
|
# Build the binary to upload to the artifacts
|
||||||
- name: "build features: sqlite,mysql,postgresql"
|
- name: "build features: sqlite,mysql,postgresql"
|
||||||
uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3
|
|
||||||
if: ${{ matrix.channel == 'rust-toolchain' }}
|
if: ${{ matrix.channel == 'rust-toolchain' }}
|
||||||
with:
|
run: |
|
||||||
command: build
|
cargo build --release --features sqlite,mysql,postgresql
|
||||||
args: --release --features sqlite,mysql,postgresql
|
|
||||||
# End Build the binary
|
# End Build the binary
|
||||||
|
|
||||||
|
|
||||||
# Upload artifact to Github Actions
|
# Upload artifact to Github Actions
|
||||||
- name: "Upload artifact"
|
- name: "Upload artifact"
|
||||||
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0
|
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2
|
||||||
if: ${{ matrix.channel == 'rust-toolchain' }}
|
if: ${{ matrix.channel == 'rust-toolchain' }}
|
||||||
with:
|
with:
|
||||||
name: vaultwarden
|
name: vaultwarden
|
||||||
|
7
.github/workflows/hadolint.yml
vendored
@@ -8,11 +8,12 @@ on: [
|
|||||||
jobs:
|
jobs:
|
||||||
hadolint:
|
hadolint:
|
||||||
name: Validate Dockerfile syntax
|
name: Validate Dockerfile syntax
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 30
|
||||||
steps:
|
steps:
|
||||||
# Checkout the repo
|
# Checkout the repo
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # v3.0.2
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3
|
||||||
# End Checkout the repo
|
# End Checkout the repo
|
||||||
|
|
||||||
|
|
||||||
@@ -23,7 +24,7 @@ jobs:
|
|||||||
sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \
|
sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \
|
||||||
sudo chmod +x /usr/local/bin/hadolint
|
sudo chmod +x /usr/local/bin/hadolint
|
||||||
env:
|
env:
|
||||||
HADOLINT_VERSION: 2.10.0
|
HADOLINT_VERSION: 2.12.0
|
||||||
# End Download hadolint
|
# End Download hadolint
|
||||||
|
|
||||||
# Test Dockerfiles
|
# Test Dockerfiles
|
||||||
|
168
.github/workflows/release.yml
vendored
@@ -24,21 +24,22 @@ jobs:
|
|||||||
# Some checks to determine if we need to continue with building a new docker.
|
# Some checks to determine if we need to continue with building a new docker.
|
||||||
# We will skip this check if we are creating a tag, because that has the same hash as a previous run already.
|
# We will skip this check if we are creating a tag, because that has the same hash as a previous run already.
|
||||||
skip_check:
|
skip_check:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||||
outputs:
|
outputs:
|
||||||
should_skip: ${{ steps.skip_check.outputs.should_skip }}
|
should_skip: ${{ steps.skip_check.outputs.should_skip }}
|
||||||
steps:
|
steps:
|
||||||
- name: Skip Duplicates Actions
|
- name: Skip Duplicates Actions
|
||||||
id: skip_check
|
id: skip_check
|
||||||
uses: fkirc/skip-duplicate-actions@9d116fa7e55f295019cfab7e3ab72b478bcf7fdd # v4.0.0
|
uses: fkirc/skip-duplicate-actions@12aca0a884f6137d619d6a8a09fcc3406ced5281 # v5.3.0
|
||||||
with:
|
with:
|
||||||
cancel_others: 'true'
|
cancel_others: 'true'
|
||||||
# Only run this when not creating a tag
|
# Only run this when not creating a tag
|
||||||
if: ${{ startsWith(github.ref, 'refs/heads/') }}
|
if: ${{ startsWith(github.ref, 'refs/heads/') }}
|
||||||
|
|
||||||
docker-build:
|
docker-build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 120
|
||||||
needs: skip_check
|
needs: skip_check
|
||||||
# Start a local docker registry to be used to generate multi-arch images.
|
# Start a local docker registry to be used to generate multi-arch images.
|
||||||
services:
|
services:
|
||||||
@@ -47,11 +48,23 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- 5000:5000
|
- 5000:5000
|
||||||
env:
|
env:
|
||||||
DOCKER_BUILDKIT: 1 # Disabled for now, but we should look at this because it will speedup building!
|
# Use BuildKit (https://docs.docker.com/build/buildkit/) for better
|
||||||
# DOCKER_REPO/secrets.DOCKERHUB_REPO needs to be 'index.docker.io/<user>/<repo>'
|
# build performance and the ability to copy extended file attributes
|
||||||
DOCKER_REPO: ${{ secrets.DOCKERHUB_REPO }}
|
# (e.g., for executable capabilities) across build phases.
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
SOURCE_COMMIT: ${{ github.sha }}
|
SOURCE_COMMIT: ${{ github.sha }}
|
||||||
SOURCE_REPOSITORY_URL: "https://github.com/${{ github.repository }}"
|
SOURCE_REPOSITORY_URL: "https://github.com/${{ github.repository }}"
|
||||||
|
# The *_REPO variables need to be configured as repository variables
|
||||||
|
# Append `/settings/variables/actions` to your repo url
|
||||||
|
# DOCKERHUB_REPO needs to be 'index.docker.io/<user>/<repo>'
|
||||||
|
# Check for Docker hub credentials in secrets
|
||||||
|
HAVE_DOCKERHUB_LOGIN: ${{ vars.DOCKERHUB_REPO != '' && secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}
|
||||||
|
# GHCR_REPO needs to be 'ghcr.io/<user>/<repo>'
|
||||||
|
# Check for Github credentials in secrets
|
||||||
|
HAVE_GHCR_LOGIN: ${{ vars.GHCR_REPO != '' && github.repository_owner != '' && secrets.GITHUB_TOKEN != '' }}
|
||||||
|
# QUAY_REPO needs to be 'quay.io/<user>/<repo>'
|
||||||
|
# Check for Quay.io credentials in secrets
|
||||||
|
HAVE_QUAY_LOGIN: ${{ vars.QUAY_REPO != '' && secrets.QUAY_USERNAME != '' && secrets.QUAY_TOKEN != '' }}
|
||||||
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
|
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
@@ -60,17 +73,10 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
# Checkout the repo
|
# Checkout the repo
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # v3.0.2
|
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
# Login to Docker Hub
|
|
||||||
- name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # v2.0.0
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
# Determine Docker Tag
|
# Determine Docker Tag
|
||||||
- name: Init Variables
|
- name: Init Variables
|
||||||
id: vars
|
id: vars
|
||||||
@@ -78,42 +84,152 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
# Check which main tag we are going to build determined by github.ref
|
# Check which main tag we are going to build determined by github.ref
|
||||||
if [[ "${{ github.ref }}" == refs/tags/* ]]; then
|
if [[ "${{ github.ref }}" == refs/tags/* ]]; then
|
||||||
echo "set-output name=DOCKER_TAG::${GITHUB_REF#refs/*/}"
|
echo "DOCKER_TAG=${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_OUTPUT}"
|
||||||
echo "::set-output name=DOCKER_TAG::${GITHUB_REF#refs/*/}"
|
|
||||||
elif [[ "${{ github.ref }}" == refs/heads/* ]]; then
|
elif [[ "${{ github.ref }}" == refs/heads/* ]]; then
|
||||||
echo "set-output name=DOCKER_TAG::testing"
|
echo "DOCKER_TAG=testing" | tee -a "${GITHUB_OUTPUT}"
|
||||||
echo "::set-output name=DOCKER_TAG::testing"
|
|
||||||
fi
|
fi
|
||||||
# End Determine Docker Tag
|
# End Determine Docker Tag
|
||||||
|
|
||||||
- name: Build Debian based images
|
# Login to Docker Hub
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # v2.2.0
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
|
||||||
|
|
||||||
|
# Login to GitHub Container Registry
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # v2.2.0
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
|
||||||
|
|
||||||
|
# Login to Quay.io
|
||||||
|
- name: Login to Quay.io
|
||||||
|
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # v2.2.0
|
||||||
|
with:
|
||||||
|
registry: quay.io
|
||||||
|
username: ${{ secrets.QUAY_USERNAME }}
|
||||||
|
password: ${{ secrets.QUAY_TOKEN }}
|
||||||
|
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
|
||||||
|
|
||||||
|
# Debian
|
||||||
|
|
||||||
|
# Docker Hub
|
||||||
|
- name: Build Debian based images (docker.io)
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
|
DOCKER_REPO: "${{ vars.DOCKERHUB_REPO }}"
|
||||||
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}"
|
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}"
|
||||||
run: |
|
run: |
|
||||||
./hooks/build
|
./hooks/build
|
||||||
if: ${{ matrix.base_image == 'debian' }}
|
if: ${{ matrix.base_image == 'debian' && env.HAVE_DOCKERHUB_LOGIN == 'true' }}
|
||||||
|
|
||||||
- name: Push Debian based images
|
- name: Push Debian based images (docker.io)
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
|
DOCKER_REPO: "${{ vars.DOCKERHUB_REPO }}"
|
||||||
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}"
|
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}"
|
||||||
run: |
|
run: |
|
||||||
./hooks/push
|
./hooks/push
|
||||||
if: ${{ matrix.base_image == 'debian' }}
|
if: ${{ matrix.base_image == 'debian' && env.HAVE_DOCKERHUB_LOGIN == 'true' }}
|
||||||
|
|
||||||
- name: Build Alpine based images
|
# GitHub Container Registry
|
||||||
|
- name: Build Debian based images (ghcr.io)
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
|
DOCKER_REPO: "${{ vars.GHCR_REPO }}"
|
||||||
|
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}"
|
||||||
|
run: |
|
||||||
|
./hooks/build
|
||||||
|
if: ${{ matrix.base_image == 'debian' && env.HAVE_GHCR_LOGIN == 'true' }}
|
||||||
|
|
||||||
|
- name: Push Debian based images (ghcr.io)
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
DOCKER_REPO: "${{ vars.GHCR_REPO }}"
|
||||||
|
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}"
|
||||||
|
run: |
|
||||||
|
./hooks/push
|
||||||
|
if: ${{ matrix.base_image == 'debian' && env.HAVE_GHCR_LOGIN == 'true' }}
|
||||||
|
|
||||||
|
# Quay.io
|
||||||
|
- name: Build Debian based images (quay.io)
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
DOCKER_REPO: "${{ vars.QUAY_REPO }}"
|
||||||
|
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}"
|
||||||
|
run: |
|
||||||
|
./hooks/build
|
||||||
|
if: ${{ matrix.base_image == 'debian' && env.HAVE_QUAY_LOGIN == 'true' }}
|
||||||
|
|
||||||
|
- name: Push Debian based images (quay.io)
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
DOCKER_REPO: "${{ vars.QUAY_REPO }}"
|
||||||
|
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}"
|
||||||
|
run: |
|
||||||
|
./hooks/push
|
||||||
|
if: ${{ matrix.base_image == 'debian' && env.HAVE_QUAY_LOGIN == 'true' }}
|
||||||
|
|
||||||
|
# Alpine
|
||||||
|
|
||||||
|
# Docker Hub
|
||||||
|
- name: Build Alpine based images (docker.io)
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
DOCKER_REPO: "${{ vars.DOCKERHUB_REPO }}"
|
||||||
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}-alpine"
|
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}-alpine"
|
||||||
run: |
|
run: |
|
||||||
./hooks/build
|
./hooks/build
|
||||||
if: ${{ matrix.base_image == 'alpine' }}
|
if: ${{ matrix.base_image == 'alpine' && env.HAVE_DOCKERHUB_LOGIN == 'true' }}
|
||||||
|
|
||||||
- name: Push Alpine based images
|
- name: Push Alpine based images (docker.io)
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
|
DOCKER_REPO: "${{ vars.DOCKERHUB_REPO }}"
|
||||||
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}-alpine"
|
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}-alpine"
|
||||||
run: |
|
run: |
|
||||||
./hooks/push
|
./hooks/push
|
||||||
if: ${{ matrix.base_image == 'alpine' }}
|
if: ${{ matrix.base_image == 'alpine' && env.HAVE_DOCKERHUB_LOGIN == 'true' }}
|
||||||
|
|
||||||
|
# GitHub Container Registry
|
||||||
|
- name: Build Alpine based images (ghcr.io)
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
DOCKER_REPO: "${{ vars.GHCR_REPO }}"
|
||||||
|
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}-alpine"
|
||||||
|
run: |
|
||||||
|
./hooks/build
|
||||||
|
if: ${{ matrix.base_image == 'alpine' && env.HAVE_GHCR_LOGIN == 'true' }}
|
||||||
|
|
||||||
|
- name: Push Alpine based images (ghcr.io)
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
DOCKER_REPO: "${{ vars.GHCR_REPO }}"
|
||||||
|
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}-alpine"
|
||||||
|
run: |
|
||||||
|
./hooks/push
|
||||||
|
if: ${{ matrix.base_image == 'alpine' && env.HAVE_GHCR_LOGIN == 'true' }}
|
||||||
|
|
||||||
|
# Quay.io
|
||||||
|
- name: Build Alpine based images (quay.io)
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
DOCKER_REPO: "${{ vars.QUAY_REPO }}"
|
||||||
|
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}-alpine"
|
||||||
|
run: |
|
||||||
|
./hooks/build
|
||||||
|
if: ${{ matrix.base_image == 'alpine' && env.HAVE_QUAY_LOGIN == 'true' }}
|
||||||
|
|
||||||
|
- name: Push Alpine based images (quay.io)
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
DOCKER_REPO: "${{ vars.QUAY_REPO }}"
|
||||||
|
DOCKER_TAG: "${{steps.vars.outputs.DOCKER_TAG}}-alpine"
|
||||||
|
run: |
|
||||||
|
./hooks/push
|
||||||
|
if: ${{ matrix.base_image == 'alpine' && env.HAVE_QUAY_LOGIN == 'true' }}
|
||||||
|
@@ -3,5 +3,9 @@ ignored:
|
|||||||
- DL3008
|
- DL3008
|
||||||
# disable explicit version for apk install
|
# disable explicit version for apk install
|
||||||
- DL3018
|
- DL3018
|
||||||
|
# disable check for consecutive `RUN` instructions
|
||||||
|
- DL3059
|
||||||
trustedRegistries:
|
trustedRegistries:
|
||||||
- docker.io
|
- docker.io
|
||||||
|
- ghcr.io
|
||||||
|
- quay.io
|
||||||
|
@@ -1,16 +1,20 @@
|
|||||||
---
|
---
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.3.0
|
rev: v4.4.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
- id: check-json
|
- id: check-json
|
||||||
- id: check-toml
|
- id: check-toml
|
||||||
|
- id: mixed-line-ending
|
||||||
|
args: ["--fix=no"]
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
exclude: "(.*js$|.*css$)"
|
exclude: "(.*js$|.*css$)"
|
||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
- id: check-merge-conflict
|
- id: check-merge-conflict
|
||||||
- id: detect-private-key
|
- id: detect-private-key
|
||||||
|
- id: check-symlinks
|
||||||
|
- id: forbid-submodules
|
||||||
- repo: local
|
- repo: local
|
||||||
hooks:
|
hooks:
|
||||||
- id: fmt
|
- id: fmt
|
||||||
@@ -27,7 +31,7 @@ repos:
|
|||||||
language: system
|
language: system
|
||||||
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--"]
|
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--"]
|
||||||
types_or: [rust, file]
|
types_or: [rust, file]
|
||||||
files: (Cargo.toml|Cargo.lock|.*\.rs$)
|
files: (Cargo.toml|Cargo.lock|rust-toolchain|.*\.rs$)
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
- id: cargo-clippy
|
- id: cargo-clippy
|
||||||
name: cargo clippy
|
name: cargo clippy
|
||||||
@@ -36,5 +40,5 @@ repos:
|
|||||||
language: system
|
language: system
|
||||||
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--", "-D", "warnings"]
|
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--", "-D", "warnings"]
|
||||||
types_or: [rust, file]
|
types_or: [rust, file]
|
||||||
files: (Cargo.toml|Cargo.lock|.*\.rs$)
|
files: (Cargo.toml|Cargo.lock|rust-toolchain|clippy.toml|.*\.rs$)
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
|
2206
Cargo.lock
generated
141
Cargo.toml
@@ -3,12 +3,12 @@ name = "vaultwarden"
|
|||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.60.0"
|
rust-version = "1.69.0"
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
repository = "https://github.com/dani-garcia/vaultwarden"
|
repository = "https://github.com/dani-garcia/vaultwarden"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
license = "GPL-3.0-only"
|
license = "AGPL-3.0-only"
|
||||||
publish = false
|
publish = false
|
||||||
build = "build.rs"
|
build = "build.rs"
|
||||||
|
|
||||||
@@ -24,6 +24,11 @@ vendored_openssl = ["openssl/vendored"]
|
|||||||
# Enable MiMalloc memory allocator to replace the default malloc
|
# Enable MiMalloc memory allocator to replace the default malloc
|
||||||
# This can improve performance for Alpine builds
|
# This can improve performance for Alpine builds
|
||||||
enable_mimalloc = ["mimalloc"]
|
enable_mimalloc = ["mimalloc"]
|
||||||
|
# This is a development dependency, and should only be used during development!
|
||||||
|
# It enables the usage of the diesel_logger crate, which is able to output the generated queries.
|
||||||
|
# You also need to set an env variable `QUERY_LOGGER=1` to fully activate this so you do not have to re-compile
|
||||||
|
# if you want to turn off the logging for a specific run.
|
||||||
|
query_logger = ["diesel_logger"]
|
||||||
|
|
||||||
# Enable unstable features, requires nightly
|
# Enable unstable features, requires nightly
|
||||||
# Currently only used to enable rusts official ip support
|
# Currently only used to enable rusts official ip support
|
||||||
@@ -31,69 +36,72 @@ unstable = []
|
|||||||
|
|
||||||
[target."cfg(not(windows))".dependencies]
|
[target."cfg(not(windows))".dependencies]
|
||||||
# Logging
|
# Logging
|
||||||
syslog = "6.0.1" # Needs to be v4 until fern is updated
|
syslog = "6.1.0"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# Logging
|
# Logging
|
||||||
log = "0.4.17"
|
log = "0.4.19"
|
||||||
fern = { version = "0.6.1", features = ["syslog-6"] }
|
fern = { version = "0.6.2", features = ["syslog-6"] }
|
||||||
tracing = { version = "0.1.37", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
|
tracing = { version = "0.1.37", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
|
||||||
|
|
||||||
backtrace = "0.3.66" # Logging panics to logfile instead stderr only
|
|
||||||
|
|
||||||
# A `dotenv` implementation for Rust
|
# A `dotenv` implementation for Rust
|
||||||
dotenvy = { version = "0.15.5", default-features = false }
|
dotenvy = { version = "0.15.7", default-features = false }
|
||||||
|
|
||||||
# Lazy initialization
|
# Lazy initialization
|
||||||
once_cell = "1.15.0"
|
once_cell = "1.18.0"
|
||||||
|
|
||||||
# Numerical libraries
|
# Numerical libraries
|
||||||
num-traits = "0.2.15"
|
num-traits = "0.2.16"
|
||||||
num-derive = "0.3.3"
|
num-derive = "0.4.0"
|
||||||
|
|
||||||
# Web framework
|
# Web framework
|
||||||
rocket = { version = "0.5.0-rc.2", features = ["tls", "json"], default-features = false }
|
rocket = { version = "0.5.0-rc.3", features = ["tls", "json"], default-features = false }
|
||||||
|
# rocket_ws = { version ="0.1.0-rc.3" }
|
||||||
|
rocket_ws = { git = 'https://github.com/SergioBenitez/Rocket', rev = "ce441b5f46fdf5cd99cb32b8b8638835e4c2a5fa" } # v0.5 branch
|
||||||
|
|
||||||
# WebSockets libraries
|
# WebSockets libraries
|
||||||
tokio-tungstenite = "0.17.2"
|
tokio-tungstenite = "0.19.0"
|
||||||
rmpv = "1.0.0" # MessagePack library
|
rmpv = "1.0.1" # MessagePack library
|
||||||
dashmap = "5.4.0"
|
|
||||||
|
# Concurrent HashMap used for WebSocket messaging and favicons
|
||||||
|
dashmap = "5.5.0"
|
||||||
|
|
||||||
# Async futures
|
# Async futures
|
||||||
futures = "0.3.24"
|
futures = "0.3.28"
|
||||||
tokio = { version = "1.21.2", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time"] }
|
tokio = { version = "1.30.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal"] }
|
||||||
|
|
||||||
# A generic serialization/deserialization framework
|
# A generic serialization/deserialization framework
|
||||||
serde = { version = "1.0.145", features = ["derive"] }
|
serde = { version = "1.0.183", features = ["derive"] }
|
||||||
serde_json = "1.0.86"
|
serde_json = "1.0.104"
|
||||||
|
|
||||||
# A safe, extensible ORM and Query builder
|
# A safe, extensible ORM and Query builder
|
||||||
diesel = { version = "1.4.8", features = ["chrono", "r2d2"] }
|
diesel = { version = "2.1.0", features = ["chrono", "r2d2"] }
|
||||||
diesel_migrations = "1.4.0"
|
diesel_migrations = "2.1.0"
|
||||||
|
diesel_logger = { version = "0.3.0", optional = true }
|
||||||
|
|
||||||
# Bundled SQLite
|
# Bundled/Static SQLite
|
||||||
libsqlite3-sys = { version = "0.22.2", features = ["bundled"], optional = true }
|
libsqlite3-sys = { version = "0.26.0", features = ["bundled"], optional = true }
|
||||||
|
|
||||||
# Crypto-related libraries
|
# Crypto-related libraries
|
||||||
rand = { version = "0.8.5", features = ["small_rng"] }
|
rand = { version = "0.8.5", features = ["small_rng"] }
|
||||||
ring = "0.16.20"
|
ring = "0.16.20"
|
||||||
|
|
||||||
# UUID generation
|
# UUID generation
|
||||||
uuid = { version = "1.2.1", features = ["v4"] }
|
uuid = { version = "1.4.1", features = ["v4"] }
|
||||||
|
|
||||||
# Date and time libraries
|
# Date and time libraries
|
||||||
chrono = { version = "0.4.22", features = ["clock", "serde"], default-features = false }
|
chrono = { version = "0.4.26", features = ["clock", "serde"], default-features = false }
|
||||||
chrono-tz = "0.6.3"
|
chrono-tz = "0.8.3"
|
||||||
time = "0.3.15"
|
time = "0.3.25"
|
||||||
|
|
||||||
# Job scheduler
|
# Job scheduler
|
||||||
job_scheduler_ng = "2.0.2"
|
job_scheduler_ng = "2.0.4"
|
||||||
|
|
||||||
# Data encoding library Hex/Base32/Base64
|
# Data encoding library Hex/Base32/Base64
|
||||||
data-encoding = "2.3.2"
|
data-encoding = "2.4.0"
|
||||||
|
|
||||||
# JWT library
|
# JWT library
|
||||||
jsonwebtoken = "8.1.1"
|
jsonwebtoken = "8.3.0"
|
||||||
|
|
||||||
# TOTP library
|
# TOTP library
|
||||||
totp-lite = "2.0.0"
|
totp-lite = "2.0.0"
|
||||||
@@ -104,55 +112,72 @@ yubico = { version = "0.11.0", features = ["online-tokio"], default-features = f
|
|||||||
# WebAuthn libraries
|
# WebAuthn libraries
|
||||||
webauthn-rs = "0.3.2"
|
webauthn-rs = "0.3.2"
|
||||||
|
|
||||||
# Handling of URL's for WebAuthn
|
# Handling of URL's for WebAuthn and favicons
|
||||||
url = "2.3.1"
|
url = "2.4.0"
|
||||||
|
|
||||||
# Email librariese-Base, Update crates and small change.
|
# Email libraries
|
||||||
lettre = { version = "0.10.1", features = ["smtp-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
lettre = { version = "0.10.4", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
||||||
percent-encoding = "2.2.0" # URL encoding library used for URL's in the emails
|
percent-encoding = "2.3.0" # URL encoding library used for URL's in the emails
|
||||||
|
email_address = "0.2.4"
|
||||||
|
|
||||||
# Template library
|
# HTML Template library
|
||||||
handlebars = { version = "4.3.5", features = ["dir_source"] }
|
handlebars = { version = "4.3.7", features = ["dir_source"] }
|
||||||
|
|
||||||
# HTTP client
|
# HTTP client (Used for favicons, version check, DUO and HIBP API)
|
||||||
reqwest = { version = "0.11.12", features = ["stream", "json", "gzip", "brotli", "socks", "cookies", "trust-dns"] }
|
reqwest = { version = "0.11.18", features = ["stream", "json", "deflate", "gzip", "brotli", "socks", "cookies", "trust-dns", "native-tls-alpn"] }
|
||||||
|
|
||||||
# For favicon extraction from main website
|
# Favicon extraction libraries
|
||||||
html5gum = "0.5.2"
|
html5gum = "0.5.7"
|
||||||
regex = { version = "1.6.0", features = ["std", "perf", "unicode-perl"], default-features = false }
|
regex = { version = "1.9.3", features = ["std", "perf", "unicode-perl"], default-features = false }
|
||||||
data-url = "0.2.0"
|
data-url = "0.3.0"
|
||||||
bytes = "1.2.1"
|
bytes = "1.4.0"
|
||||||
cached = "0.39.0"
|
|
||||||
|
# Cache function results (Used for version check and favicon fetching)
|
||||||
|
cached = "0.44.0"
|
||||||
|
|
||||||
# Used for custom short lived cookie jar during favicon extraction
|
# Used for custom short lived cookie jar during favicon extraction
|
||||||
cookie = "0.16.1"
|
cookie = "0.16.2"
|
||||||
cookie_store = "0.17.0"
|
cookie_store = "0.19.1"
|
||||||
|
|
||||||
# Used by U2F, JWT and Postgres
|
# Used by U2F, JWT and PostgreSQL
|
||||||
openssl = "0.10.42"
|
openssl = "0.10.56"
|
||||||
|
|
||||||
# CLI argument parsing
|
# CLI argument parsing
|
||||||
pico-args = "0.5.0"
|
pico-args = "0.5.0"
|
||||||
|
|
||||||
# Macro ident concatenation
|
# Macro ident concatenation
|
||||||
paste = "1.0.9"
|
paste = "1.0.14"
|
||||||
governor = "0.5.0"
|
governor = "0.6.0"
|
||||||
|
|
||||||
# Capture CTRL+C
|
# Check client versions for specific features.
|
||||||
ctrlc = { version = "3.2.3", features = ["termination"] }
|
semver = "1.0.18"
|
||||||
|
|
||||||
# Allow overriding the default memory allocator
|
# Allow overriding the default memory allocator
|
||||||
# Mainly used for the musl builds, since the default musl malloc is very slow
|
# Mainly used for the musl builds, since the default musl malloc is very slow
|
||||||
mimalloc = { version = "0.1.30", features = ["secure"], default-features = false, optional = true }
|
mimalloc = { version = "0.1.37", features = ["secure"], default-features = false, optional = true }
|
||||||
|
which = "4.4.0"
|
||||||
|
|
||||||
|
# Argon2 library with support for the PHC format
|
||||||
|
argon2 = "0.5.1"
|
||||||
|
|
||||||
|
# Reading a password from the cli for generating the Argon2id ADMIN_TOKEN
|
||||||
|
rpassword = "7.2.0"
|
||||||
|
|
||||||
[patch.crates-io]
|
[patch.crates-io]
|
||||||
# Using a patched version of multer-rs (Used by Rocket) to fix attachment/send file uploads
|
rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = 'ce441b5f46fdf5cd99cb32b8b8638835e4c2a5fa' } # v0.5 branch
|
||||||
# Issue: https://github.com/dani-garcia/vaultwarden/issues/2644
|
# rocket_ws = { git = 'https://github.com/SergioBenitez/Rocket', rev = 'ce441b5f46fdf5cd99cb32b8b8638835e4c2a5fa' } # v0.5 branch
|
||||||
# Patch: https://github.com/BlackDex/multer-rs/commit/477d16b7fa0f361b5c2a5ba18a5b28bec6d26a8a
|
|
||||||
multer = { git = "https://github.com/BlackDex/multer-rs", rev = "477d16b7fa0f361b5c2a5ba18a5b28bec6d26a8a" }
|
|
||||||
|
|
||||||
# Strip debuginfo from the release builds
|
# Strip debuginfo from the release builds
|
||||||
# Also enable thin LTO for some optimizations
|
# Also enable thin LTO for some optimizations
|
||||||
[profile.release]
|
[profile.release]
|
||||||
strip = "debuginfo"
|
strip = "debuginfo"
|
||||||
lto = "thin"
|
lto = "thin"
|
||||||
|
|
||||||
|
# Always build argon2 using opt-level 3
|
||||||
|
# This is a huge speed improvement during testing
|
||||||
|
[profile.dev.package.argon2]
|
||||||
|
opt-level = 3
|
||||||
|
|
||||||
|
# A little bit of a speedup
|
||||||
|
[profile.dev]
|
||||||
|
split-debuginfo = "unpacked"
|
||||||
|
143
LICENSE.txt
@@ -1,5 +1,5 @@
|
|||||||
GNU GENERAL PUBLIC LICENSE
|
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||||
Version 3, 29 June 2007
|
Version 3, 19 November 2007
|
||||||
|
|
||||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
Everyone is permitted to copy and distribute verbatim copies
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
@@ -7,17 +7,15 @@
|
|||||||
|
|
||||||
Preamble
|
Preamble
|
||||||
|
|
||||||
The GNU General Public License is a free, copyleft license for
|
The GNU Affero General Public License is a free, copyleft license for
|
||||||
software and other kinds of works.
|
software and other kinds of works, specifically designed to ensure
|
||||||
|
cooperation with the community in the case of network server software.
|
||||||
|
|
||||||
The licenses for most software and other practical works are designed
|
The licenses for most software and other practical works are designed
|
||||||
to take away your freedom to share and change the works. By contrast,
|
to take away your freedom to share and change the works. By contrast,
|
||||||
the GNU General Public License is intended to guarantee your freedom to
|
our General Public Licenses are intended to guarantee your freedom to
|
||||||
share and change all versions of a program--to make sure it remains free
|
share and change all versions of a program--to make sure it remains free
|
||||||
software for all its users. We, the Free Software Foundation, use the
|
software for all its users.
|
||||||
GNU General Public License for most of our software; it applies also to
|
|
||||||
any other work released this way by its authors. You can apply it to
|
|
||||||
your programs, too.
|
|
||||||
|
|
||||||
When we speak of free software, we are referring to freedom, not
|
When we speak of free software, we are referring to freedom, not
|
||||||
price. Our General Public Licenses are designed to make sure that you
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
@@ -26,44 +24,34 @@ them if you wish), that you receive source code or can get it if you
|
|||||||
want it, that you can change the software or use pieces of it in new
|
want it, that you can change the software or use pieces of it in new
|
||||||
free programs, and that you know you can do these things.
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
To protect your rights, we need to prevent others from denying you
|
Developers that use our General Public Licenses protect your rights
|
||||||
these rights or asking you to surrender the rights. Therefore, you have
|
with two steps: (1) assert copyright on the software, and (2) offer
|
||||||
certain responsibilities if you distribute copies of the software, or if
|
you this License which gives you legal permission to copy, distribute
|
||||||
you modify it: responsibilities to respect the freedom of others.
|
and/or modify the software.
|
||||||
|
|
||||||
For example, if you distribute copies of such a program, whether
|
A secondary benefit of defending all users' freedom is that
|
||||||
gratis or for a fee, you must pass on to the recipients the same
|
improvements made in alternate versions of the program, if they
|
||||||
freedoms that you received. You must make sure that they, too, receive
|
receive widespread use, become available for other developers to
|
||||||
or can get the source code. And you must show them these terms so they
|
incorporate. Many developers of free software are heartened and
|
||||||
know their rights.
|
encouraged by the resulting cooperation. However, in the case of
|
||||||
|
software used on network servers, this result may fail to come about.
|
||||||
|
The GNU General Public License permits making a modified version and
|
||||||
|
letting the public access it on a server without ever releasing its
|
||||||
|
source code to the public.
|
||||||
|
|
||||||
Developers that use the GNU GPL protect your rights with two steps:
|
The GNU Affero General Public License is designed specifically to
|
||||||
(1) assert copyright on the software, and (2) offer you this License
|
ensure that, in such cases, the modified source code becomes available
|
||||||
giving you legal permission to copy, distribute and/or modify it.
|
to the community. It requires the operator of a network server to
|
||||||
|
provide the source code of the modified version running there to the
|
||||||
|
users of that server. Therefore, public use of a modified version, on
|
||||||
|
a publicly accessible server, gives the public access to the source
|
||||||
|
code of the modified version.
|
||||||
|
|
||||||
For the developers' and authors' protection, the GPL clearly explains
|
An older license, called the Affero General Public License and
|
||||||
that there is no warranty for this free software. For both users' and
|
published by Affero, was designed to accomplish similar goals. This is
|
||||||
authors' sake, the GPL requires that modified versions be marked as
|
a different license, not a version of the Affero GPL, but Affero has
|
||||||
changed, so that their problems will not be attributed erroneously to
|
released a new version of the Affero GPL which permits relicensing under
|
||||||
authors of previous versions.
|
this license.
|
||||||
|
|
||||||
Some devices are designed to deny users access to install or run
|
|
||||||
modified versions of the software inside them, although the manufacturer
|
|
||||||
can do so. This is fundamentally incompatible with the aim of
|
|
||||||
protecting users' freedom to change the software. The systematic
|
|
||||||
pattern of such abuse occurs in the area of products for individuals to
|
|
||||||
use, which is precisely where it is most unacceptable. Therefore, we
|
|
||||||
have designed this version of the GPL to prohibit the practice for those
|
|
||||||
products. If such problems arise substantially in other domains, we
|
|
||||||
stand ready to extend this provision to those domains in future versions
|
|
||||||
of the GPL, as needed to protect the freedom of users.
|
|
||||||
|
|
||||||
Finally, every program is threatened constantly by software patents.
|
|
||||||
States should not allow patents to restrict development and use of
|
|
||||||
software on general-purpose computers, but in those that do, we wish to
|
|
||||||
avoid the special danger that patents applied to a free program could
|
|
||||||
make it effectively proprietary. To prevent this, the GPL assures that
|
|
||||||
patents cannot be used to render the program non-free.
|
|
||||||
|
|
||||||
The precise terms and conditions for copying, distribution and
|
The precise terms and conditions for copying, distribution and
|
||||||
modification follow.
|
modification follow.
|
||||||
@@ -72,7 +60,7 @@ modification follow.
|
|||||||
|
|
||||||
0. Definitions.
|
0. Definitions.
|
||||||
|
|
||||||
"This License" refers to version 3 of the GNU General Public License.
|
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||||
|
|
||||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
works, such as semiconductor masks.
|
works, such as semiconductor masks.
|
||||||
@@ -549,35 +537,45 @@ to collect a royalty for further conveying from those to whom you convey
|
|||||||
the Program, the only way you could satisfy both those terms and this
|
the Program, the only way you could satisfy both those terms and this
|
||||||
License would be to refrain entirely from conveying the Program.
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
13. Use with the GNU Affero General Public License.
|
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, if you modify the
|
||||||
|
Program, your modified version must prominently offer all users
|
||||||
|
interacting with it remotely through a computer network (if your version
|
||||||
|
supports such interaction) an opportunity to receive the Corresponding
|
||||||
|
Source of your version by providing access to the Corresponding Source
|
||||||
|
from a network server at no charge, through some standard or customary
|
||||||
|
means of facilitating copying of software. This Corresponding Source
|
||||||
|
shall include the Corresponding Source for any work covered by version 3
|
||||||
|
of the GNU General Public License that is incorporated pursuant to the
|
||||||
|
following paragraph.
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, you have
|
Notwithstanding any other provision of this License, you have
|
||||||
permission to link or combine any covered work with a work licensed
|
permission to link or combine any covered work with a work licensed
|
||||||
under version 3 of the GNU Affero General Public License into a single
|
under version 3 of the GNU General Public License into a single
|
||||||
combined work, and to convey the resulting work. The terms of this
|
combined work, and to convey the resulting work. The terms of this
|
||||||
License will continue to apply to the part which is the covered work,
|
License will continue to apply to the part which is the covered work,
|
||||||
but the special requirements of the GNU Affero General Public License,
|
but the work with which it is combined will remain governed by version
|
||||||
section 13, concerning interaction through a network will apply to the
|
3 of the GNU General Public License.
|
||||||
combination as such.
|
|
||||||
|
|
||||||
14. Revised Versions of this License.
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
The Free Software Foundation may publish revised and/or new versions of
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
the GNU General Public License from time to time. Such new versions will
|
the GNU Affero General Public License from time to time. Such new versions
|
||||||
be similar in spirit to the present version, but may differ in detail to
|
will be similar in spirit to the present version, but may differ in detail to
|
||||||
address new problems or concerns.
|
address new problems or concerns.
|
||||||
|
|
||||||
Each version is given a distinguishing version number. If the
|
Each version is given a distinguishing version number. If the
|
||||||
Program specifies that a certain numbered version of the GNU General
|
Program specifies that a certain numbered version of the GNU Affero General
|
||||||
Public License "or any later version" applies to it, you have the
|
Public License "or any later version" applies to it, you have the
|
||||||
option of following the terms and conditions either of that numbered
|
option of following the terms and conditions either of that numbered
|
||||||
version or of any later version published by the Free Software
|
version or of any later version published by the Free Software
|
||||||
Foundation. If the Program does not specify a version number of the
|
Foundation. If the Program does not specify a version number of the
|
||||||
GNU General Public License, you may choose any version ever published
|
GNU Affero General Public License, you may choose any version ever published
|
||||||
by the Free Software Foundation.
|
by the Free Software Foundation.
|
||||||
|
|
||||||
If the Program specifies that a proxy can decide which future
|
If the Program specifies that a proxy can decide which future
|
||||||
versions of the GNU General Public License can be used, that proxy's
|
versions of the GNU Affero General Public License can be used, that proxy's
|
||||||
public statement of acceptance of a version permanently authorizes you
|
public statement of acceptance of a version permanently authorizes you
|
||||||
to choose that version for the Program.
|
to choose that version for the Program.
|
||||||
|
|
||||||
@@ -635,40 +633,29 @@ the "copyright" line and a pointer to where the full notice is found.
|
|||||||
Copyright (C) <year> <name of author>
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
This program is free software: you can redistribute it and/or modify
|
||||||
it under the terms of the GNU General Public License as published by
|
it under the terms of the GNU Affero General Public License as published
|
||||||
the Free Software Foundation, either version 3 of the License, or
|
by the Free Software Foundation, either version 3 of the License, or
|
||||||
(at your option) any later version.
|
(at your option) any later version.
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful,
|
This program is distributed in the hope that it will be useful,
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
GNU General Public License for more details.
|
GNU Affero General Public License for more details.
|
||||||
|
|
||||||
You should have received a copy of the GNU General Public License
|
You should have received a copy of the GNU Affero General Public License
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
Also add information on how to contact you by electronic and paper mail.
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
If the program does terminal interaction, make it output a short
|
If your software can interact with users remotely through a computer
|
||||||
notice like this when it starts in an interactive mode:
|
network, you should also make sure that it provides a way for users to
|
||||||
|
get its source. For example, if your program is a web application, its
|
||||||
<program> Copyright (C) <year> <name of author>
|
interface could display a "Source" link that leads users to an archive
|
||||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
of the code. There are many ways you could offer source, and different
|
||||||
This is free software, and you are welcome to redistribute it
|
solutions will be better for different programs; see section 13 for the
|
||||||
under certain conditions; type `show c' for details.
|
specific requirements.
|
||||||
|
|
||||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
|
||||||
parts of the General Public License. Of course, your program's commands
|
|
||||||
might be different; for a GUI interface, you would use an "about box".
|
|
||||||
|
|
||||||
You should also get your employer (if you work as a programmer) or school,
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
For more information on this, and how to apply and follow the GNU GPL, see
|
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||||
<https://www.gnu.org/licenses/>.
|
<https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
The GNU General Public License does not permit incorporating your program
|
|
||||||
into proprietary programs. If your program is a subroutine library, you
|
|
||||||
may consider it more useful to permit linking proprietary applications with
|
|
||||||
the library. If this is what you want to do, use the GNU Lesser General
|
|
||||||
Public License instead of this License. But first, please read
|
|
||||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
|
||||||
|
41
README.md
@@ -3,11 +3,13 @@
|
|||||||
📢 Note: This project was known as Bitwarden_RS and has been renamed to separate itself from the official Bitwarden server in the hopes of avoiding confusion and trademark/branding issues. Please see [#1642](https://github.com/dani-garcia/vaultwarden/discussions/1642) for more explanation.
|
📢 Note: This project was known as Bitwarden_RS and has been renamed to separate itself from the official Bitwarden server in the hopes of avoiding confusion and trademark/branding issues. Please see [#1642](https://github.com/dani-garcia/vaultwarden/discussions/1642) for more explanation.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
[](https://github.com/dani-garcia/vaultwarden/actions/workflows/build.yml)
|
||||||
|
[](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden)
|
||||||
[](https://hub.docker.com/r/vaultwarden/server)
|
[](https://hub.docker.com/r/vaultwarden/server)
|
||||||
|
[](https://quay.io/repository/vaultwarden/server)
|
||||||
[](https://deps.rs/repo/github/dani-garcia/vaultwarden)
|
[](https://deps.rs/repo/github/dani-garcia/vaultwarden)
|
||||||
[](https://github.com/dani-garcia/vaultwarden/releases/latest)
|
[](https://github.com/dani-garcia/vaultwarden/releases/latest)
|
||||||
[](https://github.com/dani-garcia/vaultwarden/blob/main/LICENSE.txt)
|
[](https://github.com/dani-garcia/vaultwarden/blob/main/LICENSE.txt)
|
||||||
[](https://matrix.to/#/#vaultwarden:matrix.org)
|
[](https://matrix.to/#/#vaultwarden:matrix.org)
|
||||||
|
|
||||||
Image is based on [Rust implementation of Bitwarden API](https://github.com/dani-garcia/vaultwarden).
|
Image is based on [Rust implementation of Bitwarden API](https://github.com/dani-garcia/vaultwarden).
|
||||||
@@ -23,23 +25,24 @@ Image is based on [Rust implementation of Bitwarden API](https://github.com/dani
|
|||||||
Basically full implementation of Bitwarden API is provided including:
|
Basically full implementation of Bitwarden API is provided including:
|
||||||
|
|
||||||
* Organizations support
|
* Organizations support
|
||||||
* Attachments
|
* Attachments and Send
|
||||||
* Vault API support
|
* Vault API support
|
||||||
* Serving the static files for Vault interface
|
* Serving the static files for Vault interface
|
||||||
* Website icons API
|
* Website icons API
|
||||||
* Authenticator and U2F support
|
* Authenticator and U2F support
|
||||||
* YubiKey and Duo support
|
* YubiKey and Duo support
|
||||||
|
* Emergency Access
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
Pull the docker image and mount a volume from the host for persistent storage:
|
Pull the docker image and mount a volume from the host for persistent storage:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker pull vaultwarden/server:latest
|
docker pull vaultwarden/server:latest
|
||||||
docker run -d --name vaultwarden -v /vw-data/:/data/ -p 80:80 vaultwarden/server:latest
|
docker run -d --name vaultwarden -v /vw-data/:/data/ --restart unless-stopped -p 80:80 vaultwarden/server:latest
|
||||||
```
|
```
|
||||||
This will preserve any persistent data under /vw-data/, you can adapt the path to whatever suits you.
|
This will preserve any persistent data under /vw-data/, you can adapt the path to whatever suits you.
|
||||||
|
|
||||||
**IMPORTANT**: Some web browsers, like Chrome, disallow the use of Web Crypto APIs in insecure contexts. In this case, you might get an error like `Cannot read property 'importKey'`. To solve this problem, you need to access the web vault from HTTPS.
|
**IMPORTANT**: Most modern web browsers, disallow the use of Web Crypto APIs in insecure contexts. In this case, you might get an error like `Cannot read property 'importKey'`. To solve this problem, you need to access the web vault via HTTPS or localhost.
|
||||||
|
|
||||||
This can be configured in [vaultwarden directly](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS) or using a third-party reverse proxy ([some examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)).
|
This can be configured in [vaultwarden directly](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS) or using a third-party reverse proxy ([some examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)).
|
||||||
|
|
||||||
@@ -49,42 +52,44 @@ If you have an available domain name, you can get HTTPS certificates with [Let's
|
|||||||
See the [vaultwarden wiki](https://github.com/dani-garcia/vaultwarden/wiki) for more information on how to configure and run the vaultwarden server.
|
See the [vaultwarden wiki](https://github.com/dani-garcia/vaultwarden/wiki) for more information on how to configure and run the vaultwarden server.
|
||||||
|
|
||||||
## Get in touch
|
## Get in touch
|
||||||
To ask a question, offer suggestions or new features or to get help configuring or installing the software, please [use the forum](https://vaultwarden.discourse.group/).
|
To ask a question, offer suggestions or new features or to get help configuring or installing the software, please use [GitHub Discussions](https://github.com/dani-garcia/vaultwarden/discussions) or [the forum](https://vaultwarden.discourse.group/).
|
||||||
|
|
||||||
If you spot any bugs or crashes with vaultwarden itself, please [create an issue](https://github.com/dani-garcia/vaultwarden/issues/). Make sure there aren't any similar issues open, though!
|
If you spot any bugs or crashes with vaultwarden itself, please [create an issue](https://github.com/dani-garcia/vaultwarden/issues/). Make sure you are on the latest version and there aren't any similar issues open, though!
|
||||||
|
|
||||||
If you prefer to chat, we're usually hanging around at [#vaultwarden:matrix.org](https://matrix.to/#/#vaultwarden:matrix.org) room on Matrix. Feel free to join us!
|
If you prefer to chat, we're usually hanging around at [#vaultwarden:matrix.org](https://matrix.to/#/#vaultwarden:matrix.org) room on Matrix. Feel free to join us!
|
||||||
|
|
||||||
### Sponsors
|
### Sponsors
|
||||||
Thanks for your contribution to the project!
|
Thanks for your contribution to the project!
|
||||||
|
|
||||||
|
<!--
|
||||||
<table>
|
<table>
|
||||||
<tr>
|
<tr>
|
||||||
<td align="center">
|
<td align="center">
|
||||||
<a href="https://github.com/netdadaltd">
|
<a href="https://github.com/username">
|
||||||
<img src="https://avatars.githubusercontent.com/u/77323954?s=75&v=4" width="75px;" alt="netdadaltd"/>
|
<img src="https://avatars.githubusercontent.com/u/725423?s=75&v=4" width="75px;" alt="username"/>
|
||||||
<br />
|
<br />
|
||||||
<sub><b>netDada Ltd.</b></sub>
|
<sub><b>username</b></sub>
|
||||||
</a>
|
</a>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
<br/>
|
<br/>
|
||||||
|
-->
|
||||||
|
|
||||||
<table>
|
<table>
|
||||||
<tr>
|
<tr>
|
||||||
<td align="center">
|
<td align="center">
|
||||||
<a href="https://github.com/Gyarbij" style="width: 75px">
|
<a href="https://github.com/themightychris" style="width: 75px">
|
||||||
<sub><b>Chono N</b></sub>
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center">
|
|
||||||
<a href="https://github.com/themightychris">
|
|
||||||
<sub><b>Chris Alfano</b></sub>
|
<sub><b>Chris Alfano</b></sub>
|
||||||
</a>
|
</a>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td align="center">
|
||||||
|
<a href="https://github.com/numberly" style="width: 75px">
|
||||||
|
<sub><b>Numberly</b></sub>
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
|
23
build.rs
@@ -9,20 +9,25 @@ fn main() {
|
|||||||
println!("cargo:rustc-cfg=mysql");
|
println!("cargo:rustc-cfg=mysql");
|
||||||
#[cfg(feature = "postgresql")]
|
#[cfg(feature = "postgresql")]
|
||||||
println!("cargo:rustc-cfg=postgresql");
|
println!("cargo:rustc-cfg=postgresql");
|
||||||
|
#[cfg(feature = "query_logger")]
|
||||||
|
println!("cargo:rustc-cfg=query_logger");
|
||||||
|
|
||||||
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
|
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
|
||||||
compile_error!(
|
compile_error!(
|
||||||
"You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite"
|
"You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
#[cfg(all(not(debug_assertions), feature = "query_logger"))]
|
||||||
|
compile_error!("Query Logging is only allowed during development, it is not intented for production usage!");
|
||||||
|
|
||||||
// Support $BWRS_VERSION for legacy compatibility, but default to $VW_VERSION.
|
// Support $BWRS_VERSION for legacy compatibility, but default to $VW_VERSION.
|
||||||
// If neither exist, read from git.
|
// If neither exist, read from git.
|
||||||
let maybe_vaultwarden_version =
|
let maybe_vaultwarden_version =
|
||||||
env::var("VW_VERSION").or_else(|_| env::var("BWRS_VERSION")).or_else(|_| version_from_git_info());
|
env::var("VW_VERSION").or_else(|_| env::var("BWRS_VERSION")).or_else(|_| version_from_git_info());
|
||||||
|
|
||||||
if let Ok(version) = maybe_vaultwarden_version {
|
if let Ok(version) = maybe_vaultwarden_version {
|
||||||
println!("cargo:rustc-env=VW_VERSION={}", version);
|
println!("cargo:rustc-env=VW_VERSION={version}");
|
||||||
println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version);
|
println!("cargo:rustc-env=CARGO_PKG_VERSION={version}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -47,29 +52,29 @@ fn version_from_git_info() -> Result<String, std::io::Error> {
|
|||||||
// the current commit doesn't have an associated tag
|
// the current commit doesn't have an associated tag
|
||||||
let exact_tag = run(&["git", "describe", "--abbrev=0", "--tags", "--exact-match"]).ok();
|
let exact_tag = run(&["git", "describe", "--abbrev=0", "--tags", "--exact-match"]).ok();
|
||||||
if let Some(ref exact) = exact_tag {
|
if let Some(ref exact) = exact_tag {
|
||||||
println!("cargo:rustc-env=GIT_EXACT_TAG={}", exact);
|
println!("cargo:rustc-env=GIT_EXACT_TAG={exact}");
|
||||||
}
|
}
|
||||||
|
|
||||||
// The last available tag, equal to exact_tag when
|
// The last available tag, equal to exact_tag when
|
||||||
// the current commit is tagged
|
// the current commit is tagged
|
||||||
let last_tag = run(&["git", "describe", "--abbrev=0", "--tags"])?;
|
let last_tag = run(&["git", "describe", "--abbrev=0", "--tags"])?;
|
||||||
println!("cargo:rustc-env=GIT_LAST_TAG={}", last_tag);
|
println!("cargo:rustc-env=GIT_LAST_TAG={last_tag}");
|
||||||
|
|
||||||
// The current branch name
|
// The current branch name
|
||||||
let branch = run(&["git", "rev-parse", "--abbrev-ref", "HEAD"])?;
|
let branch = run(&["git", "rev-parse", "--abbrev-ref", "HEAD"])?;
|
||||||
println!("cargo:rustc-env=GIT_BRANCH={}", branch);
|
println!("cargo:rustc-env=GIT_BRANCH={branch}");
|
||||||
|
|
||||||
// The current git commit hash
|
// The current git commit hash
|
||||||
let rev = run(&["git", "rev-parse", "HEAD"])?;
|
let rev = run(&["git", "rev-parse", "HEAD"])?;
|
||||||
let rev_short = rev.get(..8).unwrap_or_default();
|
let rev_short = rev.get(..8).unwrap_or_default();
|
||||||
println!("cargo:rustc-env=GIT_REV={}", rev_short);
|
println!("cargo:rustc-env=GIT_REV={rev_short}");
|
||||||
|
|
||||||
// Combined version
|
// Combined version
|
||||||
if let Some(exact) = exact_tag {
|
if let Some(exact) = exact_tag {
|
||||||
Ok(exact)
|
Ok(exact)
|
||||||
} else if &branch != "main" && &branch != "master" {
|
} else if &branch != "main" && &branch != "master" && &branch != "HEAD" {
|
||||||
Ok(format!("{}-{} ({})", last_tag, rev_short, branch))
|
Ok(format!("{last_tag}-{rev_short} ({branch})"))
|
||||||
} else {
|
} else {
|
||||||
Ok(format!("{}-{}", last_tag, rev_short))
|
Ok(format!("{last_tag}-{rev_short}"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -2,40 +2,42 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
{% set rust_version = "1.71.1" %}
|
||||||
{% set build_stage_base_image = "rust:1.64-bullseye" %}
|
{% set debian_version = "bookworm" %}
|
||||||
|
{% set alpine_version = "3.17" %}
|
||||||
|
{% set build_stage_base_image = "docker.io/library/rust:%s-%s" % (rust_version, debian_version) %}
|
||||||
{% if "alpine" in target_file %}
|
{% if "alpine" in target_file %}
|
||||||
{% if "amd64" in target_file %}
|
{% if "amd64" in target_file %}
|
||||||
{% set build_stage_base_image = "blackdex/rust-musl:x86_64-musl-stable-1.64.0" %}
|
{% set build_stage_base_image = "docker.io/blackdex/rust-musl:x86_64-musl-stable-%s-openssl3" % rust_version %}
|
||||||
{% set runtime_stage_base_image = "alpine:3.16" %}
|
{% set runtime_stage_base_image = "docker.io/library/alpine:%s" % alpine_version %}
|
||||||
{% set package_arch_target = "x86_64-unknown-linux-musl" %}
|
{% set package_arch_target = "x86_64-unknown-linux-musl" %}
|
||||||
{% elif "armv7" in target_file %}
|
{% elif "armv7" in target_file %}
|
||||||
{% set build_stage_base_image = "blackdex/rust-musl:armv7-musleabihf-stable-1.64.0" %}
|
{% set build_stage_base_image = "docker.io/blackdex/rust-musl:armv7-musleabihf-stable-%s-openssl3" % rust_version %}
|
||||||
{% set runtime_stage_base_image = "balenalib/armv7hf-alpine:3.16" %}
|
{% set runtime_stage_base_image = "docker.io/balenalib/armv7hf-alpine:%s" % alpine_version %}
|
||||||
{% set package_arch_target = "armv7-unknown-linux-musleabihf" %}
|
{% set package_arch_target = "armv7-unknown-linux-musleabihf" %}
|
||||||
{% elif "armv6" in target_file %}
|
{% elif "armv6" in target_file %}
|
||||||
{% set build_stage_base_image = "blackdex/rust-musl:arm-musleabi-stable-1.64.0" %}
|
{% set build_stage_base_image = "docker.io/blackdex/rust-musl:arm-musleabi-stable-%s-openssl3" % rust_version %}
|
||||||
{% set runtime_stage_base_image = "balenalib/rpi-alpine:3.16" %}
|
{% set runtime_stage_base_image = "docker.io/balenalib/rpi-alpine:%s" % alpine_version %}
|
||||||
{% set package_arch_target = "arm-unknown-linux-musleabi" %}
|
{% set package_arch_target = "arm-unknown-linux-musleabi" %}
|
||||||
{% elif "arm64" in target_file %}
|
{% elif "arm64" in target_file %}
|
||||||
{% set build_stage_base_image = "blackdex/rust-musl:aarch64-musl-stable-1.64.0" %}
|
{% set build_stage_base_image = "docker.io/blackdex/rust-musl:aarch64-musl-stable-%s-openssl3" % rust_version %}
|
||||||
{% set runtime_stage_base_image = "balenalib/aarch64-alpine:3.16" %}
|
{% set runtime_stage_base_image = "docker.io/balenalib/aarch64-alpine:%s" % alpine_version %}
|
||||||
{% set package_arch_target = "aarch64-unknown-linux-musl" %}
|
{% set package_arch_target = "aarch64-unknown-linux-musl" %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% elif "amd64" in target_file %}
|
{% elif "amd64" in target_file %}
|
||||||
{% set runtime_stage_base_image = "debian:bullseye-slim" %}
|
{% set runtime_stage_base_image = "docker.io/library/debian:%s-slim" % debian_version %}
|
||||||
{% elif "arm64" in target_file %}
|
{% elif "arm64" in target_file %}
|
||||||
{% set runtime_stage_base_image = "balenalib/aarch64-debian:bullseye" %}
|
{% set runtime_stage_base_image = "docker.io/balenalib/aarch64-debian:%s" % debian_version %}
|
||||||
{% set package_arch_name = "arm64" %}
|
{% set package_arch_name = "arm64" %}
|
||||||
{% set package_arch_target = "aarch64-unknown-linux-gnu" %}
|
{% set package_arch_target = "aarch64-unknown-linux-gnu" %}
|
||||||
{% set package_cross_compiler = "aarch64-linux-gnu" %}
|
{% set package_cross_compiler = "aarch64-linux-gnu" %}
|
||||||
{% elif "armv6" in target_file %}
|
{% elif "armv6" in target_file %}
|
||||||
{% set runtime_stage_base_image = "balenalib/rpi-debian:bullseye" %}
|
{% set runtime_stage_base_image = "docker.io/balenalib/rpi-debian:%s" % debian_version %}
|
||||||
{% set package_arch_name = "armel" %}
|
{% set package_arch_name = "armel" %}
|
||||||
{% set package_arch_target = "arm-unknown-linux-gnueabi" %}
|
{% set package_arch_target = "arm-unknown-linux-gnueabi" %}
|
||||||
{% set package_cross_compiler = "arm-linux-gnueabi" %}
|
{% set package_cross_compiler = "arm-linux-gnueabi" %}
|
||||||
{% elif "armv7" in target_file %}
|
{% elif "armv7" in target_file %}
|
||||||
{% set runtime_stage_base_image = "balenalib/armv7hf-debian:bullseye" %}
|
{% set runtime_stage_base_image = "docker.io/balenalib/armv7hf-debian:%s" % debian_version %}
|
||||||
{% set package_arch_name = "armhf" %}
|
{% set package_arch_name = "armhf" %}
|
||||||
{% set package_arch_target = "armv7-unknown-linux-gnueabihf" %}
|
{% set package_arch_target = "armv7-unknown-linux-gnueabihf" %}
|
||||||
{% set package_cross_compiler = "arm-linux-gnueabihf" %}
|
{% set package_cross_compiler = "arm-linux-gnueabihf" %}
|
||||||
@@ -50,7 +52,7 @@
|
|||||||
{% else %}
|
{% else %}
|
||||||
{% set package_arch_target_param = "" %}
|
{% set package_arch_target_param = "" %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if "buildx" in target_file %}
|
{% if "buildkit" in target_file %}
|
||||||
{% set mount_rust_cache = "--mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry " %}
|
{% set mount_rust_cache = "--mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry " %}
|
||||||
{% else %}
|
{% else %}
|
||||||
{% set mount_rust_cache = "" %}
|
{% set mount_rust_cache = "" %}
|
||||||
@@ -59,8 +61,8 @@
|
|||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
####################### VAULT BUILD IMAGE #######################
|
####################### VAULT BUILD IMAGE #######################
|
||||||
{% set vault_version = "v2022.10.0" %}
|
{% set vault_version = "v2023.7.1" %}
|
||||||
{% set vault_image_digest = "sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80" %}
|
{% set vault_image_digest = "sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f" %}
|
||||||
# The web-vault digest specifies a particular web-vault build on Docker Hub.
|
# The web-vault digest specifies a particular web-vault build on Docker Hub.
|
||||||
# Using the digest instead of the tag name provides better security,
|
# Using the digest instead of the tag name provides better security,
|
||||||
# as the digest of an image is immutable, whereas a tag name can later
|
# as the digest of an image is immutable, whereas a tag name can later
|
||||||
@@ -70,55 +72,54 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:{{ vault_version }}
|
# $ docker pull docker.io/vaultwarden/web-vault:{{ vault_version }}
|
||||||
# $ docker image inspect --format "{{ '{{' }}.RepoDigests}}" vaultwarden/web-vault:{{ vault_version }}
|
# $ docker image inspect --format "{{ '{{' }}.RepoDigests}}" docker.io/vaultwarden/web-vault:{{ vault_version }}
|
||||||
# [vaultwarden/web-vault@{{ vault_image_digest }}]
|
# [docker.io/vaultwarden/web-vault@{{ vault_image_digest }}]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{ '{{' }}.RepoTags}}" vaultwarden/web-vault@{{ vault_image_digest }}
|
# $ docker image inspect --format "{{ '{{' }}.RepoTags}}" docker.io/vaultwarden/web-vault@{{ vault_image_digest }}
|
||||||
# [vaultwarden/web-vault:{{ vault_version }}]
|
# [docker.io/vaultwarden/web-vault:{{ vault_version }}]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@{{ vault_image_digest }} as vault
|
FROM docker.io/vaultwarden/web-vault@{{ vault_image_digest }} as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM {{ build_stage_base_image }} as build
|
FROM {{ build_stage_base_image }} as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
LANG=C.UTF-8 \
|
LANG=C.UTF-8 \
|
||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN {{ mount_rust_cache -}} mkdir -pv "${CARGO_HOME}" \
|
RUN {{ mount_rust_cache -}} mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
{% if "alpine" in target_file %}
|
{% if "alpine" in target_file %}
|
||||||
|
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||||
|
# Debian Bookworm already contains libpq v15
|
||||||
|
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||||
{% if "armv6" in target_file %}
|
{% if "armv6" in target_file %}
|
||||||
# To be able to build the armv6 image with mimalloc we need to specifically specify the libatomic.a file location
|
# To be able to build the armv6 image with mimalloc we need to tell the linker to also look for libatomic
|
||||||
ENV RUSTFLAGS='-Clink-arg=/usr/local/musl/{{ package_arch_target }}/lib/libatomic.a'
|
ENV RUSTFLAGS='-Clink-arg=-latomic'
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% elif "arm" in target_file %}
|
{% elif "arm" in target_file %}
|
||||||
#
|
# Install build dependencies for the {{ package_arch_name }} architecture
|
||||||
# Install required build libs for {{ package_arch_name }} architecture.
|
RUN {{ mount_rust_cache -}} dpkg --add-architecture {{ package_arch_name }} \
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN dpkg --add-architecture {{ package_arch_name }} \
|
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
libssl-dev{{ package_arch_prefix }} \
|
gcc-{{ package_cross_compiler }} \
|
||||||
libc6-dev{{ package_arch_prefix }} \
|
libc6-dev{{ package_arch_prefix }} \
|
||||||
libpq5{{ package_arch_prefix }} \
|
|
||||||
libpq-dev{{ package_arch_prefix }} \
|
|
||||||
libmariadb3{{ package_arch_prefix }} \
|
|
||||||
libmariadb-dev{{ package_arch_prefix }} \
|
libmariadb-dev{{ package_arch_prefix }} \
|
||||||
libmariadb-dev-compat{{ package_arch_prefix }} \
|
libmariadb-dev-compat{{ package_arch_prefix }} \
|
||||||
gcc-{{ package_cross_compiler }} \
|
libmariadb3{{ package_arch_prefix }} \
|
||||||
|
libpq-dev{{ package_arch_prefix }} \
|
||||||
|
libpq5{{ package_arch_prefix }} \
|
||||||
|
libssl-dev{{ package_arch_prefix }} \
|
||||||
#
|
#
|
||||||
# Make sure cargo has the right target config
|
# Make sure cargo has the right target config
|
||||||
&& echo '[target.{{ package_arch_target }}]' >> "${CARGO_HOME}/config" \
|
&& echo '[target.{{ package_arch_target }}]' >> "${CARGO_HOME}/config" \
|
||||||
@@ -130,16 +131,13 @@ ENV CC_{{ package_arch_target | replace("-", "_") }}="/usr/bin/{{ package_cross_
|
|||||||
CROSS_COMPILE="1" \
|
CROSS_COMPILE="1" \
|
||||||
OPENSSL_INCLUDE_DIR="/usr/include/{{ package_cross_compiler }}" \
|
OPENSSL_INCLUDE_DIR="/usr/include/{{ package_cross_compiler }}" \
|
||||||
OPENSSL_LIB_DIR="/usr/lib/{{ package_cross_compiler }}"
|
OPENSSL_LIB_DIR="/usr/lib/{{ package_cross_compiler }}"
|
||||||
|
|
||||||
{% elif "amd64" in target_file %}
|
{% elif "amd64" in target_file %}
|
||||||
# Install DB packages
|
# Install build dependencies
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
libmariadb-dev{{ package_arch_prefix }} \
|
libmariadb-dev \
|
||||||
libpq-dev{{ package_arch_prefix }} \
|
libpq-dev
|
||||||
&& apt-get clean \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
@@ -178,7 +176,6 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN {{ mount_rust_cache -}} cargo build --features ${DB} --release{{ package_arch_target_param }}
|
RUN {{ mount_rust_cache -}} cargo build --features ${DB} --release{{ package_arch_target_param }}
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
@@ -195,7 +192,6 @@ ENV ROCKET_PROFILE="release" \
|
|||||||
|
|
||||||
|
|
||||||
{% if "amd64" not in target_file %}
|
{% if "amd64" not in target_file %}
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
@@ -203,32 +199,23 @@ RUN [ "cross-build-start" ]
|
|||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
{% if "alpine" in runtime_stage_base_image %}
|
{% if "alpine" in runtime_stage_base_image %}
|
||||||
&& apk add --no-cache \
|
&& apk add --no-cache \
|
||||||
openssl \
|
ca-certificates \
|
||||||
tzdata \
|
|
||||||
curl \
|
curl \
|
||||||
ca-certificates
|
openssl \
|
||||||
|
tzdata
|
||||||
{% else %}
|
{% else %}
|
||||||
&& apt-get update && apt-get install -y \
|
&& apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
openssl \
|
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
|
openssl \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if "armv6" in target_file and "alpine" not in target_file %}
|
|
||||||
# In the Balena Bullseye images for armv6/rpi-debian there is a missing symlink.
|
|
||||||
# This symlink was there in the buster images, and for some reason this is needed.
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN ln -v -s /lib/ld-linux-armhf.so.3 /lib/ld-linux.so.3
|
|
||||||
|
|
||||||
{% endif -%}
|
|
||||||
|
|
||||||
{% if "amd64" not in target_file %}
|
{% if "amd64" not in target_file %}
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
@@ -8,8 +8,8 @@ all: $(OBJECTS)
|
|||||||
%/Dockerfile.alpine: Dockerfile.j2 render_template
|
%/Dockerfile.alpine: Dockerfile.j2 render_template
|
||||||
./render_template "$<" "{\"target_file\":\"$@\"}" > "$@"
|
./render_template "$<" "{\"target_file\":\"$@\"}" > "$@"
|
||||||
|
|
||||||
%/Dockerfile.buildx: Dockerfile.j2 render_template
|
%/Dockerfile.buildkit: Dockerfile.j2 render_template
|
||||||
./render_template "$<" "{\"target_file\":\"$@\"}" > "$@"
|
./render_template "$<" "{\"target_file\":\"$@\"}" > "$@"
|
||||||
|
|
||||||
%/Dockerfile.buildx.alpine: Dockerfile.j2 render_template
|
%/Dockerfile.buildkit.alpine: Dockerfile.j2 render_template
|
||||||
./render_template "$<" "{\"target_file\":\"$@\"}" > "$@"
|
./render_template "$<" "{\"target_file\":\"$@\"}" > "$@"
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM rust:1.64-bullseye as build
|
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,21 +34,19 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN mkdir -pv "${CARGO_HOME}" \
|
RUN mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
# Install DB packages
|
# Install build dependencies
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
libmariadb-dev \
|
libmariadb-dev \
|
||||||
libpq-dev \
|
libpq-dev
|
||||||
&& apt-get clean \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
@@ -81,13 +76,12 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN cargo build --features ${DB} --release
|
RUN cargo build --features ${DB} --release
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM debian:bullseye-slim
|
FROM docker.io/library/debian:bookworm-slim
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
@@ -98,11 +92,11 @@ ENV ROCKET_PROFILE="release" \
|
|||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apt-get update && apt-get install -y \
|
&& apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
openssl \
|
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
|
openssl \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM blackdex/rust-musl:x86_64-musl-stable-1.64.0 as build
|
FROM docker.io/blackdex/rust-musl:x86_64-musl-stable-1.71.1-openssl3 as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,13 +34,16 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN mkdir -pv "${CARGO_HOME}" \
|
RUN mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
|
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||||
|
# Debian Bookworm already contains libpq v15
|
||||||
|
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
@@ -75,13 +75,12 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN cargo build --features ${DB} --release --target=x86_64-unknown-linux-musl
|
RUN cargo build --features ${DB} --release --target=x86_64-unknown-linux-musl
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM alpine:3.16
|
FROM docker.io/library/alpine:3.17
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
@@ -93,10 +92,10 @@ ENV ROCKET_PROFILE="release" \
|
|||||||
# Create data folder and Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apk add --no-cache \
|
&& apk add --no-cache \
|
||||||
openssl \
|
ca-certificates \
|
||||||
tzdata \
|
|
||||||
curl \
|
curl \
|
||||||
ca-certificates
|
openssl \
|
||||||
|
tzdata
|
||||||
|
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM rust:1.64-bullseye as build
|
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,21 +34,19 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
# Install DB packages
|
# Install build dependencies
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
libmariadb-dev \
|
libmariadb-dev \
|
||||||
libpq-dev \
|
libpq-dev
|
||||||
&& apt-get clean \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
@@ -81,13 +76,12 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM debian:bullseye-slim
|
FROM docker.io/library/debian:bookworm-slim
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
@@ -98,11 +92,11 @@ ENV ROCKET_PROFILE="release" \
|
|||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apt-get update && apt-get install -y \
|
&& apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
openssl \
|
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
|
openssl \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM blackdex/rust-musl:x86_64-musl-stable-1.64.0 as build
|
FROM docker.io/blackdex/rust-musl:x86_64-musl-stable-1.71.1-openssl3 as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,13 +34,16 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
|
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||||
|
# Debian Bookworm already contains libpq v15
|
||||||
|
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
@@ -75,13 +75,12 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=x86_64-unknown-linux-musl
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=x86_64-unknown-linux-musl
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM alpine:3.16
|
FROM docker.io/library/alpine:3.17
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
@@ -93,10 +92,10 @@ ENV ROCKET_PROFILE="release" \
|
|||||||
# Create data folder and Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apk add --no-cache \
|
&& apk add --no-cache \
|
||||||
openssl \
|
ca-certificates \
|
||||||
tzdata \
|
|
||||||
curl \
|
curl \
|
||||||
ca-certificates
|
openssl \
|
||||||
|
tzdata
|
||||||
|
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM rust:1.64-bullseye as build
|
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,28 +34,26 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN mkdir -pv "${CARGO_HOME}" \
|
RUN mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
#
|
# Install build dependencies for the arm64 architecture
|
||||||
# Install required build libs for arm64 architecture.
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN dpkg --add-architecture arm64 \
|
RUN dpkg --add-architecture arm64 \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
libssl-dev:arm64 \
|
gcc-aarch64-linux-gnu \
|
||||||
libc6-dev:arm64 \
|
libc6-dev:arm64 \
|
||||||
libpq5:arm64 \
|
|
||||||
libpq-dev:arm64 \
|
|
||||||
libmariadb3:arm64 \
|
|
||||||
libmariadb-dev:arm64 \
|
libmariadb-dev:arm64 \
|
||||||
libmariadb-dev-compat:arm64 \
|
libmariadb-dev-compat:arm64 \
|
||||||
gcc-aarch64-linux-gnu \
|
libmariadb3:arm64 \
|
||||||
|
libpq-dev:arm64 \
|
||||||
|
libpq5:arm64 \
|
||||||
|
libssl-dev:arm64 \
|
||||||
#
|
#
|
||||||
# Make sure cargo has the right target config
|
# Make sure cargo has the right target config
|
||||||
&& echo '[target.aarch64-unknown-linux-gnu]' >> "${CARGO_HOME}/config" \
|
&& echo '[target.aarch64-unknown-linux-gnu]' >> "${CARGO_HOME}/config" \
|
||||||
@@ -71,7 +66,6 @@ ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc" \
|
|||||||
OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu" \
|
OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu" \
|
||||||
OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
||||||
|
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
@@ -101,34 +95,31 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/aarch64-debian:bullseye
|
FROM docker.io/balenalib/aarch64-debian:bookworm
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
ROCKET_PORT=80
|
ROCKET_PORT=80
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Create data folder and Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apt-get update && apt-get install -y \
|
&& apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
openssl \
|
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
|
openssl \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM blackdex/rust-musl:aarch64-musl-stable-1.64.0 as build
|
FROM docker.io/blackdex/rust-musl:aarch64-musl-stable-1.71.1-openssl3 as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,13 +34,16 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN mkdir -pv "${CARGO_HOME}" \
|
RUN mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
|
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||||
|
# Debian Bookworm already contains libpq v15
|
||||||
|
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
@@ -75,13 +75,12 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-musl
|
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-musl
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/aarch64-alpine:3.16
|
FROM docker.io/balenalib/aarch64-alpine:3.17
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
@@ -89,18 +88,16 @@ ENV ROCKET_PROFILE="release" \
|
|||||||
SSL_CERT_DIR=/etc/ssl/certs
|
SSL_CERT_DIR=/etc/ssl/certs
|
||||||
|
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Create data folder and Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apk add --no-cache \
|
&& apk add --no-cache \
|
||||||
openssl \
|
ca-certificates \
|
||||||
tzdata \
|
|
||||||
curl \
|
curl \
|
||||||
ca-certificates
|
openssl \
|
||||||
|
tzdata
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM rust:1.64-bullseye as build
|
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,28 +34,26 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
#
|
# Install build dependencies for the arm64 architecture
|
||||||
# Install required build libs for arm64 architecture.
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry dpkg --add-architecture arm64 \
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN dpkg --add-architecture arm64 \
|
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
libssl-dev:arm64 \
|
gcc-aarch64-linux-gnu \
|
||||||
libc6-dev:arm64 \
|
libc6-dev:arm64 \
|
||||||
libpq5:arm64 \
|
|
||||||
libpq-dev:arm64 \
|
|
||||||
libmariadb3:arm64 \
|
|
||||||
libmariadb-dev:arm64 \
|
libmariadb-dev:arm64 \
|
||||||
libmariadb-dev-compat:arm64 \
|
libmariadb-dev-compat:arm64 \
|
||||||
gcc-aarch64-linux-gnu \
|
libmariadb3:arm64 \
|
||||||
|
libpq-dev:arm64 \
|
||||||
|
libpq5:arm64 \
|
||||||
|
libssl-dev:arm64 \
|
||||||
#
|
#
|
||||||
# Make sure cargo has the right target config
|
# Make sure cargo has the right target config
|
||||||
&& echo '[target.aarch64-unknown-linux-gnu]' >> "${CARGO_HOME}/config" \
|
&& echo '[target.aarch64-unknown-linux-gnu]' >> "${CARGO_HOME}/config" \
|
||||||
@@ -71,7 +66,6 @@ ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc" \
|
|||||||
OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu" \
|
OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu" \
|
||||||
OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
||||||
|
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
@@ -101,34 +95,31 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/aarch64-debian:bullseye
|
FROM docker.io/balenalib/aarch64-debian:bookworm
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
ROCKET_PORT=80
|
ROCKET_PORT=80
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Create data folder and Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apt-get update && apt-get install -y \
|
&& apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
openssl \
|
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
|
openssl \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM blackdex/rust-musl:aarch64-musl-stable-1.64.0 as build
|
FROM docker.io/blackdex/rust-musl:aarch64-musl-stable-1.71.1-openssl3 as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,13 +34,16 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
|
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||||
|
# Debian Bookworm already contains libpq v15
|
||||||
|
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
@@ -75,13 +75,12 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=aarch64-unknown-linux-musl
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=aarch64-unknown-linux-musl
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/aarch64-alpine:3.16
|
FROM docker.io/balenalib/aarch64-alpine:3.17
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
@@ -89,18 +88,16 @@ ENV ROCKET_PROFILE="release" \
|
|||||||
SSL_CERT_DIR=/etc/ssl/certs
|
SSL_CERT_DIR=/etc/ssl/certs
|
||||||
|
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Create data folder and Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apk add --no-cache \
|
&& apk add --no-cache \
|
||||||
openssl \
|
ca-certificates \
|
||||||
tzdata \
|
|
||||||
curl \
|
curl \
|
||||||
ca-certificates
|
openssl \
|
||||||
|
tzdata
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM rust:1.64-bullseye as build
|
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,28 +34,26 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN mkdir -pv "${CARGO_HOME}" \
|
RUN mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
#
|
# Install build dependencies for the armel architecture
|
||||||
# Install required build libs for armel architecture.
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN dpkg --add-architecture armel \
|
RUN dpkg --add-architecture armel \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
libssl-dev:armel \
|
gcc-arm-linux-gnueabi \
|
||||||
libc6-dev:armel \
|
libc6-dev:armel \
|
||||||
libpq5:armel \
|
|
||||||
libpq-dev:armel \
|
|
||||||
libmariadb3:armel \
|
|
||||||
libmariadb-dev:armel \
|
libmariadb-dev:armel \
|
||||||
libmariadb-dev-compat:armel \
|
libmariadb-dev-compat:armel \
|
||||||
gcc-arm-linux-gnueabi \
|
libmariadb3:armel \
|
||||||
|
libpq-dev:armel \
|
||||||
|
libpq5:armel \
|
||||||
|
libssl-dev:armel \
|
||||||
#
|
#
|
||||||
# Make sure cargo has the right target config
|
# Make sure cargo has the right target config
|
||||||
&& echo '[target.arm-unknown-linux-gnueabi]' >> "${CARGO_HOME}/config" \
|
&& echo '[target.arm-unknown-linux-gnueabi]' >> "${CARGO_HOME}/config" \
|
||||||
@@ -71,7 +66,6 @@ ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc" \
|
|||||||
OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi" \
|
OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi" \
|
||||||
OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||||
|
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
@@ -101,39 +95,31 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/rpi-debian:bullseye
|
FROM docker.io/balenalib/rpi-debian:bookworm
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
ROCKET_PORT=80
|
ROCKET_PORT=80
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Create data folder and Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apt-get update && apt-get install -y \
|
&& apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
openssl \
|
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
|
openssl \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# In the Balena Bullseye images for armv6/rpi-debian there is a missing symlink.
|
|
||||||
# This symlink was there in the buster images, and for some reason this is needed.
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN ln -v -s /lib/ld-linux-armhf.so.3 /lib/ld-linux.so.3
|
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM blackdex/rust-musl:arm-musleabi-stable-1.64.0 as build
|
FROM docker.io/blackdex/rust-musl:arm-musleabi-stable-1.71.1-openssl3 as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,15 +34,18 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN mkdir -pv "${CARGO_HOME}" \
|
RUN mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
# To be able to build the armv6 image with mimalloc we need to specifically specify the libatomic.a file location
|
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||||
ENV RUSTFLAGS='-Clink-arg=/usr/local/musl/arm-unknown-linux-musleabi/lib/libatomic.a'
|
# Debian Bookworm already contains libpq v15
|
||||||
|
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||||
|
# To be able to build the armv6 image with mimalloc we need to tell the linker to also look for libatomic
|
||||||
|
ENV RUSTFLAGS='-Clink-arg=-latomic'
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
@@ -77,13 +77,12 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-musleabi
|
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-musleabi
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/rpi-alpine:3.16
|
FROM docker.io/balenalib/rpi-alpine:3.17
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
@@ -91,18 +90,16 @@ ENV ROCKET_PROFILE="release" \
|
|||||||
SSL_CERT_DIR=/etc/ssl/certs
|
SSL_CERT_DIR=/etc/ssl/certs
|
||||||
|
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Create data folder and Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apk add --no-cache \
|
&& apk add --no-cache \
|
||||||
openssl \
|
ca-certificates \
|
||||||
tzdata \
|
|
||||||
curl \
|
curl \
|
||||||
ca-certificates
|
openssl \
|
||||||
|
tzdata
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM rust:1.64-bullseye as build
|
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,28 +34,26 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
#
|
# Install build dependencies for the armel architecture
|
||||||
# Install required build libs for armel architecture.
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry dpkg --add-architecture armel \
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN dpkg --add-architecture armel \
|
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
libssl-dev:armel \
|
gcc-arm-linux-gnueabi \
|
||||||
libc6-dev:armel \
|
libc6-dev:armel \
|
||||||
libpq5:armel \
|
|
||||||
libpq-dev:armel \
|
|
||||||
libmariadb3:armel \
|
|
||||||
libmariadb-dev:armel \
|
libmariadb-dev:armel \
|
||||||
libmariadb-dev-compat:armel \
|
libmariadb-dev-compat:armel \
|
||||||
gcc-arm-linux-gnueabi \
|
libmariadb3:armel \
|
||||||
|
libpq-dev:armel \
|
||||||
|
libpq5:armel \
|
||||||
|
libssl-dev:armel \
|
||||||
#
|
#
|
||||||
# Make sure cargo has the right target config
|
# Make sure cargo has the right target config
|
||||||
&& echo '[target.arm-unknown-linux-gnueabi]' >> "${CARGO_HOME}/config" \
|
&& echo '[target.arm-unknown-linux-gnueabi]' >> "${CARGO_HOME}/config" \
|
||||||
@@ -71,7 +66,6 @@ ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc" \
|
|||||||
OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi" \
|
OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi" \
|
||||||
OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||||
|
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
@@ -101,39 +95,31 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/rpi-debian:bullseye
|
FROM docker.io/balenalib/rpi-debian:bookworm
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
ROCKET_PORT=80
|
ROCKET_PORT=80
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Create data folder and Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apt-get update && apt-get install -y \
|
&& apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
openssl \
|
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
|
openssl \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# In the Balena Bullseye images for armv6/rpi-debian there is a missing symlink.
|
|
||||||
# This symlink was there in the buster images, and for some reason this is needed.
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN ln -v -s /lib/ld-linux-armhf.so.3 /lib/ld-linux.so.3
|
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM blackdex/rust-musl:arm-musleabi-stable-1.64.0 as build
|
FROM docker.io/blackdex/rust-musl:arm-musleabi-stable-1.71.1-openssl3 as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,15 +34,18 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
# To be able to build the armv6 image with mimalloc we need to specifically specify the libatomic.a file location
|
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||||
ENV RUSTFLAGS='-Clink-arg=/usr/local/musl/arm-unknown-linux-musleabi/lib/libatomic.a'
|
# Debian Bookworm already contains libpq v15
|
||||||
|
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||||
|
# To be able to build the armv6 image with mimalloc we need to tell the linker to also look for libatomic
|
||||||
|
ENV RUSTFLAGS='-Clink-arg=-latomic'
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
@@ -77,13 +77,12 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=arm-unknown-linux-musleabi
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=arm-unknown-linux-musleabi
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/rpi-alpine:3.16
|
FROM docker.io/balenalib/rpi-alpine:3.17
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
@@ -91,18 +90,16 @@ ENV ROCKET_PROFILE="release" \
|
|||||||
SSL_CERT_DIR=/etc/ssl/certs
|
SSL_CERT_DIR=/etc/ssl/certs
|
||||||
|
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Create data folder and Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apk add --no-cache \
|
&& apk add --no-cache \
|
||||||
openssl \
|
ca-certificates \
|
||||||
tzdata \
|
|
||||||
curl \
|
curl \
|
||||||
ca-certificates
|
openssl \
|
||||||
|
tzdata
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM rust:1.64-bullseye as build
|
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,28 +34,26 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN mkdir -pv "${CARGO_HOME}" \
|
RUN mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
#
|
# Install build dependencies for the armhf architecture
|
||||||
# Install required build libs for armhf architecture.
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN dpkg --add-architecture armhf \
|
RUN dpkg --add-architecture armhf \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
libssl-dev:armhf \
|
gcc-arm-linux-gnueabihf \
|
||||||
libc6-dev:armhf \
|
libc6-dev:armhf \
|
||||||
libpq5:armhf \
|
|
||||||
libpq-dev:armhf \
|
|
||||||
libmariadb3:armhf \
|
|
||||||
libmariadb-dev:armhf \
|
libmariadb-dev:armhf \
|
||||||
libmariadb-dev-compat:armhf \
|
libmariadb-dev-compat:armhf \
|
||||||
gcc-arm-linux-gnueabihf \
|
libmariadb3:armhf \
|
||||||
|
libpq-dev:armhf \
|
||||||
|
libpq5:armhf \
|
||||||
|
libssl-dev:armhf \
|
||||||
#
|
#
|
||||||
# Make sure cargo has the right target config
|
# Make sure cargo has the right target config
|
||||||
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> "${CARGO_HOME}/config" \
|
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> "${CARGO_HOME}/config" \
|
||||||
@@ -71,7 +66,6 @@ ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc" \
|
|||||||
OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf" \
|
OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf" \
|
||||||
OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
||||||
|
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
@@ -101,34 +95,31 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/armv7hf-debian:bullseye
|
FROM docker.io/balenalib/armv7hf-debian:bookworm
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
ROCKET_PORT=80
|
ROCKET_PORT=80
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Create data folder and Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apt-get update && apt-get install -y \
|
&& apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
openssl \
|
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
|
openssl \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM blackdex/rust-musl:armv7-musleabihf-stable-1.64.0 as build
|
FROM docker.io/blackdex/rust-musl:armv7-musleabihf-stable-1.71.1-openssl3 as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,13 +34,16 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN mkdir -pv "${CARGO_HOME}" \
|
RUN mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
|
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||||
|
# Debian Bookworm already contains libpq v15
|
||||||
|
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
@@ -75,13 +75,12 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf
|
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/armv7hf-alpine:3.16
|
FROM docker.io/balenalib/armv7hf-alpine:3.17
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
@@ -89,18 +88,16 @@ ENV ROCKET_PROFILE="release" \
|
|||||||
SSL_CERT_DIR=/etc/ssl/certs
|
SSL_CERT_DIR=/etc/ssl/certs
|
||||||
|
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Create data folder and Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apk add --no-cache \
|
&& apk add --no-cache \
|
||||||
openssl \
|
ca-certificates \
|
||||||
tzdata \
|
|
||||||
curl \
|
curl \
|
||||||
ca-certificates
|
openssl \
|
||||||
|
tzdata
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM rust:1.64-bullseye as build
|
FROM docker.io/library/rust:1.71.1-bookworm as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,28 +34,26 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
#
|
# Install build dependencies for the armhf architecture
|
||||||
# Install required build libs for armhf architecture.
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry dpkg --add-architecture armhf \
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN dpkg --add-architecture armhf \
|
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get install -y \
|
&& apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
libssl-dev:armhf \
|
gcc-arm-linux-gnueabihf \
|
||||||
libc6-dev:armhf \
|
libc6-dev:armhf \
|
||||||
libpq5:armhf \
|
|
||||||
libpq-dev:armhf \
|
|
||||||
libmariadb3:armhf \
|
|
||||||
libmariadb-dev:armhf \
|
libmariadb-dev:armhf \
|
||||||
libmariadb-dev-compat:armhf \
|
libmariadb-dev-compat:armhf \
|
||||||
gcc-arm-linux-gnueabihf \
|
libmariadb3:armhf \
|
||||||
|
libpq-dev:armhf \
|
||||||
|
libpq5:armhf \
|
||||||
|
libssl-dev:armhf \
|
||||||
#
|
#
|
||||||
# Make sure cargo has the right target config
|
# Make sure cargo has the right target config
|
||||||
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> "${CARGO_HOME}/config" \
|
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> "${CARGO_HOME}/config" \
|
||||||
@@ -71,7 +66,6 @@ ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc" \
|
|||||||
OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf" \
|
OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf" \
|
||||||
OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
||||||
|
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
@@ -101,34 +95,31 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/armv7hf-debian:bullseye
|
FROM docker.io/balenalib/armv7hf-debian:bookworm
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
ROCKET_PORT=80
|
ROCKET_PORT=80
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Create data folder and Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apt-get update && apt-get install -y \
|
&& apt-get update && apt-get install -y \
|
||||||
--no-install-recommends \
|
--no-install-recommends \
|
||||||
openssl \
|
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
curl \
|
curl \
|
||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
|
openssl \
|
||||||
&& apt-get clean \
|
&& apt-get clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
# This file was generated using a Jinja2 template.
|
# This file was generated using a Jinja2 template.
|
||||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||||
|
|
||||||
# Using multistage build:
|
# Using multistage build:
|
||||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||||
@@ -16,20 +15,18 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull vaultwarden/web-vault:v2022.10.0
|
# $ docker pull docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.10.0
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2023.7.1
|
||||||
# [vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80]
|
# [docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f
|
||||||
# [vaultwarden/web-vault:v2022.10.0]
|
# [docker.io/vaultwarden/web-vault:v2023.7.1]
|
||||||
#
|
#
|
||||||
FROM vaultwarden/web-vault@sha256:8e8405d252bb6ecc7d59d90e9ba9dde09f35c1b6858371274c67c3e0a6f14a80 as vault
|
FROM docker.io/vaultwarden/web-vault@sha256:b306f38fe0d54fa3d79059a737f8e1803da44ddc5f273c2aecdd6a4886211b0f as vault
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
FROM blackdex/rust-musl:armv7-musleabihf-stable-1.64.0 as build
|
FROM docker.io/blackdex/rust-musl:armv7-musleabihf-stable-1.71.1-openssl3 as build
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
@@ -37,13 +34,16 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
|||||||
TZ=UTC \
|
TZ=UTC \
|
||||||
TERM=xterm-256color \
|
TERM=xterm-256color \
|
||||||
CARGO_HOME="/root/.cargo" \
|
CARGO_HOME="/root/.cargo" \
|
||||||
|
REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||||
USER="root"
|
USER="root"
|
||||||
|
|
||||||
|
|
||||||
# Create CARGO_HOME folder and don't download rust docs
|
# Create CARGO_HOME folder and don't download rust docs
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||||
&& rustup set profile minimal
|
&& rustup set profile minimal
|
||||||
|
|
||||||
|
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||||
|
# Debian Bookworm already contains libpq v15
|
||||||
|
ENV PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||||
|
|
||||||
# Creates a dummy project used to grab dependencies
|
# Creates a dummy project used to grab dependencies
|
||||||
RUN USER=root cargo new --bin /app
|
RUN USER=root cargo new --bin /app
|
||||||
@@ -75,13 +75,12 @@ RUN touch src/main.rs
|
|||||||
|
|
||||||
# Builds again, this time it'll just be
|
# Builds again, this time it'll just be
|
||||||
# your actual source files being built
|
# your actual source files being built
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf
|
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf
|
||||||
|
|
||||||
######################## RUNTIME IMAGE ########################
|
######################## RUNTIME IMAGE ########################
|
||||||
# Create a new stage with a minimal image
|
# Create a new stage with a minimal image
|
||||||
# because we already have a binary built
|
# because we already have a binary built
|
||||||
FROM balenalib/armv7hf-alpine:3.16
|
FROM docker.io/balenalib/armv7hf-alpine:3.17
|
||||||
|
|
||||||
ENV ROCKET_PROFILE="release" \
|
ENV ROCKET_PROFILE="release" \
|
||||||
ROCKET_ADDRESS=0.0.0.0 \
|
ROCKET_ADDRESS=0.0.0.0 \
|
||||||
@@ -89,18 +88,16 @@ ENV ROCKET_PROFILE="release" \
|
|||||||
SSL_CERT_DIR=/etc/ssl/certs
|
SSL_CERT_DIR=/etc/ssl/certs
|
||||||
|
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-start" ]
|
RUN [ "cross-build-start" ]
|
||||||
|
|
||||||
# Create data folder and Install needed libraries
|
# Create data folder and Install needed libraries
|
||||||
RUN mkdir /data \
|
RUN mkdir /data \
|
||||||
&& apk add --no-cache \
|
&& apk add --no-cache \
|
||||||
openssl \
|
ca-certificates \
|
||||||
tzdata \
|
|
||||||
curl \
|
curl \
|
||||||
ca-certificates
|
openssl \
|
||||||
|
tzdata
|
||||||
|
|
||||||
# hadolint ignore=DL3059
|
|
||||||
RUN [ "cross-build-end" ]
|
RUN [ "cross-build-end" ]
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
@@ -45,9 +45,13 @@ if [ -r "${CONFIG_FILE}" ]; then
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
addr="${ROCKET_ADDRESS}"
|
||||||
|
if [ -z "${addr}" ] || [ "${addr}" = '0.0.0.0' ] || [ "${addr}" = '::' ]; then
|
||||||
|
addr='localhost'
|
||||||
|
fi
|
||||||
base_path="$(get_base_path "${DOMAIN}")"
|
base_path="$(get_base_path "${DOMAIN}")"
|
||||||
if [ -n "${ROCKET_TLS}" ]; then
|
if [ -n "${ROCKET_TLS}" ]; then
|
||||||
s='s'
|
s='s'
|
||||||
fi
|
fi
|
||||||
curl --insecure --fail --silent --show-error \
|
curl --insecure --fail --silent --show-error \
|
||||||
"http${s}://localhost:${ROCKET_PORT}${base_path}/alive" || exit 1
|
"http${s}://${addr}:${ROCKET_PORT}${base_path}/alive" || exit 1
|
||||||
|
@@ -1,3 +1,5 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
# The default Debian-based images support these arches for all database backends.
|
# The default Debian-based images support these arches for all database backends.
|
||||||
arches=(
|
arches=(
|
||||||
amd64
|
amd64
|
||||||
@@ -5,7 +7,9 @@ arches=(
|
|||||||
armv7
|
armv7
|
||||||
arm64
|
arm64
|
||||||
)
|
)
|
||||||
|
export arches
|
||||||
|
|
||||||
if [[ "${DOCKER_TAG}" == *alpine ]]; then
|
if [[ "${DOCKER_TAG}" == *alpine ]]; then
|
||||||
distro_suffix=.alpine
|
distro_suffix=.alpine
|
||||||
fi
|
fi
|
||||||
|
export distro_suffix
|
||||||
|
13
hooks/build
@@ -1,7 +1,8 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
echo ">>> Building images..."
|
echo ">>> Building images..."
|
||||||
|
|
||||||
|
# shellcheck source=arches.sh
|
||||||
source ./hooks/arches.sh
|
source ./hooks/arches.sh
|
||||||
|
|
||||||
if [[ -z "${SOURCE_COMMIT}" ]]; then
|
if [[ -z "${SOURCE_COMMIT}" ]]; then
|
||||||
@@ -23,10 +24,10 @@ LABELS=(
|
|||||||
# https://github.com/opencontainers/image-spec/blob/master/annotations.md
|
# https://github.com/opencontainers/image-spec/blob/master/annotations.md
|
||||||
org.opencontainers.image.created="$(date --utc --iso-8601=seconds)"
|
org.opencontainers.image.created="$(date --utc --iso-8601=seconds)"
|
||||||
org.opencontainers.image.documentation="https://github.com/dani-garcia/vaultwarden/wiki"
|
org.opencontainers.image.documentation="https://github.com/dani-garcia/vaultwarden/wiki"
|
||||||
org.opencontainers.image.licenses="GPL-3.0-only"
|
org.opencontainers.image.licenses="AGPL-3.0-only"
|
||||||
org.opencontainers.image.revision="${SOURCE_COMMIT}"
|
org.opencontainers.image.revision="${SOURCE_COMMIT}"
|
||||||
org.opencontainers.image.source="${SOURCE_REPOSITORY_URL}"
|
org.opencontainers.image.source="${SOURCE_REPOSITORY_URL}"
|
||||||
org.opencontainers.image.url="https://hub.docker.com/r/${DOCKER_REPO#*/}"
|
org.opencontainers.image.url="https://github.com/dani-garcia/vaultwarden"
|
||||||
org.opencontainers.image.version="${SOURCE_VERSION}"
|
org.opencontainers.image.version="${SOURCE_VERSION}"
|
||||||
)
|
)
|
||||||
LABEL_ARGS=()
|
LABEL_ARGS=()
|
||||||
@@ -34,9 +35,9 @@ for label in "${LABELS[@]}"; do
|
|||||||
LABEL_ARGS+=(--label "${label}")
|
LABEL_ARGS+=(--label "${label}")
|
||||||
done
|
done
|
||||||
|
|
||||||
# Check if DOCKER_BUILDKIT is set, if so, use the Dockerfile.buildx as template
|
# Check if DOCKER_BUILDKIT is set, if so, use the Dockerfile.buildkit as template
|
||||||
if [[ -n "${DOCKER_BUILDKIT}" ]]; then
|
if [[ -n "${DOCKER_BUILDKIT}" ]]; then
|
||||||
buildx_suffix=.buildx
|
buildkit_suffix=.buildkit
|
||||||
fi
|
fi
|
||||||
|
|
||||||
set -ex
|
set -ex
|
||||||
@@ -45,6 +46,6 @@ for arch in "${arches[@]}"; do
|
|||||||
docker build \
|
docker build \
|
||||||
"${LABEL_ARGS[@]}" \
|
"${LABEL_ARGS[@]}" \
|
||||||
-t "${DOCKER_REPO}:${DOCKER_TAG}-${arch}" \
|
-t "${DOCKER_REPO}:${DOCKER_TAG}-${arch}" \
|
||||||
-f docker/${arch}/Dockerfile${buildx_suffix}${distro_suffix} \
|
-f "docker/${arch}/Dockerfile${buildkit_suffix}${distro_suffix}" \
|
||||||
.
|
.
|
||||||
done
|
done
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -ex
|
set -ex
|
||||||
|
|
||||||
|
54
hooks/push
@@ -1,5 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# shellcheck source=arches.sh
|
||||||
source ./hooks/arches.sh
|
source ./hooks/arches.sh
|
||||||
|
|
||||||
export DOCKER_CLI_EXPERIMENTAL=enabled
|
export DOCKER_CLI_EXPERIMENTAL=enabled
|
||||||
@@ -41,7 +42,7 @@ LOCAL_REPO="${LOCAL_REGISTRY}/${REPO}"
|
|||||||
|
|
||||||
echo ">>> Pushing images to local registry..."
|
echo ">>> Pushing images to local registry..."
|
||||||
|
|
||||||
for arch in ${arches[@]}; do
|
for arch in "${arches[@]}"; do
|
||||||
docker_image="${DOCKER_REPO}:${DOCKER_TAG}-${arch}"
|
docker_image="${DOCKER_REPO}:${DOCKER_TAG}-${arch}"
|
||||||
local_image="${LOCAL_REPO}:${DOCKER_TAG}-${arch}"
|
local_image="${LOCAL_REPO}:${DOCKER_TAG}-${arch}"
|
||||||
docker tag "${docker_image}" "${local_image}"
|
docker tag "${docker_image}" "${local_image}"
|
||||||
@@ -71,9 +72,9 @@ tags=("${DOCKER_REPO}:${DOCKER_TAG}")
|
|||||||
# to make it easier for users to track the latest release.
|
# to make it easier for users to track the latest release.
|
||||||
if [[ "${DOCKER_TAG}" =~ ^[0-9]+\.[0-9]+\.[0-9]+ ]]; then
|
if [[ "${DOCKER_TAG}" =~ ^[0-9]+\.[0-9]+\.[0-9]+ ]]; then
|
||||||
if [[ "${DOCKER_TAG}" == *alpine ]]; then
|
if [[ "${DOCKER_TAG}" == *alpine ]]; then
|
||||||
tags+=(${DOCKER_REPO}:alpine)
|
tags+=("${DOCKER_REPO}:alpine")
|
||||||
else
|
else
|
||||||
tags+=(${DOCKER_REPO}:latest)
|
tags+=("${DOCKER_REPO}:latest")
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -91,10 +92,10 @@ declare -A arch_to_platform=(
|
|||||||
[arm64]="linux/arm64"
|
[arm64]="linux/arm64"
|
||||||
)
|
)
|
||||||
platforms=()
|
platforms=()
|
||||||
for arch in ${arches[@]}; do
|
for arch in "${arches[@]}"; do
|
||||||
platforms+=("${arch_to_platform[$arch]}")
|
platforms+=("${arch_to_platform[$arch]}")
|
||||||
done
|
done
|
||||||
platforms="$(join "," "${platforms[@]}")"
|
platform="$(join "," "${platforms[@]}")"
|
||||||
|
|
||||||
# Run the build, pushing the resulting images and multi-arch manifest list to
|
# Run the build, pushing the resulting images and multi-arch manifest list to
|
||||||
# Docker Hub. The Dockerfile is read from stdin to avoid sending any build
|
# Docker Hub. The Dockerfile is read from stdin to avoid sending any build
|
||||||
@@ -104,46 +105,7 @@ docker buildx build \
|
|||||||
--network host \
|
--network host \
|
||||||
--build-arg LOCAL_REPO="${LOCAL_REPO}" \
|
--build-arg LOCAL_REPO="${LOCAL_REPO}" \
|
||||||
--build-arg DOCKER_TAG="${DOCKER_TAG}" \
|
--build-arg DOCKER_TAG="${DOCKER_TAG}" \
|
||||||
--platform "${platforms}" \
|
--platform "${platform}" \
|
||||||
"${tag_args[@]}" \
|
"${tag_args[@]}" \
|
||||||
--push \
|
--push \
|
||||||
- < ./docker/Dockerfile.buildx
|
- < ./docker/Dockerfile.buildx
|
||||||
|
|
||||||
# Add an extra arch-specific tag for `arm32v6`; Docker can't seem to properly
|
|
||||||
# auto-select that image on ARMv6 platforms like Raspberry Pi 1 and Zero
|
|
||||||
# (https://github.com/moby/moby/issues/41017).
|
|
||||||
#
|
|
||||||
# Note that we use `arm32v6` instead of `armv6` to be consistent with the
|
|
||||||
# existing vaultwarden tags, which adhere to the naming conventions of the
|
|
||||||
# Docker per-architecture repos (e.g., https://hub.docker.com/u/arm32v6).
|
|
||||||
# Unfortunately, these per-arch repo names aren't always consistent with the
|
|
||||||
# corresponding platform (OS/arch/variant) IDs, particularly in the case of
|
|
||||||
# 32-bit ARM arches (e.g., `linux/arm/v6` is used, not `linux/arm32/v6`).
|
|
||||||
#
|
|
||||||
# TODO: It looks like this issue should be fixed starting in Docker 20.10.0,
|
|
||||||
# so this step can be removed once fixed versions are in wider distribution.
|
|
||||||
#
|
|
||||||
# Tags:
|
|
||||||
#
|
|
||||||
# testing => testing-arm32v6
|
|
||||||
# testing-alpine => <ignored>
|
|
||||||
# x.y.z => x.y.z-arm32v6, latest-arm32v6
|
|
||||||
# x.y.z-alpine => <ignored>
|
|
||||||
#
|
|
||||||
if [[ "${DOCKER_TAG}" != *alpine ]]; then
|
|
||||||
image="${DOCKER_REPO}":"${DOCKER_TAG}"
|
|
||||||
|
|
||||||
# Fetch the multi-arch manifest list and find the digest of the armv6 image.
|
|
||||||
filter='.manifests|.[]|select(.platform.architecture=="arm" and .platform.variant=="v6")|.digest'
|
|
||||||
digest="$(docker manifest inspect "${image}" | jq -r "${filter}")"
|
|
||||||
|
|
||||||
# Pull the armv6 image by digest, retag it, and repush it.
|
|
||||||
docker pull "${DOCKER_REPO}"@"${digest}"
|
|
||||||
docker tag "${DOCKER_REPO}"@"${digest}" "${image}"-arm32v6
|
|
||||||
docker push "${image}"-arm32v6
|
|
||||||
|
|
||||||
if [[ "${DOCKER_TAG}" =~ ^[0-9]+\.[0-9]+\.[0-9]+ ]]; then
|
|
||||||
docker tag "${image}"-arm32v6 "${DOCKER_REPO}:latest"-arm32v6
|
|
||||||
docker push "${DOCKER_REPO}:latest"-arm32v6
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
@@ -0,0 +1,3 @@
|
|||||||
|
DROP TABLE `groups`;
|
||||||
|
DROP TABLE groups_users;
|
||||||
|
DROP TABLE collections_groups;
|
23
migrations/mysql/2022-07-27-110000_add_group_support/up.sql
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
CREATE TABLE `groups` (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
organizations_uuid VARCHAR(40) NOT NULL REFERENCES organizations (uuid),
|
||||||
|
name VARCHAR(100) NOT NULL,
|
||||||
|
access_all BOOLEAN NOT NULL,
|
||||||
|
external_id VARCHAR(300) NULL,
|
||||||
|
creation_date DATETIME NOT NULL,
|
||||||
|
revision_date DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE groups_users (
|
||||||
|
groups_uuid CHAR(36) NOT NULL REFERENCES `groups` (uuid),
|
||||||
|
users_organizations_uuid VARCHAR(36) NOT NULL REFERENCES users_organizations (uuid),
|
||||||
|
UNIQUE (groups_uuid, users_organizations_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE collections_groups (
|
||||||
|
collections_uuid VARCHAR(40) NOT NULL REFERENCES collections (uuid),
|
||||||
|
groups_uuid CHAR(36) NOT NULL REFERENCES `groups` (uuid),
|
||||||
|
read_only BOOLEAN NOT NULL,
|
||||||
|
hide_passwords BOOLEAN NOT NULL,
|
||||||
|
UNIQUE (collections_uuid, groups_uuid)
|
||||||
|
);
|
1
migrations/mysql/2022-10-18-170602_add_events/down.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE event;
|
19
migrations/mysql/2022-10-18-170602_add_events/up.sql
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
CREATE TABLE event (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
event_type INTEGER NOT NULL,
|
||||||
|
user_uuid CHAR(36),
|
||||||
|
org_uuid CHAR(36),
|
||||||
|
cipher_uuid CHAR(36),
|
||||||
|
collection_uuid CHAR(36),
|
||||||
|
group_uuid CHAR(36),
|
||||||
|
org_user_uuid CHAR(36),
|
||||||
|
act_user_uuid CHAR(36),
|
||||||
|
device_type INTEGER,
|
||||||
|
ip_address TEXT,
|
||||||
|
event_date DATETIME NOT NULL,
|
||||||
|
policy_uuid CHAR(36),
|
||||||
|
provider_uuid CHAR(36),
|
||||||
|
provider_user_uuid CHAR(36),
|
||||||
|
provider_org_uuid CHAR(36),
|
||||||
|
UNIQUE (uuid)
|
||||||
|
);
|
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE users_organizations
|
||||||
|
ADD COLUMN reset_password_key TEXT;
|
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN avatar_color VARCHAR(7);
|
7
migrations/mysql/2023-01-31-222222_add_argon2/up.sql
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN
|
||||||
|
client_kdf_memory INTEGER DEFAULT NULL;
|
||||||
|
|
||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN
|
||||||
|
client_kdf_parallelism INTEGER DEFAULT NULL;
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE devices ADD COLUMN push_uuid TEXT;
|
@@ -0,0 +1,10 @@
|
|||||||
|
CREATE TABLE organization_api_key (
|
||||||
|
uuid CHAR(36) NOT NULL,
|
||||||
|
org_uuid CHAR(36) NOT NULL REFERENCES organizations(uuid),
|
||||||
|
atype INTEGER NOT NULL,
|
||||||
|
api_key VARCHAR(255) NOT NULL,
|
||||||
|
revision_date DATETIME NOT NULL,
|
||||||
|
PRIMARY KEY(uuid, org_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE users ADD COLUMN external_id TEXT;
|
@@ -0,0 +1,19 @@
|
|||||||
|
CREATE TABLE auth_requests (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
user_uuid CHAR(36) NOT NULL,
|
||||||
|
organization_uuid CHAR(36),
|
||||||
|
request_device_identifier CHAR(36) NOT NULL,
|
||||||
|
device_type INTEGER NOT NULL,
|
||||||
|
request_ip TEXT NOT NULL,
|
||||||
|
response_device_id CHAR(36),
|
||||||
|
access_code TEXT NOT NULL,
|
||||||
|
public_key TEXT NOT NULL,
|
||||||
|
enc_key TEXT NOT NULL,
|
||||||
|
master_password_hash TEXT NOT NULL,
|
||||||
|
approved BOOLEAN,
|
||||||
|
creation_date DATETIME NOT NULL,
|
||||||
|
response_date DATETIME,
|
||||||
|
authentication_date DATETIME,
|
||||||
|
FOREIGN KEY(user_uuid) REFERENCES users(uuid),
|
||||||
|
FOREIGN KEY(organization_uuid) REFERENCES organizations(uuid)
|
||||||
|
);
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE collections ADD COLUMN external_id TEXT;
|
@@ -0,0 +1,3 @@
|
|||||||
|
DROP TABLE groups;
|
||||||
|
DROP TABLE groups_users;
|
||||||
|
DROP TABLE collections_groups;
|
@@ -0,0 +1,23 @@
|
|||||||
|
CREATE TABLE groups (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
organizations_uuid VARCHAR(40) NOT NULL REFERENCES organizations (uuid),
|
||||||
|
name VARCHAR(100) NOT NULL,
|
||||||
|
access_all BOOLEAN NOT NULL,
|
||||||
|
external_id VARCHAR(300) NULL,
|
||||||
|
creation_date TIMESTAMP NOT NULL,
|
||||||
|
revision_date TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE groups_users (
|
||||||
|
groups_uuid CHAR(36) NOT NULL REFERENCES groups (uuid),
|
||||||
|
users_organizations_uuid VARCHAR(36) NOT NULL REFERENCES users_organizations (uuid),
|
||||||
|
PRIMARY KEY (groups_uuid, users_organizations_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE collections_groups (
|
||||||
|
collections_uuid VARCHAR(40) NOT NULL REFERENCES collections (uuid),
|
||||||
|
groups_uuid CHAR(36) NOT NULL REFERENCES groups (uuid),
|
||||||
|
read_only BOOLEAN NOT NULL,
|
||||||
|
hide_passwords BOOLEAN NOT NULL,
|
||||||
|
PRIMARY KEY (collections_uuid, groups_uuid)
|
||||||
|
);
|
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE event;
|
19
migrations/postgresql/2022-10-18-170602_add_events/up.sql
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
CREATE TABLE event (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
event_type INTEGER NOT NULL,
|
||||||
|
user_uuid CHAR(36),
|
||||||
|
org_uuid CHAR(36),
|
||||||
|
cipher_uuid CHAR(36),
|
||||||
|
collection_uuid CHAR(36),
|
||||||
|
group_uuid CHAR(36),
|
||||||
|
org_user_uuid CHAR(36),
|
||||||
|
act_user_uuid CHAR(36),
|
||||||
|
device_type INTEGER,
|
||||||
|
ip_address TEXT,
|
||||||
|
event_date TIMESTAMP NOT NULL,
|
||||||
|
policy_uuid CHAR(36),
|
||||||
|
provider_uuid CHAR(36),
|
||||||
|
provider_user_uuid CHAR(36),
|
||||||
|
provider_org_uuid CHAR(36),
|
||||||
|
UNIQUE (uuid)
|
||||||
|
);
|
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE users_organizations
|
||||||
|
ADD COLUMN reset_password_key TEXT;
|
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN avatar_color TEXT;
|
@@ -0,0 +1,7 @@
|
|||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN
|
||||||
|
client_kdf_memory INTEGER DEFAULT NULL;
|
||||||
|
|
||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN
|
||||||
|
client_kdf_parallelism INTEGER DEFAULT NULL;
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE devices ADD COLUMN push_uuid TEXT;
|
@@ -0,0 +1,10 @@
|
|||||||
|
CREATE TABLE organization_api_key (
|
||||||
|
uuid CHAR(36) NOT NULL,
|
||||||
|
org_uuid CHAR(36) NOT NULL REFERENCES organizations(uuid),
|
||||||
|
atype INTEGER NOT NULL,
|
||||||
|
api_key VARCHAR(255),
|
||||||
|
revision_date TIMESTAMP NOT NULL,
|
||||||
|
PRIMARY KEY(uuid, org_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE users ADD COLUMN external_id TEXT;
|
@@ -0,0 +1,19 @@
|
|||||||
|
CREATE TABLE auth_requests (
|
||||||
|
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||||
|
user_uuid CHAR(36) NOT NULL,
|
||||||
|
organization_uuid CHAR(36),
|
||||||
|
request_device_identifier CHAR(36) NOT NULL,
|
||||||
|
device_type INTEGER NOT NULL,
|
||||||
|
request_ip TEXT NOT NULL,
|
||||||
|
response_device_id CHAR(36),
|
||||||
|
access_code TEXT NOT NULL,
|
||||||
|
public_key TEXT NOT NULL,
|
||||||
|
enc_key TEXT NOT NULL,
|
||||||
|
master_password_hash TEXT NOT NULL,
|
||||||
|
approved BOOLEAN,
|
||||||
|
creation_date TIMESTAMP NOT NULL,
|
||||||
|
response_date TIMESTAMP,
|
||||||
|
authentication_date TIMESTAMP,
|
||||||
|
FOREIGN KEY(user_uuid) REFERENCES users(uuid),
|
||||||
|
FOREIGN KEY(organization_uuid) REFERENCES organizations(uuid)
|
||||||
|
);
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE collections ADD COLUMN external_id TEXT;
|
@@ -0,0 +1,3 @@
|
|||||||
|
DROP TABLE groups;
|
||||||
|
DROP TABLE groups_users;
|
||||||
|
DROP TABLE collections_groups;
|
23
migrations/sqlite/2022-07-27-110000_add_group_support/up.sql
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
CREATE TABLE groups (
|
||||||
|
uuid TEXT NOT NULL PRIMARY KEY,
|
||||||
|
organizations_uuid TEXT NOT NULL REFERENCES organizations (uuid),
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
access_all BOOLEAN NOT NULL,
|
||||||
|
external_id TEXT NULL,
|
||||||
|
creation_date TIMESTAMP NOT NULL,
|
||||||
|
revision_date TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE groups_users (
|
||||||
|
groups_uuid TEXT NOT NULL REFERENCES groups (uuid),
|
||||||
|
users_organizations_uuid TEXT NOT NULL REFERENCES users_organizations (uuid),
|
||||||
|
UNIQUE (groups_uuid, users_organizations_uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE collections_groups (
|
||||||
|
collections_uuid TEXT NOT NULL REFERENCES collections (uuid),
|
||||||
|
groups_uuid TEXT NOT NULL REFERENCES groups (uuid),
|
||||||
|
read_only BOOLEAN NOT NULL,
|
||||||
|
hide_passwords BOOLEAN NOT NULL,
|
||||||
|
UNIQUE (collections_uuid, groups_uuid)
|
||||||
|
);
|
1
migrations/sqlite/2022-10-18-170602_add_events/down.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE event;
|
19
migrations/sqlite/2022-10-18-170602_add_events/up.sql
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
CREATE TABLE event (
|
||||||
|
uuid TEXT NOT NULL PRIMARY KEY,
|
||||||
|
event_type INTEGER NOT NULL,
|
||||||
|
user_uuid TEXT,
|
||||||
|
org_uuid TEXT,
|
||||||
|
cipher_uuid TEXT,
|
||||||
|
collection_uuid TEXT,
|
||||||
|
group_uuid TEXT,
|
||||||
|
org_user_uuid TEXT,
|
||||||
|
act_user_uuid TEXT,
|
||||||
|
device_type INTEGER,
|
||||||
|
ip_address TEXT,
|
||||||
|
event_date DATETIME NOT NULL,
|
||||||
|
policy_uuid TEXT,
|
||||||
|
provider_uuid TEXT,
|
||||||
|
provider_user_uuid TEXT,
|
||||||
|
provider_org_uuid TEXT,
|
||||||
|
UNIQUE (uuid)
|
||||||
|
);
|
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE users_organizations
|
||||||
|
ADD COLUMN reset_password_key TEXT;
|
@@ -0,0 +1,2 @@
|
|||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN avatar_color TEXT;
|
7
migrations/sqlite/2023-01-31-222222_add_argon2/up.sql
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN
|
||||||
|
client_kdf_memory INTEGER DEFAULT NULL;
|
||||||
|
|
||||||
|
ALTER TABLE users
|
||||||
|
ADD COLUMN
|
||||||
|
client_kdf_parallelism INTEGER DEFAULT NULL;
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE devices ADD COLUMN push_uuid TEXT;
|
@@ -0,0 +1,11 @@
|
|||||||
|
CREATE TABLE organization_api_key (
|
||||||
|
uuid TEXT NOT NULL,
|
||||||
|
org_uuid TEXT NOT NULL,
|
||||||
|
atype INTEGER NOT NULL,
|
||||||
|
api_key TEXT NOT NULL,
|
||||||
|
revision_date DATETIME NOT NULL,
|
||||||
|
PRIMARY KEY(uuid, org_uuid),
|
||||||
|
FOREIGN KEY(org_uuid) REFERENCES organizations(uuid)
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE users ADD COLUMN external_id TEXT;
|
@@ -0,0 +1,19 @@
|
|||||||
|
CREATE TABLE auth_requests (
|
||||||
|
uuid TEXT NOT NULL PRIMARY KEY,
|
||||||
|
user_uuid TEXT NOT NULL,
|
||||||
|
organization_uuid TEXT,
|
||||||
|
request_device_identifier TEXT NOT NULL,
|
||||||
|
device_type INTEGER NOT NULL,
|
||||||
|
request_ip TEXT NOT NULL,
|
||||||
|
response_device_id TEXT,
|
||||||
|
access_code TEXT NOT NULL,
|
||||||
|
public_key TEXT NOT NULL,
|
||||||
|
enc_key TEXT NOT NULL,
|
||||||
|
master_password_hash TEXT NOT NULL,
|
||||||
|
approved BOOLEAN,
|
||||||
|
creation_date DATETIME NOT NULL,
|
||||||
|
response_date DATETIME,
|
||||||
|
authentication_date DATETIME,
|
||||||
|
FOREIGN KEY(user_uuid) REFERENCES users(uuid),
|
||||||
|
FOREIGN KEY(organization_uuid) REFERENCES organizations(uuid)
|
||||||
|
);
|
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE collections ADD COLUMN external_id TEXT;
|
93
resources/404.svg
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||||
|
|
||||||
|
<svg
|
||||||
|
width="500"
|
||||||
|
height="222"
|
||||||
|
viewBox="0 0 500 222"
|
||||||
|
version="1.1"
|
||||||
|
id="svg5"
|
||||||
|
xml:space="preserve"
|
||||||
|
inkscape:version="1.2.1 (9c6d41e410, 2022-07-14, custom)"
|
||||||
|
sodipodi:docname="404.svg"
|
||||||
|
inkscape:export-filename="404.png"
|
||||||
|
inkscape:export-xdpi="96"
|
||||||
|
inkscape:export-ydpi="96"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg"><sodipodi:namedview
|
||||||
|
id="namedview7"
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#666666"
|
||||||
|
borderopacity="1.0"
|
||||||
|
inkscape:showpageshadow="2"
|
||||||
|
inkscape:pageopacity="0.0"
|
||||||
|
inkscape:pagecheckerboard="0"
|
||||||
|
inkscape:deskcolor="#d1d1d1"
|
||||||
|
inkscape:document-units="px"
|
||||||
|
showgrid="false"
|
||||||
|
inkscape:zoom="1.3791767"
|
||||||
|
inkscape:cx="284.59007"
|
||||||
|
inkscape:cy="214.25826"
|
||||||
|
inkscape:window-width="1916"
|
||||||
|
inkscape:window-height="1038"
|
||||||
|
inkscape:window-x="0"
|
||||||
|
inkscape:window-y="18"
|
||||||
|
inkscape:window-maximized="1"
|
||||||
|
inkscape:current-layer="layer1"
|
||||||
|
showguides="false" /><defs
|
||||||
|
id="defs2"><mask
|
||||||
|
id="holes"><rect
|
||||||
|
x="-60"
|
||||||
|
y="-60"
|
||||||
|
width="120"
|
||||||
|
height="120"
|
||||||
|
fill="#ffffff"
|
||||||
|
id="rect3296" /><circle
|
||||||
|
id="hole"
|
||||||
|
cy="-40"
|
||||||
|
r="3"
|
||||||
|
cx="0" /><use
|
||||||
|
transform="rotate(72)"
|
||||||
|
xlink:href="#hole"
|
||||||
|
id="use3299" /><use
|
||||||
|
transform="rotate(144)"
|
||||||
|
xlink:href="#hole"
|
||||||
|
id="use3301" /><use
|
||||||
|
transform="rotate(-144)"
|
||||||
|
xlink:href="#hole"
|
||||||
|
id="use3303" /><use
|
||||||
|
transform="rotate(-72)"
|
||||||
|
xlink:href="#hole"
|
||||||
|
id="use3305" /></mask></defs><g
|
||||||
|
inkscape:label="Ebene 1"
|
||||||
|
inkscape:groupmode="layer"
|
||||||
|
id="layer1"><rect
|
||||||
|
style="fill:none;fill-opacity:0.5;stroke:none;stroke-width:0.74;stroke-opacity:1"
|
||||||
|
id="rect681"
|
||||||
|
width="666"
|
||||||
|
height="222"
|
||||||
|
x="0"
|
||||||
|
y="0" /><text
|
||||||
|
xml:space="preserve"
|
||||||
|
style="font-size:128px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans';text-align:center;text-anchor:middle;fill:#000000;fill-opacity:0.7;stroke-width:1"
|
||||||
|
x="249.9375"
|
||||||
|
y="134.8125"
|
||||||
|
id="text3425"><tspan
|
||||||
|
id="tspan3423"
|
||||||
|
x="249.9375"
|
||||||
|
y="134.8125"
|
||||||
|
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:128px;font-family:'Open Sans';-inkscape-font-specification:'Open Sans';text-align:center;text-anchor:middle;fill:#000000;fill-opacity:0.7;stroke-width:1"
|
||||||
|
sodipodi:role="line">404</tspan></text><text
|
||||||
|
xml:space="preserve"
|
||||||
|
style="font-size:26.6667px;line-height:1.25;font-family:'Open Sans';-inkscape-font-specification:'Open Sans';text-align:center;text-anchor:middle"
|
||||||
|
x="249.04297"
|
||||||
|
y="194.68582"
|
||||||
|
id="text4067"><tspan
|
||||||
|
sodipodi:role="line"
|
||||||
|
id="tspan4065"
|
||||||
|
x="249.04295"
|
||||||
|
y="194.68582"
|
||||||
|
style="font-size:26.6667px;text-align:center;text-anchor:middle;fill:#000000;fill-opacity:0.7">Return to the web vault?</tspan></text></g></svg>
|
After Width: | Height: | Size: 3.3 KiB |
Before Width: | Height: | Size: 8.7 KiB After Width: | Height: | Size: 5.5 KiB |
Before Width: | Height: | Size: 8.4 KiB After Width: | Height: | Size: 5.2 KiB |
Before Width: | Height: | Size: 17 KiB After Width: | Height: | Size: 6.5 KiB |
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 6.5 KiB |
@@ -1 +1 @@
|
|||||||
1.64.0
|
1.71.1
|
||||||
|
@@ -1,7 +1,4 @@
|
|||||||
# version = "Two"
|
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
max_width = 120
|
max_width = 120
|
||||||
newline_style = "Unix"
|
newline_style = "Unix"
|
||||||
use_small_heuristics = "Off"
|
use_small_heuristics = "Off"
|
||||||
# struct_lit_single_line = false
|
|
||||||
# overflow_delimited_expr = true
|
|
||||||
|
475
src/api/admin.rs
@@ -6,14 +6,14 @@ use std::env;
|
|||||||
use rocket::serde::json::Json;
|
use rocket::serde::json::Json;
|
||||||
use rocket::{
|
use rocket::{
|
||||||
form::Form,
|
form::Form,
|
||||||
http::{Cookie, CookieJar, SameSite, Status},
|
http::{Cookie, CookieJar, MediaType, SameSite, Status},
|
||||||
request::{self, FromRequest, Outcome, Request},
|
request::{FromRequest, Outcome, Request},
|
||||||
response::{content::RawHtml as Html, Redirect},
|
response::{content::RawHtml as Html, Redirect},
|
||||||
Route,
|
Catcher, Route,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
api::{ApiResult, EmptyResult, JsonResult, NumberOrString},
|
api::{core::log_event, unregister_push_device, ApiResult, EmptyResult, JsonResult, Notify, NumberOrString},
|
||||||
auth::{decode_admin, encode_jwt, generate_admin_claims, ClientIp},
|
auth::{decode_admin, encode_jwt, generate_admin_claims, ClientIp},
|
||||||
config::ConfigBuilder,
|
config::ConfigBuilder,
|
||||||
db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType},
|
db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType},
|
||||||
@@ -25,19 +25,18 @@ use crate::{
|
|||||||
CONFIG, VERSION,
|
CONFIG, VERSION,
|
||||||
};
|
};
|
||||||
|
|
||||||
use futures::{stream, stream::StreamExt};
|
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
if !CONFIG.disable_admin_token() && !CONFIG.is_admin_token_set() {
|
if !CONFIG.disable_admin_token() && !CONFIG.is_admin_token_set() {
|
||||||
return routes![admin_disabled];
|
return routes![admin_disabled];
|
||||||
}
|
}
|
||||||
|
|
||||||
routes![
|
routes![
|
||||||
admin_login,
|
|
||||||
get_users_json,
|
get_users_json,
|
||||||
get_user_json,
|
get_user_json,
|
||||||
|
get_user_by_mail_json,
|
||||||
post_admin_login,
|
post_admin_login,
|
||||||
admin_page,
|
admin_page,
|
||||||
|
admin_page_login,
|
||||||
invite_user,
|
invite_user,
|
||||||
logout,
|
logout,
|
||||||
delete_user,
|
delete_user,
|
||||||
@@ -55,10 +54,19 @@ pub fn routes() -> Vec<Route> {
|
|||||||
organizations_overview,
|
organizations_overview,
|
||||||
delete_organization,
|
delete_organization,
|
||||||
diagnostics,
|
diagnostics,
|
||||||
get_diagnostics_config
|
get_diagnostics_config,
|
||||||
|
resend_user_invite,
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn catchers() -> Vec<Catcher> {
|
||||||
|
if !CONFIG.disable_admin_token() && !CONFIG.is_admin_token_set() {
|
||||||
|
catchers![]
|
||||||
|
} else {
|
||||||
|
catchers![admin_login]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static DB_TYPE: Lazy<&str> = Lazy::new(|| {
|
static DB_TYPE: Lazy<&str> = Lazy::new(|| {
|
||||||
DbConnType::from_url(&CONFIG.database_url())
|
DbConnType::from_url(&CONFIG.database_url())
|
||||||
.map(|t| match t {
|
.map(|t| match t {
|
||||||
@@ -83,21 +91,12 @@ const DT_FMT: &str = "%Y-%m-%d %H:%M:%S %Z";
|
|||||||
|
|
||||||
const BASE_TEMPLATE: &str = "admin/base";
|
const BASE_TEMPLATE: &str = "admin/base";
|
||||||
|
|
||||||
|
const ACTING_ADMIN_USER: &str = "vaultwarden-admin-00000-000000000000";
|
||||||
|
|
||||||
fn admin_path() -> String {
|
fn admin_path() -> String {
|
||||||
format!("{}{}", CONFIG.domain_path(), ADMIN_PATH)
|
format!("{}{}", CONFIG.domain_path(), ADMIN_PATH)
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Referer(Option<String>);
|
|
||||||
|
|
||||||
#[rocket::async_trait]
|
|
||||||
impl<'r> FromRequest<'r> for Referer {
|
|
||||||
type Error = ();
|
|
||||||
|
|
||||||
async fn from_request(request: &'r Request<'_>) -> request::Outcome<Self, Self::Error> {
|
|
||||||
Outcome::Success(Referer(request.headers().get_one("Referer").map(str::to_string)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct IpHeader(Option<String>);
|
struct IpHeader(Option<String>);
|
||||||
|
|
||||||
@@ -120,25 +119,8 @@ impl<'r> FromRequest<'r> for IpHeader {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Used for `Location` response headers, which must specify an absolute URI
|
fn admin_url() -> String {
|
||||||
/// (see https://tools.ietf.org/html/rfc2616#section-14.30).
|
format!("{}{}", CONFIG.domain_origin(), admin_path())
|
||||||
fn admin_url(referer: Referer) -> String {
|
|
||||||
// If we get a referer use that to make it work when, DOMAIN is not set
|
|
||||||
if let Some(mut referer) = referer.0 {
|
|
||||||
if let Some(start_index) = referer.find(ADMIN_PATH) {
|
|
||||||
referer.truncate(start_index + ADMIN_PATH.len());
|
|
||||||
return referer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if CONFIG.domain_set() {
|
|
||||||
// Don't use CONFIG.domain() directly, since the user may want to keep a
|
|
||||||
// trailing slash there, particularly when running under a subpath.
|
|
||||||
format!("{}{}{}", CONFIG.domain_origin(), CONFIG.domain_path(), ADMIN_PATH)
|
|
||||||
} else {
|
|
||||||
// Last case, when no referer or domain set, technically invalid but better than nothing
|
|
||||||
ADMIN_PATH.to_string()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Responder)]
|
#[derive(Responder)]
|
||||||
@@ -151,18 +133,22 @@ enum AdminResponse {
|
|||||||
TooManyRequests(ApiResult<Html<String>>),
|
TooManyRequests(ApiResult<Html<String>>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/", rank = 2)]
|
#[catch(401)]
|
||||||
fn admin_login() -> ApiResult<Html<String>> {
|
fn admin_login(request: &Request<'_>) -> ApiResult<Html<String>> {
|
||||||
render_admin_login(None)
|
if request.format() == Some(&MediaType::JSON) {
|
||||||
|
err_code!("Authorization failed.", Status::Unauthorized.code);
|
||||||
|
}
|
||||||
|
let redirect = request.segments::<std::path::PathBuf>(0..).unwrap_or_default().display().to_string();
|
||||||
|
render_admin_login(None, Some(redirect))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_admin_login(msg: Option<&str>) -> ApiResult<Html<String>> {
|
fn render_admin_login(msg: Option<&str>, redirect: Option<String>) -> ApiResult<Html<String>> {
|
||||||
// If there is an error, show it
|
// If there is an error, show it
|
||||||
let msg = msg.map(|msg| format!("Error: {msg}"));
|
let msg = msg.map(|msg| format!("Error: {msg}"));
|
||||||
let json = json!({
|
let json = json!({
|
||||||
"page_content": "admin/login",
|
"page_content": "admin/login",
|
||||||
"version": VERSION,
|
|
||||||
"error": msg,
|
"error": msg,
|
||||||
|
"redirect": redirect,
|
||||||
"urlpath": CONFIG.domain_path()
|
"urlpath": CONFIG.domain_path()
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -174,20 +160,25 @@ fn render_admin_login(msg: Option<&str>) -> ApiResult<Html<String>> {
|
|||||||
#[derive(FromForm)]
|
#[derive(FromForm)]
|
||||||
struct LoginForm {
|
struct LoginForm {
|
||||||
token: String,
|
token: String,
|
||||||
|
redirect: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/", data = "<data>")]
|
#[post("/", data = "<data>")]
|
||||||
fn post_admin_login(data: Form<LoginForm>, cookies: &CookieJar<'_>, ip: ClientIp) -> AdminResponse {
|
fn post_admin_login(data: Form<LoginForm>, cookies: &CookieJar<'_>, ip: ClientIp) -> Result<Redirect, AdminResponse> {
|
||||||
let data = data.into_inner();
|
let data = data.into_inner();
|
||||||
|
let redirect = data.redirect;
|
||||||
|
|
||||||
if crate::ratelimit::check_limit_admin(&ip.ip).is_err() {
|
if crate::ratelimit::check_limit_admin(&ip.ip).is_err() {
|
||||||
return AdminResponse::TooManyRequests(render_admin_login(Some("Too many requests, try again later.")));
|
return Err(AdminResponse::TooManyRequests(render_admin_login(
|
||||||
|
Some("Too many requests, try again later."),
|
||||||
|
redirect,
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the token is invalid, redirect to login page
|
// If the token is invalid, redirect to login page
|
||||||
if !_validate_token(&data.token) {
|
if !_validate_token(&data.token) {
|
||||||
error!("Invalid admin token. IP: {}", ip.ip);
|
error!("Invalid admin token. IP: {}", ip.ip);
|
||||||
AdminResponse::Unauthorized(render_admin_login(Some("Invalid admin token, please try again.")))
|
Err(AdminResponse::Unauthorized(render_admin_login(Some("Invalid admin token, please try again."), redirect)))
|
||||||
} else {
|
} else {
|
||||||
// If the token received is valid, generate JWT and save it as a cookie
|
// If the token received is valid, generate JWT and save it as a cookie
|
||||||
let claims = generate_admin_claims();
|
let claims = generate_admin_claims();
|
||||||
@@ -195,19 +186,36 @@ fn post_admin_login(data: Form<LoginForm>, cookies: &CookieJar<'_>, ip: ClientIp
|
|||||||
|
|
||||||
let cookie = Cookie::build(COOKIE_NAME, jwt)
|
let cookie = Cookie::build(COOKIE_NAME, jwt)
|
||||||
.path(admin_path())
|
.path(admin_path())
|
||||||
.max_age(rocket::time::Duration::minutes(20))
|
.max_age(rocket::time::Duration::minutes(CONFIG.admin_session_lifetime()))
|
||||||
.same_site(SameSite::Strict)
|
.same_site(SameSite::Strict)
|
||||||
.http_only(true)
|
.http_only(true)
|
||||||
.finish();
|
.finish();
|
||||||
|
|
||||||
cookies.add(cookie);
|
cookies.add(cookie);
|
||||||
AdminResponse::Ok(render_admin_page())
|
if let Some(redirect) = redirect {
|
||||||
|
Ok(Redirect::to(format!("{}{}", admin_path(), redirect)))
|
||||||
|
} else {
|
||||||
|
Err(AdminResponse::Ok(render_admin_page()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _validate_token(token: &str) -> bool {
|
fn _validate_token(token: &str) -> bool {
|
||||||
match CONFIG.admin_token().as_ref() {
|
match CONFIG.admin_token().as_ref() {
|
||||||
None => false,
|
None => false,
|
||||||
|
Some(t) if t.starts_with("$argon2") => {
|
||||||
|
use argon2::password_hash::PasswordVerifier;
|
||||||
|
match argon2::password_hash::PasswordHash::new(t) {
|
||||||
|
Ok(h) => {
|
||||||
|
// NOTE: hash params from `ADMIN_TOKEN` are used instead of what is configured in the `Argon2` instance.
|
||||||
|
argon2::Argon2::default().verify_password(token.trim().as_ref(), &h).is_ok()
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("The configured Argon2 PHC in `ADMIN_TOKEN` is invalid: {e}");
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Some(t) => crate::crypto::ct_eq(t.trim(), token.trim()),
|
Some(t) => crate::crypto::ct_eq(t.trim(), token.trim()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -215,34 +223,16 @@ fn _validate_token(token: &str) -> bool {
|
|||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
struct AdminTemplateData {
|
struct AdminTemplateData {
|
||||||
page_content: String,
|
page_content: String,
|
||||||
version: Option<&'static str>,
|
|
||||||
page_data: Option<Value>,
|
page_data: Option<Value>,
|
||||||
config: Value,
|
|
||||||
can_backup: bool,
|
|
||||||
logged_in: bool,
|
logged_in: bool,
|
||||||
urlpath: String,
|
urlpath: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AdminTemplateData {
|
impl AdminTemplateData {
|
||||||
fn new() -> Self {
|
fn new(page_content: &str, page_data: Value) -> Self {
|
||||||
Self {
|
|
||||||
page_content: String::from("admin/settings"),
|
|
||||||
version: VERSION,
|
|
||||||
config: CONFIG.prepare_json(),
|
|
||||||
can_backup: *CAN_BACKUP,
|
|
||||||
logged_in: true,
|
|
||||||
urlpath: CONFIG.domain_path(),
|
|
||||||
page_data: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_data(page_content: &str, page_data: Value) -> Self {
|
|
||||||
Self {
|
Self {
|
||||||
page_content: String::from(page_content),
|
page_content: String::from(page_content),
|
||||||
version: VERSION,
|
|
||||||
page_data: Some(page_data),
|
page_data: Some(page_data),
|
||||||
config: CONFIG.prepare_json(),
|
|
||||||
can_backup: *CAN_BACKUP,
|
|
||||||
logged_in: true,
|
logged_in: true,
|
||||||
urlpath: CONFIG.domain_path(),
|
urlpath: CONFIG.domain_path(),
|
||||||
}
|
}
|
||||||
@@ -254,22 +244,31 @@ impl AdminTemplateData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn render_admin_page() -> ApiResult<Html<String>> {
|
fn render_admin_page() -> ApiResult<Html<String>> {
|
||||||
let text = AdminTemplateData::new().render()?;
|
let settings_json = json!({
|
||||||
|
"config": CONFIG.prepare_json(),
|
||||||
|
"can_backup": *CAN_BACKUP,
|
||||||
|
});
|
||||||
|
let text = AdminTemplateData::new("admin/settings", settings_json).render()?;
|
||||||
Ok(Html(text))
|
Ok(Html(text))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/", rank = 1)]
|
#[get("/")]
|
||||||
fn admin_page(_token: AdminToken) -> ApiResult<Html<String>> {
|
fn admin_page(_token: AdminToken) -> ApiResult<Html<String>> {
|
||||||
render_admin_page()
|
render_admin_page()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[get("/", rank = 2)]
|
||||||
|
fn admin_page_login() -> ApiResult<Html<String>> {
|
||||||
|
render_admin_login(None, None)
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
struct InviteData {
|
struct InviteData {
|
||||||
email: String,
|
email: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_user_or_404(uuid: &str, conn: &DbConn) -> ApiResult<User> {
|
async fn get_user_or_404(uuid: &str, conn: &mut DbConn) -> ApiResult<User> {
|
||||||
if let Some(user) = User::find_by_uuid(uuid, conn).await {
|
if let Some(user) = User::find_by_uuid(uuid, conn).await {
|
||||||
Ok(user)
|
Ok(user)
|
||||||
} else {
|
} else {
|
||||||
@@ -278,28 +277,28 @@ async fn get_user_or_404(uuid: &str, conn: &DbConn) -> ApiResult<User> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[post("/invite", data = "<data>")]
|
#[post("/invite", data = "<data>")]
|
||||||
async fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -> JsonResult {
|
async fn invite_user(data: Json<InviteData>, _token: AdminToken, mut conn: DbConn) -> JsonResult {
|
||||||
let data: InviteData = data.into_inner();
|
let data: InviteData = data.into_inner();
|
||||||
let email = data.email.clone();
|
let email = data.email.clone();
|
||||||
if User::find_by_mail(&data.email, &conn).await.is_some() {
|
if User::find_by_mail(&data.email, &mut conn).await.is_some() {
|
||||||
err_code!("User already exists", Status::Conflict.code)
|
err_code!("User already exists", Status::Conflict.code)
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut user = User::new(email);
|
let mut user = User::new(email);
|
||||||
|
|
||||||
async fn _generate_invite(user: &User, conn: &DbConn) -> EmptyResult {
|
async fn _generate_invite(user: &User, conn: &mut DbConn) -> EmptyResult {
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
mail::send_invite(&user.email, &user.uuid, None, None, &CONFIG.invitation_org_name(), None).await
|
mail::send_invite(&user.email, &user.uuid, None, None, &CONFIG.invitation_org_name(), None).await
|
||||||
} else {
|
} else {
|
||||||
let invitation = Invitation::new(user.email.clone());
|
let invitation = Invitation::new(&user.email);
|
||||||
invitation.save(conn).await
|
invitation.save(conn).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_generate_invite(&user, &conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
|
_generate_invite(&user, &mut conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
|
||||||
user.save(&conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
|
user.save(&mut conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
|
||||||
|
|
||||||
Ok(Json(user.to_json(&conn).await))
|
Ok(Json(user.to_json(&mut conn).await))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/test/smtp", data = "<data>")]
|
#[post("/test/smtp", data = "<data>")]
|
||||||
@@ -314,99 +313,159 @@ async fn test_smtp(data: Json<InviteData>, _token: AdminToken) -> EmptyResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[get("/logout")]
|
#[get("/logout")]
|
||||||
fn logout(cookies: &CookieJar<'_>, referer: Referer) -> Redirect {
|
fn logout(cookies: &CookieJar<'_>) -> Redirect {
|
||||||
cookies.remove(Cookie::build(COOKIE_NAME, "").path(admin_path()).finish());
|
cookies.remove(Cookie::build(COOKIE_NAME, "").path(admin_path()).finish());
|
||||||
Redirect::temporary(admin_url(referer))
|
Redirect::to(admin_path())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/users")]
|
#[get("/users")]
|
||||||
async fn get_users_json(_token: AdminToken, conn: DbConn) -> Json<Value> {
|
async fn get_users_json(_token: AdminToken, mut conn: DbConn) -> Json<Value> {
|
||||||
let users_json = stream::iter(User::get_all(&conn).await)
|
let users = User::get_all(&mut conn).await;
|
||||||
.then(|u| async {
|
let mut users_json = Vec::with_capacity(users.len());
|
||||||
let u = u; // Move out this single variable
|
for u in users {
|
||||||
let mut usr = u.to_json(&conn).await;
|
let mut usr = u.to_json(&mut conn).await;
|
||||||
usr["UserEnabled"] = json!(u.enabled);
|
usr["UserEnabled"] = json!(u.enabled);
|
||||||
usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
||||||
usr
|
users_json.push(usr);
|
||||||
})
|
}
|
||||||
.collect::<Vec<Value>>()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
Json(Value::Array(users_json))
|
Json(Value::Array(users_json))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/users/overview")]
|
#[get("/users/overview")]
|
||||||
async fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
async fn users_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html<String>> {
|
||||||
let users_json = stream::iter(User::get_all(&conn).await)
|
let users = User::get_all(&mut conn).await;
|
||||||
.then(|u| async {
|
let mut users_json = Vec::with_capacity(users.len());
|
||||||
let u = u; // Move out this single variable
|
for u in users {
|
||||||
let mut usr = u.to_json(&conn).await;
|
let mut usr = u.to_json(&mut conn).await;
|
||||||
usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn).await);
|
usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &mut conn).await);
|
||||||
usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &conn).await);
|
usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &mut conn).await);
|
||||||
usr["attachment_size"] = json!(get_display_size(Attachment::size_by_user(&u.uuid, &conn).await as i32));
|
usr["attachment_size"] = json!(get_display_size(Attachment::size_by_user(&u.uuid, &mut conn).await as i32));
|
||||||
usr["user_enabled"] = json!(u.enabled);
|
usr["user_enabled"] = json!(u.enabled);
|
||||||
usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
||||||
usr["last_active"] = match u.last_active(&conn).await {
|
usr["last_active"] = match u.last_active(&mut conn).await {
|
||||||
Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)),
|
Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)),
|
||||||
None => json!("Never"),
|
None => json!("Never"),
|
||||||
};
|
};
|
||||||
usr
|
users_json.push(usr);
|
||||||
})
|
}
|
||||||
.collect::<Vec<Value>>()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let text = AdminTemplateData::with_data("admin/users", json!(users_json)).render()?;
|
let text = AdminTemplateData::new("admin/users", json!(users_json)).render()?;
|
||||||
Ok(Html(text))
|
Ok(Html(text))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[get("/users/by-mail/<mail>")]
|
||||||
|
async fn get_user_by_mail_json(mail: &str, _token: AdminToken, mut conn: DbConn) -> JsonResult {
|
||||||
|
if let Some(u) = User::find_by_mail(mail, &mut conn).await {
|
||||||
|
let mut usr = u.to_json(&mut conn).await;
|
||||||
|
usr["UserEnabled"] = json!(u.enabled);
|
||||||
|
usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
||||||
|
Ok(Json(usr))
|
||||||
|
} else {
|
||||||
|
err_code!("User doesn't exist", Status::NotFound.code);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[get("/users/<uuid>")]
|
#[get("/users/<uuid>")]
|
||||||
async fn get_user_json(uuid: String, _token: AdminToken, conn: DbConn) -> JsonResult {
|
async fn get_user_json(uuid: &str, _token: AdminToken, mut conn: DbConn) -> JsonResult {
|
||||||
let u = get_user_or_404(&uuid, &conn).await?;
|
let u = get_user_or_404(uuid, &mut conn).await?;
|
||||||
let mut usr = u.to_json(&conn).await;
|
let mut usr = u.to_json(&mut conn).await;
|
||||||
usr["UserEnabled"] = json!(u.enabled);
|
usr["UserEnabled"] = json!(u.enabled);
|
||||||
usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
||||||
Ok(Json(usr))
|
Ok(Json(usr))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/users/<uuid>/delete")]
|
#[post("/users/<uuid>/delete")]
|
||||||
async fn delete_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
async fn delete_user(uuid: &str, token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||||
let user = get_user_or_404(&uuid, &conn).await?;
|
let user = get_user_or_404(uuid, &mut conn).await?;
|
||||||
user.delete(&conn).await
|
|
||||||
|
// Get the user_org records before deleting the actual user
|
||||||
|
let user_orgs = UserOrganization::find_any_state_by_user(uuid, &mut conn).await;
|
||||||
|
let res = user.delete(&mut conn).await;
|
||||||
|
|
||||||
|
for user_org in user_orgs {
|
||||||
|
log_event(
|
||||||
|
EventType::OrganizationUserRemoved as i32,
|
||||||
|
&user_org.uuid,
|
||||||
|
&user_org.org_uuid,
|
||||||
|
String::from(ACTING_ADMIN_USER),
|
||||||
|
14, // Use UnknownBrowser type
|
||||||
|
&token.ip.ip,
|
||||||
|
&mut conn,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/users/<uuid>/deauth")]
|
#[post("/users/<uuid>/deauth")]
|
||||||
async fn deauth_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
async fn deauth_user(uuid: &str, _token: AdminToken, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||||
let mut user = get_user_or_404(&uuid, &conn).await?;
|
let mut user = get_user_or_404(uuid, &mut conn).await?;
|
||||||
Device::delete_all_by_user(&user.uuid, &conn).await?;
|
|
||||||
|
nt.send_logout(&user, None).await;
|
||||||
|
|
||||||
|
if CONFIG.push_enabled() {
|
||||||
|
for device in Device::find_push_devices_by_user(&user.uuid, &mut conn).await {
|
||||||
|
match unregister_push_device(device.uuid).await {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(e) => error!("Unable to unregister devices from Bitwarden server: {}", e),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Device::delete_all_by_user(&user.uuid, &mut conn).await?;
|
||||||
user.reset_security_stamp();
|
user.reset_security_stamp();
|
||||||
|
|
||||||
user.save(&conn).await
|
user.save(&mut conn).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/users/<uuid>/disable")]
|
#[post("/users/<uuid>/disable")]
|
||||||
async fn disable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
async fn disable_user(uuid: &str, _token: AdminToken, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
|
||||||
let mut user = get_user_or_404(&uuid, &conn).await?;
|
let mut user = get_user_or_404(uuid, &mut conn).await?;
|
||||||
Device::delete_all_by_user(&user.uuid, &conn).await?;
|
Device::delete_all_by_user(&user.uuid, &mut conn).await?;
|
||||||
user.reset_security_stamp();
|
user.reset_security_stamp();
|
||||||
user.enabled = false;
|
user.enabled = false;
|
||||||
|
|
||||||
user.save(&conn).await
|
let save_result = user.save(&mut conn).await;
|
||||||
|
|
||||||
|
nt.send_logout(&user, None).await;
|
||||||
|
|
||||||
|
save_result
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/users/<uuid>/enable")]
|
#[post("/users/<uuid>/enable")]
|
||||||
async fn enable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
async fn enable_user(uuid: &str, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||||
let mut user = get_user_or_404(&uuid, &conn).await?;
|
let mut user = get_user_or_404(uuid, &mut conn).await?;
|
||||||
user.enabled = true;
|
user.enabled = true;
|
||||||
|
|
||||||
user.save(&conn).await
|
user.save(&mut conn).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/users/<uuid>/remove-2fa")]
|
#[post("/users/<uuid>/remove-2fa")]
|
||||||
async fn remove_2fa(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
async fn remove_2fa(uuid: &str, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||||
let mut user = get_user_or_404(&uuid, &conn).await?;
|
let mut user = get_user_or_404(uuid, &mut conn).await?;
|
||||||
TwoFactor::delete_all_by_user(&user.uuid, &conn).await?;
|
TwoFactor::delete_all_by_user(&user.uuid, &mut conn).await?;
|
||||||
user.totp_recover = None;
|
user.totp_recover = None;
|
||||||
user.save(&conn).await
|
user.save(&mut conn).await
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/users/<uuid>/invite/resend")]
|
||||||
|
async fn resend_user_invite(uuid: &str, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||||
|
if let Some(user) = User::find_by_uuid(uuid, &mut conn).await {
|
||||||
|
//TODO: replace this with user.status check when it will be available (PR#3397)
|
||||||
|
if !user.password_hash.is_empty() {
|
||||||
|
err_code!("User already accepted invitation", Status::BadRequest.code);
|
||||||
|
}
|
||||||
|
|
||||||
|
if CONFIG.mail_enabled() {
|
||||||
|
mail::send_invite(&user.email, &user.uuid, None, None, &CONFIG.invitation_org_name(), None).await
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
err_code!("User doesn't exist", Status::NotFound.code);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
@@ -417,10 +476,11 @@ struct UserOrgTypeData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[post("/users/org_type", data = "<data>")]
|
#[post("/users/org_type", data = "<data>")]
|
||||||
async fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
async fn update_user_org_type(data: Json<UserOrgTypeData>, token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||||
let data: UserOrgTypeData = data.into_inner();
|
let data: UserOrgTypeData = data.into_inner();
|
||||||
|
|
||||||
let mut user_to_edit = match UserOrganization::find_by_user_and_org(&data.user_uuid, &data.org_uuid, &conn).await {
|
let mut user_to_edit =
|
||||||
|
match UserOrganization::find_by_user_and_org(&data.user_uuid, &data.org_uuid, &mut conn).await {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => err!("The specified user isn't member of the organization"),
|
None => err!("The specified user isn't member of the organization"),
|
||||||
};
|
};
|
||||||
@@ -431,8 +491,8 @@ async fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, c
|
|||||||
};
|
};
|
||||||
|
|
||||||
if user_to_edit.atype == UserOrgType::Owner && new_type != UserOrgType::Owner {
|
if user_to_edit.atype == UserOrgType::Owner && new_type != UserOrgType::Owner {
|
||||||
// Removing owner permmission, check that there is at least one other confirmed owner
|
// Removing owner permission, check that there is at least one other confirmed owner
|
||||||
if UserOrganization::count_confirmed_by_org_and_type(&data.org_uuid, UserOrgType::Owner, &conn).await <= 1 {
|
if UserOrganization::count_confirmed_by_org_and_type(&data.org_uuid, UserOrgType::Owner, &mut conn).await <= 1 {
|
||||||
err!("Can't change the type of the last owner")
|
err!("Can't change the type of the last owner")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -440,7 +500,7 @@ async fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, c
|
|||||||
// This check is also done at api::organizations::{accept_invite(), _confirm_invite, _activate_user(), edit_user()}, update_user_org_type
|
// This check is also done at api::organizations::{accept_invite(), _confirm_invite, _activate_user(), edit_user()}, update_user_org_type
|
||||||
// It returns different error messages per function.
|
// It returns different error messages per function.
|
||||||
if new_type < UserOrgType::Admin {
|
if new_type < UserOrgType::Admin {
|
||||||
match OrgPolicy::is_user_allowed(&user_to_edit.user_uuid, &user_to_edit.org_uuid, true, &conn).await {
|
match OrgPolicy::is_user_allowed(&user_to_edit.user_uuid, &user_to_edit.org_uuid, true, &mut conn).await {
|
||||||
Ok(_) => {}
|
Ok(_) => {}
|
||||||
Err(OrgPolicyErr::TwoFactorMissing) => {
|
Err(OrgPolicyErr::TwoFactorMissing) => {
|
||||||
err!("You cannot modify this user to this type because it has no two-step login method activated");
|
err!("You cannot modify this user to this type because it has no two-step login method activated");
|
||||||
@@ -451,38 +511,50 @@ async fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, c
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
log_event(
|
||||||
|
EventType::OrganizationUserUpdated as i32,
|
||||||
|
&user_to_edit.uuid,
|
||||||
|
&data.org_uuid,
|
||||||
|
String::from(ACTING_ADMIN_USER),
|
||||||
|
14, // Use UnknownBrowser type
|
||||||
|
&token.ip.ip,
|
||||||
|
&mut conn,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
user_to_edit.atype = new_type;
|
user_to_edit.atype = new_type;
|
||||||
user_to_edit.save(&conn).await
|
user_to_edit.save(&mut conn).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/users/update_revision")]
|
#[post("/users/update_revision")]
|
||||||
async fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
|
async fn update_revision_users(_token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||||
User::update_all_revisions(&conn).await
|
User::update_all_revisions(&mut conn).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/organizations/overview")]
|
#[get("/organizations/overview")]
|
||||||
async fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
async fn organizations_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html<String>> {
|
||||||
let organizations_json = stream::iter(Organization::get_all(&conn).await)
|
let organizations = Organization::get_all(&mut conn).await;
|
||||||
.then(|o| async {
|
let mut organizations_json = Vec::with_capacity(organizations.len());
|
||||||
let o = o; //Move out this single variable
|
for o in organizations {
|
||||||
let mut org = o.to_json();
|
let mut org = o.to_json();
|
||||||
org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn).await);
|
org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &mut conn).await);
|
||||||
org["cipher_count"] = json!(Cipher::count_by_org(&o.uuid, &conn).await);
|
org["cipher_count"] = json!(Cipher::count_by_org(&o.uuid, &mut conn).await);
|
||||||
org["attachment_count"] = json!(Attachment::count_by_org(&o.uuid, &conn).await);
|
org["collection_count"] = json!(Collection::count_by_org(&o.uuid, &mut conn).await);
|
||||||
org["attachment_size"] = json!(get_display_size(Attachment::size_by_org(&o.uuid, &conn).await as i32));
|
org["group_count"] = json!(Group::count_by_org(&o.uuid, &mut conn).await);
|
||||||
org
|
org["event_count"] = json!(Event::count_by_org(&o.uuid, &mut conn).await);
|
||||||
})
|
org["attachment_count"] = json!(Attachment::count_by_org(&o.uuid, &mut conn).await);
|
||||||
.collect::<Vec<Value>>()
|
org["attachment_size"] = json!(get_display_size(Attachment::size_by_org(&o.uuid, &mut conn).await as i32));
|
||||||
.await;
|
organizations_json.push(org);
|
||||||
|
}
|
||||||
|
|
||||||
let text = AdminTemplateData::with_data("admin/organizations", json!(organizations_json)).render()?;
|
let text = AdminTemplateData::new("admin/organizations", json!(organizations_json)).render()?;
|
||||||
Ok(Html(text))
|
Ok(Html(text))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/organizations/<uuid>/delete")]
|
#[post("/organizations/<uuid>/delete")]
|
||||||
async fn delete_organization(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
async fn delete_organization(uuid: &str, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||||
let org = Organization::find_by_uuid(&uuid, &conn).await.map_res("Organization doesn't exist")?;
|
let org = Organization::find_by_uuid(uuid, &mut conn).await.map_res("Organization doesn't exist")?;
|
||||||
org.delete(&conn).await
|
org.delete(&mut conn).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@@ -500,10 +572,20 @@ struct GitCommit {
|
|||||||
sha: String,
|
sha: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_github_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> {
|
#[derive(Deserialize)]
|
||||||
let github_api = get_reqwest_client();
|
struct TimeApi {
|
||||||
|
year: u16,
|
||||||
|
month: u8,
|
||||||
|
day: u8,
|
||||||
|
hour: u8,
|
||||||
|
minute: u8,
|
||||||
|
seconds: u8,
|
||||||
|
}
|
||||||
|
|
||||||
Ok(github_api.get(url).send().await?.error_for_status()?.json::<T>().await?)
|
async fn get_json_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> {
|
||||||
|
let json_api = get_reqwest_client();
|
||||||
|
|
||||||
|
Ok(json_api.get(url).send().await?.error_for_status()?.json::<T>().await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn has_http_access() -> bool {
|
async fn has_http_access() -> bool {
|
||||||
@@ -523,14 +605,13 @@ async fn get_release_info(has_http_access: bool, running_within_docker: bool) ->
|
|||||||
// If the HTTP Check failed, do not even attempt to check for new versions since we were not able to connect with github.com anyway.
|
// If the HTTP Check failed, do not even attempt to check for new versions since we were not able to connect with github.com anyway.
|
||||||
if has_http_access {
|
if has_http_access {
|
||||||
(
|
(
|
||||||
match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/vaultwarden/releases/latest")
|
match get_json_api::<GitRelease>("https://api.github.com/repos/dani-garcia/vaultwarden/releases/latest")
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
Ok(r) => r.tag_name,
|
Ok(r) => r.tag_name,
|
||||||
_ => "-".to_string(),
|
_ => "-".to_string(),
|
||||||
},
|
},
|
||||||
match get_github_api::<GitCommit>("https://api.github.com/repos/dani-garcia/vaultwarden/commits/main").await
|
match get_json_api::<GitCommit>("https://api.github.com/repos/dani-garcia/vaultwarden/commits/main").await {
|
||||||
{
|
|
||||||
Ok(mut c) => {
|
Ok(mut c) => {
|
||||||
c.sha.truncate(8);
|
c.sha.truncate(8);
|
||||||
c.sha
|
c.sha
|
||||||
@@ -542,7 +623,7 @@ async fn get_release_info(has_http_access: bool, running_within_docker: bool) ->
|
|||||||
if running_within_docker {
|
if running_within_docker {
|
||||||
"-".to_string()
|
"-".to_string()
|
||||||
} else {
|
} else {
|
||||||
match get_github_api::<GitRelease>(
|
match get_json_api::<GitRelease>(
|
||||||
"https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest",
|
"https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest",
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
@@ -557,16 +638,34 @@ async fn get_release_info(has_http_access: bool, running_within_docker: bool) ->
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn get_ntp_time(has_http_access: bool) -> String {
|
||||||
|
if has_http_access {
|
||||||
|
if let Ok(ntp_time) = get_json_api::<TimeApi>("https://www.timeapi.io/api/Time/current/zone?timeZone=UTC").await
|
||||||
|
{
|
||||||
|
return format!(
|
||||||
|
"{year}-{month:02}-{day:02} {hour:02}:{minute:02}:{seconds:02} UTC",
|
||||||
|
year = ntp_time.year,
|
||||||
|
month = ntp_time.month,
|
||||||
|
day = ntp_time.day,
|
||||||
|
hour = ntp_time.hour,
|
||||||
|
minute = ntp_time.minute,
|
||||||
|
seconds = ntp_time.seconds
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
String::from("Unable to fetch NTP time.")
|
||||||
|
}
|
||||||
|
|
||||||
#[get("/diagnostics")]
|
#[get("/diagnostics")]
|
||||||
async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResult<Html<String>> {
|
async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn) -> ApiResult<Html<String>> {
|
||||||
use chrono::prelude::*;
|
use chrono::prelude::*;
|
||||||
use std::net::ToSocketAddrs;
|
use std::net::ToSocketAddrs;
|
||||||
|
|
||||||
// Get current running versions
|
// Get current running versions
|
||||||
let web_vault_version: WebVaultVersion =
|
let web_vault_version: WebVaultVersion =
|
||||||
match std::fs::read_to_string(&format!("{}/{}", CONFIG.web_vault_folder(), "vw-version.json")) {
|
match std::fs::read_to_string(format!("{}/{}", CONFIG.web_vault_folder(), "vw-version.json")) {
|
||||||
Ok(s) => serde_json::from_str(&s)?,
|
Ok(s) => serde_json::from_str(&s)?,
|
||||||
_ => match std::fs::read_to_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
|
_ => match std::fs::read_to_string(format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
|
||||||
Ok(s) => serde_json::from_str(&s)?,
|
Ok(s) => serde_json::from_str(&s)?,
|
||||||
_ => WebVaultVersion {
|
_ => WebVaultVersion {
|
||||||
version: String::from("Version file missing"),
|
version: String::from("Version file missing"),
|
||||||
@@ -585,7 +684,7 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> A
|
|||||||
// Check if we are able to resolve DNS entries
|
// Check if we are able to resolve DNS entries
|
||||||
let dns_resolved = match ("github.com", 0).to_socket_addrs().map(|mut i| i.next()) {
|
let dns_resolved = match ("github.com", 0).to_socket_addrs().map(|mut i| i.next()) {
|
||||||
Ok(Some(a)) => a.ip().to_string(),
|
Ok(Some(a)) => a.ip().to_string(),
|
||||||
_ => "Could not resolve domain name.".to_string(),
|
_ => "Unable to resolve domain name.".to_string(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let (latest_release, latest_commit, latest_web_build) =
|
let (latest_release, latest_commit, latest_web_build) =
|
||||||
@@ -598,13 +697,14 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> A
|
|||||||
|
|
||||||
let diagnostics_json = json!({
|
let diagnostics_json = json!({
|
||||||
"dns_resolved": dns_resolved,
|
"dns_resolved": dns_resolved,
|
||||||
|
"current_release": VERSION,
|
||||||
"latest_release": latest_release,
|
"latest_release": latest_release,
|
||||||
"latest_commit": latest_commit,
|
"latest_commit": latest_commit,
|
||||||
"web_vault_enabled": &CONFIG.web_vault_enabled(),
|
"web_vault_enabled": &CONFIG.web_vault_enabled(),
|
||||||
"web_vault_version": web_vault_version.version,
|
"web_vault_version": web_vault_version.version.trim_start_matches('v'),
|
||||||
"latest_web_build": latest_web_build,
|
"latest_web_build": latest_web_build,
|
||||||
"running_within_docker": running_within_docker,
|
"running_within_docker": running_within_docker,
|
||||||
"docker_base_image": docker_base_image(),
|
"docker_base_image": if running_within_docker { docker_base_image() } else { "Not applicable" },
|
||||||
"has_http_access": has_http_access,
|
"has_http_access": has_http_access,
|
||||||
"ip_header_exists": &ip_header.0.is_some(),
|
"ip_header_exists": &ip_header.0.is_some(),
|
||||||
"ip_header_match": ip_header_name == CONFIG.ip_header(),
|
"ip_header_match": ip_header_name == CONFIG.ip_header(),
|
||||||
@@ -612,14 +712,17 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> A
|
|||||||
"ip_header_config": &CONFIG.ip_header(),
|
"ip_header_config": &CONFIG.ip_header(),
|
||||||
"uses_proxy": uses_proxy,
|
"uses_proxy": uses_proxy,
|
||||||
"db_type": *DB_TYPE,
|
"db_type": *DB_TYPE,
|
||||||
"db_version": get_sql_server_version(&conn).await,
|
"db_version": get_sql_server_version(&mut conn).await,
|
||||||
"admin_url": format!("{}/diagnostics", admin_url(Referer(None))),
|
"admin_url": format!("{}/diagnostics", admin_url()),
|
||||||
"overrides": &CONFIG.get_overrides().join(", "),
|
"overrides": &CONFIG.get_overrides().join(", "),
|
||||||
|
"host_arch": std::env::consts::ARCH,
|
||||||
|
"host_os": std::env::consts::OS,
|
||||||
"server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(),
|
"server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(),
|
||||||
"server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the date/time check as the last item to minimize the difference
|
"server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the server date/time check as late as possible to minimize the time difference
|
||||||
|
"ntp_time": get_ntp_time(has_http_access).await, // Run the ntp check as late as possible to minimize the time difference
|
||||||
});
|
});
|
||||||
|
|
||||||
let text = AdminTemplateData::with_data("admin/diagnostics", diagnostics_json).render()?;
|
let text = AdminTemplateData::new("admin/diagnostics", diagnostics_json).render()?;
|
||||||
Ok(Html(text))
|
Ok(Html(text))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -641,44 +744,60 @@ fn delete_config(_token: AdminToken) -> EmptyResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[post("/config/backup_db")]
|
#[post("/config/backup_db")]
|
||||||
async fn backup_db(_token: AdminToken, conn: DbConn) -> EmptyResult {
|
async fn backup_db(_token: AdminToken, mut conn: DbConn) -> EmptyResult {
|
||||||
if *CAN_BACKUP {
|
if *CAN_BACKUP {
|
||||||
backup_database(&conn).await
|
backup_database(&mut conn).await
|
||||||
} else {
|
} else {
|
||||||
err!("Can't back up current DB (Only SQLite supports this feature)");
|
err!("Can't back up current DB (Only SQLite supports this feature)");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AdminToken {}
|
pub struct AdminToken {
|
||||||
|
ip: ClientIp,
|
||||||
|
}
|
||||||
|
|
||||||
#[rocket::async_trait]
|
#[rocket::async_trait]
|
||||||
impl<'r> FromRequest<'r> for AdminToken {
|
impl<'r> FromRequest<'r> for AdminToken {
|
||||||
type Error = &'static str;
|
type Error = &'static str;
|
||||||
|
|
||||||
async fn from_request(request: &'r Request<'_>) -> request::Outcome<Self, Self::Error> {
|
async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> {
|
||||||
|
let ip = match ClientIp::from_request(request).await {
|
||||||
|
Outcome::Success(ip) => ip,
|
||||||
|
_ => err_handler!("Error getting Client IP"),
|
||||||
|
};
|
||||||
|
|
||||||
if CONFIG.disable_admin_token() {
|
if CONFIG.disable_admin_token() {
|
||||||
Outcome::Success(AdminToken {})
|
Outcome::Success(Self {
|
||||||
|
ip,
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
let cookies = request.cookies();
|
let cookies = request.cookies();
|
||||||
|
|
||||||
let access_token = match cookies.get(COOKIE_NAME) {
|
let access_token = match cookies.get(COOKIE_NAME) {
|
||||||
Some(cookie) => cookie.value(),
|
Some(cookie) => cookie.value(),
|
||||||
None => return Outcome::Forward(()), // If there is no cookie, redirect to login
|
None => {
|
||||||
};
|
let requested_page =
|
||||||
|
request.segments::<std::path::PathBuf>(0..).unwrap_or_default().display().to_string();
|
||||||
let ip = match ClientIp::from_request(request).await {
|
// When the requested page is empty, it is `/admin`, in that case, Forward, so it will render the login page
|
||||||
Outcome::Success(ip) => ip.ip,
|
// Else, return a 401 failure, which will be caught
|
||||||
_ => err_handler!("Error getting Client IP"),
|
if requested_page.is_empty() {
|
||||||
|
return Outcome::Forward(Status::Unauthorized);
|
||||||
|
} else {
|
||||||
|
return Outcome::Failure((Status::Unauthorized, "Unauthorized"));
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if decode_admin(access_token).is_err() {
|
if decode_admin(access_token).is_err() {
|
||||||
// Remove admin cookie
|
// Remove admin cookie
|
||||||
cookies.remove(Cookie::build(COOKIE_NAME, "").path(admin_path()).finish());
|
cookies.remove(Cookie::build(COOKIE_NAME, "").path(admin_path()).finish());
|
||||||
error!("Invalid or expired admin JWT. IP: {}.", ip);
|
error!("Invalid or expired admin JWT. IP: {}.", &ip.ip);
|
||||||
return Outcome::Forward(());
|
return Outcome::Failure((Status::Unauthorized, "Session expired"));
|
||||||
}
|
}
|
||||||
|
|
||||||
Outcome::Success(AdminToken {})
|
Outcome::Success(Self {
|
||||||
|
ip,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,8 +1,6 @@
|
|||||||
use chrono::{Duration, Utc};
|
use chrono::{Duration, Utc};
|
||||||
use rocket::serde::json::Json;
|
use rocket::{serde::json::Json, Route};
|
||||||
use rocket::Route;
|
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::borrow::Borrow;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
api::{
|
api::{
|
||||||
@@ -14,8 +12,6 @@ use crate::{
|
|||||||
mail, CONFIG,
|
mail, CONFIG,
|
||||||
};
|
};
|
||||||
|
|
||||||
use futures::{stream, stream::StreamExt};
|
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
routes![
|
routes![
|
||||||
get_contacts,
|
get_contacts,
|
||||||
@@ -41,17 +37,14 @@ pub fn routes() -> Vec<Route> {
|
|||||||
// region get
|
// region get
|
||||||
|
|
||||||
#[get("/emergency-access/trusted")]
|
#[get("/emergency-access/trusted")]
|
||||||
async fn get_contacts(headers: Headers, conn: DbConn) -> JsonResult {
|
async fn get_contacts(headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
let emergency_access_list_json =
|
let emergency_access_list = EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &mut conn).await;
|
||||||
stream::iter(EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &conn).await)
|
let mut emergency_access_list_json = Vec::with_capacity(emergency_access_list.len());
|
||||||
.then(|e| async {
|
for ea in emergency_access_list {
|
||||||
let e = e; // Move out this single variable
|
emergency_access_list_json.push(ea.to_json_grantee_details(&mut conn).await);
|
||||||
e.to_json_grantee_details(&conn).await
|
}
|
||||||
})
|
|
||||||
.collect::<Vec<Value>>()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
"Data": emergency_access_list_json,
|
"Data": emergency_access_list_json,
|
||||||
@@ -61,17 +54,14 @@ async fn get_contacts(headers: Headers, conn: DbConn) -> JsonResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[get("/emergency-access/granted")]
|
#[get("/emergency-access/granted")]
|
||||||
async fn get_grantees(headers: Headers, conn: DbConn) -> JsonResult {
|
async fn get_grantees(headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
let emergency_access_list_json =
|
let emergency_access_list = EmergencyAccess::find_all_by_grantee_uuid(&headers.user.uuid, &mut conn).await;
|
||||||
stream::iter(EmergencyAccess::find_all_by_grantee_uuid(&headers.user.uuid, &conn).await)
|
let mut emergency_access_list_json = Vec::with_capacity(emergency_access_list.len());
|
||||||
.then(|e| async {
|
for ea in emergency_access_list {
|
||||||
let e = e; // Move out this single variable
|
emergency_access_list_json.push(ea.to_json_grantor_details(&mut conn).await);
|
||||||
e.to_json_grantor_details(&conn).await
|
}
|
||||||
})
|
|
||||||
.collect::<Vec<Value>>()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
"Data": emergency_access_list_json,
|
"Data": emergency_access_list_json,
|
||||||
@@ -81,11 +71,11 @@ async fn get_grantees(headers: Headers, conn: DbConn) -> JsonResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[get("/emergency-access/<emer_id>")]
|
#[get("/emergency-access/<emer_id>")]
|
||||||
async fn get_emergency_access(emer_id: String, conn: DbConn) -> JsonResult {
|
async fn get_emergency_access(emer_id: &str, mut conn: DbConn) -> JsonResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
match EmergencyAccess::find_by_uuid(&emer_id, &conn).await {
|
match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
|
||||||
Some(emergency_access) => Ok(Json(emergency_access.to_json_grantee_details(&conn).await)),
|
Some(emergency_access) => Ok(Json(emergency_access.to_json_grantee_details(&mut conn).await)),
|
||||||
None => err!("Emergency access not valid."),
|
None => err!("Emergency access not valid."),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -94,7 +84,7 @@ async fn get_emergency_access(emer_id: String, conn: DbConn) -> JsonResult {
|
|||||||
|
|
||||||
// region put/post
|
// region put/post
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize)]
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
struct EmergencyAccessUpdateData {
|
struct EmergencyAccessUpdateData {
|
||||||
Type: NumberOrString,
|
Type: NumberOrString,
|
||||||
@@ -103,25 +93,21 @@ struct EmergencyAccessUpdateData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[put("/emergency-access/<emer_id>", data = "<data>")]
|
#[put("/emergency-access/<emer_id>", data = "<data>")]
|
||||||
async fn put_emergency_access(
|
async fn put_emergency_access(emer_id: &str, data: JsonUpcase<EmergencyAccessUpdateData>, conn: DbConn) -> JsonResult {
|
||||||
emer_id: String,
|
|
||||||
data: JsonUpcase<EmergencyAccessUpdateData>,
|
|
||||||
conn: DbConn,
|
|
||||||
) -> JsonResult {
|
|
||||||
post_emergency_access(emer_id, data, conn).await
|
post_emergency_access(emer_id, data, conn).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/emergency-access/<emer_id>", data = "<data>")]
|
#[post("/emergency-access/<emer_id>", data = "<data>")]
|
||||||
async fn post_emergency_access(
|
async fn post_emergency_access(
|
||||||
emer_id: String,
|
emer_id: &str,
|
||||||
data: JsonUpcase<EmergencyAccessUpdateData>,
|
data: JsonUpcase<EmergencyAccessUpdateData>,
|
||||||
conn: DbConn,
|
mut conn: DbConn,
|
||||||
) -> JsonResult {
|
) -> JsonResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
let data: EmergencyAccessUpdateData = data.into_inner().data;
|
let data: EmergencyAccessUpdateData = data.into_inner().data;
|
||||||
|
|
||||||
let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await {
|
let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
|
||||||
Some(emergency_access) => emergency_access,
|
Some(emergency_access) => emergency_access,
|
||||||
None => err!("Emergency access not valid."),
|
None => err!("Emergency access not valid."),
|
||||||
};
|
};
|
||||||
@@ -133,9 +119,11 @@ async fn post_emergency_access(
|
|||||||
|
|
||||||
emergency_access.atype = new_type;
|
emergency_access.atype = new_type;
|
||||||
emergency_access.wait_time_days = data.WaitTimeDays;
|
emergency_access.wait_time_days = data.WaitTimeDays;
|
||||||
|
if data.KeyEncrypted.is_some() {
|
||||||
emergency_access.key_encrypted = data.KeyEncrypted;
|
emergency_access.key_encrypted = data.KeyEncrypted;
|
||||||
|
}
|
||||||
|
|
||||||
emergency_access.save(&conn).await?;
|
emergency_access.save(&mut conn).await?;
|
||||||
Ok(Json(emergency_access.to_json()))
|
Ok(Json(emergency_access.to_json()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -144,12 +132,12 @@ async fn post_emergency_access(
|
|||||||
// region delete
|
// region delete
|
||||||
|
|
||||||
#[delete("/emergency-access/<emer_id>")]
|
#[delete("/emergency-access/<emer_id>")]
|
||||||
async fn delete_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> EmptyResult {
|
async fn delete_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> EmptyResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
let grantor_user = headers.user;
|
let grantor_user = headers.user;
|
||||||
|
|
||||||
let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await {
|
let emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
|
||||||
Some(emer) => {
|
Some(emer) => {
|
||||||
if emer.grantor_uuid != grantor_user.uuid && emer.grantee_uuid != Some(grantor_user.uuid) {
|
if emer.grantor_uuid != grantor_user.uuid && emer.grantee_uuid != Some(grantor_user.uuid) {
|
||||||
err!("Emergency access not valid.")
|
err!("Emergency access not valid.")
|
||||||
@@ -158,12 +146,12 @@ async fn delete_emergency_access(emer_id: String, headers: Headers, conn: DbConn
|
|||||||
}
|
}
|
||||||
None => err!("Emergency access not valid."),
|
None => err!("Emergency access not valid."),
|
||||||
};
|
};
|
||||||
emergency_access.delete(&conn).await?;
|
emergency_access.delete(&mut conn).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/emergency-access/<emer_id>/delete")]
|
#[post("/emergency-access/<emer_id>/delete")]
|
||||||
async fn post_delete_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> EmptyResult {
|
async fn post_delete_emergency_access(emer_id: &str, headers: Headers, conn: DbConn) -> EmptyResult {
|
||||||
delete_emergency_access(emer_id, headers, conn).await
|
delete_emergency_access(emer_id, headers, conn).await
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -171,7 +159,7 @@ async fn post_delete_emergency_access(emer_id: String, headers: Headers, conn: D
|
|||||||
|
|
||||||
// region invite
|
// region invite
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize)]
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
struct EmergencyAccessInviteData {
|
struct EmergencyAccessInviteData {
|
||||||
Email: String,
|
Email: String,
|
||||||
@@ -180,7 +168,7 @@ struct EmergencyAccessInviteData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[post("/emergency-access/invite", data = "<data>")]
|
#[post("/emergency-access/invite", data = "<data>")]
|
||||||
async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Headers, conn: DbConn) -> EmptyResult {
|
async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
let data: EmergencyAccessInviteData = data.into_inner().data;
|
let data: EmergencyAccessInviteData = data.into_inner().data;
|
||||||
@@ -201,10 +189,10 @@ async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Heade
|
|||||||
err!("You can not set yourself as an emergency contact.")
|
err!("You can not set yourself as an emergency contact.")
|
||||||
}
|
}
|
||||||
|
|
||||||
let grantee_user = match User::find_by_mail(&email, &conn).await {
|
let grantee_user = match User::find_by_mail(&email, &mut conn).await {
|
||||||
None => {
|
None => {
|
||||||
if !CONFIG.invitations_allowed() {
|
if !CONFIG.invitations_allowed() {
|
||||||
err!(format!("Grantee user does not exist: {}", email))
|
err!(format!("Grantee user does not exist: {}", &email))
|
||||||
}
|
}
|
||||||
|
|
||||||
if !CONFIG.is_email_domain_allowed(&email) {
|
if !CONFIG.is_email_domain_allowed(&email) {
|
||||||
@@ -212,12 +200,12 @@ async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Heade
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !CONFIG.mail_enabled() {
|
if !CONFIG.mail_enabled() {
|
||||||
let invitation = Invitation::new(email.clone());
|
let invitation = Invitation::new(&email);
|
||||||
invitation.save(&conn).await?;
|
invitation.save(&mut conn).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut user = User::new(email.clone());
|
let mut user = User::new(email.clone());
|
||||||
user.save(&conn).await?;
|
user.save(&mut conn).await?;
|
||||||
user
|
user
|
||||||
}
|
}
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
@@ -227,41 +215,34 @@ async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Heade
|
|||||||
&grantor_user.uuid,
|
&grantor_user.uuid,
|
||||||
&grantee_user.uuid,
|
&grantee_user.uuid,
|
||||||
&grantee_user.email,
|
&grantee_user.email,
|
||||||
&conn,
|
&mut conn,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.is_some()
|
.is_some()
|
||||||
{
|
{
|
||||||
err!(format!("Grantee user already invited: {}", email))
|
err!(format!("Grantee user already invited: {}", &grantee_user.email))
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut new_emergency_access = EmergencyAccess::new(
|
let mut new_emergency_access =
|
||||||
grantor_user.uuid.clone(),
|
EmergencyAccess::new(grantor_user.uuid, grantee_user.email, emergency_access_status, new_type, wait_time_days);
|
||||||
Some(grantee_user.email.clone()),
|
new_emergency_access.save(&mut conn).await?;
|
||||||
emergency_access_status,
|
|
||||||
new_type,
|
|
||||||
wait_time_days,
|
|
||||||
);
|
|
||||||
new_emergency_access.save(&conn).await?;
|
|
||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
mail::send_emergency_access_invite(
|
mail::send_emergency_access_invite(
|
||||||
&grantee_user.email,
|
&new_emergency_access.email.expect("Grantee email does not exists"),
|
||||||
&grantee_user.uuid,
|
&grantee_user.uuid,
|
||||||
Some(new_emergency_access.uuid),
|
&new_emergency_access.uuid,
|
||||||
Some(grantor_user.name.clone()),
|
&grantor_user.name,
|
||||||
Some(grantor_user.email),
|
&grantor_user.email,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
} else {
|
} else {
|
||||||
// Automatically mark user as accepted if no email invites
|
// Automatically mark user as accepted if no email invites
|
||||||
match User::find_by_mail(&email, &conn).await {
|
match User::find_by_mail(&email, &mut conn).await {
|
||||||
Some(user) => {
|
Some(user) => match accept_invite_process(&user.uuid, &mut new_emergency_access, &email, &mut conn).await {
|
||||||
match accept_invite_process(user.uuid, new_emergency_access.uuid, Some(email), conn.borrow()).await {
|
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
Err(e) => err!(e.to_string()),
|
Err(e) => err!(e.to_string()),
|
||||||
}
|
},
|
||||||
}
|
|
||||||
None => err!("Grantee user not found."),
|
None => err!("Grantee user not found."),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -270,10 +251,10 @@ async fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Heade
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[post("/emergency-access/<emer_id>/reinvite")]
|
#[post("/emergency-access/<emer_id>/reinvite")]
|
||||||
async fn resend_invite(emer_id: String, headers: Headers, conn: DbConn) -> EmptyResult {
|
async fn resend_invite(emer_id: &str, headers: Headers, mut conn: DbConn) -> EmptyResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await {
|
let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
|
||||||
Some(emer) => emer,
|
Some(emer) => emer,
|
||||||
None => err!("Emergency access not valid."),
|
None => err!("Emergency access not valid."),
|
||||||
};
|
};
|
||||||
@@ -291,7 +272,7 @@ async fn resend_invite(emer_id: String, headers: Headers, conn: DbConn) -> Empty
|
|||||||
None => err!("Email not valid."),
|
None => err!("Email not valid."),
|
||||||
};
|
};
|
||||||
|
|
||||||
let grantee_user = match User::find_by_mail(&email, &conn).await {
|
let grantee_user = match User::find_by_mail(&email, &mut conn).await {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => err!("Grantee user not found."),
|
None => err!("Grantee user not found."),
|
||||||
};
|
};
|
||||||
@@ -302,21 +283,19 @@ async fn resend_invite(emer_id: String, headers: Headers, conn: DbConn) -> Empty
|
|||||||
mail::send_emergency_access_invite(
|
mail::send_emergency_access_invite(
|
||||||
&email,
|
&email,
|
||||||
&grantor_user.uuid,
|
&grantor_user.uuid,
|
||||||
Some(emergency_access.uuid),
|
&emergency_access.uuid,
|
||||||
Some(grantor_user.name.clone()),
|
&grantor_user.name,
|
||||||
Some(grantor_user.email),
|
&grantor_user.email,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
} else {
|
} else {
|
||||||
if Invitation::find_by_mail(&email, &conn).await.is_none() {
|
if Invitation::find_by_mail(&email, &mut conn).await.is_none() {
|
||||||
let invitation = Invitation::new(email);
|
let invitation = Invitation::new(&email);
|
||||||
invitation.save(&conn).await?;
|
invitation.save(&mut conn).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Automatically mark user as accepted if no email invites
|
// Automatically mark user as accepted if no email invites
|
||||||
match accept_invite_process(grantee_user.uuid, emergency_access.uuid, emergency_access.email, conn.borrow())
|
match accept_invite_process(&grantee_user.uuid, &mut emergency_access, &email, &mut conn).await {
|
||||||
.await
|
|
||||||
{
|
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
Err(e) => err!(e.to_string()),
|
Err(e) => err!(e.to_string()),
|
||||||
}
|
}
|
||||||
@@ -332,37 +311,43 @@ struct AcceptData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[post("/emergency-access/<emer_id>/accept", data = "<data>")]
|
#[post("/emergency-access/<emer_id>/accept", data = "<data>")]
|
||||||
async fn accept_invite(emer_id: String, data: JsonUpcase<AcceptData>, conn: DbConn) -> EmptyResult {
|
async fn accept_invite(emer_id: &str, data: JsonUpcase<AcceptData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
let data: AcceptData = data.into_inner().data;
|
let data: AcceptData = data.into_inner().data;
|
||||||
let token = &data.Token;
|
let token = &data.Token;
|
||||||
let claims = decode_emergency_access_invite(token)?;
|
let claims = decode_emergency_access_invite(token)?;
|
||||||
|
|
||||||
let grantee_user = match User::find_by_mail(&claims.email, &conn).await {
|
// This can happen if the user who received the invite used a different email to signup.
|
||||||
|
// Since we do not know if this is intented, we error out here and do nothing with the invite.
|
||||||
|
if claims.email != headers.user.email {
|
||||||
|
err!("Claim email does not match current users email")
|
||||||
|
}
|
||||||
|
|
||||||
|
let grantee_user = match User::find_by_mail(&claims.email, &mut conn).await {
|
||||||
Some(user) => {
|
Some(user) => {
|
||||||
Invitation::take(&claims.email, &conn).await;
|
Invitation::take(&claims.email, &mut conn).await;
|
||||||
user
|
user
|
||||||
}
|
}
|
||||||
None => err!("Invited user not found"),
|
None => err!("Invited user not found"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await {
|
let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
|
||||||
Some(emer) => emer,
|
Some(emer) => emer,
|
||||||
None => err!("Emergency access not valid."),
|
None => err!("Emergency access not valid."),
|
||||||
};
|
};
|
||||||
|
|
||||||
// get grantor user to send Accepted email
|
// get grantor user to send Accepted email
|
||||||
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await {
|
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => err!("Grantor user not found."),
|
None => err!("Grantor user not found."),
|
||||||
};
|
};
|
||||||
|
|
||||||
if (claims.emer_id.is_some() && emer_id == claims.emer_id.unwrap())
|
if emer_id == claims.emer_id
|
||||||
&& (claims.grantor_name.is_some() && grantor_user.name == claims.grantor_name.unwrap())
|
&& grantor_user.name == claims.grantor_name
|
||||||
&& (claims.grantor_email.is_some() && grantor_user.email == claims.grantor_email.unwrap())
|
&& grantor_user.email == claims.grantor_email
|
||||||
{
|
{
|
||||||
match accept_invite_process(grantee_user.uuid.clone(), emer_id, Some(grantee_user.email.clone()), &conn).await {
|
match accept_invite_process(&grantee_user.uuid, &mut emergency_access, &grantee_user.email, &mut conn).await {
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
Err(e) => err!(e.to_string()),
|
Err(e) => err!(e.to_string()),
|
||||||
}
|
}
|
||||||
@@ -378,18 +363,12 @@ async fn accept_invite(emer_id: String, data: JsonUpcase<AcceptData>, conn: DbCo
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn accept_invite_process(
|
async fn accept_invite_process(
|
||||||
grantee_uuid: String,
|
grantee_uuid: &str,
|
||||||
emer_id: String,
|
emergency_access: &mut EmergencyAccess,
|
||||||
email: Option<String>,
|
grantee_email: &str,
|
||||||
conn: &DbConn,
|
conn: &mut DbConn,
|
||||||
) -> EmptyResult {
|
) -> EmptyResult {
|
||||||
let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, conn).await {
|
if emergency_access.email.is_none() || emergency_access.email.as_ref().unwrap() != grantee_email {
|
||||||
Some(emer) => emer,
|
|
||||||
None => err!("Emergency access not valid."),
|
|
||||||
};
|
|
||||||
|
|
||||||
let emer_email = emergency_access.email;
|
|
||||||
if emer_email.is_none() || emer_email != email {
|
|
||||||
err!("User email does not match invite.");
|
err!("User email does not match invite.");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -398,7 +377,7 @@ async fn accept_invite_process(
|
|||||||
}
|
}
|
||||||
|
|
||||||
emergency_access.status = EmergencyAccessStatus::Accepted as i32;
|
emergency_access.status = EmergencyAccessStatus::Accepted as i32;
|
||||||
emergency_access.grantee_uuid = Some(grantee_uuid);
|
emergency_access.grantee_uuid = Some(String::from(grantee_uuid));
|
||||||
emergency_access.email = None;
|
emergency_access.email = None;
|
||||||
emergency_access.save(conn).await
|
emergency_access.save(conn).await
|
||||||
}
|
}
|
||||||
@@ -411,10 +390,10 @@ struct ConfirmData {
|
|||||||
|
|
||||||
#[post("/emergency-access/<emer_id>/confirm", data = "<data>")]
|
#[post("/emergency-access/<emer_id>/confirm", data = "<data>")]
|
||||||
async fn confirm_emergency_access(
|
async fn confirm_emergency_access(
|
||||||
emer_id: String,
|
emer_id: &str,
|
||||||
data: JsonUpcase<ConfirmData>,
|
data: JsonUpcase<ConfirmData>,
|
||||||
headers: Headers,
|
headers: Headers,
|
||||||
conn: DbConn,
|
mut conn: DbConn,
|
||||||
) -> JsonResult {
|
) -> JsonResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
@@ -422,7 +401,7 @@ async fn confirm_emergency_access(
|
|||||||
let data: ConfirmData = data.into_inner().data;
|
let data: ConfirmData = data.into_inner().data;
|
||||||
let key = data.Key;
|
let key = data.Key;
|
||||||
|
|
||||||
let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await {
|
let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
|
||||||
Some(emer) => emer,
|
Some(emer) => emer,
|
||||||
None => err!("Emergency access not valid."),
|
None => err!("Emergency access not valid."),
|
||||||
};
|
};
|
||||||
@@ -433,13 +412,13 @@ async fn confirm_emergency_access(
|
|||||||
err!("Emergency access not valid.")
|
err!("Emergency access not valid.")
|
||||||
}
|
}
|
||||||
|
|
||||||
let grantor_user = match User::find_by_uuid(&confirming_user.uuid, &conn).await {
|
let grantor_user = match User::find_by_uuid(&confirming_user.uuid, &mut conn).await {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => err!("Grantor user not found."),
|
None => err!("Grantor user not found."),
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
|
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
|
||||||
let grantee_user = match User::find_by_uuid(grantee_uuid, &conn).await {
|
let grantee_user = match User::find_by_uuid(grantee_uuid, &mut conn).await {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => err!("Grantee user not found."),
|
None => err!("Grantee user not found."),
|
||||||
};
|
};
|
||||||
@@ -448,7 +427,7 @@ async fn confirm_emergency_access(
|
|||||||
emergency_access.key_encrypted = Some(key);
|
emergency_access.key_encrypted = Some(key);
|
||||||
emergency_access.email = None;
|
emergency_access.email = None;
|
||||||
|
|
||||||
emergency_access.save(&conn).await?;
|
emergency_access.save(&mut conn).await?;
|
||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
mail::send_emergency_access_invite_confirmed(&grantee_user.email, &grantor_user.name).await?;
|
mail::send_emergency_access_invite_confirmed(&grantee_user.email, &grantor_user.name).await?;
|
||||||
@@ -464,22 +443,22 @@ async fn confirm_emergency_access(
|
|||||||
// region access emergency access
|
// region access emergency access
|
||||||
|
|
||||||
#[post("/emergency-access/<emer_id>/initiate")]
|
#[post("/emergency-access/<emer_id>/initiate")]
|
||||||
async fn initiate_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult {
|
async fn initiate_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
let initiating_user = headers.user;
|
let initiating_user = headers.user;
|
||||||
let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await {
|
let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
|
||||||
Some(emer) => emer,
|
Some(emer) => emer,
|
||||||
None => err!("Emergency access not valid."),
|
None => err!("Emergency access not valid."),
|
||||||
};
|
};
|
||||||
|
|
||||||
if emergency_access.status != EmergencyAccessStatus::Confirmed as i32
|
if emergency_access.status != EmergencyAccessStatus::Confirmed as i32
|
||||||
|| emergency_access.grantee_uuid != Some(initiating_user.uuid.clone())
|
|| emergency_access.grantee_uuid != Some(initiating_user.uuid)
|
||||||
{
|
{
|
||||||
err!("Emergency access not valid.")
|
err!("Emergency access not valid.")
|
||||||
}
|
}
|
||||||
|
|
||||||
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await {
|
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => err!("Grantor user not found."),
|
None => err!("Grantor user not found."),
|
||||||
};
|
};
|
||||||
@@ -489,14 +468,14 @@ async fn initiate_emergency_access(emer_id: String, headers: Headers, conn: DbCo
|
|||||||
emergency_access.updated_at = now;
|
emergency_access.updated_at = now;
|
||||||
emergency_access.recovery_initiated_at = Some(now);
|
emergency_access.recovery_initiated_at = Some(now);
|
||||||
emergency_access.last_notification_at = Some(now);
|
emergency_access.last_notification_at = Some(now);
|
||||||
emergency_access.save(&conn).await?;
|
emergency_access.save(&mut conn).await?;
|
||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
mail::send_emergency_access_recovery_initiated(
|
mail::send_emergency_access_recovery_initiated(
|
||||||
&grantor_user.email,
|
&grantor_user.email,
|
||||||
&initiating_user.name,
|
&initiating_user.name,
|
||||||
emergency_access.get_type_as_str(),
|
emergency_access.get_type_as_str(),
|
||||||
&emergency_access.wait_time_days.clone().to_string(),
|
&emergency_access.wait_time_days,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
@@ -504,34 +483,33 @@ async fn initiate_emergency_access(emer_id: String, headers: Headers, conn: DbCo
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[post("/emergency-access/<emer_id>/approve")]
|
#[post("/emergency-access/<emer_id>/approve")]
|
||||||
async fn approve_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult {
|
async fn approve_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
let approving_user = headers.user;
|
let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
|
||||||
let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await {
|
|
||||||
Some(emer) => emer,
|
Some(emer) => emer,
|
||||||
None => err!("Emergency access not valid."),
|
None => err!("Emergency access not valid."),
|
||||||
};
|
};
|
||||||
|
|
||||||
if emergency_access.status != EmergencyAccessStatus::RecoveryInitiated as i32
|
if emergency_access.status != EmergencyAccessStatus::RecoveryInitiated as i32
|
||||||
|| emergency_access.grantor_uuid != approving_user.uuid
|
|| emergency_access.grantor_uuid != headers.user.uuid
|
||||||
{
|
{
|
||||||
err!("Emergency access not valid.")
|
err!("Emergency access not valid.")
|
||||||
}
|
}
|
||||||
|
|
||||||
let grantor_user = match User::find_by_uuid(&approving_user.uuid, &conn).await {
|
let grantor_user = match User::find_by_uuid(&headers.user.uuid, &mut conn).await {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => err!("Grantor user not found."),
|
None => err!("Grantor user not found."),
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
|
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
|
||||||
let grantee_user = match User::find_by_uuid(grantee_uuid, &conn).await {
|
let grantee_user = match User::find_by_uuid(grantee_uuid, &mut conn).await {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => err!("Grantee user not found."),
|
None => err!("Grantee user not found."),
|
||||||
};
|
};
|
||||||
|
|
||||||
emergency_access.status = EmergencyAccessStatus::RecoveryApproved as i32;
|
emergency_access.status = EmergencyAccessStatus::RecoveryApproved as i32;
|
||||||
emergency_access.save(&conn).await?;
|
emergency_access.save(&mut conn).await?;
|
||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
mail::send_emergency_access_recovery_approved(&grantee_user.email, &grantor_user.name).await?;
|
mail::send_emergency_access_recovery_approved(&grantee_user.email, &grantor_user.name).await?;
|
||||||
@@ -543,35 +521,34 @@ async fn approve_emergency_access(emer_id: String, headers: Headers, conn: DbCon
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[post("/emergency-access/<emer_id>/reject")]
|
#[post("/emergency-access/<emer_id>/reject")]
|
||||||
async fn reject_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult {
|
async fn reject_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
let rejecting_user = headers.user;
|
let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
|
||||||
let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await {
|
|
||||||
Some(emer) => emer,
|
Some(emer) => emer,
|
||||||
None => err!("Emergency access not valid."),
|
None => err!("Emergency access not valid."),
|
||||||
};
|
};
|
||||||
|
|
||||||
if (emergency_access.status != EmergencyAccessStatus::RecoveryInitiated as i32
|
if (emergency_access.status != EmergencyAccessStatus::RecoveryInitiated as i32
|
||||||
&& emergency_access.status != EmergencyAccessStatus::RecoveryApproved as i32)
|
&& emergency_access.status != EmergencyAccessStatus::RecoveryApproved as i32)
|
||||||
|| emergency_access.grantor_uuid != rejecting_user.uuid
|
|| emergency_access.grantor_uuid != headers.user.uuid
|
||||||
{
|
{
|
||||||
err!("Emergency access not valid.")
|
err!("Emergency access not valid.")
|
||||||
}
|
}
|
||||||
|
|
||||||
let grantor_user = match User::find_by_uuid(&rejecting_user.uuid, &conn).await {
|
let grantor_user = match User::find_by_uuid(&headers.user.uuid, &mut conn).await {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => err!("Grantor user not found."),
|
None => err!("Grantor user not found."),
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
|
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
|
||||||
let grantee_user = match User::find_by_uuid(grantee_uuid, &conn).await {
|
let grantee_user = match User::find_by_uuid(grantee_uuid, &mut conn).await {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => err!("Grantee user not found."),
|
None => err!("Grantee user not found."),
|
||||||
};
|
};
|
||||||
|
|
||||||
emergency_access.status = EmergencyAccessStatus::Confirmed as i32;
|
emergency_access.status = EmergencyAccessStatus::Confirmed as i32;
|
||||||
emergency_access.save(&conn).await?;
|
emergency_access.save(&mut conn).await?;
|
||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
mail::send_emergency_access_recovery_rejected(&grantee_user.email, &grantor_user.name).await?;
|
mail::send_emergency_access_recovery_rejected(&grantee_user.email, &grantor_user.name).await?;
|
||||||
@@ -587,31 +564,34 @@ async fn reject_emergency_access(emer_id: String, headers: Headers, conn: DbConn
|
|||||||
// region action
|
// region action
|
||||||
|
|
||||||
#[post("/emergency-access/<emer_id>/view")]
|
#[post("/emergency-access/<emer_id>/view")]
|
||||||
async fn view_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult {
|
async fn view_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
let requesting_user = headers.user;
|
let emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
|
||||||
let host = headers.host;
|
|
||||||
let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await {
|
|
||||||
Some(emer) => emer,
|
Some(emer) => emer,
|
||||||
None => err!("Emergency access not valid."),
|
None => err!("Emergency access not valid."),
|
||||||
};
|
};
|
||||||
|
|
||||||
if !is_valid_request(&emergency_access, requesting_user.uuid, EmergencyAccessType::View) {
|
if !is_valid_request(&emergency_access, &headers.user.uuid, EmergencyAccessType::View) {
|
||||||
err!("Emergency access not valid.")
|
err!("Emergency access not valid.")
|
||||||
}
|
}
|
||||||
|
|
||||||
let ciphers = Cipher::find_owned_by_user(&emergency_access.grantor_uuid, &conn).await;
|
let ciphers = Cipher::find_owned_by_user(&emergency_access.grantor_uuid, &mut conn).await;
|
||||||
let cipher_sync_data =
|
let cipher_sync_data = CipherSyncData::new(&emergency_access.grantor_uuid, CipherSyncType::User, &mut conn).await;
|
||||||
CipherSyncData::new(&emergency_access.grantor_uuid, &ciphers, CipherSyncType::User, &conn).await;
|
|
||||||
|
|
||||||
let ciphers_json = stream::iter(ciphers)
|
let mut ciphers_json = Vec::with_capacity(ciphers.len());
|
||||||
.then(|c| async {
|
for c in ciphers {
|
||||||
let c = c; // Move out this single variable
|
ciphers_json.push(
|
||||||
c.to_json(&host, &emergency_access.grantor_uuid, Some(&cipher_sync_data), &conn).await
|
c.to_json(
|
||||||
})
|
&headers.host,
|
||||||
.collect::<Vec<Value>>()
|
&emergency_access.grantor_uuid,
|
||||||
.await;
|
Some(&cipher_sync_data),
|
||||||
|
CipherSyncType::User,
|
||||||
|
&mut conn,
|
||||||
|
)
|
||||||
|
.await,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
"Ciphers": ciphers_json,
|
"Ciphers": ciphers_json,
|
||||||
@@ -621,33 +601,37 @@ async fn view_emergency_access(emer_id: String, headers: Headers, conn: DbConn)
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[post("/emergency-access/<emer_id>/takeover")]
|
#[post("/emergency-access/<emer_id>/takeover")]
|
||||||
async fn takeover_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult {
|
async fn takeover_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
let requesting_user = headers.user;
|
let requesting_user = headers.user;
|
||||||
let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await {
|
let emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
|
||||||
Some(emer) => emer,
|
Some(emer) => emer,
|
||||||
None => err!("Emergency access not valid."),
|
None => err!("Emergency access not valid."),
|
||||||
};
|
};
|
||||||
|
|
||||||
if !is_valid_request(&emergency_access, requesting_user.uuid, EmergencyAccessType::Takeover) {
|
if !is_valid_request(&emergency_access, &requesting_user.uuid, EmergencyAccessType::Takeover) {
|
||||||
err!("Emergency access not valid.")
|
err!("Emergency access not valid.")
|
||||||
}
|
}
|
||||||
|
|
||||||
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await {
|
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => err!("Grantor user not found."),
|
None => err!("Grantor user not found."),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(Json(json!({
|
let result = json!({
|
||||||
"Kdf": grantor_user.client_kdf_type,
|
"Kdf": grantor_user.client_kdf_type,
|
||||||
"KdfIterations": grantor_user.client_kdf_iter,
|
"KdfIterations": grantor_user.client_kdf_iter,
|
||||||
|
"KdfMemory": grantor_user.client_kdf_memory,
|
||||||
|
"KdfParallelism": grantor_user.client_kdf_parallelism,
|
||||||
"KeyEncrypted": &emergency_access.key_encrypted,
|
"KeyEncrypted": &emergency_access.key_encrypted,
|
||||||
"Object": "emergencyAccessTakeover",
|
"Object": "emergencyAccessTakeover",
|
||||||
})))
|
});
|
||||||
|
|
||||||
|
Ok(Json(result))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize)]
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
struct EmergencyAccessPasswordData {
|
struct EmergencyAccessPasswordData {
|
||||||
NewMasterPasswordHash: String,
|
NewMasterPasswordHash: String,
|
||||||
@@ -656,44 +640,43 @@ struct EmergencyAccessPasswordData {
|
|||||||
|
|
||||||
#[post("/emergency-access/<emer_id>/password", data = "<data>")]
|
#[post("/emergency-access/<emer_id>/password", data = "<data>")]
|
||||||
async fn password_emergency_access(
|
async fn password_emergency_access(
|
||||||
emer_id: String,
|
emer_id: &str,
|
||||||
data: JsonUpcase<EmergencyAccessPasswordData>,
|
data: JsonUpcase<EmergencyAccessPasswordData>,
|
||||||
headers: Headers,
|
headers: Headers,
|
||||||
conn: DbConn,
|
mut conn: DbConn,
|
||||||
) -> EmptyResult {
|
) -> EmptyResult {
|
||||||
check_emergency_access_allowed()?;
|
check_emergency_access_allowed()?;
|
||||||
|
|
||||||
let data: EmergencyAccessPasswordData = data.into_inner().data;
|
let data: EmergencyAccessPasswordData = data.into_inner().data;
|
||||||
let new_master_password_hash = &data.NewMasterPasswordHash;
|
let new_master_password_hash = &data.NewMasterPasswordHash;
|
||||||
let key = data.Key;
|
//let key = &data.Key;
|
||||||
|
|
||||||
let requesting_user = headers.user;
|
let requesting_user = headers.user;
|
||||||
let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await {
|
let emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
|
||||||
Some(emer) => emer,
|
Some(emer) => emer,
|
||||||
None => err!("Emergency access not valid."),
|
None => err!("Emergency access not valid."),
|
||||||
};
|
};
|
||||||
|
|
||||||
if !is_valid_request(&emergency_access, requesting_user.uuid, EmergencyAccessType::Takeover) {
|
if !is_valid_request(&emergency_access, &requesting_user.uuid, EmergencyAccessType::Takeover) {
|
||||||
err!("Emergency access not valid.")
|
err!("Emergency access not valid.")
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await {
|
let mut grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => err!("Grantor user not found."),
|
None => err!("Grantor user not found."),
|
||||||
};
|
};
|
||||||
|
|
||||||
// change grantor_user password
|
// change grantor_user password
|
||||||
grantor_user.set_password(new_master_password_hash, None);
|
grantor_user.set_password(new_master_password_hash, Some(data.Key), true, None);
|
||||||
grantor_user.akey = key;
|
grantor_user.save(&mut conn).await?;
|
||||||
grantor_user.save(&conn).await?;
|
|
||||||
|
|
||||||
// Disable TwoFactor providers since they will otherwise block logins
|
// Disable TwoFactor providers since they will otherwise block logins
|
||||||
TwoFactor::delete_all_by_user(&grantor_user.uuid, &conn).await?;
|
TwoFactor::delete_all_by_user(&grantor_user.uuid, &mut conn).await?;
|
||||||
|
|
||||||
// Remove grantor from all organisations unless Owner
|
// Remove grantor from all organisations unless Owner
|
||||||
for user_org in UserOrganization::find_any_state_by_user(&grantor_user.uuid, &conn).await {
|
for user_org in UserOrganization::find_any_state_by_user(&grantor_user.uuid, &mut conn).await {
|
||||||
if user_org.atype != UserOrgType::Owner as i32 {
|
if user_org.atype != UserOrgType::Owner as i32 {
|
||||||
user_org.delete(&conn).await?;
|
user_org.delete(&mut conn).await?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -702,23 +685,23 @@ async fn password_emergency_access(
|
|||||||
// endregion
|
// endregion
|
||||||
|
|
||||||
#[get("/emergency-access/<emer_id>/policies")]
|
#[get("/emergency-access/<emer_id>/policies")]
|
||||||
async fn policies_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult {
|
async fn policies_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult {
|
||||||
let requesting_user = headers.user;
|
let requesting_user = headers.user;
|
||||||
let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await {
|
let emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await {
|
||||||
Some(emer) => emer,
|
Some(emer) => emer,
|
||||||
None => err!("Emergency access not valid."),
|
None => err!("Emergency access not valid."),
|
||||||
};
|
};
|
||||||
|
|
||||||
if !is_valid_request(&emergency_access, requesting_user.uuid, EmergencyAccessType::Takeover) {
|
if !is_valid_request(&emergency_access, &requesting_user.uuid, EmergencyAccessType::Takeover) {
|
||||||
err!("Emergency access not valid.")
|
err!("Emergency access not valid.")
|
||||||
}
|
}
|
||||||
|
|
||||||
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await {
|
let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await {
|
||||||
Some(user) => user,
|
Some(user) => user,
|
||||||
None => err!("Grantor user not found."),
|
None => err!("Grantor user not found."),
|
||||||
};
|
};
|
||||||
|
|
||||||
let policies = OrgPolicy::find_confirmed_by_user(&grantor_user.uuid, &conn);
|
let policies = OrgPolicy::find_confirmed_by_user(&grantor_user.uuid, &mut conn);
|
||||||
let policies_json: Vec<Value> = policies.await.iter().map(OrgPolicy::to_json).collect();
|
let policies_json: Vec<Value> = policies.await.iter().map(OrgPolicy::to_json).collect();
|
||||||
|
|
||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
@@ -730,10 +713,11 @@ async fn policies_emergency_access(emer_id: String, headers: Headers, conn: DbCo
|
|||||||
|
|
||||||
fn is_valid_request(
|
fn is_valid_request(
|
||||||
emergency_access: &EmergencyAccess,
|
emergency_access: &EmergencyAccess,
|
||||||
requesting_user_uuid: String,
|
requesting_user_uuid: &str,
|
||||||
requested_access_type: EmergencyAccessType,
|
requested_access_type: EmergencyAccessType,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
emergency_access.grantee_uuid == Some(requesting_user_uuid)
|
emergency_access.grantee_uuid.is_some()
|
||||||
|
&& emergency_access.grantee_uuid.as_ref().unwrap() == requesting_user_uuid
|
||||||
&& emergency_access.status == EmergencyAccessStatus::RecoveryApproved as i32
|
&& emergency_access.status == EmergencyAccessStatus::RecoveryApproved as i32
|
||||||
&& emergency_access.atype == requested_access_type as i32
|
&& emergency_access.atype == requested_access_type as i32
|
||||||
}
|
}
|
||||||
@@ -751,41 +735,45 @@ pub async fn emergency_request_timeout_job(pool: DbPool) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Ok(conn) = pool.get().await {
|
if let Ok(mut conn) = pool.get().await {
|
||||||
let emergency_access_list = EmergencyAccess::find_all_recoveries(&conn).await;
|
let emergency_access_list = EmergencyAccess::find_all_recoveries_initiated(&mut conn).await;
|
||||||
|
|
||||||
if emergency_access_list.is_empty() {
|
if emergency_access_list.is_empty() {
|
||||||
debug!("No emergency request timeout to approve");
|
debug!("No emergency request timeout to approve");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let now = Utc::now().naive_utc();
|
||||||
for mut emer in emergency_access_list {
|
for mut emer in emergency_access_list {
|
||||||
if emer.recovery_initiated_at.is_some()
|
// The find_all_recoveries_initiated already checks if the recovery_initiated_at is not null (None)
|
||||||
&& Utc::now().naive_utc()
|
let recovery_allowed_at =
|
||||||
>= emer.recovery_initiated_at.unwrap() + Duration::days(i64::from(emer.wait_time_days))
|
emer.recovery_initiated_at.unwrap() + Duration::days(i64::from(emer.wait_time_days));
|
||||||
{
|
if recovery_allowed_at.le(&now) {
|
||||||
emer.status = EmergencyAccessStatus::RecoveryApproved as i32;
|
// Only update the access status
|
||||||
emer.save(&conn).await.expect("Cannot save emergency access on job");
|
// Updating the whole record could cause issues when the emergency_notification_reminder_job is also active
|
||||||
|
emer.update_access_status_and_save(EmergencyAccessStatus::RecoveryApproved as i32, &now, &mut conn)
|
||||||
|
.await
|
||||||
|
.expect("Unable to update emergency access status");
|
||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
// get grantor user to send Accepted email
|
// get grantor user to send Accepted email
|
||||||
let grantor_user =
|
let grantor_user =
|
||||||
User::find_by_uuid(&emer.grantor_uuid, &conn).await.expect("Grantor user not found.");
|
User::find_by_uuid(&emer.grantor_uuid, &mut conn).await.expect("Grantor user not found");
|
||||||
|
|
||||||
// get grantee user to send Accepted email
|
// get grantee user to send Accepted email
|
||||||
let grantee_user =
|
let grantee_user =
|
||||||
User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid."), &conn)
|
User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid"), &mut conn)
|
||||||
.await
|
.await
|
||||||
.expect("Grantee user not found.");
|
.expect("Grantee user not found");
|
||||||
|
|
||||||
mail::send_emergency_access_recovery_timed_out(
|
mail::send_emergency_access_recovery_timed_out(
|
||||||
&grantor_user.email,
|
&grantor_user.email,
|
||||||
&grantee_user.name.clone(),
|
&grantee_user.name,
|
||||||
emer.get_type_as_str(),
|
emer.get_type_as_str(),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.expect("Error on sending email");
|
.expect("Error on sending email");
|
||||||
|
|
||||||
mail::send_emergency_access_recovery_approved(&grantee_user.email, &grantor_user.name.clone())
|
mail::send_emergency_access_recovery_approved(&grantee_user.email, &grantor_user.name)
|
||||||
.await
|
.await
|
||||||
.expect("Error on sending email");
|
.expect("Error on sending email");
|
||||||
}
|
}
|
||||||
@@ -802,39 +790,48 @@ pub async fn emergency_notification_reminder_job(pool: DbPool) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Ok(conn) = pool.get().await {
|
if let Ok(mut conn) = pool.get().await {
|
||||||
let emergency_access_list = EmergencyAccess::find_all_recoveries(&conn).await;
|
let emergency_access_list = EmergencyAccess::find_all_recoveries_initiated(&mut conn).await;
|
||||||
|
|
||||||
if emergency_access_list.is_empty() {
|
if emergency_access_list.is_empty() {
|
||||||
debug!("No emergency request reminder notification to send");
|
debug!("No emergency request reminder notification to send");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let now = Utc::now().naive_utc();
|
||||||
for mut emer in emergency_access_list {
|
for mut emer in emergency_access_list {
|
||||||
if (emer.recovery_initiated_at.is_some()
|
// The find_all_recoveries_initiated already checks if the recovery_initiated_at is not null (None)
|
||||||
&& Utc::now().naive_utc()
|
// Calculate the day before the recovery will become active
|
||||||
>= emer.recovery_initiated_at.unwrap() + Duration::days((i64::from(emer.wait_time_days)) - 1))
|
let final_recovery_reminder_at =
|
||||||
&& (emer.last_notification_at.is_none()
|
emer.recovery_initiated_at.unwrap() + Duration::days(i64::from(emer.wait_time_days - 1));
|
||||||
|| (emer.last_notification_at.is_some()
|
// Calculate if a day has passed since the previous notification, else no notification has been sent before
|
||||||
&& Utc::now().naive_utc() >= emer.last_notification_at.unwrap() + Duration::days(1)))
|
let next_recovery_reminder_at = if let Some(last_notification_at) = emer.last_notification_at {
|
||||||
{
|
last_notification_at + Duration::days(1)
|
||||||
emer.save(&conn).await.expect("Cannot save emergency access on job");
|
} else {
|
||||||
|
now
|
||||||
|
};
|
||||||
|
if final_recovery_reminder_at.le(&now) && next_recovery_reminder_at.le(&now) {
|
||||||
|
// Only update the last notification date
|
||||||
|
// Updating the whole record could cause issues when the emergency_request_timeout_job is also active
|
||||||
|
emer.update_last_notification_date_and_save(&now, &mut conn)
|
||||||
|
.await
|
||||||
|
.expect("Unable to update emergency access notification date");
|
||||||
|
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
// get grantor user to send Accepted email
|
// get grantor user to send Accepted email
|
||||||
let grantor_user =
|
let grantor_user =
|
||||||
User::find_by_uuid(&emer.grantor_uuid, &conn).await.expect("Grantor user not found.");
|
User::find_by_uuid(&emer.grantor_uuid, &mut conn).await.expect("Grantor user not found");
|
||||||
|
|
||||||
// get grantee user to send Accepted email
|
// get grantee user to send Accepted email
|
||||||
let grantee_user =
|
let grantee_user =
|
||||||
User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid."), &conn)
|
User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid"), &mut conn)
|
||||||
.await
|
.await
|
||||||
.expect("Grantee user not found.");
|
.expect("Grantee user not found");
|
||||||
|
|
||||||
mail::send_emergency_access_recovery_reminder(
|
mail::send_emergency_access_recovery_reminder(
|
||||||
&grantor_user.email,
|
&grantor_user.email,
|
||||||
&grantee_user.name.clone(),
|
&grantee_user.name,
|
||||||
emer.get_type_as_str(),
|
emer.get_type_as_str(),
|
||||||
&emer.wait_time_days.to_string(), // TODO(jjlin): This should be the number of days left.
|
"1", // This notification is only triggered one day before the activation
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.expect("Error on sending email");
|
.expect("Error on sending email");
|
||||||
|