mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-09-09 18:25:58 +03:00
Compare commits
2744 Commits
0.9.0
...
55945ad793
Author | SHA1 | Date | |
---|---|---|---|
|
55945ad793 | ||
|
4fd22d8e3b | ||
|
d6a8fb8e48 | ||
|
3b48e6e903 | ||
|
6b9333b33e | ||
|
a545636ee5 | ||
|
f125d5f1a1 | ||
|
ad75ce281e | ||
|
9059437c35 | ||
|
c84db0daca | ||
|
72adc239f5 | ||
|
34ebeeca76 | ||
|
0469d9ba4c | ||
|
eaa6ad06ed | ||
|
0d3f283c37 | ||
|
51a1d641c5 | ||
|
90f7e5ff80 | ||
|
200999c94e | ||
|
d363e647e9 | ||
|
53f58b14d5 | ||
|
ef7835d1b0 | ||
|
3a44dc963b | ||
|
a039e227c7 | ||
|
602b18fdd6 | ||
|
bf04c64759 | ||
|
2f1d86b7f1 | ||
|
ff97bcfdda | ||
|
73f2441d1a | ||
|
ad8484a2d5 | ||
|
9813e480c0 | ||
|
bfe172702a | ||
|
df42b6d6b0 | ||
|
2697fe8aba | ||
|
674e444d67 | ||
|
0d16da440d | ||
|
66cf179bca | ||
|
025bb90f8f | ||
|
d5039d9c17 | ||
|
e7c796a660 | ||
|
bbbd2f6d15 | ||
|
a2d7895586 | ||
|
8a0cb1137e | ||
|
f960bf59bb | ||
|
3a1f1bae00 | ||
|
8dfe805954 | ||
|
07b869b3ef | ||
|
2a18665288 | ||
|
71952a4ab5 | ||
|
994d157064 | ||
|
1dae6093c9 | ||
|
6edceb5f7a | ||
|
359a4a088a | ||
|
3baffeee9a | ||
|
d5c353427d | ||
|
1f868b8d22 | ||
|
8d1df08b81 | ||
|
3b6bccde97 | ||
|
d2b36642a6 | ||
|
a02fb0fd24 | ||
|
1109293992 | ||
|
3c29f82974 | ||
|
663f88e717 | ||
|
a3dccee243 | ||
|
c0ebe0d982 | ||
|
1b46c80389 | ||
|
2c549984c0 | ||
|
ecab7a50ea | ||
|
2903a3a13a | ||
|
952992c85b | ||
|
c0be36a17f | ||
|
d1dee04615 | ||
|
ef2695de0c | ||
|
29f2b433f0 | ||
|
07f80346b4 | ||
|
4f68eafa3e | ||
|
327d369188 | ||
|
ca7483df85 | ||
|
16b6d2a71e | ||
|
871a3f214a | ||
|
10d12676cf | ||
|
dec3a9603a | ||
|
86aaf27659 | ||
|
bc913d1156 | ||
|
ef4bff09eb | ||
|
4816f77fd7 | ||
|
dfd9e65396 | ||
|
b1481c7c1a | ||
|
d9e0d68f20 | ||
|
08183fc999 | ||
|
d9b043d32c | ||
|
ed4ad67e73 | ||
|
a523c82f5f | ||
|
4d6d3443ae | ||
|
9cd400db6c | ||
|
fd51230044 | ||
|
45e5f06b86 | ||
|
620ad92331 | ||
|
c9860af11c | ||
|
d7adce97df | ||
|
71b3d3c818 | ||
|
da3701c0cf | ||
|
96813b1317 | ||
|
b0b953f348 | ||
|
cdfdc6ff4f | ||
|
2393c3f3c0 | ||
|
0d16b38a68 | ||
|
ff33534c07 | ||
|
adb21d5c1a | ||
|
e927b8aa5e | ||
|
ba48ca68fc | ||
|
294b429436 | ||
|
37c14c3c69 | ||
|
d0581da638 | ||
|
38aad4f7be | ||
|
20d9e885bf | ||
|
2f20ad86f9 | ||
|
33bae5fbe9 | ||
|
f60502a17e | ||
|
13f4b66e62 | ||
|
c967d0ddc1 | ||
|
ae6ed0ece8 | ||
|
b7c254eb30 | ||
|
a47b484172 | ||
|
65629a99f0 | ||
|
49c5dec9b6 | ||
|
cd195ff243 | ||
|
e3541763fd | ||
|
f0efec7c96 | ||
|
040e2a7bb0 | ||
|
d184c8f08c | ||
|
7d6dec6413 | ||
|
de01111082 | ||
|
0bd8f607cb | ||
|
21efc0800d | ||
|
1031c2e286 | ||
|
1bf85201e7 | ||
|
6ceed9284d | ||
|
25d99e3506 | ||
|
dca14285fd | ||
|
66baa5e7d8 | ||
|
248e561b3f | ||
|
55623ad9c6 | ||
|
e9acd8bd3c | ||
|
544b7229e8 | ||
|
978f009293 | ||
|
92f1530e96 | ||
|
2b824e8096 | ||
|
059661be48 | ||
|
0f3f97cc76 | ||
|
aa0fe7785a | ||
|
65d11a9720 | ||
|
c722006385 | ||
|
aaab7f9640 | ||
|
cbdb5657f1 | ||
|
669b9db758 | ||
|
3466a8040e | ||
|
7d47155d83 | ||
|
9e26014b4d | ||
|
339612c917 | ||
|
9eebbf3b9f | ||
|
b557c11724 | ||
|
a1204cc935 | ||
|
1ea511cbfc | ||
|
2e6a6fa39f | ||
|
e7d5c17ff7 | ||
|
a7be8fab9b | ||
|
39d4d31080 | ||
|
c28246cf34 | ||
|
d7df0ad79e | ||
|
7c8ba0c232 | ||
|
d335187172 | ||
|
f858523d92 | ||
|
529c39c6c5 | ||
|
b428481ac0 | ||
|
b4b2701905 | ||
|
de66e56b6c | ||
|
ecfebaf3c7 | ||
|
0e53f58288 | ||
|
bc7ceb2ee3 | ||
|
b27e6e30c9 | ||
|
505b30eec2 | ||
|
54bfcb8bc3 | ||
|
035f694d2f | ||
|
a4ab014ade | ||
|
6fedfceaa9 | ||
|
8e8483481f | ||
|
d04b94b77d | ||
|
247d0706ff | ||
|
0e8b410798 | ||
|
fda77afc2a | ||
|
d9835f530c | ||
|
bd91964170 | ||
|
d42b264a93 | ||
|
a4c7fadbf4 | ||
|
8e2a87fd79 | ||
|
4233dbf3db | ||
|
a2bf8def2a | ||
|
8f05a90b96 | ||
|
9082e7cebb | ||
|
55fdee3bf8 | ||
|
377969ea67 | ||
|
f05398a6b3 | ||
|
9555ac7bb8 | ||
|
f01ef40a8e | ||
|
8e7b27cc36 | ||
|
d230ee087c | ||
|
f8f14727b9 | ||
|
753a9e0bae | ||
|
f5fb69b64f | ||
|
3261534438 | ||
|
46762d9fde | ||
|
6cadb2627a | ||
|
0fe93edea6 | ||
|
e9aa5a545e | ||
|
9dcc738f85 | ||
|
84a7c7da5d | ||
|
ca9234ed86 | ||
|
27dc67fadd | ||
|
2ad33ec97f | ||
|
e1a8df96db | ||
|
e42a37c6c1 | ||
|
129b835ac7 | ||
|
2d98aa3045 | ||
|
93636eb3c3 | ||
|
1e42755187 | ||
|
ce8efcc48f | ||
|
79ce5b49bc | ||
|
7c3cad197c | ||
|
000c606029 | ||
|
29144b2ce0 | ||
|
ea04b6f151 | ||
|
3427217686 | ||
|
a1fbd6d729 | ||
|
2cbfe6fa5b | ||
|
d86c4f2c23 | ||
|
6d73f30b4f | ||
|
d0c22b9fc9 | ||
|
d6b97090fa | ||
|
94b077cb2d | ||
|
bb2412d033 | ||
|
b9bdc9b8e2 | ||
|
897bdf8343 | ||
|
569add453d | ||
|
77cd5b5954 | ||
|
4438da39f9 | ||
|
0b2383ab56 | ||
|
ad1d65bdf8 | ||
|
3b283c289e | ||
|
4b9384cb2b | ||
|
0f39d96518 | ||
|
edf7484a70 | ||
|
8b66e34415 | ||
|
1d00e34bbb | ||
|
1b801406d6 | ||
|
5e46a43306 | ||
|
5c77431c2d | ||
|
2775c6ce8a | ||
|
890e668071 | ||
|
596c167312 | ||
|
ae3a153bdb | ||
|
2c36993792 | ||
|
d672ad3f76 | ||
|
a641b48884 | ||
|
98b2178c7d | ||
|
76a3f0f531 | ||
|
c5665e7b77 | ||
|
cbdcf8ef9f | ||
|
3337594d60 | ||
|
2daa8be1f1 | ||
|
eccb3ab947 | ||
|
3246251f29 | ||
|
8ab200224e | ||
|
34e00e1478 | ||
|
0fdda3bc2f | ||
|
48836501bf | ||
|
f863ffb89a | ||
|
03c6ed2e07 | ||
|
efc6eb0073 | ||
|
cec1e87679 | ||
|
512b3b9b7c | ||
|
93da5091e6 | ||
|
915496c103 | ||
|
ecb31c85d6 | ||
|
d722328f05 | ||
|
cb4b683dcd | ||
|
6eaf131922 | ||
|
8933ac2ee7 | ||
|
6822e445bb | ||
|
18fbc1ccf6 | ||
|
4861f6decc | ||
|
b435ee49ad | ||
|
193f86e43e | ||
|
66a7baa67c | ||
|
18d66474e0 | ||
|
ff8db4fd78 | ||
|
b2f9af718e | ||
|
198fd2fc1d | ||
|
ec8a9c82df | ||
|
ef5e0bd4e5 | ||
|
30b408eaa9 | ||
|
e205e3b7db | ||
|
ca1a9e26d8 | ||
|
f3a1385aee | ||
|
008a2cf298 | ||
|
f0c9a7fbc3 | ||
|
9162b13123 | ||
|
480bf9b0c1 | ||
|
f96c5e8a1e | ||
|
3d4be24902 | ||
|
bf41d74501 | ||
|
01e33a4919 | ||
|
bc26bfa589 | ||
|
ccc51e7580 | ||
|
99a59bc4f3 | ||
|
a77482575a | ||
|
bbd630f1ee | ||
|
d18b793c71 | ||
|
d3a1d875d5 | ||
|
d6e0ace192 | ||
|
60cbfa59bf | ||
|
5ab7010c37 | ||
|
ad2cfd8b97 | ||
|
32543c46da | ||
|
66bff73ebf | ||
|
83d5432cbf | ||
|
f579a4154c | ||
|
f5a19c5f8b | ||
|
aa9bc1f785 | ||
|
f162e85e44 | ||
|
33ef70c192 | ||
|
3d2df6ce11 | ||
|
6cdcb3b297 | ||
|
d1af468700 | ||
|
ae1c53f4e5 | ||
|
bc57c4b193 | ||
|
61ae4c9cf5 | ||
|
8d7b3db33d | ||
|
e9ec3741ae | ||
|
dacd50f3f1 | ||
|
9412112639 | ||
|
aaeae16983 | ||
|
d892880dd2 | ||
|
4395e8e888 | ||
|
3dbfc484a5 | ||
|
4ec2507073 | ||
|
ab65d7989b | ||
|
8707728cdb | ||
|
631d022e17 | ||
|
211f4492fa | ||
|
61f9081827 | ||
|
a8e5384c4a | ||
|
1c7338c7c4 | ||
|
08f37b9935 | ||
|
4826ddca4c | ||
|
2b32b6f78c | ||
|
a6cfdddfd8 | ||
|
814ce9a6ac | ||
|
1bee46f64b | ||
|
556d945396 | ||
|
664b480c71 | ||
|
84e901b7d2 | ||
|
839b2bc950 | ||
|
6050c8dac5 | ||
|
0a6b797e6e | ||
|
fb6f441a4f | ||
|
9876aedd67 | ||
|
19e671ff25 | ||
|
60964c07e6 | ||
|
e4894524e4 | ||
|
e7f083dee9 | ||
|
1074315a87 | ||
|
c56bf38079 | ||
|
3c0cac623d | ||
|
550794b127 | ||
|
e818a0bf37 | ||
|
2aedff50e8 | ||
|
84a23008f4 | ||
|
44e9e1a58e | ||
|
e4606431d1 | ||
|
5b7d7390b0 | ||
|
a05187c0ff | ||
|
8e34495e73 | ||
|
4219249e11 | ||
|
bd883de70e | ||
|
2d66292350 | ||
|
adf67a8ee8 | ||
|
f40f5b8399 | ||
|
2d6ca0ea95 | ||
|
06a10e2c5a | ||
|
445680fb84 | ||
|
83376544d8 | ||
|
04a17dcdef | ||
|
0851561392 | ||
|
95cd6deda6 | ||
|
636f16dc66 | ||
|
9e5b049dca | ||
|
23aa9088f3 | ||
|
4f0ed06b06 | ||
|
349c97efaf | ||
|
8b05a5d192 | ||
|
83bf77d713 | ||
|
4d5c047ddc | ||
|
147c9c7b50 | ||
|
6515a2fcad | ||
|
4a2ed553df | ||
|
ba492c0602 | ||
|
1ec049e2b5 | ||
|
0fb8563b13 | ||
|
f906f6230a | ||
|
951ba55123 | ||
|
18abf226be | ||
|
393645617e | ||
|
5bf243b675 | ||
|
cfba8347a3 | ||
|
55c1b6e8d5 | ||
|
3d7e80a7aa | ||
|
5866338de4 | ||
|
271e3ae757 | ||
|
48cc31a59f | ||
|
6a7cee4e7e | ||
|
f850dbb310 | ||
|
07099df41a | ||
|
0c0a80720e | ||
|
ae437f70a3 | ||
|
3d11f4cd16 | ||
|
3bd4e42fb0 | ||
|
89e94b1d91 | ||
|
0b28ab3be1 | ||
|
c5bcc340fa | ||
|
bff54fbfdb | ||
|
867c6ba056 | ||
|
d1ecf03f44 | ||
|
fc43608eec | ||
|
15dd05c78d | ||
|
aa6f774f65 | ||
|
379f885354 | ||
|
39a5f2dbe8 | ||
|
0daaa9b175 | ||
|
0c085d21ce | ||
|
dcaaa430f0 | ||
|
2cda54ceff | ||
|
525e6bb65a | ||
|
62cebebd3d | ||
|
3646f14042 | ||
|
813e889c97 | ||
|
8bcd0ab0c6 | ||
|
5725d297b4 | ||
|
a428f05e77 | ||
|
467ecfdc99 | ||
|
ed8091a994 | ||
|
56cad93e0f | ||
|
3cf67e0b8d | ||
|
5800aceb2d | ||
|
729b563160 | ||
|
6b5618a5fc | ||
|
2aa72eb240 | ||
|
c8655c4f89 | ||
|
daaa03d1b3 | ||
|
9e5b94924f | ||
|
f21089900e | ||
|
0c0e632bc9 | ||
|
a13a5bd1d8 | ||
|
3b34b429f3 | ||
|
97ffd17789 | ||
|
10c5476d31 | ||
|
d3626eba2a | ||
|
de157b2654 | ||
|
337cbfaf22 | ||
|
f88b6d961e | ||
|
0426051541 | ||
|
4556f668de | ||
|
da8225a3bd | ||
|
f10e6b6ac2 | ||
|
7ec00d3850 | ||
|
8f8d7418ed | ||
|
af6d17b701 | ||
|
61183d001c | ||
|
024d12db08 | ||
|
dc7951efaf | ||
|
06e14fea55 | ||
|
0f656b4889 | ||
|
6fa1dc50be | ||
|
2bb41367bc | ||
|
20d8886bfa | ||
|
59ef82b740 | ||
|
fc543154c0 | ||
|
569b464157 | ||
|
adf83c698d | ||
|
8fcbc58ee2 | ||
|
2dcbb2be59 | ||
|
7026e004e1 | ||
|
a3084feaee | ||
|
e7d36de784 | ||
|
54cc47b14e | ||
|
fac44888cd | ||
|
9f056523c9 | ||
|
0af1ef387d | ||
|
f95f40be15 | ||
|
5c859e2e6c | ||
|
03ff5e6ece | ||
|
52d696aa74 | ||
|
a4e80712dd | ||
|
a947e434f0 | ||
|
2eb4f290a5 | ||
|
8ae799a771 | ||
|
9a5f3a5015 | ||
|
1ca0d6e245 | ||
|
7f69eebeb1 | ||
|
32bd9b83a3 | ||
|
477d60de49 | ||
|
1ba8275dcb | ||
|
a0a4994250 | ||
|
32dfa41970 | ||
|
f92efda0f0 | ||
|
3b0f643e9d | ||
|
5bcee24f88 | ||
|
9e3d7ea44c | ||
|
8cc6dac893 | ||
|
b7c4316c77 | ||
|
0c295d5e6e | ||
|
bc49d1f90d | ||
|
6f6d9dee83 | ||
|
cef5dd4a46 | ||
|
79061c0eb5 | ||
|
6e2c3fc1cc | ||
|
e301fe137f | ||
|
af69c83db2 | ||
|
53fa8da5b1 | ||
|
c58aac585b | ||
|
8c1117fcbf | ||
|
a6dd4f1206 | ||
|
5af1799991 | ||
|
a20a641de3 | ||
|
8abd38573b | ||
|
78abdf0e9d | ||
|
dc031d8d86 | ||
|
de6330b09d | ||
|
68bcc7a4b8 | ||
|
c04a1352cb | ||
|
5d1c11ceba | ||
|
a2aa7c9bc2 | ||
|
b3a351ccb2 | ||
|
679bc7a59b | ||
|
a72d0b518f | ||
|
6741b25907 | ||
|
24b5784f02 | ||
|
eb9b481eba | ||
|
64edc49392 | ||
|
0d1753ac74 | ||
|
a6558f5548 | ||
|
62dfeb80f2 | ||
|
26cd5d9643 | ||
|
e65fbbfc21 | ||
|
a2162f4d69 | ||
|
c9ed9aa733 | ||
|
9b20decdc1 | ||
|
adaefc8628 | ||
|
c6c45c4c49 | ||
|
95494083f2 | ||
|
686474f815 | ||
|
2c6bd8c9dc | ||
|
9366e31452 | ||
|
96ff32fb2f | ||
|
9342fa5744 | ||
|
50fc22966c | ||
|
4fab4c74ff | ||
|
e38e1a5d5f | ||
|
cc91ac6cc0 | ||
|
2d8c8e18f7 | ||
|
b17e2da2cf | ||
|
d121cce0d2 | ||
|
0eba7a88fa | ||
|
34ac16e9d7 | ||
|
906d9e2f1a | ||
|
623d84aeb5 | ||
|
f8122cd2ca | ||
|
9b7e86efc2 | ||
|
e7ccfbdd0e | ||
|
acc1474394 | ||
|
c90b3031a6 | ||
|
aaffb2e007 | ||
|
e0e95e95e4 | ||
|
fa70b440d0 | ||
|
42acb2ebb6 | ||
|
174bea8d6e | ||
|
f68a57950b | ||
|
f747bf126b | ||
|
1ca197fd46 | ||
|
63d05d929b | ||
|
ef5bf5d326 | ||
|
9d6e35d803 | ||
|
0cccdcab83 | ||
|
6607faa390 | ||
|
6fcf18ab51 | ||
|
d122c10573 | ||
|
ae9553ca1c | ||
|
ff919039c9 | ||
|
80eb15d46a | ||
|
c36b870c54 | ||
|
b7cbca590c | ||
|
606a1bbfcb | ||
|
3e5369c8dd | ||
|
dd5e4cec73 | ||
|
a31a040abd | ||
|
f0125b95c1 | ||
|
072f2e24c2 | ||
|
36b5350f9b | ||
|
c7489c9fdf | ||
|
3181e4e96e | ||
|
2ee0d53c5f | ||
|
dfa629ecc7 | ||
|
92dc48b882 | ||
|
367e1ce289 | ||
|
7390f34355 | ||
|
c47d9f6593 | ||
|
5399ee8208 | ||
|
117045e6d3 | ||
|
912ad64555 | ||
|
00855ee31d | ||
|
c18a273b4a | ||
|
ca24a4adf1 | ||
|
a263aaa481 | ||
|
0a20ba0020 | ||
|
6541600af6 | ||
|
525979d5d9 | ||
|
7dd1959eba | ||
|
e266b39254 | ||
|
e935989fee | ||
|
25c401f64d | ||
|
18b72da657 | ||
|
e8e6c89927 | ||
|
fd5f657334 | ||
|
da9605f2d2 | ||
|
7030de32d5 | ||
|
b67c5b77be | ||
|
d30878c4ea | ||
|
6be26f0a38 | ||
|
34a6bfaefa | ||
|
1c8749eb4d | ||
|
1198c36a2b | ||
|
41e6c1a383 | ||
|
0042c3e4a7 | ||
|
724190f262 | ||
|
6867d23ca2 | ||
|
de26af0c2d | ||
|
3f223a7514 | ||
|
23f5a62d61 | ||
|
81e2054f59 | ||
|
f9337effa5 | ||
|
2972904eb8 | ||
|
bdd918b4d4 | ||
|
88085fe17b | ||
|
2020a302d0 | ||
|
ab2dd0f300 | ||
|
8e6fd4b4a1 | ||
|
988d24927e | ||
|
e945d16fcf | ||
|
f1c0aa4f83 | ||
|
68362d06b3 | ||
|
f65c0e2ac8 | ||
|
0f588ced03 | ||
|
b0f03bb49c | ||
|
5063661028 | ||
|
7e66ab78ff | ||
|
665e275dc5 | ||
|
a6da728cca | ||
|
04e02d7f9f | ||
|
7c739dd58e | ||
|
05a552910c | ||
|
c990837066 | ||
|
57aec37507 | ||
|
0c5b4476ad | ||
|
17141147a8 | ||
|
193c2fa860 | ||
|
6d01aaa80f | ||
|
ad60eaa0f3 | ||
|
d878face07 | ||
|
8bf8388cd6 | ||
|
b4db853bcb | ||
|
5ee94c0ba9 | ||
|
f108349547 | ||
|
d25e1ab94b | ||
|
79fee269ee | ||
|
ffe362f856 | ||
|
04bb15a802 | ||
|
4d9d649db9 | ||
|
2897c24e83 | ||
|
5964dc95f0 | ||
|
613b2519ed | ||
|
996b60e43d | ||
|
a6d09407b9 | ||
|
f2e9ddef4e | ||
|
ca417d3257 | ||
|
10dadfca06 | ||
|
bf73a8235f | ||
|
67a584c1d4 | ||
|
8e5f03972e | ||
|
d8abf8f98f | ||
|
cb348d2e05 | ||
|
aceb111024 | ||
|
b60a4a68c7 | ||
|
8b6dfe48b7 | ||
|
6154e03c05 | ||
|
d0b53a6a3d | ||
|
317aa679cf | ||
|
8d1bc2e539 | ||
|
50c46f6e9a | ||
|
4f1928778a | ||
|
5fcba3d7f5 | ||
|
4db42b07c4 | ||
|
cd3e2d7a5a | ||
|
d139e22042 | ||
|
892296e6d5 | ||
|
992ef399ed | ||
|
5afba46743 | ||
|
df0aa7949e | ||
|
353d2e6e01 | ||
|
f9375bb215 | ||
|
8d04ff66e7 | ||
|
e649b11511 | ||
|
bda19bdddf | ||
|
99fd92df21 | ||
|
1210310063 | ||
|
b093384385 | ||
|
cec45ae9bd | ||
|
e6dd584dd6 | ||
|
7cc74dabaf | ||
|
2336f102f9 | ||
|
cebe0f6442 | ||
|
d9c0c23819 | ||
|
aa355a96f9 | ||
|
4a85dd2480 | ||
|
213909baa5 | ||
|
6915a60332 | ||
|
52a50e9ade | ||
|
b7c9a346c1 | ||
|
2d90c6ac24 | ||
|
7f7b5447fd | ||
|
142f7bb50d | ||
|
d209df9e10 | ||
|
1b56f4266b | ||
|
d6dc6070f3 | ||
|
d66323b742 | ||
|
7b09d74b1f | ||
|
c0e3c2c5e1 | ||
|
06189a58fe | ||
|
f402dd81bb | ||
|
c885bbc947 | ||
|
63fb0e5a57 | ||
|
37d0792a7d | ||
|
c8040d2f63 | ||
|
dbcad65b68 | ||
|
226da67bc0 | ||
|
fee2b5c3fb | ||
|
6bbb3d53ae | ||
|
610b183cef | ||
|
1b64b9e164 | ||
|
b022be9ba8 | ||
|
7f11363725 | ||
|
4aa6dd22bb | ||
|
8feed2916f | ||
|
59eaa0aa0d | ||
|
d5e54cb576 | ||
|
8837660ba7 | ||
|
464a489b44 | ||
|
7035700c8d | ||
|
23c2921690 | ||
|
7d506f3633 | ||
|
b186813049 | ||
|
bfa82225da | ||
|
ffa2044563 | ||
|
d57b69952d | ||
|
5a13efefd3 | ||
|
2f9d7060bd | ||
|
0aa33a2cb4 | ||
|
fa7dbedd5d | ||
|
2ea9b66943 | ||
|
f3beaea9e9 | ||
|
39ae2f1f76 | ||
|
366b1050ec | ||
|
b3aab7a6ad | ||
|
aa8d050d6b | ||
|
5200f0e98d | ||
|
5f4abb1b7f | ||
|
dfe1e30d1b | ||
|
e27a5be47a | ||
|
56786a18f1 | ||
|
0d2399d485 | ||
|
5bfc7cfde3 | ||
|
723f0cbc1e | ||
|
b141f789f6 | ||
|
7445ee40f8 | ||
|
4a9a0f7e64 | ||
|
63aad2e5d2 | ||
|
d0baa23f9a | ||
|
7a7673103f | ||
|
05d4788d1d | ||
|
6f0dea1b56 | ||
|
439ef44973 | ||
|
2a525b42cb | ||
|
aee91acfdc | ||
|
17388ec43e | ||
|
bdc1cd13a7 | ||
|
42db4b5c77 | ||
|
53da073274 | ||
|
b010dde661 | ||
|
c9ec389b24 | ||
|
baa2841b04 | ||
|
6af5c86081 | ||
|
f60a6929a9 | ||
|
2aa97fa121 | ||
|
b59809af46 | ||
|
ed24d51d3e | ||
|
870f0d0932 | ||
|
31b77bf178 | ||
|
b525f9aa4c | ||
|
8409b31d6b | ||
|
b878495d64 | ||
|
945b85da2f | ||
|
d4577d161e | ||
|
3c8e1c3ca9 | ||
|
88dba8c4dd | ||
|
21bc3bfd53 | ||
|
4cb5122e90 | ||
|
0a2a8be0ff | ||
|
720a046610 | ||
|
64ae5d4f81 | ||
|
ff7e22c08a | ||
|
0c267d073f | ||
|
bbc6470f65 | ||
|
23f1f8a576 | ||
|
0e6f6e612a | ||
|
4d1b860dad | ||
|
6576914e55 | ||
|
12075639f3 | ||
|
3b9bfe55d0 | ||
|
a0c6a7c0de | ||
|
a2d716aec3 | ||
|
c1c60e3b68 | ||
|
ed6e852904 | ||
|
a54065420c | ||
|
aa5a05960e | ||
|
f41ba2a60f | ||
|
2215cfefb9 | ||
|
4289663a16 | ||
|
ea19c2250e | ||
|
638766b346 | ||
|
d1ff136552 | ||
|
46ec11de12 | ||
|
4283a49e0b | ||
|
1e32db8c41 | ||
|
0f944ec7e2 | ||
|
736dbc9553 | ||
|
b4a38f1f63 | ||
|
646186fe38 | ||
|
c2725916f4 | ||
|
fd334e2b7d | ||
|
f9feca1ce4 | ||
|
677fd2ff32 | ||
|
f49eb8eb4d | ||
|
b0e0d68632 | ||
|
f3c8c16d79 | ||
|
2dd5086916 | ||
|
7532072d50 | ||
|
382e6107fe | ||
|
e6c6609e19 | ||
|
4cb5918950 | ||
|
55030f3687 | ||
|
ef4072e4ff | ||
|
c78d383ed1 | ||
|
5b96270874 | ||
|
2c0742387b | ||
|
1704d14f29 | ||
|
2d7ffbf378 | ||
|
dfd63f85c0 | ||
|
cd0c49eaf6 | ||
|
080e38d227 | ||
|
1a664fba6a | ||
|
c915ef815d | ||
|
adea4ec54d | ||
|
387b5eb2dd | ||
|
6337af59ed | ||
|
475c7b8f16 | ||
|
ac120be1c6 | ||
|
b70316e6d3 | ||
|
0a0f620d0b | ||
|
9132cc4a30 | ||
|
e50edcadfb | ||
|
2685099720 | ||
|
6fa6eb18e8 | ||
|
bb79396f0e | ||
|
da9fd6b7d0 | ||
|
5b8067ef77 | ||
|
9eabcd5cae | ||
|
d6e0d4cbbd | ||
|
e5e6db2688 | ||
|
186fe24484 | ||
|
5da96d36e6 | ||
|
f4b1071e23 | ||
|
18291b6533 | ||
|
8095cb68bb | ||
|
04cd751556 | ||
|
7ce2372f51 | ||
|
aebda93afe | ||
|
2b7b1141eb | ||
|
1ff4ff72bf | ||
|
d27e91a9b0 | ||
|
7cf063b196 | ||
|
642f04d493 | ||
|
fc6e65e4b0 | ||
|
db5c98ec3b | ||
|
73c64af27e | ||
|
b3f7db813f | ||
|
59660ff087 | ||
|
69a69e8e04 | ||
|
1094f359c3 | ||
|
102ee3f871 | ||
|
acb5ab08a8 | ||
|
ae59472d9a | ||
|
5a07b193dc | ||
|
fd2edb9adc | ||
|
1d074f7b3f | ||
|
81984c4bce | ||
|
9c891baad1 | ||
|
b050c60807 | ||
|
e47a2fd0f3 | ||
|
42b9cc73ac | ||
|
edca4248aa | ||
|
b1b6bc9be0 | ||
|
818b254cef | ||
|
ddfac5e34b | ||
|
8b5c945bad | ||
|
50c5eb9c50 | ||
|
94be67eac1 | ||
|
5a05139efe | ||
|
a62dc102fb | ||
|
518d74ce21 | ||
|
7598997deb | ||
|
3c876dc202 | ||
|
1722742ab3 | ||
|
d9c0eb3cfc | ||
|
0d990e1dc0 | ||
|
60ed5ff99d | ||
|
5b98bd66ee | ||
|
abd20777fe | ||
|
7f0d0cf8a4 | ||
|
6e23a573fb | ||
|
ce9d93003c | ||
|
abfa868423 | ||
|
331f6c08fe | ||
|
c0efd3d419 | ||
|
1385d75972 | ||
|
9a787dd105 | ||
|
0dcc435bb4 | ||
|
f1a67663d1 | ||
|
0f95bdc9bb | ||
|
a0eab35768 | ||
|
027c87dd07 | ||
|
f2b31352fe | ||
|
c9376e3126 | ||
|
7cbcad0e38 | ||
|
e167798449 | ||
|
fc5928772b | ||
|
8263bdd21d | ||
|
3c1d4254e7 | ||
|
55d7c48b1d | ||
|
bf623eed7f | ||
|
84bcac0112 | ||
|
31595888ea | ||
|
5c38b2c4eb | ||
|
ebe9162af9 | ||
|
b64cf27038 | ||
|
0c4e79cff6 | ||
|
5b9129a086 | ||
|
93d4a12834 | ||
|
bf3e2dc652 | ||
|
0d0e98d783 | ||
|
5a55cfbb9b | ||
|
ac93b8a6b9 | ||
|
93786d9ebd | ||
|
a6dbb580c9 | ||
|
e62678abdb | ||
|
af50eae604 | ||
|
cb4f6aa7f6 | ||
|
5e13b1a7cb | ||
|
60b339f450 | ||
|
f71c779860 | ||
|
221a11de9b | ||
|
794483c10d | ||
|
c9934ccdb7 | ||
|
54729f3c1e | ||
|
f1a86acb98 | ||
|
6b6ea3c8bf | ||
|
bf403fee7d | ||
|
5cd920cf6f | ||
|
45d3b479bc | ||
|
c7a752b01d | ||
|
099d359628 | ||
|
006a2aacbb | ||
|
b71d9dd53e | ||
|
887e320e7f | ||
|
d7c18fd86e | ||
|
7566f3db3e | ||
|
5d05ec58be | ||
|
d9a452f558 | ||
|
dec03b3dc0 | ||
|
85950bdc0b | ||
|
f95bd3bb04 | ||
|
e33b8fab34 | ||
|
b00fbf153e | ||
|
0de5919a16 | ||
|
699777be9e | ||
|
16ff49d712 | ||
|
54c78cf06d | ||
|
303eaabeea | ||
|
6b6f5b8d04 | ||
|
0c18a7e306 | ||
|
a23a38080b | ||
|
316ca66a4b | ||
|
2f71a01877 | ||
|
d5cfbfc71d | ||
|
12612da75e | ||
|
68ec5f2a18 | ||
|
00670450df | ||
|
dbd95e08e9 | ||
|
3713f2d134 | ||
|
a85a250dfd | ||
|
5845ed2c92 | ||
|
40ed505581 | ||
|
bf0b8d9968 | ||
|
d0a7437dbd | ||
|
21b433c5d7 | ||
|
7c89bc619a | ||
|
0d3daa9fc6 | ||
|
0c1f0bad17 | ||
|
72cf59fa54 | ||
|
527bc1f625 | ||
|
2168d09421 | ||
|
1c266031d7 | ||
|
b636d20c64 | ||
|
2a9ca88c2a | ||
|
b9c434addb | ||
|
451ad47327 | ||
|
7f61dd5fe3 | ||
|
3ca85028ea | ||
|
542a73cc6e | ||
|
78d07e2fda | ||
|
b617ffd2af | ||
|
3abf173d89 | ||
|
df8aeb10e8 | ||
|
26ad06df7c | ||
|
37fff3ef4a | ||
|
28c5e63bf5 | ||
|
a07c213b3e | ||
|
ed72741f48 | ||
|
fb0c23b71f | ||
|
d98f95f536 | ||
|
6643e83b61 | ||
|
7b742009a1 | ||
|
649e2b48f3 | ||
|
81f0c2b0e8 | ||
|
80d8aa7239 | ||
|
27d4b713f6 | ||
|
b0faaf2527 | ||
|
8d06d9c111 | ||
|
c4d565b15b | ||
|
06f8e69c70 | ||
|
7db52374cd | ||
|
843f205f6f | ||
|
2ff51ae77e | ||
|
2b75d81a8b | ||
|
cad0dcbed1 | ||
|
19b8388950 | ||
|
87e08b9e50 | ||
|
0b7d6bf6df | ||
|
89fe05b6cc | ||
|
d73d74e78f | ||
|
9a682b7a45 | ||
|
94201ca133 | ||
|
99f9e7252a | ||
|
42136a7097 | ||
|
9bb4c38bf9 | ||
|
5f01db69ff | ||
|
c59a7f4a8c | ||
|
8295688bed | ||
|
9713a3a555 | ||
|
d781981bbd | ||
|
5125fdb882 | ||
|
fd9693b961 | ||
|
f38926d666 | ||
|
775d07e9a0 | ||
|
2d5f172e77 | ||
|
08f0de7b46 | ||
|
45122bed9e | ||
|
0876d4a5fd | ||
|
7d552dbdc8 | ||
|
9a60eb04c2 | ||
|
1b99da91fb | ||
|
a64a400c9c | ||
|
85c0aa1619 | ||
|
19e78e3509 | ||
|
bf6330374c | ||
|
e639d9063b | ||
|
4a88e7ec78 | ||
|
65dad5a9d1 | ||
|
ba9ad14fbb | ||
|
62c7a4d491 | ||
|
14e3dcad8e | ||
|
f4a9645b54 | ||
|
8f7900759f | ||
|
69ee4a70b4 | ||
|
e4e16ed50f | ||
|
a16c656770 | ||
|
76b7de15de | ||
|
8ba6e61fd5 | ||
|
a30a1c9703 | ||
|
76687e2df7 | ||
|
bf5aefd129 | ||
|
1fa178d1d3 | ||
|
b7eedbcddc | ||
|
920371929b | ||
|
6ddbe84bde | ||
|
690d0ed1bb | ||
|
248e7dabc2 | ||
|
4584cfe3c1 | ||
|
cc646b1519 | ||
|
e501dc6d0e | ||
|
85ac9783f0 | ||
|
5b430f22bc | ||
|
d4eb21c2d9 | ||
|
6bf8a9d93d | ||
|
605419ae1b | ||
|
b89ffb2731 | ||
|
bda123b1eb | ||
|
2c94ea075c | ||
|
4bd8eae07e | ||
|
5529264c3f | ||
|
0a5df06e77 | ||
|
2f9ac61a4e | ||
|
840cf8740a | ||
|
d8869adf52 | ||
|
a631fc0077 | ||
|
9e4d372213 | ||
|
d0bf0ab237 | ||
|
e327583aa5 | ||
|
ead2f02cbd | ||
|
c453528dc1 | ||
|
6ae48aa8c2 | ||
|
88643fd9d5 | ||
|
73e0002219 | ||
|
c49ee47de0 | ||
|
14408396bb | ||
|
6cbb724069 | ||
|
a2316ca091 | ||
|
c476e19796 | ||
|
9f393cfd9d | ||
|
450c4d4d97 | ||
|
75e62abed0 | ||
|
97f9eb1320 | ||
|
53cc8a65af | ||
|
f94ac6ca61 | ||
|
cee3fd5ba2 | ||
|
016fe2269e | ||
|
03c0a5e405 | ||
|
cbbed79036 | ||
|
4af81ec50e | ||
|
a5ba67fef2 | ||
|
4cebe1fff4 | ||
|
a984dbbdf3 | ||
|
881524bd54 | ||
|
44da9e6ca7 | ||
|
4c0c8f7432 | ||
|
f67854c59c | ||
|
a1c1b9ab3b | ||
|
395979e834 | ||
|
fce6cb5865 | ||
|
338756550a | ||
|
d014eede9a | ||
|
9930a0d752 | ||
|
9928a5404b | ||
|
a6e0ddcdf1 | ||
|
acab70ed89 | ||
|
c0d149060f | ||
|
344f00d9c9 | ||
|
b26afb970a | ||
|
34ed5ce4b3 | ||
|
9375d5b8c2 | ||
|
e3678b4b56 | ||
|
b4c95fb4ac | ||
|
0bb33e04bb | ||
|
4d33e24099 | ||
|
2cdce04662 | ||
|
756d108f6a | ||
|
ca20b3d80c | ||
|
4ab9362971 | ||
|
4e8828e41a | ||
|
f8d1cfad2a | ||
|
b0a411b733 | ||
|
81741647f3 | ||
|
f36bd72a7f | ||
|
8c10de3edd | ||
|
0ab10a7c43 | ||
|
a1a5e00ff5 | ||
|
8af4b593fa | ||
|
9bef2c120c | ||
|
f7d99c43b5 | ||
|
ca0fd7a31b | ||
|
9e1550af8e | ||
|
a99c9715f6 | ||
|
1a888b5355 | ||
|
10d5c7738a | ||
|
80f23e6d78 | ||
|
d5ed2ce6df | ||
|
5e649f0d0d | ||
|
612c0e9478 | ||
|
0d2b3bfb99 | ||
|
c934838ace | ||
|
4350e9d241 | ||
|
0cdc0cb147 | ||
|
20535065d7 | ||
|
a23f4a704b | ||
|
93f2f74767 | ||
|
37ca202247 | ||
|
37525b1e7e | ||
|
d594b5a266 | ||
|
41add45e67 | ||
|
08b168a0a1 | ||
|
978ef2bc8b | ||
|
881d1f4334 | ||
|
56b4f46d7d | ||
|
f6bd8b3462 | ||
|
1f0f64d961 | ||
|
42ba817a4c | ||
|
dd98fe860b | ||
|
1fe9f101be | ||
|
c68fbb41d2 | ||
|
91e80657e4 | ||
|
2db30f918e | ||
|
cfceac3909 | ||
|
58b046fd10 | ||
|
227779256c | ||
|
89b5f7c98d | ||
|
c666497130 | ||
|
2620a1ac8c | ||
|
ffdcafa044 | ||
|
56ffec40f4 | ||
|
96c2416903 | ||
|
340d42a1ca | ||
|
e19420160f | ||
|
1741316f42 | ||
|
4f08167d6f | ||
|
fef76e2f6f | ||
|
f16d56cb27 | ||
|
120b286f2b | ||
|
7f437b6947 | ||
|
8d6e62e18b | ||
|
d0ec410b73 | ||
|
c546a59c38 | ||
|
e5ec245626 | ||
|
6ea95d1ede | ||
|
88bea44dd8 | ||
|
8ee5d51bd4 | ||
|
c640abbcd7 | ||
|
13598c098f | ||
|
a622b4d2fb | ||
|
403f35b571 | ||
|
3968bc8016 | ||
|
ff66368cb6 | ||
|
3fb419e704 | ||
|
832f838ddd | ||
|
18703bf195 | ||
|
ff8e88a5df | ||
|
72e1946ce5 | ||
|
ee391720aa | ||
|
e3a2dfffab | ||
|
8bf1278b1b | ||
|
00ce943ea5 | ||
|
b67eacdfde | ||
|
0dcea75764 | ||
|
0c5532d8b5 | ||
|
46e0f3c43a | ||
|
2cd17fe7af | ||
|
f44b2611e6 | ||
|
82fee0ede3 | ||
|
49579e4ce7 | ||
|
9254cf9d9c | ||
|
ff0fee3690 | ||
|
0778bd4bd5 | ||
|
0cd065d354 | ||
|
8615736e84 | ||
|
5772836be5 | ||
|
c380d9c379 | ||
|
cea7a30d82 | ||
|
06cde29419 | ||
|
20f5988174 | ||
|
b491cfe0b0 | ||
|
fc513413ea | ||
|
3f7e4712cd | ||
|
c2ef331df9 | ||
|
5fef7983f4 | ||
|
29ed82a359 | ||
|
7d5186e40a | ||
|
99270612ba | ||
|
c7b5b6ee07 | ||
|
848d17ffb9 | ||
|
47e8aa29e1 | ||
|
f270f2ed65 | ||
|
aba5b234af | ||
|
9133e2927d | ||
|
38104ba7cf | ||
|
c42bcae224 | ||
|
764e51bbe9 | ||
|
8e6c6a1dc4 | ||
|
7a9cfc45da | ||
|
9e24b9065c | ||
|
1c2b376ca2 | ||
|
746ce2afb4 | ||
|
029008bad5 | ||
|
d3449bfa00 | ||
|
a9a5706764 | ||
|
3ff8014add | ||
|
e60bdc7efe | ||
|
cccd8262fa | ||
|
68e5d95d25 | ||
|
5f458b288a | ||
|
e9ee8ac2fa | ||
|
8a4dfc3bbe | ||
|
4f86517501 | ||
|
7cb19ef767 | ||
|
565439a914 | ||
|
b8010be26b | ||
|
f76b8a32ca | ||
|
39167d333a | ||
|
0d63132987 | ||
|
7b5d5d1302 | ||
|
0dc98bda23 | ||
|
f9a062cac8 | ||
|
6ad4ccd901 | ||
|
ee6ceaa923 | ||
|
20b393d354 | ||
|
f707f86c8e | ||
|
daea54b288 | ||
|
1e5306b820 | ||
|
6890c25ea1 | ||
|
48482fece0 | ||
|
1dc1d4df72 | ||
|
2b4dd6f137 | ||
|
cc021a4784 | ||
|
e3c4609c2a | ||
|
3da44a8d30 | ||
|
34ea10475d | ||
|
89a68741d6 | ||
|
2421d49d9a | ||
|
ced7f1771a | ||
|
af2235bf88 | ||
|
305de2e2cd | ||
|
8756c5c255 | ||
|
27609ac4cc | ||
|
95d906bdbb | ||
|
4bb0d7bc05 | ||
|
d9599155ae | ||
|
1db37bf3d0 | ||
|
d75a80bd2d | ||
|
244bad3a24 | ||
|
f7056bcaa5 | ||
|
994669fb69 | ||
|
3ab90259f2 | ||
|
155109dea1 | ||
|
b268c3dd1c | ||
|
4e64dbdde4 | ||
|
a2955daffe | ||
|
d3921b973b | ||
|
cf6ad3cb15 | ||
|
90e0b7fec6 | ||
|
d77333576b | ||
|
73ff8d79f7 | ||
|
95fc88ae5b | ||
|
1d0eaac260 | ||
|
3565bfc939 | ||
|
a82c04910f | ||
|
233f03ca2b | ||
|
93c881a7a9 | ||
|
0af3956abd | ||
|
15feff3e79 | ||
|
5c5700caa7 | ||
|
3bddc176d6 | ||
|
9caf4bf383 | ||
|
9b2234fa0e | ||
|
1f79fdec4e | ||
|
a56f4c97e4 | ||
|
3a3390963c | ||
|
fd27759a95 | ||
|
01d8056c73 | ||
|
81fa33ebb5 | ||
|
e8aa3bc066 | ||
|
0bf0125e82 | ||
|
6209e778e5 | ||
|
5323283f98 | ||
|
57e17d0648 | ||
|
da55d5ec70 | ||
|
828a060698 | ||
|
3e5971b9db | ||
|
47c2625d38 | ||
|
49af9cf4f5 | ||
|
6b1daeba05 | ||
|
9f1240d8d9 | ||
|
a8138be69b | ||
|
ea57dc3bc9 | ||
|
131348a49f | ||
|
b22564cb00 | ||
|
16eb0a56f9 | ||
|
3e4ff47a38 | ||
|
8ea01a67f6 | ||
|
aa5cc642e1 | ||
|
a121cb6f00 | ||
|
60164182ae | ||
|
f842a80cdb | ||
|
4b6a574ee0 | ||
|
f9ebb780f9 | ||
|
1fc6c30652 | ||
|
46a1a013cd | ||
|
551810c486 | ||
|
b987ba506d | ||
|
84810f2bb2 | ||
|
424d666a50 | ||
|
a71359f647 | ||
|
d93c344176 | ||
|
b9c3213b90 | ||
|
95e24ffc51 | ||
|
00d56d7295 | ||
|
7436b454db | ||
|
8da5b99482 | ||
|
2969e87b52 | ||
|
ce62e898c3 | ||
|
431462d839 | ||
|
7d0e234b34 | ||
|
dad1b1bee9 | ||
|
9312cebee3 | ||
|
cdf5b6ec2d | ||
|
ce99fc8f95 | ||
|
a75d050001 | ||
|
75cfd10f11 | ||
|
9859ba6339 | ||
|
513056f711 | ||
|
ebe334fcc7 | ||
|
0eec12472e | ||
|
39106d440a | ||
|
9117095764 | ||
|
099bba950c | ||
|
e37ff60617 | ||
|
5b14608041 | ||
|
ad92692bab | ||
|
d956d42903 | ||
|
d69be7d03a | ||
|
f82de8d00d | ||
|
c836f88ff2 | ||
|
8b660ae090 | ||
|
9323c57f49 | ||
|
85e3c73525 | ||
|
a74bc2e58f | ||
|
0680638933 | ||
|
46d31ee5f7 | ||
|
e794b397d3 | ||
|
d41350050b | ||
|
4cd5b06b7f | ||
|
cd768439d2 | ||
|
9e5fd2d576 | ||
|
ecb46f591c | ||
|
d62d53aa8e | ||
|
2c515ab13c | ||
|
83d556ff0c | ||
|
678d313836 | ||
|
705d840ea3 | ||
|
7dff8c01dd | ||
|
5860679624 | ||
|
4628e4519d | ||
|
b884fd20a1 | ||
|
67c657003d | ||
|
580c1bbc7d | ||
|
2b6383d243 | ||
|
f27455a26f | ||
|
1d4f900e48 | ||
|
c5ca588a6f | ||
|
06888251e3 | ||
|
1a6e4cf4e4 | ||
|
9f86196a9d | ||
|
1e31043fb3 | ||
|
85adcf1ae5 | ||
|
9abb4d2873 | ||
|
235ff44736 | ||
|
9c2d741749 | ||
|
37cc0c34cf | ||
|
5633b6ac94 | ||
|
175f2aeace | ||
|
feefe69094 | ||
|
46df3ee7cd | ||
|
bb945ad01b | ||
|
de86aa671e | ||
|
e38771bbbd | ||
|
a3f9a8d7dc | ||
|
4b6bc6ef66 | ||
|
455a23361f | ||
|
1a8ec04733 | ||
|
4e60df7a08 | ||
|
219a9d9f5e | ||
|
48baf723a4 | ||
|
6530904883 | ||
|
d15d24f4ff | ||
|
8d992d637e | ||
|
6ebc83c3b7 | ||
|
b32f4451ee | ||
|
99142c7552 | ||
|
db710bb931 | ||
|
a9e9a397d8 | ||
|
d46a6ac687 | ||
|
1eb5495802 | ||
|
7cf8809d77 | ||
|
043aa27aa3 | ||
|
9824d94a1c | ||
|
e8ef76b8f9 | ||
|
be1ddb4203 | ||
|
caddf21fca | ||
|
5379329ef7 | ||
|
6faaeaae66 | ||
|
3fed323385 | ||
|
58a928547d | ||
|
558410c5bd | ||
|
0dc0decaa7 | ||
|
d11d663c5c | ||
|
771233176f | ||
|
ed70b07d81 | ||
|
e25fc7083d | ||
|
fa364c3f2c | ||
|
b5f9fe4d3b | ||
|
013d4c28b2 | ||
|
63acc8619b | ||
|
ec920b5756 | ||
|
95caaf2a40 | ||
|
7099f8bee8 | ||
|
b41a0d840c | ||
|
c577ade90e | ||
|
257b143df1 | ||
|
34ee326ce9 | ||
|
090104ce1b | ||
|
3305d5dc92 | ||
|
296063e135 | ||
|
b9daa59e5d | ||
|
5bdcfe128d | ||
|
1842a796fb | ||
|
ce99e5c583 | ||
|
0c96c2d305 | ||
|
5796b6b554 | ||
|
c7ab27c86f | ||
|
8c03746a67 | ||
|
8746d36845 | ||
|
448e6ac917 | ||
|
729c9cff41 | ||
|
22b9c80007 | ||
|
ab4355cfed | ||
|
948dc82228 | ||
|
bc74fd23e7 | ||
|
37776241be | ||
|
feba41ec88 | ||
|
6a8f42da8a | ||
|
670d8cb83a | ||
|
2f7fbde789 | ||
|
c698bca2b9 | ||
|
0b6a003a8b | ||
|
c64560016e | ||
|
978be0b4a9 | ||
|
b58bff1178 | ||
|
2f3e18caa9 | ||
|
6a291040bd | ||
|
dbc082dc75 | ||
|
32a0dd09bf | ||
|
f847c6e225 | ||
|
99da5fbebb | ||
|
6a0d024c69 | ||
|
b24929a243 | ||
|
9a47821642 | ||
|
d69968313b | ||
|
3c377d97dc | ||
|
ea15218197 | ||
|
0eee907c88 | ||
|
c877583979 | ||
|
844cf70345 | ||
|
a0d92a167c | ||
|
d7b0d6f9f5 | ||
|
4c3b328aca | ||
|
260ffee093 | ||
|
c59cfe3371 | ||
|
0822c0c128 | ||
|
57a88f0a1b | ||
|
87393409f9 | ||
|
062f5e4712 | ||
|
aaba1e8368 | ||
|
ff2684dfee | ||
|
6b5fa201aa | ||
|
7167e443ca | ||
|
175d647e47 | ||
|
4c324e1160 | ||
|
0365b7c6a4 | ||
|
19889187a5 | ||
|
9571277c44 | ||
|
a202da9e23 | ||
|
e5a77a477d | ||
|
c05dc50f53 | ||
|
3bbdbb832c | ||
|
d9684bef6b | ||
|
db0c45c172 | ||
|
ad4393e3f7 | ||
|
f83a8a36d1 | ||
|
0e9eba8c8b | ||
|
d5c760960a | ||
|
2c6ef2bc68 | ||
|
7032ae5587 | ||
|
eba22c2d94 | ||
|
11cc9ae0c0 | ||
|
fb648db47d | ||
|
959283d333 | ||
|
385c2227e7 | ||
|
6d9f03e84b | ||
|
6a972e4b19 | ||
|
171b174ce9 | ||
|
93b7ded1e6 | ||
|
29c6b145ca | ||
|
a7a479623c | ||
|
83dff9ae6e | ||
|
6b2cc5a3ee | ||
|
5247e0d773 | ||
|
05b308b8b4 | ||
|
9621278fca | ||
|
570d6c8bf9 | ||
|
ad48e9ed0f | ||
|
f724addf9a | ||
|
aa20974703 | ||
|
a846f6c610 | ||
|
c218c34812 | ||
|
2626e66873 | ||
|
81e0e1b339 | ||
|
fd1354d00e | ||
|
071a3b2a32 | ||
|
32cfaab5ee | ||
|
d348f12a0e | ||
|
11845d9f5b | ||
|
de70fbf88a | ||
|
0b04caab78 | ||
|
4c78c5a9c9 | ||
|
73f0841f17 | ||
|
4559e85daa | ||
|
bbef332e25 | ||
|
1e950c7dbc | ||
|
f14e19a3d8 | ||
|
668d5c23dc | ||
|
fb6f96f5c3 | ||
|
6e6e34ff18 | ||
|
790146bfac | ||
|
af625930d6 | ||
|
a28ebcb401 | ||
|
77e47ddd1f | ||
|
5b620ba6cd | ||
|
d5f9b33f66 | ||
|
596c9b8691 | ||
|
d4357eb55a | ||
|
b37f0dfde3 | ||
|
624791e09a | ||
|
f9a73a9bbe | ||
|
35868dd72c | ||
|
979d010dc2 | ||
|
b34d548246 | ||
|
a87646b8cb | ||
|
a2411eef56 | ||
|
52ed8e4d75 | ||
|
24c914799d | ||
|
db53511855 | ||
|
325691e588 | ||
|
fac3cb687d | ||
|
afbf1db331 | ||
|
1aefaec297 | ||
|
f1d3fb5d40 | ||
|
ac2723f898 | ||
|
2fffaec226 | ||
|
5c54dfee3a | ||
|
967d2d78ec | ||
|
1aa5e0d4dc | ||
|
b47cf97409 | ||
|
5e802f8aa3 | ||
|
0bdeb02a31 | ||
|
b03698fadb | ||
|
39d1a09704 | ||
|
a447e4e7ef | ||
|
4eee6e7aee | ||
|
b6fde857a7 | ||
|
3c66deb5cc | ||
|
4146612a32 | ||
|
a314933557 | ||
|
c5d7e3f2bc | ||
|
c95a2881b5 | ||
|
4c3727b4a3 | ||
|
a1f304dff7 | ||
|
a8870eef0d | ||
|
afaebc6cf3 | ||
|
8f4a1f4fc2 | ||
|
0807783388 | ||
|
80d4061d14 | ||
|
dc2f8e5c85 | ||
|
aee1ea032b | ||
|
484e82fb9f | ||
|
322a08edfb | ||
|
08afc312c3 | ||
|
5571a5d8ed | ||
|
6a8c65493f | ||
|
dfdf4473ea | ||
|
8bbbff7567 | ||
|
42e37ebea1 | ||
|
632f4d5453 | ||
|
6c5e35ce5c | ||
|
4ff15f6dc2 | ||
|
ec8028aef2 | ||
|
63cbd9ef9c | ||
|
9cca64003a | ||
|
819d5e2dc8 | ||
|
3b06ab296b | ||
|
0de52c6c99 | ||
|
e3b00b59a7 | ||
|
5a390a973f | ||
|
1ee8e44912 | ||
|
86685c1cd2 | ||
|
e3feba2a2c | ||
|
0a68de6c24 | ||
|
4be8dae626 | ||
|
e4d08836e2 | ||
|
c2a324e5da | ||
|
77f95146d6 | ||
|
6cd8512bbd | ||
|
843604c9e7 | ||
|
7407b8326a | ||
|
adf47827c9 | ||
|
5471088e93 | ||
|
4e85a1dee1 | ||
|
ec60839064 | ||
|
d4bfa1a189 | ||
|
862d401077 | ||
|
255a06382d | ||
|
bbb0484d03 | ||
|
93346bc05d | ||
|
fdf50f0064 | ||
|
ccf6ee79d0 | ||
|
91dd19473d | ||
|
c06162b22f | ||
|
7a6a3e4160 | ||
|
94341f9f3f | ||
|
ff19fb3426 | ||
|
baac8d9627 | ||
|
669b101e6a | ||
|
935f38692f | ||
|
d2d9fb08cc | ||
|
b85d548879 | ||
|
35f30088b2 | ||
|
dce054e632 | ||
|
ba725e1c25 | ||
|
b837348b25 | ||
|
7d9c7017c9 | ||
|
d6b9b8bf0c | ||
|
bd09fe1a3d | ||
|
bcbe6177b8 | ||
|
9b1d07365e | ||
|
37b212427c | ||
|
078234d8b3 | ||
|
3ce0c3d1a5 | ||
|
2ee07ea1d8 | ||
|
40c339db9b | ||
|
402c1cd06c | ||
|
819f340f39 | ||
|
1b4b40c95d | ||
|
afd9f4e278 | ||
|
47a9461f39 | ||
|
c6f64d8368 | ||
|
edabf19ddf | ||
|
a30d5f4cf9 | ||
|
3fa78e7bb1 | ||
|
a8a7e4f9a5 | ||
|
5d3b765a23 | ||
|
70f3ab8ec3 | ||
|
b6612e90ca | ||
|
161cccca30 | ||
|
84dc2eda1f | ||
|
390d10d656 | ||
|
1f775f4414 | ||
|
cc404b4edc | ||
|
536672ac1b | ||
|
e41e7c07db | ||
|
f1d3b03c60 | ||
|
2ebff958a4 | ||
|
edfdda86ae | ||
|
97fb7b5b96 | ||
|
f6de144cbb | ||
|
5a974c7b94 | ||
|
5f61607419 | ||
|
7439aeb63e | ||
|
cd8907542a | ||
|
8a5450e830 | ||
|
ad9f2b2d8e | ||
|
2f4a9865e1 | ||
|
0a3008e753 | ||
|
29a0795219 | ||
|
63459c5f72 | ||
|
916e96b143 | ||
|
325039c316 | ||
|
c5b97f4146 | ||
|
03233429f4 | ||
|
0a72c4b6db | ||
|
8867626de8 | ||
|
f5916ec396 | ||
|
ebb36235a7 | ||
|
def174a517 | ||
|
2798f623d4 | ||
|
480ba933fa | ||
|
3d1ee9ef62 | ||
|
5352321fe1 | ||
|
c4101162d6 | ||
|
632d55265b | ||
|
e277f7d1c1 | ||
|
ff7b4a3d38 | ||
|
d212dfe735 | ||
|
84ed185579 | ||
|
c0ba3406ef | ||
|
e196ba6e86 | ||
|
76743aee48 | ||
|
9ebca99290 | ||
|
a734ad2d36 | ||
|
baf7d1be4e | ||
|
31bcd1bf7c | ||
|
a3b30ed65a | ||
|
59e50b03bd | ||
|
0a88f020e1 | ||
|
c058a1d63c | ||
|
96a189deb9 | ||
|
8c229920ad | ||
|
d592323e39 | ||
|
402c857d17 | ||
|
def858854b | ||
|
f6761ac30e | ||
|
f8e49ea3f4 | ||
|
f6a4a2127b | ||
|
446fc3f1f8 | ||
|
146525db91 | ||
|
1698b43f9b | ||
|
078b21db85 | ||
|
43adcde094 | ||
|
7a0bb18dcf | ||
|
47a5a4e1fc | ||
|
0f0e5876ae | ||
|
43aa75dc89 | ||
|
95dd1cd7ad | ||
|
36ae946655 | ||
|
24edc94f9d | ||
|
4deae76347 | ||
|
8280d200ea | ||
|
8ee0c57224 | ||
|
cb6f392774 | ||
|
f250c54813 | ||
|
5c6081c4e2 | ||
|
88c56de97b | ||
|
e274af6e3d | ||
|
a0ece3754b | ||
|
0bcc2ae7ab | ||
|
bdb90460c4 | ||
|
824137a02c | ||
|
2edc699eac | ||
|
8e79366076 | ||
|
c1e39b182f | ||
|
13eb276085 | ||
|
4cec502f7b | ||
|
2545469713 | ||
|
f09996a21d | ||
|
5cabf4d040 | ||
|
a03db6d224 | ||
|
8d1b72b951 | ||
|
912e1f93b7 | ||
|
a5aa4d9b54 | ||
|
e777be3dde | ||
|
b5441f6b77 | ||
|
dbbd63e519 | ||
|
adc443ea80 | ||
|
0d32179d07 | ||
|
b45b02b37e | ||
|
12928b832c | ||
|
1e224220a8 | ||
|
3471e2660f | ||
|
924ba153aa | ||
|
bd1e8be328 | ||
|
cf5a985b31 | ||
|
607521c88f | ||
|
486c7d8c56 | ||
|
4b71197c97 | ||
|
8b8839d049 | ||
|
b209c1bc4d | ||
|
2b8d08a3f4 | ||
|
cbadf00941 | ||
|
c5b7447dac | ||
|
64d6f72e6c | ||
|
a19a6fb016 | ||
|
b889e5185e | ||
|
cd83a9e7b2 | ||
|
748c825202 | ||
|
204993568a | ||
|
70be2d93ce | ||
|
f5638716d2 | ||
|
fbc2fad9c9 | ||
|
3f39e35123 | ||
|
3f6809bcdf | ||
|
9ff577a7b4 | ||
|
c52adef919 | ||
|
cbb92bcbc0 | ||
|
948798a84f | ||
|
2ffc3eac4d | ||
|
0ff7fd939e | ||
|
ca7c5129b2 | ||
|
07e0fdbd2a | ||
|
b4dfc24040 | ||
|
85dbf4e16c | ||
|
efc65b93f8 | ||
|
9a0fe6f617 | ||
|
3442eb1b9d | ||
|
e449912f05 | ||
|
72a46fb386 | ||
|
d29b6bee28 | ||
|
e2e3712921 | ||
|
00a11b1b78 | ||
|
77b78f0991 | ||
|
ee550be80c | ||
|
97d41c2686 | ||
|
fccc0a4b05 | ||
|
57b1d3f850 | ||
|
77d40833d9 | ||
|
7814218208 | ||
|
95a7ffdf6b | ||
|
ebc47dc161 | ||
|
cd8acc2e8c | ||
|
3b7a5bd102 | ||
|
d3054d4f83 | ||
|
5ac66b05e3 | ||
|
83fd44eeef | ||
|
2edecf34ff | ||
|
18bc8331f9 | ||
|
7d956c5117 | ||
|
603a964579 | ||
|
dc515b83f3 | ||
|
9466f02696 | ||
|
d3bd2774dc | ||
|
f482585d7c | ||
|
2cde814aaa | ||
|
d989a19f76 | ||
|
d292269ea0 | ||
|
ebf40099f2 | ||
|
0586c00285 | ||
|
bb9ddd5680 | ||
|
cb1663fc12 | ||
|
45d9d8db94 | ||
|
edc482c8ea | ||
|
6e5c03cc78 | ||
|
881c1978eb | ||
|
662bc27523 | ||
|
b4b62c22a4 | ||
|
05569147af | ||
|
99a635d327 | ||
|
e6b763026e | ||
|
c182583e09 | ||
|
d821389c2e | ||
|
be2916333b | ||
|
9124d8a3fb | ||
|
7b1da527a6 | ||
|
e7b8602e1f | ||
|
d6e9af909b | ||
|
acdd42935b | ||
|
8367d1d715 | ||
|
56f12dc982 | ||
|
4c07f05b3a | ||
|
b73ff886c3 | ||
|
2e7bd62353 | ||
|
1264eb640a | ||
|
3a90364b32 | ||
|
f5f9861a78 | ||
|
f9408a00c6 | ||
|
ae8bf954c1 | ||
|
c656f2f694 | ||
|
eea3f13bb3 | ||
|
df8114f8be | ||
|
dda244edd8 | ||
|
cce3ce816c | ||
|
65c0d1064b | ||
|
5a2f968d7a | ||
|
16d88402cb | ||
|
7dcf18151d | ||
|
e3404dd322 | ||
|
bfc517ee80 | ||
|
4a7d2a1e28 | ||
|
66a68f6d22 | ||
|
469318bcbd | ||
|
c07c9995ea | ||
|
2c2276c5bb | ||
|
672a245548 | ||
|
5d50b1ee3c | ||
|
c99df1c310 | ||
|
591ae10144 | ||
|
2d2745195e | ||
|
026f9da035 | ||
|
d23d4f2c1d | ||
|
515b87755a | ||
|
d8ea3d2bfe | ||
|
ee7837d022 | ||
|
07743e490b | ||
|
9101d6e48f | ||
|
27c23b60b8 | ||
|
e7b6238f43 | ||
|
ad2225b6e5 | ||
|
5609103a97 | ||
|
6d460b44b0 | ||
|
efd8d9f528 | ||
|
29aedd388e | ||
|
27e0e41835 | ||
|
0b60f20eb3 | ||
|
8be2ed6255 | ||
|
c9c3f07171 | ||
|
8a21c6df10 | ||
|
df71f57d86 | ||
|
60e39a9dd1 | ||
|
bc6a53b847 | ||
|
05a1137828 | ||
|
cef38bf40b | ||
|
0b13a8c4aa | ||
|
3fbd7919d8 | ||
|
5f688ff209 | ||
|
f6cfb5bf21 | ||
|
df8c9f39ac | ||
|
d7ee7caed4 | ||
|
2e300da057 | ||
|
3fb63bbe8c | ||
|
9671ed4cca | ||
|
d10ef3fd4b | ||
|
dd0b847912 | ||
|
8c34ff5d23 | ||
|
15750256e2 | ||
|
6989fc7bdb | ||
|
4923614730 | ||
|
76f38621de | ||
|
fff72889f6 | ||
|
12af32b9ea | ||
|
9add8e19eb | ||
|
5710703c50 | ||
|
1322b876e9 | ||
|
9ed2ba61c6 | ||
|
62a461ae15 | ||
|
6f7220b68e | ||
|
4859932d35 | ||
|
ee277de707 | ||
|
c11f47903a | ||
|
6a5f1613e7 | ||
|
dc36f0cb6c | ||
|
6c38026ef5 | ||
|
4c9cc9890c | ||
|
f57b407c60 | ||
|
ce0651b79c | ||
|
edc26cb1e1 | ||
|
ff759397f6 | ||
|
badd22ac3d | ||
|
6f78395ef7 | ||
|
5fb6531db8 | ||
|
eb9d5e1196 | ||
|
233b48bdad | ||
|
e22e290f67 | ||
|
ab95a69dc8 | ||
|
85c8a01f4a | ||
|
42af7c6dab | ||
|
08a445e2ac | ||
|
c0b2877da3 | ||
|
cf8ca85289 | ||
|
a8a92f6c51 | ||
|
95f833aacd | ||
|
4f45cc081f | ||
|
2a4cd24c60 | ||
|
ef551f4cc6 | ||
|
4545f271c3 | ||
|
2768396a72 | ||
|
5521a86693 | ||
|
3160780549 | ||
|
f0701657a9 | ||
|
21325b7523 | ||
|
874f5c34bd | ||
|
eadab2e9ca | ||
|
253faaf023 | ||
|
3d843a6a51 | ||
|
03fdf36bf9 | ||
|
fdcc32beda | ||
|
bf20355c5e | ||
|
0136c793b4 | ||
|
2e12114350 | ||
|
f25ab42ebb | ||
|
d3a8a278e6 | ||
|
8d9827c55f | ||
|
cad63f9761 | ||
|
bf446f44f9 | ||
|
621f607297 | ||
|
d89bd707a8 | ||
|
754087b990 | ||
|
cfbeb56371 | ||
|
3bb46ce496 | ||
|
c5832f2b30 | ||
|
d9406b0095 | ||
|
2475c36a75 | ||
|
c384f9c0ca | ||
|
afbfebf659 | ||
|
6b686c18f7 | ||
|
349cb33fbd | ||
|
d7542b6818 | ||
|
7976d39d9d | ||
|
5ee9676941 | ||
|
4b40cda910 | ||
|
4689ed7b30 | ||
|
084bc2aee3 | ||
|
6d7e15b2fd | ||
|
61515160a7 | ||
|
a25bfdd16d | ||
|
e93538cea9 | ||
|
b4244b28b6 | ||
|
43f9038325 | ||
|
27872f476e | ||
|
339044f8aa | ||
|
0718a090e1 | ||
|
9e1f030a80 | ||
|
04922f6aa0 | ||
|
7d2bc9e162 | ||
|
c6c00729e3 | ||
|
10756b0920 | ||
|
1eb1502a07 | ||
|
30e72a96a9 | ||
|
2646db78a4 | ||
|
f5358b13f5 | ||
|
d156170971 | ||
|
d9bfe847db | ||
|
473f8b8e31 | ||
|
aeb4b4c8a5 | ||
|
980a3e45db | ||
|
5794969f5b | ||
|
8b5b06c3d1 | ||
|
b50c27b619 | ||
|
5ee04e31e5 | ||
|
bf6ae91a6d | ||
|
828e3a5795 | ||
|
7b5bcd45f8 | ||
|
72de16fb86 | ||
|
0b903fc5f4 | ||
|
4df686f49e | ||
|
d7eeaaf249 | ||
|
84fb6aaddb | ||
|
a744b9437a | ||
|
6027b969f5 | ||
|
93805a5d7b | ||
|
71da961ecd | ||
|
dd421809e5 | ||
|
8526055bb7 | ||
|
a79334ea4c | ||
|
274ea9a4f2 | ||
|
8743d18aca | ||
|
d3773a433a | ||
|
0f0a87becf | ||
|
4b57bb8eeb | ||
|
3b27dbb0aa | ||
|
ff2fbd322e | ||
|
9636f33fdb | ||
|
bbe2a1b264 | ||
|
79fdfd6524 | ||
|
d086a99e5b | ||
|
22b0b95209 | ||
|
28d1588e73 | ||
|
f3b1a5ff3e | ||
|
330e90a6ac | ||
|
8fac72db53 | ||
|
820c8b0dce | ||
|
8b4a6f2a64 | ||
|
ef63342e20 | ||
|
89840790e7 | ||
|
a72809b225 | ||
|
9976e4736e | ||
|
dc92f07232 | ||
|
3db815b969 | ||
|
ade293cf52 | ||
|
877408b808 | ||
|
86ed75bf7c | ||
|
20d8d800f3 | ||
|
7ce06b3808 | ||
|
08ca47cadb | ||
|
0bd3a26051 | ||
|
5272b465cc | ||
|
b75f38033b | ||
|
637f655b6f | ||
|
b3f7394c06 | ||
|
1a5ecd4d4a | ||
|
bd65c4e312 | ||
|
bce656c787 | ||
|
06522c9ac0 | ||
|
9026cc8d42 | ||
|
574b040142 | ||
|
48113b7bd9 | ||
|
c13f115473 | ||
|
1e20f9f1d8 | ||
|
bc461d9baa | ||
|
5016e30cf2 | ||
|
f42ac5f2c0 | ||
|
2a60414031 | ||
|
9a2a304860 | ||
|
feb74a5e86 | ||
|
c0e350b734 | ||
|
bef1183c49 | ||
|
f935f5cf46 | ||
|
07388d327f | ||
|
4de16b2d17 | ||
|
da068a43c1 | ||
|
9657463717 | ||
|
69036cc6a4 | ||
|
700e084101 | ||
|
a1dc47b826 | ||
|
86de0ca17b | ||
|
80414f8452 | ||
|
fc0e239bdf | ||
|
928ad6c1d8 | ||
|
9d027b96d8 | ||
|
ddd49596ba | ||
|
b8cabadd43 | ||
|
ce42b07a80 | ||
|
bfd93e5b13 | ||
|
a797459560 | ||
|
6cbb683f99 | ||
|
92bbb98d48 | ||
|
834c847746 | ||
|
97aa407fe4 | ||
|
86a254ad9e | ||
|
64c38856cc | ||
|
b4f6206eda | ||
|
82f828a327 | ||
|
d8116a80df | ||
|
e0aec8d373 | ||
|
1ce2587330 | ||
|
20964ac2d8 | ||
|
71a10e0378 | ||
|
9bf13b7872 | ||
|
d420992f8c | ||
|
c259a0e3e2 | ||
|
432be274ba | ||
|
484bf5b703 | ||
|
979b6305af | ||
|
4bf32af60e | ||
|
0e4a746eeb | ||
|
2fe919cc5e | ||
|
bcd750695f | ||
|
19b6bb0fd6 | ||
|
60f6a350be | ||
|
f571df7367 | ||
|
de51bc782e | ||
|
c5aef60bd7 | ||
|
8b07ecb937 | ||
|
6f52104324 | ||
|
1d7f704754 | ||
|
1d034749f7 | ||
|
08c55f636a | ||
|
0dc5d1a1c6 | ||
|
1b11445bb2 | ||
|
1596e1d4c5 | ||
|
320266606e | ||
|
a0a08c4c5a | ||
|
4309df8334 | ||
|
f1161c65fb | ||
|
50eeb4f651 | ||
|
21b85b78b1 | ||
|
673adde9f1 | ||
|
c9063a06b4 | ||
|
62b8500aae | ||
|
f645e5381c | ||
|
5e37471488 | ||
|
0a74e79cea | ||
|
7db66f73f0 | ||
|
2f5bdc23f6 | ||
|
94adf063ad | ||
|
77cada4085 | ||
|
cec28a85ac | ||
|
5f49ecd7f3 | ||
|
736c0e62f2 | ||
|
43eb064351 | ||
|
c8af62ed48 | ||
|
6e47535c2e | ||
|
0448d98afc | ||
|
b8326a15a3 | ||
|
a6631c2ea8 | ||
|
30e768613b | ||
|
72ed05c4a4 | ||
|
adb8052689 | ||
|
1483829c94 | ||
|
acb9d1b3c6 | ||
|
301919d9d4 | ||
|
2bb0b15e04 | ||
|
250a2b340f | ||
|
b2fc0499f6 | ||
|
6a99849a1e | ||
|
172f1770cf | ||
|
1b5134dfe2 | ||
|
5fecf09631 | ||
|
9a8cae836b | ||
|
7d7d8afed9 | ||
|
f20c4705d9 | ||
|
3142d8d01f | ||
|
84fa5a4ed6 | ||
|
004a3f891f | ||
|
e197f372b5 | ||
|
e7ea5097f4 | ||
|
8451a70de6 | ||
|
9f0357ce82 | ||
|
cd6e4a0ebd | ||
|
473740c13a | ||
|
ec715d78fb | ||
|
3f5df3ef8d | ||
|
f694d6f839 | ||
|
b20516d645 | ||
|
c04300651a | ||
|
2f058d3ff5 | ||
|
582f967a59 | ||
|
00bc355220 | ||
|
e3395ee910 | ||
|
cb78ba9bb2 | ||
|
b0ac640d8b | ||
|
2b24b17609 | ||
|
2cd736ab81 | ||
|
99256b9b3a | ||
|
26bf7bc12f | ||
|
b3ec8f2611 | ||
|
a55c048a62 | ||
|
848cd1dbec | ||
|
149e69414f | ||
|
9a7d3634d5 | ||
|
7f7c936049 | ||
|
9479108fb7 | ||
|
042c1072d9 | ||
|
1f4edb38e6 | ||
|
371017b547 | ||
|
bc20592712 | ||
|
37a6da3443 | ||
|
53bd169462 | ||
|
5a9aab1a32 | ||
|
23eadf2c9a | ||
|
d5dfda8905 | ||
|
e245e965ba | ||
|
ce15c7ffba | ||
|
037eb0b790 | ||
|
4910b14d57 | ||
|
d428120ec6 | ||
|
e2907f4250 | ||
|
680f5e83d8 | ||
|
a335bcd682 | ||
|
76c510c5b6 | ||
|
4635d62e2c | ||
|
65e36e2931 | ||
|
063dfcf487 | ||
|
d3e4fb88ee | ||
|
7ebba736cb | ||
|
a1272c7190 | ||
|
0a6b4e9961 | ||
|
e66edc1ce9 | ||
|
4e827e4f8a | ||
|
295985de7c | ||
|
a48611aa6d | ||
|
f713e2e092 | ||
|
e26e2319da | ||
|
f2ab25085d | ||
|
7adc045b80 | ||
|
19754c967f | ||
|
738ad2127b | ||
|
cb930a0858 | ||
|
94810c106a | ||
|
2fde4e6933 | ||
|
259a2f2982 | ||
|
dc1bb6de20 | ||
|
e34f75c267 | ||
|
2129946d14 | ||
|
bdcdb08fc1 | ||
|
8b5d97790f | ||
|
cac7fb145b | ||
|
6990525e8a | ||
|
55579261b7 | ||
|
70c20f7f52 | ||
|
b56a905322 | ||
|
e177160ee9 | ||
|
6364c05789 | ||
|
f71f10eac6 | ||
|
0f6ab01f77 | ||
|
0935cb90a4 | ||
|
e86cac984a | ||
|
f1acc1e05a | ||
|
3d36ac4601 | ||
|
bf1c96695b | ||
|
9ecb29883c | ||
|
8b3e87cfe0 | ||
|
e5af230315 | ||
|
a1da82c868 | ||
|
981a324027 | ||
|
00abd4c853 | ||
|
a4550e51ea | ||
|
5edbd0e952 | ||
|
1d4944b88e | ||
|
660c8f8d7e | ||
|
478fc0c9dd | ||
|
a2de4ce40e | ||
|
17052b665f | ||
|
f344dbaad4 | ||
|
c5c9e3fd65 | ||
|
aba9c28226 | ||
|
82e2b8a8c0 | ||
|
5a8d5e426d | ||
|
f9ecb7201b | ||
|
044cf19913 | ||
|
2c233cda8b | ||
|
6d735806c0 | ||
|
2433d39df5 | ||
|
9e0e4b13c5 | ||
|
e66436625c | ||
|
24a4478b5c | ||
|
f84cbeaaf8 | ||
|
3cb911a52f | ||
|
dd684753d0 | ||
|
f3e6cc6ffd | ||
|
b94f4db52a | ||
|
66a4c5d48b | ||
|
b4e222d598 | ||
|
dd00591082 | ||
|
4638786507 | ||
|
1e9dd2fd4e | ||
|
62bc58e145 | ||
|
760e0ab805 | ||
|
6eb1c3d638 | ||
|
f408efc927 | ||
|
9b20247fc2 | ||
|
8f5bfe7938 | ||
|
b359df7045 | ||
|
e844f41abc | ||
|
bc532f54d5 | ||
|
c673370103 | ||
|
f1b1000600 | ||
|
c0e248c457 | ||
|
f510a1b060 | ||
|
fafc3883c6 | ||
|
1bdb98d139 | ||
|
2f5ca88fb1 | ||
|
e7a24159c5 | ||
|
e056cc8178 | ||
|
8ce4c79612 | ||
|
77d9641323 | ||
|
31e4237247 | ||
|
c32c65d367 | ||
|
0a4dbaf307 | ||
|
daa66b08dc | ||
|
d613fa1e68 | ||
|
55fbd8d468 | ||
|
adf40291e8 | ||
|
acfc900997 | ||
|
0a08b1afc8 | ||
|
eb48a3fac2 | ||
|
2e7fa6440b | ||
|
9ecc98c3cc | ||
|
02fd68d63b | ||
|
235bce1ecb | ||
|
e985221b50 | ||
|
77cf63c06d | ||
|
faec050a6d | ||
|
22304f4925 | ||
|
58a78ffa54 | ||
|
64f6c60bfd | ||
|
e0614620ef | ||
|
a28caa33ef | ||
|
ce4fedf191 | ||
|
f2078a3849 | ||
|
5292d38c73 | ||
|
1049646e27 | ||
|
380cf06211 | ||
|
1f35ef2865 | ||
|
c29bc9309a | ||
|
7112c86471 | ||
|
2aabf14372 | ||
|
77ff9c91c5 | ||
|
d9457e929c | ||
|
86b49856a7 | ||
|
54f54ee845 | ||
|
015bd28cc2 | ||
|
990c83a037 | ||
|
c3c74506a7 | ||
|
fb4e6bab14 | ||
|
fe38f95f15 | ||
|
9eaa9c1a17 | ||
|
8ee681c4a3 | ||
|
08aee97c1d | ||
|
2bb6482bec | ||
|
c169095128 | ||
|
b1397c95ca | ||
|
3df31e3464 | ||
|
638a0fd3c3 | ||
|
ebb66c374e | ||
|
89e3c41043 | ||
|
3da410ef71 | ||
|
2dccbd3412 | ||
|
2ff529ed99 | ||
|
4fae1e4298 | ||
|
f7951b44ba | ||
|
ff8eeff995 | ||
|
00019dc356 | ||
|
404fe5321e | ||
|
e7dd239d20 | ||
|
071f3370e3 | ||
|
ee321be579 | ||
|
eb61425da5 | ||
|
b75ba216d1 | ||
|
8651df8c2a | ||
|
948554a20f | ||
|
9cdb605659 | ||
|
928e2424c0 | ||
|
a01fee0b9f | ||
|
924e4a17e5 | ||
|
fdbd73c716 | ||
|
f397f0cbd0 | ||
|
4d2c6e39b2 | ||
|
3e1afb139c | ||
|
af69606bea | ||
|
bc8ff14695 | ||
|
5f7b220eb4 | ||
|
67adfee5e5 | ||
|
d66d4fd87f | ||
|
1b20a25514 | ||
|
c1cd4d9a6b | ||
|
b63693aefb | ||
|
ec05f14f5a | ||
|
37d88be2be | ||
|
1c641d7635 | ||
|
e2ab2f7306 | ||
|
434551e012 | ||
|
69dcbdd3b2 | ||
|
8df6f79f19 | ||
|
422f7ccfa8 | ||
|
c58682e3fb | ||
|
db111ae2a0 | ||
|
049aa33f17 | ||
|
b1ac37609f | ||
|
53e8f78af6 | ||
|
1bced97e04 | ||
|
f8ae5013cb | ||
|
d8e5e53273 | ||
|
b6502e9e9d | ||
|
d70864ac73 | ||
|
f94e626021 | ||
|
0a3b84b815 | ||
|
d336d89b83 | ||
|
1a5c1979e3 | ||
|
cec9566d2a | ||
|
fe473b9e75 | ||
|
062ae4dd59 | ||
|
45d676eb10 | ||
|
3cfdf9b585 | ||
|
08b551624c | ||
|
761a0a3393 | ||
|
6660b0aef3 | ||
|
781056152a | ||
|
6822bb28a0 | ||
|
b82710eecf | ||
|
c386b3bcf7 | ||
|
ffec0b065b | ||
|
5b7fe9f155 | ||
|
8d1ee859f2 | ||
|
c91f80c456 | ||
|
39891e86a0 | ||
|
575f701390 | ||
|
335099cd30 | ||
|
9fad541c87 | ||
|
007e053e2f | ||
|
ef2413a5aa | ||
|
ca8e1c646d | ||
|
346c7630c9 | ||
|
1c57c9d8e0 | ||
|
bd20d8724b | ||
|
69a18255c6 | ||
|
c40baf5e17 | ||
|
df041108f6 | ||
|
ee10d278a7 | ||
|
2b2401be19 | ||
|
4f58d07c83 | ||
|
9eea0151ba | ||
|
40d09ddd2a | ||
|
d332e87655 | ||
|
0fa48a749f | ||
|
a5ef8aef0f | ||
|
4fb09c5b4d | ||
|
9e63985b28 | ||
|
6fdeeb56ce | ||
|
b002d34cd4 | ||
|
e46fc62b78 | ||
|
401aa7c699 | ||
|
12a2dc0901 | ||
|
b3f3fd81ac | ||
|
f2fec345ec | ||
|
b6312340b6 | ||
|
3d1fc0f2e8 | ||
|
d68f57cbba | ||
|
80bad9f66d | ||
|
19e0605d30 | ||
|
812387e586 | ||
|
5ecafb157d | ||
|
f1ade62638 | ||
|
00b882935f | ||
|
eb5641b863 | ||
|
0dfd9c7670 | ||
|
6ede1743ac | ||
|
d3f357b708 | ||
|
5a55dd1d4b | ||
|
16056626b0 | ||
|
f7ffb81d9e | ||
|
626a3c93ba | ||
|
c0f554311b | ||
|
3f5a99916a | ||
|
b5a057f063 | ||
|
e7e0717f5b | ||
|
3fd3d8d5e9 | ||
|
7b2de40beb | ||
|
5f6d721c09 | ||
|
ddda86b90d | ||
|
c6256e1455 | ||
|
0cd3053fcb | ||
|
58c1545707 | ||
|
d3b4b10d18 | ||
|
c031ae9f2f | ||
|
672e3273cd | ||
|
039860f87e | ||
|
9511456ded | ||
|
04b198a7e2 | ||
|
73a1abed10 | ||
|
fb7b1c8c18 | ||
|
8ffa7ebb6a | ||
|
aac1304b46 | ||
|
7dfc759691 | ||
|
54afe0671e | ||
|
74e2ca81ae | ||
|
d6fadb52ff | ||
|
b163aeb8ca | ||
|
fcb479a457 | ||
|
0e095a9fa4 | ||
|
2f6aa3c363 | ||
|
fcc485384f | ||
|
91a2319325 | ||
|
56b3afa77c | ||
|
d335f45e34 | ||
|
34d2648509 | ||
|
f39c4fe2f4 | ||
|
01875c395b | ||
|
2872f40d13 | ||
|
07a30c8334 | ||
|
ceb3d0314d | ||
|
d7df545078 | ||
|
d073f06652 | ||
|
3726da9c14 | ||
|
51450a0df9 | ||
|
659f677897 | ||
|
a291dea16f | ||
|
98bae4a0a1 | ||
|
48e69cebab | ||
|
798a3b6a43 | ||
|
2dc1427027 | ||
|
233d23a527 | ||
|
06f7bd7c97 | ||
|
458a238c38 | ||
|
de72655bb1 | ||
|
4a2350891a | ||
|
4677ae4ac6 | ||
|
31349a47d3 | ||
|
55b7a3e4d1 | ||
|
692ed81306 | ||
|
03172a6cd7 | ||
|
819622e310 | ||
|
970863ffb1 | ||
|
e876d3077a | ||
|
99d6742fac | ||
|
75615bb5c8 | ||
|
e271b246f3 | ||
|
6378d96d1a | ||
|
c722256cbd | ||
|
8ff50481e5 | ||
|
be4e6c6f0c | ||
|
2f892cb866 | ||
|
4f6f510bd4 | ||
|
dae92b9018 | ||
|
dde7c0d99b | ||
|
79fccccad7 | ||
|
470ad14616 | ||
|
8d13e759fa | ||
|
3bba02b364 | ||
|
251c5c2348 | ||
|
f718827693 | ||
|
869352c361 | ||
|
ca31f117d5 | ||
|
1cb67eee69 | ||
|
e88d8c856d | ||
|
ec37004dfe | ||
|
03ce42e1cf | ||
|
3f56730b8a | ||
|
57701d5213 | ||
|
f920441b28 | ||
|
203fb2e3e7 | ||
|
3c662de4f2 | ||
|
b1d1926249 | ||
|
c5dd1a03be | ||
|
df598d7208 | ||
|
a0ae032ea7 | ||
|
35b4ad69bd | ||
|
dfb348d630 | ||
|
22786c8c9d | ||
|
a1ffa4c28d | ||
|
9f8183deb0 | ||
|
ea600ab2b8 | ||
|
83da757dfb | ||
|
d84d8d756f | ||
|
4fcdf33621 | ||
|
400a17a1ce | ||
|
15833e8d95 | ||
|
7d01947173 | ||
|
6aab2ae6c8 | ||
|
64ac81b9ee | ||
|
7c316fc19a | ||
|
1c45c2ec3a | ||
|
0905355629 | ||
|
f24e754ff7 | ||
|
0260667f7a | ||
|
7983ce4f13 | ||
|
5fc0472d88 | ||
|
410ee9f1f7 | ||
|
538dc00234 | ||
|
515c84d74d | ||
|
f72efa899e | ||
|
483066b9a0 | ||
|
57850a3379 | ||
|
3b09750b76 | ||
|
0da4a8fc8a |
@@ -1,18 +1,16 @@
|
||||
# Local build artifacts
|
||||
target
|
||||
// Ignore everything
|
||||
*
|
||||
|
||||
# Data folder
|
||||
data
|
||||
|
||||
# IDE files
|
||||
.vscode
|
||||
.idea
|
||||
*.iml
|
||||
|
||||
# Git files
|
||||
.git
|
||||
.gitignore
|
||||
|
||||
# Documentation
|
||||
*.md
|
||||
// Allow what is needed
|
||||
!.git
|
||||
!docker/healthcheck.sh
|
||||
!docker/start.sh
|
||||
!macros
|
||||
!migrations
|
||||
!src
|
||||
|
||||
!build.rs
|
||||
!Cargo.lock
|
||||
!Cargo.toml
|
||||
!rustfmt.toml
|
||||
!rust-toolchain.toml
|
||||
|
23
.editorconfig
Normal file
23
.editorconfig
Normal file
@@ -0,0 +1,23 @@
|
||||
# EditorConfig is awesome: https://EditorConfig.org
|
||||
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
|
||||
[*.{rs,py}]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[*.{yml,yaml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
13
.env
13
.env
@@ -1,13 +0,0 @@
|
||||
# DATABASE_URL=data/db.sqlite3
|
||||
# PRIVATE_RSA_KEY=data/private_rsa_key.der
|
||||
# PUBLIC_RSA_KEY=data/public_rsa_key.der
|
||||
# ICON_CACHE_FOLDER=data/icon_cache
|
||||
# ATTACHMENTS_FOLDER=data/attachments
|
||||
|
||||
# true for yes, anything else for no
|
||||
SIGNUPS_ALLOWED=true
|
||||
|
||||
# ROCKET_ENV=production
|
||||
# ROCKET_ADDRESS=0.0.0.0 # Enable this to test mobile app
|
||||
# ROCKET_PORT=8000
|
||||
# ROCKET_TLS={certs="/path/to/certs.pem",key="/path/to/key.pem"}
|
596
.env.template
Normal file
596
.env.template
Normal file
@@ -0,0 +1,596 @@
|
||||
# shellcheck disable=SC2034,SC2148
|
||||
## Vaultwarden Configuration File
|
||||
## Uncomment any of the following lines to change the defaults
|
||||
##
|
||||
## Be aware that most of these settings will be overridden if they were changed
|
||||
## in the admin interface. Those overrides are stored within DATA_FOLDER/config.json .
|
||||
##
|
||||
## By default, Vaultwarden expects for this file to be named ".env" and located
|
||||
## in the current working directory. If this is not the case, the environment
|
||||
## variable ENV_FILE can be set to the location of this file prior to starting
|
||||
## Vaultwarden.
|
||||
|
||||
####################
|
||||
### Data folders ###
|
||||
####################
|
||||
|
||||
## Main data folder
|
||||
## This can be a path to local folder or a path to an external location
|
||||
## depending on features enabled at build time. Possible external locations:
|
||||
##
|
||||
## - AWS S3 Bucket (via `s3` feature): s3://bucket-name/path/to/folder
|
||||
##
|
||||
## When using an external location, make sure to set TMP_FOLDER,
|
||||
## TEMPLATES_FOLDER, and DATABASE_URL to local paths and/or a remote database
|
||||
## location.
|
||||
# DATA_FOLDER=data
|
||||
|
||||
## Individual folders, these override %DATA_FOLDER%
|
||||
# RSA_KEY_FILENAME=data/rsa_key
|
||||
# ICON_CACHE_FOLDER=data/icon_cache
|
||||
# ATTACHMENTS_FOLDER=data/attachments
|
||||
# SENDS_FOLDER=data/sends
|
||||
|
||||
## Temporary folder used for storing temporary file uploads
|
||||
## Must be a local path.
|
||||
# TMP_FOLDER=data/tmp
|
||||
|
||||
## HTML template overrides data folder
|
||||
## Must be a local path.
|
||||
# TEMPLATES_FOLDER=data/templates
|
||||
## Automatically reload the templates for every request, slow, use only for development
|
||||
# RELOAD_TEMPLATES=false
|
||||
|
||||
## Web vault settings
|
||||
# WEB_VAULT_FOLDER=web-vault/
|
||||
# WEB_VAULT_ENABLED=true
|
||||
|
||||
#########################
|
||||
### Database settings ###
|
||||
#########################
|
||||
|
||||
## Database URL
|
||||
## When using SQLite, this is the path to the DB file, and it defaults to
|
||||
## %DATA_FOLDER%/db.sqlite3. If DATA_FOLDER is set to an external location, this
|
||||
## must be set to a local sqlite3 file path.
|
||||
# DATABASE_URL=data/db.sqlite3
|
||||
## When using MySQL, specify an appropriate connection URI.
|
||||
## Details: https://docs.diesel.rs/2.1.x/diesel/mysql/struct.MysqlConnection.html
|
||||
# DATABASE_URL=mysql://user:password@host[:port]/database_name
|
||||
## When using PostgreSQL, specify an appropriate connection URI (recommended)
|
||||
## or keyword/value connection string.
|
||||
## Details:
|
||||
## - https://docs.diesel.rs/2.1.x/diesel/pg/struct.PgConnection.html
|
||||
## - https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING
|
||||
# DATABASE_URL=postgresql://user:password@host[:port]/database_name
|
||||
|
||||
## Enable WAL for the DB
|
||||
## Set to false to avoid enabling WAL during startup.
|
||||
## Note that if the DB already has WAL enabled, you will also need to disable WAL in the DB,
|
||||
## this setting only prevents Vaultwarden from automatically enabling it on start.
|
||||
## Please read project wiki page about this setting first before changing the value as it can
|
||||
## cause performance degradation or might render the service unable to start.
|
||||
# ENABLE_DB_WAL=true
|
||||
|
||||
## Database connection retries
|
||||
## Number of times to retry the database connection during startup, with 1 second delay between each retry, set to 0 to retry indefinitely
|
||||
# DB_CONNECTION_RETRIES=15
|
||||
|
||||
## Database timeout
|
||||
## Timeout when acquiring database connection
|
||||
# DATABASE_TIMEOUT=30
|
||||
|
||||
## Database max connections
|
||||
## Define the size of the connection pool used for connecting to the database.
|
||||
# DATABASE_MAX_CONNS=10
|
||||
|
||||
## Database connection initialization
|
||||
## Allows SQL statements to be run whenever a new database connection is created.
|
||||
## This is mainly useful for connection-scoped pragmas.
|
||||
## If empty, a database-specific default is used:
|
||||
## - SQLite: "PRAGMA busy_timeout = 5000; PRAGMA synchronous = NORMAL;"
|
||||
## - MySQL: ""
|
||||
## - PostgreSQL: ""
|
||||
# DATABASE_CONN_INIT=""
|
||||
|
||||
#################
|
||||
### WebSocket ###
|
||||
#################
|
||||
|
||||
## Enable websocket notifications
|
||||
# ENABLE_WEBSOCKET=true
|
||||
|
||||
##########################
|
||||
### Push notifications ###
|
||||
##########################
|
||||
|
||||
## Enables push notifications (requires key and id from https://bitwarden.com/host)
|
||||
## Details about mobile client push notification:
|
||||
## - https://github.com/dani-garcia/vaultwarden/wiki/Enabling-Mobile-Client-push-notification
|
||||
# PUSH_ENABLED=false
|
||||
# PUSH_INSTALLATION_ID=CHANGEME
|
||||
# PUSH_INSTALLATION_KEY=CHANGEME
|
||||
|
||||
# WARNING: Do not modify the following settings unless you fully understand their implications!
|
||||
# Default Push Relay and Identity URIs
|
||||
# PUSH_RELAY_URI=https://push.bitwarden.com
|
||||
# PUSH_IDENTITY_URI=https://identity.bitwarden.com
|
||||
# European Union Data Region Settings
|
||||
# If you have selected "European Union" as your data region, use the following URIs instead.
|
||||
# PUSH_RELAY_URI=https://api.bitwarden.eu
|
||||
# PUSH_IDENTITY_URI=https://identity.bitwarden.eu
|
||||
|
||||
#####################
|
||||
### Schedule jobs ###
|
||||
#####################
|
||||
|
||||
## Job scheduler settings
|
||||
##
|
||||
## Job schedules use a cron-like syntax (as parsed by https://crates.io/crates/cron),
|
||||
## and are always in terms of UTC time (regardless of your local time zone settings).
|
||||
##
|
||||
## The schedule format is a bit different from crontab as crontab does not contains seconds.
|
||||
## You can test the format here: https://crontab.guru, but remove the first digit!
|
||||
## SEC MIN HOUR DAY OF MONTH MONTH DAY OF WEEK
|
||||
## "0 30 9,12,15 1,15 May-Aug Mon,Wed,Fri"
|
||||
## "0 30 * * * * "
|
||||
## "0 30 1 * * * "
|
||||
##
|
||||
## How often (in ms) the job scheduler thread checks for jobs that need running.
|
||||
## Set to 0 to globally disable scheduled jobs.
|
||||
# JOB_POLL_INTERVAL_MS=30000
|
||||
##
|
||||
## Cron schedule of the job that checks for Sends past their deletion date.
|
||||
## Defaults to hourly (5 minutes after the hour). Set blank to disable this job.
|
||||
# SEND_PURGE_SCHEDULE="0 5 * * * *"
|
||||
##
|
||||
## Cron schedule of the job that checks for trashed items to delete permanently.
|
||||
## Defaults to daily (5 minutes after midnight). Set blank to disable this job.
|
||||
# TRASH_PURGE_SCHEDULE="0 5 0 * * *"
|
||||
##
|
||||
## Cron schedule of the job that checks for incomplete 2FA logins.
|
||||
## Defaults to once every minute. Set blank to disable this job.
|
||||
# INCOMPLETE_2FA_SCHEDULE="30 * * * * *"
|
||||
##
|
||||
## Cron schedule of the job that sends expiration reminders to emergency access grantors.
|
||||
## Defaults to hourly (3 minutes after the hour). Set blank to disable this job.
|
||||
# EMERGENCY_NOTIFICATION_REMINDER_SCHEDULE="0 3 * * * *"
|
||||
##
|
||||
## Cron schedule of the job that grants emergency access requests that have met the required wait time.
|
||||
## Defaults to hourly (7 minutes after the hour). Set blank to disable this job.
|
||||
# EMERGENCY_REQUEST_TIMEOUT_SCHEDULE="0 7 * * * *"
|
||||
##
|
||||
## Cron schedule of the job that cleans old events from the event table.
|
||||
## Defaults to daily. Set blank to disable this job. Also without EVENTS_DAYS_RETAIN set, this job will not start.
|
||||
# EVENT_CLEANUP_SCHEDULE="0 10 0 * * *"
|
||||
## Number of days to retain events stored in the database.
|
||||
## If unset (the default), events are kept indefinitely and the scheduled job is disabled!
|
||||
# EVENTS_DAYS_RETAIN=
|
||||
##
|
||||
## Cron schedule of the job that cleans old auth requests from the auth request.
|
||||
## Defaults to every minute. Set blank to disable this job.
|
||||
# AUTH_REQUEST_PURGE_SCHEDULE="30 * * * * *"
|
||||
##
|
||||
## Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt.
|
||||
## Defaults to every minute. Set blank to disable this job.
|
||||
# DUO_CONTEXT_PURGE_SCHEDULE="30 * * * * *"
|
||||
|
||||
########################
|
||||
### General settings ###
|
||||
########################
|
||||
|
||||
## Domain settings
|
||||
## The domain must match the address from where you access the server
|
||||
## It's recommended to configure this value, otherwise certain functionality might not work,
|
||||
## like attachment downloads, email links and U2F.
|
||||
## For U2F to work, the server must use HTTPS, you can use Let's Encrypt for free certs
|
||||
## To use HTTPS, the recommended way is to put Vaultwarden behind a reverse proxy
|
||||
## Details:
|
||||
## - https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS
|
||||
## - https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples
|
||||
## For development
|
||||
# DOMAIN=http://localhost
|
||||
## For public server
|
||||
# DOMAIN=https://vw.domain.tld
|
||||
## For public server (URL with port number)
|
||||
# DOMAIN=https://vw.domain.tld:8443
|
||||
## For public server (URL with path)
|
||||
# DOMAIN=https://domain.tld/vw
|
||||
|
||||
## Controls whether users are allowed to create Bitwarden Sends.
|
||||
## This setting applies globally to all users.
|
||||
## To control this on a per-org basis instead, use the "Disable Send" org policy.
|
||||
# SENDS_ALLOWED=true
|
||||
|
||||
## HIBP Api Key
|
||||
## HaveIBeenPwned API Key, request it here: https://haveibeenpwned.com/API/Key
|
||||
# HIBP_API_KEY=
|
||||
|
||||
## Per-organization attachment storage limit (KB)
|
||||
## Max kilobytes of attachment storage allowed per organization.
|
||||
## When this limit is reached, organization members will not be allowed to upload further attachments for ciphers owned by that organization.
|
||||
# ORG_ATTACHMENT_LIMIT=
|
||||
## Per-user attachment storage limit (KB)
|
||||
## Max kilobytes of attachment storage allowed per user.
|
||||
## When this limit is reached, the user will not be allowed to upload further attachments.
|
||||
# USER_ATTACHMENT_LIMIT=
|
||||
## Per-user send storage limit (KB)
|
||||
## Max kilobytes of send storage allowed per user.
|
||||
## When this limit is reached, the user will not be allowed to upload further sends.
|
||||
# USER_SEND_LIMIT=
|
||||
|
||||
## Number of days to wait before auto-deleting a trashed item.
|
||||
## If unset (the default), trashed items are not auto-deleted.
|
||||
## This setting applies globally, so make sure to inform all users of any changes to this setting.
|
||||
# TRASH_AUTO_DELETE_DAYS=
|
||||
|
||||
## Number of minutes to wait before a 2FA-enabled login is considered incomplete,
|
||||
## resulting in an email notification. An incomplete 2FA login is one where the correct
|
||||
## master password was provided but the required 2FA step was not completed, which
|
||||
## potentially indicates a master password compromise. Set to 0 to disable this check.
|
||||
## This setting applies globally to all users.
|
||||
# INCOMPLETE_2FA_TIME_LIMIT=3
|
||||
|
||||
## Disable icon downloading
|
||||
## Set to true to disable icon downloading in the internal icon service.
|
||||
## This still serves existing icons from $ICON_CACHE_FOLDER, without generating any external
|
||||
## network requests. $ICON_CACHE_TTL must also be set to 0; otherwise, the existing icons
|
||||
## will be deleted eventually, but won't be downloaded again.
|
||||
# DISABLE_ICON_DOWNLOAD=false
|
||||
|
||||
## Controls if new users can register
|
||||
# SIGNUPS_ALLOWED=true
|
||||
|
||||
## Controls if new users need to verify their email address upon registration
|
||||
## On new client versions, this will require the user to verify their email at signup time.
|
||||
## On older clients, it will require the user to verify their email before they can log in.
|
||||
## The welcome email will include a verification link, and login attempts will periodically
|
||||
## trigger another verification email to be sent.
|
||||
# SIGNUPS_VERIFY=false
|
||||
|
||||
## If SIGNUPS_VERIFY is set to true, this limits how many seconds after the last time
|
||||
## an email verification link has been sent another verification email will be sent
|
||||
# SIGNUPS_VERIFY_RESEND_TIME=3600
|
||||
|
||||
## If SIGNUPS_VERIFY is set to true, this limits how many times an email verification
|
||||
## email will be re-sent upon an attempted login.
|
||||
# SIGNUPS_VERIFY_RESEND_LIMIT=6
|
||||
|
||||
## Controls if new users from a list of comma-separated domains can register
|
||||
## even if SIGNUPS_ALLOWED is set to false
|
||||
# SIGNUPS_DOMAINS_WHITELIST=example.com,example.net,example.org
|
||||
|
||||
## Controls whether event logging is enabled for organizations
|
||||
## This setting applies to organizations.
|
||||
## Disabled by default. Also check the EVENT_CLEANUP_SCHEDULE and EVENTS_DAYS_RETAIN settings.
|
||||
# ORG_EVENTS_ENABLED=false
|
||||
|
||||
## Controls which users can create new orgs.
|
||||
## Blank or 'all' means all users can create orgs (this is the default):
|
||||
# ORG_CREATION_USERS=
|
||||
## 'none' means no users can create orgs:
|
||||
# ORG_CREATION_USERS=none
|
||||
## A comma-separated list means only those users can create orgs:
|
||||
# ORG_CREATION_USERS=admin1@example.com,admin2@example.com
|
||||
|
||||
## Allows org admins to invite users, even when signups are disabled
|
||||
# INVITATIONS_ALLOWED=true
|
||||
## Name shown in the invitation emails that don't come from a specific organization
|
||||
# INVITATION_ORG_NAME=Vaultwarden
|
||||
|
||||
## The number of hours after which an organization invite token, emergency access invite token,
|
||||
## email verification token and deletion request token will expire (must be at least 1)
|
||||
# INVITATION_EXPIRATION_HOURS=120
|
||||
|
||||
## Controls whether users can enable emergency access to their accounts.
|
||||
## This setting applies globally to all users.
|
||||
# EMERGENCY_ACCESS_ALLOWED=true
|
||||
|
||||
## Controls whether users can change their email.
|
||||
## This setting applies globally to all users
|
||||
# EMAIL_CHANGE_ALLOWED=true
|
||||
|
||||
## Number of server-side passwords hashing iterations for the password hash.
|
||||
## The default for new users. If changed, it will be updated during login for existing users.
|
||||
# PASSWORD_ITERATIONS=600000
|
||||
|
||||
## Controls whether users can set or show password hints. This setting applies globally to all users.
|
||||
# PASSWORD_HINTS_ALLOWED=true
|
||||
|
||||
## Controls whether a password hint should be shown directly in the web page if
|
||||
## SMTP service is not configured and password hints are allowed.
|
||||
## Not recommended for publicly-accessible instances because this provides
|
||||
## unauthenticated access to potentially sensitive data.
|
||||
# SHOW_PASSWORD_HINT=false
|
||||
|
||||
#########################
|
||||
### Advanced settings ###
|
||||
#########################
|
||||
|
||||
## Client IP Header, used to identify the IP of the client, defaults to "X-Real-IP"
|
||||
## Set to the string "none" (without quotes), to disable any headers and just use the remote IP
|
||||
# IP_HEADER=X-Real-IP
|
||||
|
||||
## Icon service
|
||||
## The predefined icon services are: internal, bitwarden, duckduckgo, google.
|
||||
## To specify a custom icon service, set a URL template with exactly one instance of `{}`,
|
||||
## which is replaced with the domain. For example: `https://icon.example.com/domain/{}`.
|
||||
##
|
||||
## `internal` refers to Vaultwarden's built-in icon fetching implementation.
|
||||
## If an external service is set, an icon request to Vaultwarden will return an HTTP
|
||||
## redirect to the corresponding icon at the external service. An external service may
|
||||
## be useful if your Vaultwarden instance has no external network connectivity, or if
|
||||
## you are concerned that someone may probe your instance to try to detect whether icons
|
||||
## for certain sites have been cached.
|
||||
# ICON_SERVICE=internal
|
||||
|
||||
## Icon redirect code
|
||||
## The HTTP status code to use for redirects to an external icon service.
|
||||
## The supported codes are 301 (legacy permanent), 302 (legacy temporary), 307 (temporary), and 308 (permanent).
|
||||
## Temporary redirects are useful while testing different icon services, but once a service
|
||||
## has been decided on, consider using permanent redirects for cacheability. The legacy codes
|
||||
## are currently better supported by the Bitwarden clients.
|
||||
# ICON_REDIRECT_CODE=302
|
||||
|
||||
## Cache time-to-live for successfully obtained icons, in seconds (0 is "forever")
|
||||
## Default: 2592000 (30 days)
|
||||
# ICON_CACHE_TTL=2592000
|
||||
## Cache time-to-live for icons which weren't available, in seconds (0 is "forever")
|
||||
## Default: 2592000 (3 days)
|
||||
# ICON_CACHE_NEGTTL=259200
|
||||
|
||||
## Icon download timeout
|
||||
## Configure the timeout value when downloading the favicons.
|
||||
## The default is 10 seconds, but this could be too low on slower network connections
|
||||
# ICON_DOWNLOAD_TIMEOUT=10
|
||||
|
||||
## Block HTTP domains/IPs by Regex
|
||||
## Any domains or IPs that match this regex won't be fetched by the internal HTTP client.
|
||||
## Useful to hide other servers in the local network. Check the WIKI for more details
|
||||
## NOTE: Always enclose this regex within single quotes!
|
||||
# HTTP_REQUEST_BLOCK_REGEX='^(192\.168\.0\.[0-9]+|192\.168\.1\.[0-9]+)$'
|
||||
|
||||
## Enabling this will cause the internal HTTP client to refuse to connect to any non-global IP address.
|
||||
## Useful to secure your internal environment: See https://en.wikipedia.org/wiki/Reserved_IP_addresses for a list of IPs which it will block
|
||||
# HTTP_REQUEST_BLOCK_NON_GLOBAL_IPS=true
|
||||
|
||||
## Client Settings
|
||||
## Enable experimental feature flags for clients.
|
||||
## This is a comma-separated list of flags, e.g. "flag1,flag2,flag3".
|
||||
## Note that clients cache the /api/config endpoint for about 1 hour and it could take some time before they are enabled or disabled!
|
||||
##
|
||||
## The following flags are available:
|
||||
## - "inline-menu-positioning-improvements": Enable the use of inline menu password generator and identity suggestions in the browser extension.
|
||||
## - "inline-menu-totp": Enable the use of inline menu TOTP codes in the browser extension.
|
||||
## - "ssh-agent": Enable SSH agent support on Desktop. (Needs desktop >=2024.12.0)
|
||||
## - "ssh-key-vault-item": Enable the creation and use of SSH key vault items. (Needs clients >=2024.12.0)
|
||||
## - "export-attachments": Enable support for exporting attachments (Clients >=2025.4.0)
|
||||
## - "anon-addy-self-host-alias": Enable configuring self-hosted Anon Addy alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0)
|
||||
## - "simple-login-self-host-alias": Enable configuring self-hosted Simple Login alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0)
|
||||
## - "mutual-tls": Enable the use of mutual TLS on Android (Client >= 2025.2.0)
|
||||
# EXPERIMENTAL_CLIENT_FEATURE_FLAGS=fido2-vault-credentials
|
||||
|
||||
## Require new device emails. When a user logs in an email is required to be sent.
|
||||
## If sending the email fails the login attempt will fail!!
|
||||
# REQUIRE_DEVICE_EMAIL=false
|
||||
|
||||
## Enable extended logging, which shows timestamps and targets in the logs
|
||||
# EXTENDED_LOGGING=true
|
||||
|
||||
## Timestamp format used in extended logging.
|
||||
## Format specifiers: https://docs.rs/chrono/latest/chrono/format/strftime
|
||||
# LOG_TIMESTAMP_FORMAT="%Y-%m-%d %H:%M:%S.%3f"
|
||||
|
||||
## Logging to Syslog
|
||||
## This requires extended logging
|
||||
# USE_SYSLOG=false
|
||||
|
||||
## Logging to file
|
||||
# LOG_FILE=/path/to/log
|
||||
|
||||
## Log level
|
||||
## Change the verbosity of the log output
|
||||
## Valid values are "trace", "debug", "info", "warn", "error" and "off"
|
||||
## Setting it to "trace" or "debug" would also show logs for mounted routes and static file, websocket and alive requests
|
||||
## For a specific module append a comma separated `path::to::module=log_level`
|
||||
## For example, to only see debug logs for icons use: LOG_LEVEL="info,vaultwarden::api::icons=debug"
|
||||
# LOG_LEVEL=info
|
||||
|
||||
## Token for the admin interface, preferably an Argon2 PCH string
|
||||
## Vaultwarden has a built-in generator by calling `vaultwarden hash`
|
||||
## For details see: https://github.com/dani-garcia/vaultwarden/wiki/Enabling-admin-page#secure-the-admin_token
|
||||
## If not set, the admin panel is disabled
|
||||
## New Argon2 PHC string
|
||||
## Note that for some environments, like docker-compose you need to escape all the dollar signs `$` with an extra dollar sign like `$$`
|
||||
## Also, use single quotes (') instead of double quotes (") to enclose the string when needed
|
||||
# ADMIN_TOKEN='$argon2id$v=19$m=65540,t=3,p=4$MmeKRnGK5RW5mJS7h3TOL89GrpLPXJPAtTK8FTqj9HM$DqsstvoSAETl9YhnsXbf43WeaUwJC6JhViIvuPoig78'
|
||||
## Old plain text string (Will generate warnings in favor of Argon2)
|
||||
# ADMIN_TOKEN=Vy2VyYTTsKPv8W5aEOWUbB/Bt3DEKePbHmI4m9VcemUMS2rEviDowNAFqYi1xjmp
|
||||
|
||||
## Enable this to bypass the admin panel security. This option is only
|
||||
## meant to be used with the use of a separate auth layer in front
|
||||
# DISABLE_ADMIN_TOKEN=false
|
||||
|
||||
## Number of seconds, on average, between admin login requests from the same IP address before rate limiting kicks in.
|
||||
# ADMIN_RATELIMIT_SECONDS=300
|
||||
## Allow a burst of requests of up to this size, while maintaining the average indicated by `ADMIN_RATELIMIT_SECONDS`.
|
||||
# ADMIN_RATELIMIT_MAX_BURST=3
|
||||
|
||||
## Set the lifetime of admin sessions to this value (in minutes).
|
||||
# ADMIN_SESSION_LIFETIME=20
|
||||
|
||||
## Allowed iframe ancestors (Know the risks!)
|
||||
## https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy/frame-ancestors
|
||||
## Allows other domains to embed the web vault into an iframe, useful for embedding into secure intranets
|
||||
## This adds the configured value to the 'Content-Security-Policy' headers 'frame-ancestors' value.
|
||||
## Multiple values must be separated with a whitespace.
|
||||
# ALLOWED_IFRAME_ANCESTORS=
|
||||
|
||||
## Allowed connect-src (Know the risks!)
|
||||
## https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy/connect-src
|
||||
## Allows other domains to URLs which can be loaded using script interfaces like the Forwarded email alias feature
|
||||
## This adds the configured value to the 'Content-Security-Policy' headers 'connect-src' value.
|
||||
## Multiple values must be separated with a whitespace. And only HTTPS values are allowed.
|
||||
## Example: "https://my-addy-io.domain.tld https://my-simplelogin.domain.tld"
|
||||
# ALLOWED_CONNECT_SRC=""
|
||||
|
||||
## Number of seconds, on average, between login requests from the same IP address before rate limiting kicks in.
|
||||
# LOGIN_RATELIMIT_SECONDS=60
|
||||
## Allow a burst of requests of up to this size, while maintaining the average indicated by `LOGIN_RATELIMIT_SECONDS`.
|
||||
## Note that this applies to both the login and the 2FA, so it's recommended to allow a burst size of at least 2.
|
||||
# LOGIN_RATELIMIT_MAX_BURST=10
|
||||
|
||||
## BETA FEATURE: Groups
|
||||
## Controls whether group support is enabled for organizations
|
||||
## This setting applies to organizations.
|
||||
## Disabled by default because this is a beta feature, it contains known issues!
|
||||
## KNOW WHAT YOU ARE DOING!
|
||||
# ORG_GROUPS_ENABLED=false
|
||||
|
||||
## Increase secure note size limit (Know the risks!)
|
||||
## Sets the secure note size limit to 100_000 instead of the default 10_000.
|
||||
## WARNING: This could cause issues with clients. Also exports will not work on Bitwarden servers!
|
||||
## KNOW WHAT YOU ARE DOING!
|
||||
# INCREASE_NOTE_SIZE_LIMIT=false
|
||||
|
||||
## Enforce Single Org with Reset Password Policy
|
||||
## Enforce that the Single Org policy is enabled before setting the Reset Password policy
|
||||
## Bitwarden enforces this by default. In Vaultwarden we encouraged to use multiple organizations because groups were not available.
|
||||
## Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy.
|
||||
# ENFORCE_SINGLE_ORG_WITH_RESET_PW_POLICY=false
|
||||
|
||||
########################
|
||||
### MFA/2FA settings ###
|
||||
########################
|
||||
|
||||
## Yubico (Yubikey) Settings
|
||||
## Set your Client ID and Secret Key for Yubikey OTP
|
||||
## You can generate it here: https://upgrade.yubico.com/getapikey/
|
||||
## You can optionally specify a custom OTP server
|
||||
# YUBICO_CLIENT_ID=11111
|
||||
# YUBICO_SECRET_KEY=AAAAAAAAAAAAAAAAAAAAAAAA
|
||||
# YUBICO_SERVER=http://yourdomain.com/wsapi/2.0/verify
|
||||
|
||||
## Duo Settings
|
||||
## You need to configure the DUO_IKEY, DUO_SKEY, and DUO_HOST options to enable global Duo support.
|
||||
## Otherwise users will need to configure it themselves.
|
||||
## Create an account and protect an application as mentioned in this link (only the first step, not the rest):
|
||||
## https://help.bitwarden.com/article/setup-two-step-login-duo/#create-a-duo-security-account
|
||||
## Then set the following options, based on the values obtained from the last step:
|
||||
# DUO_IKEY=<Client ID>
|
||||
# DUO_SKEY=<Client Secret>
|
||||
# DUO_HOST=<API Hostname>
|
||||
## After that, you should be able to follow the rest of the guide linked above,
|
||||
## ignoring the fields that ask for the values that you already configured beforehand.
|
||||
##
|
||||
## If you want to attempt to use Duo's 'Traditional Prompt' (deprecated, iframe based) set DUO_USE_IFRAME to 'true'.
|
||||
## Duo no longer supports this, but it still works for some integrations.
|
||||
## If you aren't sure, leave this alone.
|
||||
# DUO_USE_IFRAME=false
|
||||
|
||||
## Email 2FA settings
|
||||
## Email token size
|
||||
## Number of digits in an email 2FA token (min: 6, max: 255).
|
||||
## Note that the Bitwarden clients are hardcoded to mention 6 digit codes regardless of this setting!
|
||||
# EMAIL_TOKEN_SIZE=6
|
||||
##
|
||||
## Token expiration time
|
||||
## Maximum time in seconds a token is valid. The time the user has to open email client and copy token.
|
||||
# EMAIL_EXPIRATION_TIME=600
|
||||
##
|
||||
## Maximum attempts before an email token is reset and a new email will need to be sent.
|
||||
# EMAIL_ATTEMPTS_LIMIT=3
|
||||
##
|
||||
## Setup email 2FA on registration regardless of any organization policy
|
||||
# EMAIL_2FA_ENFORCE_ON_VERIFIED_INVITE=false
|
||||
## Automatically setup email 2FA as fallback provider when needed
|
||||
# EMAIL_2FA_AUTO_FALLBACK=false
|
||||
|
||||
## Other MFA/2FA settings
|
||||
## Disable 2FA remember
|
||||
## Enabling this would force the users to use a second factor to login every time.
|
||||
## Note that the checkbox would still be present, but ignored.
|
||||
# DISABLE_2FA_REMEMBER=false
|
||||
##
|
||||
## Authenticator Settings
|
||||
## Disable authenticator time drifted codes to be valid.
|
||||
## TOTP codes of the previous and next 30 seconds will be invalid
|
||||
##
|
||||
## According to the RFC6238 (https://tools.ietf.org/html/rfc6238),
|
||||
## we allow by default the TOTP code which was valid one step back and one in the future.
|
||||
## This can however allow attackers to be a bit more lucky with there attempts because there are 3 valid codes.
|
||||
## You can disable this, so that only the current TOTP Code is allowed.
|
||||
## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid.
|
||||
## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid.
|
||||
# AUTHENTICATOR_DISABLE_TIME_DRIFT=false
|
||||
|
||||
###########################
|
||||
### SMTP Email settings ###
|
||||
###########################
|
||||
|
||||
## Mail specific settings, set SMTP_FROM and either SMTP_HOST or USE_SENDMAIL to enable the mail service.
|
||||
## To make sure the email links are pointing to the correct host, set the DOMAIN variable.
|
||||
## Note: if SMTP_USERNAME is specified, SMTP_PASSWORD is mandatory
|
||||
# SMTP_HOST=smtp.domain.tld
|
||||
# SMTP_FROM=vaultwarden@domain.tld
|
||||
# SMTP_FROM_NAME=Vaultwarden
|
||||
# SMTP_USERNAME=username
|
||||
# SMTP_PASSWORD=password
|
||||
# SMTP_TIMEOUT=15
|
||||
|
||||
## Choose the type of secure connection for SMTP. The default is "starttls".
|
||||
## The available options are:
|
||||
## - "starttls": The default port is 587.
|
||||
## - "force_tls": The default port is 465.
|
||||
## - "off": The default port is 25.
|
||||
## Ports 587 (submission) and 25 (smtp) are standard without encryption and with encryption via STARTTLS (Explicit TLS). Port 465 (submissions) is used for encrypted submission (Implicit TLS).
|
||||
# SMTP_SECURITY=starttls
|
||||
# SMTP_PORT=587
|
||||
|
||||
# Whether to send mail via the `sendmail` command
|
||||
# USE_SENDMAIL=false
|
||||
# Which sendmail command to use. The one found in the $PATH is used if not specified.
|
||||
# SENDMAIL_COMMAND="/path/to/sendmail"
|
||||
|
||||
## Defaults for SSL is "Plain" and "Login" and nothing for Non-SSL connections.
|
||||
## Possible values: ["Plain", "Login", "Xoauth2"].
|
||||
## Multiple options need to be separated by a comma ','.
|
||||
# SMTP_AUTH_MECHANISM=
|
||||
|
||||
## Server name sent during the SMTP HELO
|
||||
## By default this value should be is on the machine's hostname,
|
||||
## but might need to be changed in case it trips some anti-spam filters
|
||||
# HELO_NAME=
|
||||
|
||||
## Embed images as email attachments
|
||||
# SMTP_EMBED_IMAGES=true
|
||||
|
||||
## SMTP debugging
|
||||
## When set to true this will output very detailed SMTP messages.
|
||||
## WARNING: This could contain sensitive information like passwords and usernames! Only enable this during troubleshooting!
|
||||
# SMTP_DEBUG=false
|
||||
|
||||
## Accept Invalid Certificates
|
||||
## DANGEROUS: This option introduces significant vulnerabilities to man-in-the-middle attacks!
|
||||
## Only use this as a last resort if you are not able to use a valid certificate.
|
||||
## If the Certificate is valid but the hostname doesn't match, please use SMTP_ACCEPT_INVALID_HOSTNAMES instead.
|
||||
# SMTP_ACCEPT_INVALID_CERTS=false
|
||||
|
||||
## Accept Invalid Hostnames
|
||||
## DANGEROUS: This option introduces significant vulnerabilities to man-in-the-middle attacks!
|
||||
## Only use this as a last resort if you are not able to use a valid certificate.
|
||||
# SMTP_ACCEPT_INVALID_HOSTNAMES=false
|
||||
|
||||
#######################
|
||||
### Rocket settings ###
|
||||
#######################
|
||||
|
||||
## Rocket specific settings
|
||||
## See https://rocket.rs/v0.5/guide/configuration/ for more details.
|
||||
# ROCKET_ADDRESS=0.0.0.0
|
||||
## The default port is 8000, unless running in a Docker container, in which case it is 80.
|
||||
# ROCKET_PORT=8000
|
||||
# ROCKET_TLS={certs="/path/to/certs.pem",key="/path/to/key.pem"}
|
||||
|
||||
|
||||
# vim: syntax=ini
|
3
.gitattributes
vendored
Normal file
3
.gitattributes
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# Ignore vendored scripts in GitHub stats
|
||||
src/static/scripts/* linguist-vendored
|
||||
|
5
.github/CODEOWNERS
vendored
Normal file
5
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
/.github @dani-garcia @BlackDex
|
||||
/.github/** @dani-garcia @BlackDex
|
||||
/.github/CODEOWNERS @dani-garcia @BlackDex
|
||||
/.github/workflows/** @dani-garcia @BlackDex
|
||||
/SECURITY.md @dani-garcia @BlackDex
|
3
.github/FUNDING.yml
vendored
Normal file
3
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
github: dani-garcia
|
||||
liberapay: dani-garcia
|
||||
custom: ["https://paypal.me/DaniGG"]
|
167
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
167
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,167 @@
|
||||
name: Bug Report
|
||||
description: File a bug report
|
||||
labels: ["bug"]
|
||||
body:
|
||||
#
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report!
|
||||
|
||||
Please *do not* submit feature requests or ask for help on how to configure Vaultwarden here.
|
||||
|
||||
The [GitHub Discussions](https://github.com/dani-garcia/vaultwarden/discussions/) has sections for Questions and Ideas.
|
||||
|
||||
Also, make sure you are running [](https://github.com/dani-garcia/vaultwarden/releases/latest) of Vaultwarden!
|
||||
And search for existing open or closed issues or discussions regarding your topic before posting.
|
||||
|
||||
Be sure to check and validate the Vaultwarden Admin Diagnostics (`/admin/diagnostics`) page for any errors!
|
||||
See here [how to enable the admin page](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-admin-page).
|
||||
#
|
||||
- id: support-string
|
||||
type: textarea
|
||||
attributes:
|
||||
label: Vaultwarden Support String
|
||||
description: Output of the **Generate Support String** from the `/admin/diagnostics` page.
|
||||
placeholder: |
|
||||
1. Go to the Vaultwarden Admin of your instance https://example.domain.tld/admin/diagnostics
|
||||
2. Click on `Generate Support String`
|
||||
3. Click on `Copy To Clipboard`
|
||||
4. Replace this text by pasting it into this textarea without any modifications
|
||||
validations:
|
||||
required: true
|
||||
#
|
||||
- id: version
|
||||
type: input
|
||||
attributes:
|
||||
label: Vaultwarden Build Version
|
||||
description: What version of Vaultwarden are you running?
|
||||
placeholder: ex. v1.31.0 or v1.32.0-3466a804
|
||||
validations:
|
||||
required: true
|
||||
#
|
||||
- id: deployment
|
||||
type: dropdown
|
||||
attributes:
|
||||
label: Deployment method
|
||||
description: How did you deploy Vaultwarden?
|
||||
multiple: false
|
||||
options:
|
||||
- Official Container Image
|
||||
- Build from source
|
||||
- OS Package (apt, yum/dnf, pacman, apk, nix, ...)
|
||||
- Manually Extracted from Container Image
|
||||
- Downloaded from GitHub Actions Release Workflow
|
||||
- Other method
|
||||
validations:
|
||||
required: true
|
||||
#
|
||||
- id: deployment-other
|
||||
type: textarea
|
||||
attributes:
|
||||
label: Custom deployment method
|
||||
description: If you deployed Vaultwarden via any other method, please describe how.
|
||||
#
|
||||
- id: reverse-proxy
|
||||
type: input
|
||||
attributes:
|
||||
label: Reverse Proxy
|
||||
description: Are you using a reverse proxy, if so which and what version?
|
||||
placeholder: ex. nginx 1.26.2, caddy 2.8.4, traefik 3.1.2, haproxy 3.0
|
||||
validations:
|
||||
required: true
|
||||
#
|
||||
- id: os
|
||||
type: dropdown
|
||||
attributes:
|
||||
label: Host/Server Operating System
|
||||
description: On what operating system are you running the Vaultwarden server?
|
||||
multiple: false
|
||||
options:
|
||||
- Linux
|
||||
- NAS/SAN
|
||||
- Cloud
|
||||
- Windows
|
||||
- macOS
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
#
|
||||
- id: os-version
|
||||
type: input
|
||||
attributes:
|
||||
label: Operating System Version
|
||||
description: What version of the operating system(s) are you seeing the problem on?
|
||||
placeholder: ex. Arch Linux, Ubuntu 24.04, Kubernetes, Synology DSM 7.x, Windows 11
|
||||
#
|
||||
- id: clients
|
||||
type: dropdown
|
||||
attributes:
|
||||
label: Clients
|
||||
description: What client(s) are you seeing the problem on?
|
||||
multiple: true
|
||||
options:
|
||||
- Web Vault
|
||||
- Browser Extension
|
||||
- CLI
|
||||
- Desktop
|
||||
- Android
|
||||
- iOS
|
||||
validations:
|
||||
required: true
|
||||
#
|
||||
- id: client-version
|
||||
type: input
|
||||
attributes:
|
||||
label: Client Version
|
||||
description: What version(s) of the client(s) are you seeing the problem on?
|
||||
placeholder: ex. CLI v2024.7.2, Firefox 130 - v2024.7.0
|
||||
#
|
||||
- id: reproduce
|
||||
type: textarea
|
||||
attributes:
|
||||
label: Steps To Reproduce
|
||||
description: How can we reproduce the behavior.
|
||||
value: |
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. Click on '...'
|
||||
5. Etc '...'
|
||||
validations:
|
||||
required: true
|
||||
#
|
||||
- id: expected
|
||||
type: textarea
|
||||
attributes:
|
||||
label: Expected Result
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
#
|
||||
- id: actual
|
||||
type: textarea
|
||||
attributes:
|
||||
label: Actual Result
|
||||
description: A clear and concise description of what is happening.
|
||||
validations:
|
||||
required: true
|
||||
#
|
||||
- id: logs
|
||||
type: textarea
|
||||
attributes:
|
||||
label: Logs
|
||||
description: Provide the logs generated by Vaultwarden during the time this issue occurs.
|
||||
render: text
|
||||
#
|
||||
- id: screenshots
|
||||
type: textarea
|
||||
attributes:
|
||||
label: Screenshots or Videos
|
||||
description: If applicable, add screenshots and/or a short video to help explain your problem.
|
||||
#
|
||||
- id: additional-context
|
||||
type: textarea
|
||||
attributes:
|
||||
label: Additional Context
|
||||
description: Add any other context about the problem here.
|
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: GitHub Discussions for Vaultwarden
|
||||
url: https://github.com/dani-garcia/vaultwarden/discussions
|
||||
about: Use the discussions to request features or get help with usage/configuration.
|
||||
- name: Discourse forum for Vaultwarden
|
||||
url: https://vaultwarden.discourse.group/
|
||||
about: An alternative to the GitHub Discussions, if this is easier for you.
|
BIN
.github/security-contact.gif
vendored
Normal file
BIN
.github/security-contact.gif
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.3 KiB |
222
.github/workflows/build.yml
vendored
Normal file
222
.github/workflows/build.yml
vendored
Normal file
@@ -0,0 +1,222 @@
|
||||
name: Build
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- ".github/workflows/build.yml"
|
||||
- "src/**"
|
||||
- "migrations/**"
|
||||
- "Cargo.*"
|
||||
- "build.rs"
|
||||
- "rust-toolchain.toml"
|
||||
- "rustfmt.toml"
|
||||
- "diesel.toml"
|
||||
- "docker/Dockerfile.j2"
|
||||
- "docker/DockerSettings.yaml"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/build.yml"
|
||||
- "src/**"
|
||||
- "migrations/**"
|
||||
- "Cargo.*"
|
||||
- "build.rs"
|
||||
- "rust-toolchain.toml"
|
||||
- "rustfmt.toml"
|
||||
- "diesel.toml"
|
||||
- "docker/Dockerfile.j2"
|
||||
- "docker/DockerSettings.yaml"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build and Test ${{ matrix.channel }}
|
||||
permissions:
|
||||
actions: write
|
||||
contents: read
|
||||
# We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 120
|
||||
# Make warnings errors, this is to prevent warnings slipping through.
|
||||
# This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes.
|
||||
env:
|
||||
RUSTFLAGS: "-Dwarnings"
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
channel:
|
||||
- "rust-toolchain" # The version defined in rust-toolchain
|
||||
- "msrv" # The supported MSRV
|
||||
|
||||
steps:
|
||||
# Install dependencies
|
||||
- name: "Install dependencies Ubuntu"
|
||||
run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config
|
||||
# End Install dependencies
|
||||
|
||||
# Checkout the repo
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
# End Checkout the repo
|
||||
|
||||
# Determine rust-toolchain version
|
||||
- name: Init Variables
|
||||
id: toolchain
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ "${{ matrix.channel }}" == 'rust-toolchain' ]]; then
|
||||
RUST_TOOLCHAIN="$(grep -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)"
|
||||
elif [[ "${{ matrix.channel }}" == 'msrv' ]]; then
|
||||
RUST_TOOLCHAIN="$(grep -oP 'rust-version.*"(\K.*?)(?=")' Cargo.toml)"
|
||||
else
|
||||
RUST_TOOLCHAIN="${{ matrix.channel }}"
|
||||
fi
|
||||
echo "RUST_TOOLCHAIN=${RUST_TOOLCHAIN}" | tee -a "${GITHUB_OUTPUT}"
|
||||
# End Determine rust-toolchain version
|
||||
|
||||
|
||||
# Only install the clippy and rustfmt components on the default rust-toolchain
|
||||
- name: "Install rust-toolchain version"
|
||||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master @ Mar 18, 2025, 8:14 PM GMT+1
|
||||
if: ${{ matrix.channel == 'rust-toolchain' }}
|
||||
with:
|
||||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||
components: clippy, rustfmt
|
||||
# End Uses the rust-toolchain file to determine version
|
||||
|
||||
|
||||
# Install the any other channel to be used for which we do not execute clippy and rustfmt
|
||||
- name: "Install MSRV version"
|
||||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master @ Mar 18, 2025, 8:14 PM GMT+1
|
||||
if: ${{ matrix.channel != 'rust-toolchain' }}
|
||||
with:
|
||||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||
# End Install the MSRV channel to be used
|
||||
|
||||
# Set the current matrix toolchain version as default
|
||||
- name: "Set toolchain ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} as default"
|
||||
env:
|
||||
RUST_TOOLCHAIN: ${{steps.toolchain.outputs.RUST_TOOLCHAIN}}
|
||||
run: |
|
||||
# Remove the rust-toolchain.toml
|
||||
rm rust-toolchain.toml
|
||||
# Set the default
|
||||
rustup default "${RUST_TOOLCHAIN}"
|
||||
|
||||
# Show environment
|
||||
- name: "Show environment"
|
||||
run: |
|
||||
rustc -vV
|
||||
cargo -vV
|
||||
# End Show environment
|
||||
|
||||
# Enable Rust Caching
|
||||
- name: Rust Caching
|
||||
uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
# Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
|
||||
# Like changing the build host from Ubuntu 20.04 to 22.04 for example.
|
||||
# Only update when really needed! Use a <year>.<month>[.<inc>] format.
|
||||
prefix-key: "v2023.07-rust"
|
||||
# End Enable Rust Caching
|
||||
|
||||
# Run cargo tests
|
||||
# First test all features together, afterwards test them separately.
|
||||
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc,query_logger"
|
||||
id: test_sqlite_mysql_postgresql_mimalloc_logger
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
cargo test --features sqlite,mysql,postgresql,enable_mimalloc,query_logger
|
||||
|
||||
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc"
|
||||
id: test_sqlite_mysql_postgresql_mimalloc
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
cargo test --features sqlite,mysql,postgresql,enable_mimalloc
|
||||
|
||||
- name: "test features: sqlite,mysql,postgresql"
|
||||
id: test_sqlite_mysql_postgresql
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
cargo test --features sqlite,mysql,postgresql
|
||||
|
||||
- name: "test features: sqlite"
|
||||
id: test_sqlite
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
cargo test --features sqlite
|
||||
|
||||
- name: "test features: mysql"
|
||||
id: test_mysql
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
cargo test --features mysql
|
||||
|
||||
- name: "test features: postgresql"
|
||||
id: test_postgresql
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
cargo test --features postgresql
|
||||
# End Run cargo tests
|
||||
|
||||
|
||||
# Run cargo clippy, and fail on warnings
|
||||
- name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc"
|
||||
id: clippy
|
||||
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }}
|
||||
run: |
|
||||
cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc
|
||||
# End Run cargo clippy
|
||||
|
||||
|
||||
# Run cargo fmt (Only run on rust-toolchain defined version)
|
||||
- name: "check formatting"
|
||||
id: formatting
|
||||
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }}
|
||||
run: |
|
||||
cargo fmt --all -- --check
|
||||
# End Run cargo fmt
|
||||
|
||||
|
||||
# Check for any previous failures, if there are stop, else continue.
|
||||
# This is useful so all test/clippy/fmt actions are done, and they can all be addressed
|
||||
- name: "Some checks failed"
|
||||
if: ${{ failure() }}
|
||||
env:
|
||||
TEST_DB_M_L: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}
|
||||
TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}
|
||||
TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }}
|
||||
TEST_SQLITE: ${{ steps.test_sqlite.outcome }}
|
||||
TEST_MYSQL: ${{ steps.test_mysql.outcome }}
|
||||
TEST_POSTGRESQL: ${{ steps.test_postgresql.outcome }}
|
||||
CLIPPY: ${{ steps.clippy.outcome }}
|
||||
FMT: ${{ steps.formatting.outcome }}
|
||||
run: |
|
||||
echo "### :x: Checks Failed!" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${TEST_DB_M_L}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||
exit 1
|
||||
|
||||
|
||||
# Check for any previous failures, if there are stop, else continue.
|
||||
# This is useful so all test/clippy/fmt actions are done, and they can all be addressed
|
||||
- name: "All checks passed"
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
echo "### :tada: Checks Passed!" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
28
.github/workflows/check-templates.yml
vendored
Normal file
28
.github/workflows/check-templates.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
name: Check templates
|
||||
permissions: {}
|
||||
|
||||
on: [ push, pull_request ]
|
||||
|
||||
jobs:
|
||||
docker-templates:
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
# Checkout the repo
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
# End Checkout the repo
|
||||
|
||||
- name: Run make to rebuild templates
|
||||
working-directory: docker
|
||||
run: make
|
||||
|
||||
- name: Check for unstaged changes
|
||||
working-directory: docker
|
||||
run: git diff --exit-code
|
||||
continue-on-error: false
|
57
.github/workflows/hadolint.yml
vendored
Normal file
57
.github/workflows/hadolint.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: Hadolint
|
||||
permissions: {}
|
||||
|
||||
on: [ push, pull_request ]
|
||||
|
||||
jobs:
|
||||
hadolint:
|
||||
name: Validate Dockerfile syntax
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
# Start Docker Buildx
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
# https://github.com/moby/buildkit/issues/3969
|
||||
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
|
||||
with:
|
||||
buildkitd-config-inline: |
|
||||
[worker.oci]
|
||||
max-parallelism = 2
|
||||
driver-opts: |
|
||||
network=host
|
||||
|
||||
# Download hadolint - https://github.com/hadolint/hadolint/releases
|
||||
- name: Download hadolint
|
||||
shell: bash
|
||||
run: |
|
||||
sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \
|
||||
sudo chmod +x /usr/local/bin/hadolint
|
||||
env:
|
||||
HADOLINT_VERSION: 2.12.0
|
||||
# End Download hadolint
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
# End Checkout the repo
|
||||
|
||||
# Test Dockerfiles with hadolint
|
||||
- name: Run hadolint
|
||||
shell: bash
|
||||
run: hadolint docker/Dockerfile.{debian,alpine}
|
||||
# End Test Dockerfiles with hadolint
|
||||
|
||||
# Test Dockerfiles with docker build checks
|
||||
- name: Run docker build check
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Checking docker/Dockerfile.debian"
|
||||
docker build --check . -f docker/Dockerfile.debian
|
||||
echo "Checking docker/Dockerfile.alpine"
|
||||
docker build --check . -f docker/Dockerfile.alpine
|
||||
# End Test Dockerfiles with docker build checks
|
320
.github/workflows/release.yml
vendored
Normal file
320
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,320 @@
|
||||
name: Release
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
tags:
|
||||
# https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
|
||||
- '[1-2].[0-9]+.[0-9]+'
|
||||
|
||||
jobs:
|
||||
# https://github.com/marketplace/actions/skip-duplicate-actions
|
||||
# Some checks to determine if we need to continue with building a new docker.
|
||||
# We will skip this check if we are creating a tag, because that has the same hash as a previous run already.
|
||||
skip_check:
|
||||
# Only run this in the upstream repo and not on forks
|
||||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||
name: Cancel older jobs when running
|
||||
permissions:
|
||||
actions: write
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
should_skip: ${{ steps.skip_check.outputs.should_skip }}
|
||||
|
||||
steps:
|
||||
- name: Skip Duplicates Actions
|
||||
id: skip_check
|
||||
uses: fkirc/skip-duplicate-actions@f75f66ce1886f00957d99748a42c724f4330bdcf # v5.3.1
|
||||
with:
|
||||
cancel_others: 'true'
|
||||
# Only run this when not creating a tag
|
||||
if: ${{ github.ref_type == 'branch' }}
|
||||
|
||||
docker-build:
|
||||
needs: skip_check
|
||||
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
|
||||
name: Build Vaultwarden containers
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
attestations: write
|
||||
id-token: write
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 120
|
||||
# Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them
|
||||
services:
|
||||
registry:
|
||||
image: registry:2
|
||||
ports:
|
||||
- 5000:5000
|
||||
env:
|
||||
SOURCE_COMMIT: ${{ github.sha }}
|
||||
SOURCE_REPOSITORY_URL: "https://github.com/${{ github.repository }}"
|
||||
# The *_REPO variables need to be configured as repository variables
|
||||
# Append `/settings/variables/actions` to your repo url
|
||||
# DOCKERHUB_REPO needs to be 'index.docker.io/<user>/<repo>'
|
||||
# Check for Docker hub credentials in secrets
|
||||
HAVE_DOCKERHUB_LOGIN: ${{ vars.DOCKERHUB_REPO != '' && secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}
|
||||
# GHCR_REPO needs to be 'ghcr.io/<user>/<repo>'
|
||||
# Check for Github credentials in secrets
|
||||
HAVE_GHCR_LOGIN: ${{ vars.GHCR_REPO != '' && github.repository_owner != '' && secrets.GITHUB_TOKEN != '' }}
|
||||
# QUAY_REPO needs to be 'quay.io/<user>/<repo>'
|
||||
# Check for Quay.io credentials in secrets
|
||||
HAVE_QUAY_LOGIN: ${{ vars.QUAY_REPO != '' && secrets.QUAY_USERNAME != '' && secrets.QUAY_TOKEN != '' }}
|
||||
strategy:
|
||||
matrix:
|
||||
base_image: ["debian","alpine"]
|
||||
|
||||
steps:
|
||||
- name: Initialize QEMU binfmt support
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
with:
|
||||
platforms: "arm64,arm"
|
||||
|
||||
# Start Docker Buildx
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
# https://github.com/moby/buildkit/issues/3969
|
||||
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
|
||||
with:
|
||||
cache-binary: false
|
||||
buildkitd-config-inline: |
|
||||
[worker.oci]
|
||||
max-parallelism = 2
|
||||
driver-opts: |
|
||||
network=host
|
||||
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
# We need fetch-depth of 0 so we also get all the tag metadata
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
# Determine Base Tags and Source Version
|
||||
- name: Determine Base Tags and Source Version
|
||||
shell: bash
|
||||
env:
|
||||
REF_TYPE: ${{ github.ref_type }}
|
||||
run: |
|
||||
# Check which main tag we are going to build determined by ref_type
|
||||
if [[ "${REF_TYPE}" == "tag" ]]; then
|
||||
echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}"
|
||||
elif [[ "${REF_TYPE}" == "branch" ]]; then
|
||||
echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}"
|
||||
fi
|
||||
|
||||
# Get the Source Version for this release
|
||||
GIT_EXACT_TAG="$(git describe --tags --abbrev=0 --exact-match 2>/dev/null || true)"
|
||||
if [[ -n "${GIT_EXACT_TAG}" ]]; then
|
||||
echo "SOURCE_VERSION=${GIT_EXACT_TAG}" | tee -a "${GITHUB_ENV}"
|
||||
else
|
||||
GIT_LAST_TAG="$(git describe --tags --abbrev=0)"
|
||||
echo "SOURCE_VERSION=${GIT_LAST_TAG}-${SOURCE_COMMIT:0:8}" | tee -a "${GITHUB_ENV}"
|
||||
fi
|
||||
# End Determine Base Tags
|
||||
|
||||
# Login to Docker Hub
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
|
||||
|
||||
- name: Add registry for DockerHub
|
||||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }}
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
# Login to GitHub Container Registry
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
|
||||
|
||||
- name: Add registry for ghcr.io
|
||||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
GHCR_REPO: ${{ vars.GHCR_REPO }}
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
# Login to Quay.io
|
||||
- name: Login to Quay.io
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USERNAME }}
|
||||
password: ${{ secrets.QUAY_TOKEN }}
|
||||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
|
||||
|
||||
- name: Add registry for Quay.io
|
||||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
QUAY_REPO: ${{ vars.QUAY_REPO }}
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
- name: Configure build cache from/to
|
||||
shell: bash
|
||||
env:
|
||||
GHCR_REPO: ${{ vars.GHCR_REPO }}
|
||||
BASE_IMAGE: ${{ matrix.base_image }}
|
||||
run: |
|
||||
#
|
||||
# Check if there is a GitHub Container Registry Login and use it for caching
|
||||
if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then
|
||||
echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}" | tee -a "${GITHUB_ENV}"
|
||||
echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
|
||||
else
|
||||
echo "BAKE_CACHE_FROM="
|
||||
echo "BAKE_CACHE_TO="
|
||||
fi
|
||||
#
|
||||
|
||||
- name: Add localhost registry
|
||||
shell: bash
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
- name: Bake ${{ matrix.base_image }} containers
|
||||
id: bake_vw
|
||||
uses: docker/bake-action@4ba453fbc2db7735392b93edf935aaf9b1e8f747 # v6.5.0
|
||||
env:
|
||||
BASE_TAGS: "${{ env.BASE_TAGS }}"
|
||||
SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}"
|
||||
SOURCE_VERSION: "${{ env.SOURCE_VERSION }}"
|
||||
SOURCE_REPOSITORY_URL: "${{ env.SOURCE_REPOSITORY_URL }}"
|
||||
CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}"
|
||||
with:
|
||||
pull: true
|
||||
push: true
|
||||
source: .
|
||||
files: docker/docker-bake.hcl
|
||||
targets: "${{ matrix.base_image }}-multi"
|
||||
set: |
|
||||
*.cache-from=${{ env.BAKE_CACHE_FROM }}
|
||||
*.cache-to=${{ env.BAKE_CACHE_TO }}
|
||||
|
||||
- name: Extract digest SHA
|
||||
shell: bash
|
||||
env:
|
||||
BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }}
|
||||
run: |
|
||||
GET_DIGEST_SHA="$(jq -r '.["${{ matrix.base_image }}-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")"
|
||||
echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
# Attest container images
|
||||
- name: Attest - docker.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
|
||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
|
||||
with:
|
||||
subject-name: ${{ vars.DOCKERHUB_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
push-to-registry: true
|
||||
|
||||
- name: Attest - ghcr.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
|
||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
|
||||
with:
|
||||
subject-name: ${{ vars.GHCR_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
push-to-registry: true
|
||||
|
||||
- name: Attest - quay.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
|
||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
|
||||
with:
|
||||
subject-name: ${{ vars.QUAY_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
push-to-registry: true
|
||||
|
||||
|
||||
# Extract the Alpine binaries from the containers
|
||||
- name: Extract binaries
|
||||
shell: bash
|
||||
env:
|
||||
REF_TYPE: ${{ github.ref_type }}
|
||||
run: |
|
||||
# Check which main tag we are going to build determined by ref_type
|
||||
if [[ "${REF_TYPE}" == "tag" ]]; then
|
||||
EXTRACT_TAG="latest"
|
||||
elif [[ "${REF_TYPE}" == "branch" ]]; then
|
||||
EXTRACT_TAG="testing"
|
||||
fi
|
||||
|
||||
# Check which base_image was used and append -alpine if needed
|
||||
if [[ "${{ matrix.base_image }}" == "alpine" ]]; then
|
||||
EXTRACT_TAG="${EXTRACT_TAG}-alpine"
|
||||
fi
|
||||
|
||||
# After each extraction the image is removed.
|
||||
# This is needed because using different platforms doesn't trigger a new pull/download
|
||||
|
||||
# Extract amd64 binary
|
||||
docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
docker cp amd64:/vaultwarden vaultwarden-amd64-${{ matrix.base_image }}
|
||||
docker rm --force amd64
|
||||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
|
||||
# Extract arm64 binary
|
||||
docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
docker cp arm64:/vaultwarden vaultwarden-arm64-${{ matrix.base_image }}
|
||||
docker rm --force arm64
|
||||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
|
||||
# Extract armv7 binary
|
||||
docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
docker cp armv7:/vaultwarden vaultwarden-armv7-${{ matrix.base_image }}
|
||||
docker rm --force armv7
|
||||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
|
||||
# Extract armv6 binary
|
||||
docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
docker cp armv6:/vaultwarden vaultwarden-armv6-${{ matrix.base_image }}
|
||||
docker rm --force armv6
|
||||
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
|
||||
|
||||
# Upload artifacts to Github Actions and Attest the binaries
|
||||
- name: "Upload amd64 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }}
|
||||
path: vaultwarden-amd64-${{ matrix.base_image }}
|
||||
|
||||
- name: "Upload arm64 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }}
|
||||
path: vaultwarden-arm64-${{ matrix.base_image }}
|
||||
|
||||
- name: "Upload armv7 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }}
|
||||
path: vaultwarden-armv7-${{ matrix.base_image }}
|
||||
|
||||
- name: "Upload armv6 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }}
|
||||
path: vaultwarden-armv6-${{ matrix.base_image }}
|
||||
|
||||
- name: "Attest artifacts ${{ matrix.base_image }}"
|
||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
|
||||
with:
|
||||
subject-path: vaultwarden-*
|
||||
# End Upload artifacts to Github Actions
|
30
.github/workflows/releasecache-cleanup.yml
vendored
Normal file
30
.github/workflows/releasecache-cleanup.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Cleanup
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
manual_trigger:
|
||||
description: "Manual trigger buildcache cleanup"
|
||||
required: false
|
||||
default: ""
|
||||
|
||||
schedule:
|
||||
- cron: '0 1 * * FRI'
|
||||
|
||||
jobs:
|
||||
releasecache-cleanup:
|
||||
name: Releasecache Cleanup
|
||||
permissions:
|
||||
packages: write
|
||||
runs-on: ubuntu-24.04
|
||||
continue-on-error: true
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- name: Delete vaultwarden-buildcache containers
|
||||
uses: actions/delete-package-versions@e5bc658cc4c965c472efe991f8beea3981499c55 # v5.0.0
|
||||
with:
|
||||
package-name: 'vaultwarden-buildcache'
|
||||
package-type: 'container'
|
||||
min-versions-to-keep: 0
|
||||
delete-only-untagged-versions: 'false'
|
53
.github/workflows/trivy.yml
vendored
Normal file
53
.github/workflows/trivy.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
name: Trivy
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
schedule:
|
||||
- cron: '08 11 * * *'
|
||||
|
||||
jobs:
|
||||
trivy-scan:
|
||||
# Only run this in the upstream repo and not on forks
|
||||
# When all forks run this at the same time, it is causing `Too Many Requests` issues
|
||||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||
name: Trivy Scan
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
security-events: write
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@6c175e9c4083a92bbca2f9724c8a5e33bc2d97a5 # v0.30.0
|
||||
env:
|
||||
TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2,public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2
|
||||
TRIVY_JAVA_DB_REPOSITORY: docker.io/aquasec/trivy-java-db:1,public.ecr.aws/aquasecurity/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1
|
||||
with:
|
||||
scan-type: repo
|
||||
ignore-unfixed: true
|
||||
format: sarif
|
||||
output: trivy-results.sarif
|
||||
severity: CRITICAL,HIGH
|
||||
|
||||
- name: Upload Trivy scan results to GitHub Security tab
|
||||
uses: github/codeql-action/upload-sarif@86b04fb0e47484f7282357688f21d5d0e32175fe # v3.27.5
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
4
.gitignore
vendored
4
.gitignore
vendored
@@ -10,7 +10,7 @@ data
|
||||
*.iml
|
||||
|
||||
# Environment file
|
||||
# .env
|
||||
.env
|
||||
|
||||
# Web vault
|
||||
web-vault
|
||||
web-vault
|
||||
|
13
.hadolint.yaml
Normal file
13
.hadolint.yaml
Normal file
@@ -0,0 +1,13 @@
|
||||
ignored:
|
||||
# To prevent issues and make clear some images only work on linux/amd64, we ignore this
|
||||
- DL3029
|
||||
# disable explicit version for apt install
|
||||
- DL3008
|
||||
# disable explicit version for apk install
|
||||
- DL3018
|
||||
# Ignore shellcheck info message
|
||||
- SC1091
|
||||
trustedRegistries:
|
||||
- docker.io
|
||||
- ghcr.io
|
||||
- quay.io
|
52
.pre-commit-config.yaml
Normal file
52
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,52 @@
|
||||
---
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
- id: check-json
|
||||
- id: check-toml
|
||||
- id: mixed-line-ending
|
||||
args: ["--fix=no"]
|
||||
- id: end-of-file-fixer
|
||||
exclude: "(.*js$|.*css$)"
|
||||
- id: check-case-conflict
|
||||
- id: check-merge-conflict
|
||||
- id: detect-private-key
|
||||
- id: check-symlinks
|
||||
- id: forbid-submodules
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: fmt
|
||||
name: fmt
|
||||
description: Format files with cargo fmt.
|
||||
entry: cargo fmt
|
||||
language: system
|
||||
types: [rust]
|
||||
args: ["--", "--check"]
|
||||
- id: cargo-test
|
||||
name: cargo test
|
||||
description: Test the package for errors.
|
||||
entry: cargo test
|
||||
language: system
|
||||
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--"]
|
||||
types_or: [rust, file]
|
||||
files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
|
||||
pass_filenames: false
|
||||
- id: cargo-clippy
|
||||
name: cargo clippy
|
||||
description: Lint Rust sources
|
||||
entry: cargo clippy
|
||||
language: system
|
||||
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--", "-D", "warnings"]
|
||||
types_or: [rust, file]
|
||||
files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
|
||||
pass_filenames: false
|
||||
- id: check-docker-templates
|
||||
name: check docker templates
|
||||
description: Check if the Docker templates are updated
|
||||
language: system
|
||||
entry: sh
|
||||
args:
|
||||
- "-c"
|
||||
- "cd docker && make"
|
6258
Cargo.lock
generated
6258
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
335
Cargo.toml
335
Cargo.toml
@@ -1,55 +1,298 @@
|
||||
[workspace]
|
||||
members = ["macros"]
|
||||
|
||||
[package]
|
||||
name = "bitwarden_rs"
|
||||
version = "0.9.0"
|
||||
name = "vaultwarden"
|
||||
version = "1.0.0"
|
||||
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.86.0"
|
||||
resolver = "2"
|
||||
|
||||
repository = "https://github.com/dani-garcia/vaultwarden"
|
||||
readme = "README.md"
|
||||
license = "AGPL-3.0-only"
|
||||
publish = false
|
||||
build = "build.rs"
|
||||
|
||||
[features]
|
||||
# default = ["sqlite"]
|
||||
# Empty to keep compatibility, prefer to set USE_SYSLOG=true
|
||||
enable_syslog = []
|
||||
mysql = ["diesel/mysql", "diesel_migrations/mysql"]
|
||||
postgresql = ["diesel/postgres", "diesel_migrations/postgres"]
|
||||
sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "dep:libsqlite3-sys"]
|
||||
# Enable to use a vendored and statically linked openssl
|
||||
vendored_openssl = ["openssl/vendored"]
|
||||
# Enable MiMalloc memory allocator to replace the default malloc
|
||||
# This can improve performance for Alpine builds
|
||||
enable_mimalloc = ["dep:mimalloc"]
|
||||
# This is a development dependency, and should only be used during development!
|
||||
# It enables the usage of the diesel_logger crate, which is able to output the generated queries.
|
||||
# You also need to set an env variable `QUERY_LOGGER=1` to fully activate this so you do not have to re-compile
|
||||
# if you want to turn off the logging for a specific run.
|
||||
query_logger = ["dep:diesel_logger"]
|
||||
s3 = ["opendal/services-s3", "dep:aws-config", "dep:aws-credential-types", "dep:aws-smithy-runtime-api", "dep:anyhow", "dep:http", "dep:reqsign"]
|
||||
|
||||
# Enable unstable features, requires nightly
|
||||
# Currently only used to enable rusts official ip support
|
||||
unstable = []
|
||||
|
||||
[target."cfg(unix)".dependencies]
|
||||
# Logging
|
||||
syslog = "7.0.0"
|
||||
|
||||
[dependencies]
|
||||
# Web framework for nightly with a focus on ease-of-use, expressibility, and speed.
|
||||
rocket = { version = "0.3.12", features = ["tls"] }
|
||||
rocket_codegen = "0.3.12"
|
||||
rocket_contrib = "0.3.12"
|
||||
macros = { path = "./macros" }
|
||||
|
||||
# HTTP client
|
||||
reqwest = "0.8.6"
|
||||
|
||||
# multipart/form-data support
|
||||
multipart = "0.14.2"
|
||||
|
||||
# A generic serialization/deserialization framework
|
||||
serde = "1.0.64"
|
||||
serde_derive = "1.0.64"
|
||||
serde_json = "1.0.19"
|
||||
|
||||
# A safe, extensible ORM and Query builder
|
||||
diesel = { version = "~1.2.2", features = ["sqlite", "chrono", "r2d2"] }
|
||||
diesel_migrations = { version = "~1.2.0", features = ["sqlite"] }
|
||||
|
||||
# Bundled SQLite
|
||||
libsqlite3-sys = { version = "0.9.1", features = ["bundled"] }
|
||||
|
||||
# Crypto library
|
||||
ring = { version = "= 0.11.0", features = ["rsa_signing"] }
|
||||
|
||||
# UUID generation
|
||||
uuid = { version = "0.6.5", features = ["v4"] }
|
||||
|
||||
# Date and time library for Rust
|
||||
chrono = "0.4.2"
|
||||
|
||||
# TOTP library
|
||||
oath = "0.10.2"
|
||||
|
||||
# Data encoding library
|
||||
data-encoding = "2.1.1"
|
||||
|
||||
# JWT library
|
||||
jsonwebtoken = "= 4.0.1"
|
||||
# Logging
|
||||
log = "0.4.27"
|
||||
fern = { version = "0.7.1", features = ["syslog-7", "reopen-1"] }
|
||||
tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
|
||||
|
||||
# A `dotenv` implementation for Rust
|
||||
dotenv = { version = "0.13.0", default-features = false }
|
||||
dotenvy = { version = "0.15.7", default-features = false }
|
||||
|
||||
# Lazy static macro
|
||||
lazy_static = "1.0.1"
|
||||
# Lazy initialization
|
||||
once_cell = "1.21.3"
|
||||
|
||||
[patch.crates-io]
|
||||
jsonwebtoken = { path = "libs/jsonwebtoken" } # Make jwt use ring 0.11, to match rocket
|
||||
# Numerical libraries
|
||||
num-traits = "0.2.19"
|
||||
num-derive = "0.4.2"
|
||||
bigdecimal = "0.4.8"
|
||||
|
||||
# Web framework
|
||||
rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false }
|
||||
rocket_ws = { version ="0.1.1" }
|
||||
|
||||
# WebSockets libraries
|
||||
rmpv = "1.3.0" # MessagePack library
|
||||
|
||||
# Concurrent HashMap used for WebSocket messaging and favicons
|
||||
dashmap = "6.1.0"
|
||||
|
||||
# Async futures
|
||||
futures = "0.3.31"
|
||||
tokio = { version = "1.45.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
|
||||
tokio-util = { version = "0.7.15", features = ["compat"]}
|
||||
|
||||
# A generic serialization/deserialization framework
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = "1.0.140"
|
||||
|
||||
# A safe, extensible ORM and Query builder
|
||||
diesel = { version = "2.2.11", features = ["chrono", "r2d2", "numeric"] }
|
||||
diesel_migrations = "2.2.0"
|
||||
diesel_logger = { version = "0.4.0", optional = true }
|
||||
|
||||
derive_more = { version = "2.0.1", features = ["from", "into", "as_ref", "deref", "display"] }
|
||||
diesel-derive-newtype = "2.1.2"
|
||||
|
||||
# Bundled/Static SQLite
|
||||
libsqlite3-sys = { version = "0.33.0", features = ["bundled"], optional = true }
|
||||
|
||||
# Crypto-related libraries
|
||||
rand = "0.9.1"
|
||||
ring = "0.17.14"
|
||||
subtle = "2.6.1"
|
||||
|
||||
# UUID generation
|
||||
uuid = { version = "1.17.0", features = ["v4"] }
|
||||
|
||||
# Date and time libraries
|
||||
chrono = { version = "0.4.41", features = ["clock", "serde"], default-features = false }
|
||||
chrono-tz = "0.10.3"
|
||||
time = "0.3.41"
|
||||
|
||||
# Job scheduler
|
||||
job_scheduler_ng = "2.2.0"
|
||||
|
||||
# Data encoding library Hex/Base32/Base64
|
||||
data-encoding = "2.9.0"
|
||||
|
||||
# JWT library
|
||||
jsonwebtoken = "9.3.1"
|
||||
|
||||
# TOTP library
|
||||
totp-lite = "2.0.1"
|
||||
|
||||
# Yubico Library
|
||||
yubico = { package = "yubico_ng", version = "0.13.0", features = ["online-tokio"], default-features = false }
|
||||
|
||||
# WebAuthn libraries
|
||||
webauthn-rs = "0.3.2"
|
||||
|
||||
# Handling of URL's for WebAuthn and favicons
|
||||
url = "2.5.4"
|
||||
|
||||
# Email libraries
|
||||
lettre = { version = "0.11.17", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "hostname", "tracing", "tokio1-rustls", "ring", "rustls-native-certs"], default-features = false }
|
||||
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
|
||||
email_address = "0.2.9"
|
||||
|
||||
# HTML Template library
|
||||
handlebars = { version = "6.3.2", features = ["dir_source"] }
|
||||
|
||||
# HTTP client (Used for favicons, version check, DUO and HIBP API)
|
||||
reqwest = { version = "0.12.20", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false}
|
||||
hickory-resolver = "0.25.2"
|
||||
|
||||
# Favicon extraction libraries
|
||||
html5gum = "0.7.0"
|
||||
regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false }
|
||||
data-url = "0.3.1"
|
||||
bytes = "1.10.1"
|
||||
svg-hush = "0.9.5"
|
||||
|
||||
# Cache function results (Used for version check and favicon fetching)
|
||||
cached = { version = "0.55.1", features = ["async"] }
|
||||
|
||||
# Used for custom short lived cookie jar during favicon extraction
|
||||
cookie = "0.18.1"
|
||||
cookie_store = "0.21.1"
|
||||
|
||||
# Used by U2F, JWT and PostgreSQL
|
||||
openssl = "0.10.73"
|
||||
|
||||
# CLI argument parsing
|
||||
pico-args = "0.5.0"
|
||||
|
||||
# Macro ident concatenation
|
||||
pastey = "0.1.0"
|
||||
governor = "0.10.0"
|
||||
|
||||
# Check client versions for specific features.
|
||||
semver = "1.0.26"
|
||||
|
||||
# Allow overriding the default memory allocator
|
||||
# Mainly used for the musl builds, since the default musl malloc is very slow
|
||||
mimalloc = { version = "0.1.47", features = ["secure"], default-features = false, optional = true }
|
||||
|
||||
which = "8.0.0"
|
||||
|
||||
# Argon2 library with support for the PHC format
|
||||
argon2 = "0.5.3"
|
||||
|
||||
# Reading a password from the cli for generating the Argon2id ADMIN_TOKEN
|
||||
rpassword = "7.4.0"
|
||||
|
||||
# Loading a dynamic CSS Stylesheet
|
||||
grass_compiler = { version = "0.13.4", default-features = false }
|
||||
|
||||
# File are accessed through Apache OpenDAL
|
||||
opendal = { version = "0.53.3", features = ["services-fs"], default-features = false }
|
||||
|
||||
# For retrieving AWS credentials, including temporary SSO credentials
|
||||
anyhow = { version = "1.0.98", optional = true }
|
||||
aws-config = { version = "1.8.0", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true }
|
||||
aws-credential-types = { version = "1.2.3", optional = true }
|
||||
aws-smithy-runtime-api = { version = "1.8.1", optional = true }
|
||||
http = { version = "1.3.1", optional = true }
|
||||
reqsign = { version = "0.16.5", optional = true }
|
||||
|
||||
# Strip debuginfo from the release builds
|
||||
# The debug symbols are to provide better panic traces
|
||||
# Also enable fat LTO and use 1 codegen unit for optimizations
|
||||
[profile.release]
|
||||
strip = "debuginfo"
|
||||
lto = "fat"
|
||||
codegen-units = 1
|
||||
|
||||
# A little bit of a speedup
|
||||
[profile.dev]
|
||||
split-debuginfo = "unpacked"
|
||||
|
||||
# Always build argon2 using opt-level 3
|
||||
# This is a huge speed improvement during testing
|
||||
[profile.dev.package.argon2]
|
||||
opt-level = 3
|
||||
|
||||
# Optimize for size
|
||||
[profile.release-micro]
|
||||
inherits = "release"
|
||||
opt-level = "z"
|
||||
strip = "symbols"
|
||||
lto = "fat"
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
|
||||
# Profile for systems with low resources
|
||||
# It will use less resources during build
|
||||
[profile.release-low]
|
||||
inherits = "release"
|
||||
strip = "symbols"
|
||||
lto = "thin"
|
||||
codegen-units = 16
|
||||
|
||||
# Linting config
|
||||
# https://doc.rust-lang.org/rustc/lints/groups.html
|
||||
[workspace.lints.rust]
|
||||
# Forbid
|
||||
unsafe_code = "forbid"
|
||||
non_ascii_idents = "forbid"
|
||||
|
||||
# Deny
|
||||
deprecated_in_future = "deny"
|
||||
future_incompatible = { level = "deny", priority = -1 }
|
||||
keyword_idents = { level = "deny", priority = -1 }
|
||||
let_underscore = { level = "deny", priority = -1 }
|
||||
noop_method_call = "deny"
|
||||
refining_impl_trait = { level = "deny", priority = -1 }
|
||||
rust_2018_idioms = { level = "deny", priority = -1 }
|
||||
rust_2021_compatibility = { level = "deny", priority = -1 }
|
||||
rust_2024_compatibility = { level = "deny", priority = -1 }
|
||||
edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
|
||||
single_use_lifetimes = "deny"
|
||||
trivial_casts = "deny"
|
||||
trivial_numeric_casts = "deny"
|
||||
unused = { level = "deny", priority = -1 }
|
||||
unused_import_braces = "deny"
|
||||
unused_lifetimes = "deny"
|
||||
unused_qualifications = "deny"
|
||||
variant_size_differences = "deny"
|
||||
# Allow the following lints since these cause issues with Rust v1.84.0 or newer
|
||||
# Building Vaultwarden with Rust v1.85.0 and edition 2024 also works without issues
|
||||
if_let_rescope = "allow"
|
||||
tail_expr_drop_order = "allow"
|
||||
|
||||
# https://rust-lang.github.io/rust-clippy/stable/index.html
|
||||
[workspace.lints.clippy]
|
||||
# Warn
|
||||
dbg_macro = "warn"
|
||||
todo = "warn"
|
||||
|
||||
# Ignore/Allow
|
||||
result_large_err = "allow"
|
||||
|
||||
# Deny
|
||||
case_sensitive_file_extension_comparisons = "deny"
|
||||
cast_lossless = "deny"
|
||||
clone_on_ref_ptr = "deny"
|
||||
equatable_if_let = "deny"
|
||||
filter_map_next = "deny"
|
||||
float_cmp_const = "deny"
|
||||
inefficient_to_string = "deny"
|
||||
iter_on_empty_collections = "deny"
|
||||
iter_on_single_items = "deny"
|
||||
linkedlist = "deny"
|
||||
macro_use_imports = "deny"
|
||||
manual_assert = "deny"
|
||||
manual_instant_elapsed = "deny"
|
||||
manual_string_new = "deny"
|
||||
match_wildcard_for_single_variants = "deny"
|
||||
mem_forget = "deny"
|
||||
needless_continue = "deny"
|
||||
needless_lifetimes = "deny"
|
||||
option_option = "deny"
|
||||
string_add_assign = "deny"
|
||||
string_to_string = "deny"
|
||||
unnecessary_join = "deny"
|
||||
unnecessary_self_imports = "deny"
|
||||
unnested_or_patterns = "deny"
|
||||
unused_async = "deny"
|
||||
unused_self = "deny"
|
||||
verbose_file_reads = "deny"
|
||||
zero_sized_map_values = "deny"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
88
Dockerfile
88
Dockerfile
@@ -1,88 +0,0 @@
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
FROM node:9-alpine as vault
|
||||
|
||||
ENV VAULT_VERSION "1.26.0"
|
||||
ENV URL "https://github.com/bitwarden/web/archive/v${VAULT_VERSION}.tar.gz"
|
||||
|
||||
RUN apk add --update-cache --upgrade \
|
||||
curl \
|
||||
git \
|
||||
tar \
|
||||
&& npm install -g \
|
||||
gulp-cli \
|
||||
gulp
|
||||
|
||||
RUN mkdir /web-build \
|
||||
&& cd /web-build \
|
||||
&& curl -L "${URL}" | tar -xvz --strip-components=1
|
||||
|
||||
WORKDIR /web-build
|
||||
|
||||
COPY /docker/settings.Production.json /web-build/
|
||||
|
||||
RUN git config --global url."https://github.com/".insteadOf ssh://git@github.com/ \
|
||||
&& npm install \
|
||||
&& gulp dist:selfHosted \
|
||||
&& mv dist /web-vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# We need to use the Rust build image, because
|
||||
# we need the Rust compiler and Cargo tooling
|
||||
FROM rustlang/rust:nightly as build
|
||||
|
||||
# Using bundled SQLite, no need to install it
|
||||
# RUN apt-get update && apt-get install -y\
|
||||
# sqlite3\
|
||||
# --no-install-recommends\
|
||||
# && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and vendored dependencies
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./libs ./libs
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --release
|
||||
RUN find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
RUN cargo build --release
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM debian:stretch-slim
|
||||
|
||||
# Install needed libraries
|
||||
RUN apt-get update && apt-get install -y\
|
||||
openssl\
|
||||
--no-install-recommends\
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN mkdir /data
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
|
||||
# Copies the files from the context (env file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
COPY .env .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build app/target/release/bitwarden_rs .
|
||||
|
||||
# Configures the startup!
|
||||
# Use production to disable Rocket logging
|
||||
#CMD ROCKET_ENV=production ./bitwarden_rs
|
||||
CMD ROCKET_ENV=staging ./bitwarden_rs
|
1
Dockerfile
Symbolic link
1
Dockerfile
Symbolic link
@@ -0,0 +1 @@
|
||||
docker/Dockerfile.debian
|
143
LICENSE.txt
143
LICENSE.txt
@@ -1,5 +1,5 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
@@ -7,17 +7,15 @@
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
@@ -26,44 +24,34 @@ them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
@@ -72,7 +60,7 @@ modification follow.
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
@@ -549,35 +537,45 @@ to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
@@ -635,40 +633,29 @@ the "copyright" line and a pointer to where the full notice is found.
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
it under the terms of the GNU Affero General Public License as published
|
||||
by the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
|
203
README.md
203
README.md
@@ -1,97 +1,144 @@
|
||||
## Easy setup (Docker)
|
||||
Install Docker to your system and then, from the project root, run:
|
||||
```sh
|
||||
# Build the docker image:
|
||||
docker build -t dani/bitwarden_rs .
|
||||

|
||||
|
||||
# Run the docker image with a docker volume:
|
||||
docker volume create bw_data
|
||||
docker run --name bitwarden_rs -t --init --rm --mount source=bw_data,target=/data -p 8000:80 dani/bitwarden_rs
|
||||
An alternative server implementation of the Bitwarden Client API, written in Rust and compatible with [official Bitwarden clients](https://bitwarden.com/download/) [[disclaimer](#disclaimer)], perfect for self-hosted deployment where running the official resource-heavy service might not be ideal.
|
||||
|
||||
---
|
||||
|
||||
[](https://github.com/dani-garcia/vaultwarden/releases/latest)
|
||||
[](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden)
|
||||
[](https://hub.docker.com/r/vaultwarden/server)
|
||||
[](https://quay.io/repository/vaultwarden/server) <br>
|
||||
[](https://github.com/dani-garcia/vaultwarden/graphs/contributors)
|
||||
[](https://github.com/dani-garcia/vaultwarden/network/members)
|
||||
[](https://github.com/dani-garcia/vaultwarden/stargazers)
|
||||
[](https://github.com/dani-garcia/vaultwarden/issues)
|
||||
[](https://github.com/dani-garcia/vaultwarden/issues?q=is%3Aissue+is%3Aclosed)
|
||||
[](https://github.com/dani-garcia/vaultwarden/blob/main/LICENSE.txt) <br>
|
||||
[%3D'svg'%5D%2F*%5Blocal-name()%3D'g'%5D%5B2%5D%2F*%5Blocal-name()%3D'text'%5D%5B4%5D&style=flat-square&logo=rust&label=dependencies&color=005AA4)](https://deps.rs/repo/github/dani-garcia/vaultwarden)
|
||||
[](https://github.com/dani-garcia/vaultwarden/actions/workflows/release.yml)
|
||||
[](https://github.com/dani-garcia/vaultwarden/actions/workflows/build.yml) <br>
|
||||
[](https://matrix.to/#/#vaultwarden:matrix.org)
|
||||
[](https://github.com/dani-garcia/vaultwarden/discussions)
|
||||
[](https://vaultwarden.discourse.group/)
|
||||
|
||||
> [!IMPORTANT]
|
||||
> **When using this server, please report any bugs or suggestions directly to us (see [Get in touch](#get-in-touch)), regardless of whatever clients you are using (mobile, desktop, browser...). DO NOT use the official Bitwarden support channels.**
|
||||
|
||||
<br>
|
||||
|
||||
## Features
|
||||
|
||||
A nearly complete implementation of the Bitwarden Client API is provided, including:
|
||||
|
||||
* [Personal Vault](https://bitwarden.com/help/managing-items/)
|
||||
* [Send](https://bitwarden.com/help/about-send/)
|
||||
* [Attachments](https://bitwarden.com/help/attachments/)
|
||||
* [Website icons](https://bitwarden.com/help/website-icons/)
|
||||
* [Personal API Key](https://bitwarden.com/help/personal-api-key/)
|
||||
* [Organizations](https://bitwarden.com/help/getting-started-organizations/)
|
||||
- [Collections](https://bitwarden.com/help/about-collections/),
|
||||
[Password Sharing](https://bitwarden.com/help/sharing/),
|
||||
[Member Roles](https://bitwarden.com/help/user-types-access-control/),
|
||||
[Groups](https://bitwarden.com/help/about-groups/),
|
||||
[Event Logs](https://bitwarden.com/help/event-logs/),
|
||||
[Admin Password Reset](https://bitwarden.com/help/admin-reset/),
|
||||
[Directory Connector](https://bitwarden.com/help/directory-sync/),
|
||||
[Policies](https://bitwarden.com/help/policies/)
|
||||
* [Multi/Two Factor Authentication](https://bitwarden.com/help/bitwarden-field-guide-two-step-login/)
|
||||
- [Authenticator](https://bitwarden.com/help/setup-two-step-login-authenticator/),
|
||||
[Email](https://bitwarden.com/help/setup-two-step-login-email/),
|
||||
[FIDO2 WebAuthn](https://bitwarden.com/help/setup-two-step-login-fido/),
|
||||
[YubiKey](https://bitwarden.com/help/setup-two-step-login-yubikey/),
|
||||
[Duo](https://bitwarden.com/help/setup-two-step-login-duo/)
|
||||
* [Emergency Access](https://bitwarden.com/help/emergency-access/)
|
||||
* [Vaultwarden Admin Backend](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-admin-page)
|
||||
* [Modified Web Vault client](https://github.com/dani-garcia/bw_web_builds) (Bundled within our containers)
|
||||
|
||||
<br>
|
||||
|
||||
## Usage
|
||||
|
||||
> [!IMPORTANT]
|
||||
> Most modern web browsers disallow the use of Web Crypto APIs in insecure contexts. In this case, you might get an error like `Cannot read property 'importKey'`. To solve this problem, you need to access the web vault via HTTPS or localhost.
|
||||
>
|
||||
>This can be configured in [Vaultwarden directly](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS) or using a third-party reverse proxy ([some examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)).
|
||||
>
|
||||
>If you have an available domain name, you can get HTTPS certificates with [Let's Encrypt](https://letsencrypt.org/), or you can generate self-signed certificates with utilities like [mkcert](https://github.com/FiloSottile/mkcert). Some proxies automatically do this step, like Caddy or Traefik (see examples linked above).
|
||||
|
||||
> [!TIP]
|
||||
>**For more detailed examples on how to install, use and configure Vaultwarden you can check our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).**
|
||||
|
||||
The main way to use Vaultwarden is via our container images which are published to [ghcr.io](https://github.com/dani-garcia/vaultwarden/pkgs/container/vaultwarden), [docker.io](https://hub.docker.com/r/vaultwarden/server) and [quay.io](https://quay.io/repository/vaultwarden/server).
|
||||
|
||||
There are also [community driven packages](https://github.com/dani-garcia/vaultwarden/wiki/Third-party-packages) which can be used, but those might be lagging behind the latest version or might deviate in the way Vaultwarden is configured, as described in our [Wiki](https://github.com/dani-garcia/vaultwarden/wiki).
|
||||
|
||||
### Docker/Podman CLI
|
||||
|
||||
Pull the container image and mount a volume from the host for persistent storage.<br>
|
||||
You can replace `docker` with `podman` if you prefer to use podman.
|
||||
|
||||
```shell
|
||||
docker pull vaultwarden/server:latest
|
||||
docker run --detach --name vaultwarden \
|
||||
--env DOMAIN="https://vw.domain.tld" \
|
||||
--volume /vw-data/:/data/ \
|
||||
--restart unless-stopped \
|
||||
--publish 80:80 \
|
||||
vaultwarden/server:latest
|
||||
```
|
||||
|
||||
#### Other possible Docker options
|
||||
This will preserve any persistent data under `/vw-data/`, you can adapt the path to whatever suits you.
|
||||
|
||||
To run the container in the background, add the `-d` parameter.
|
||||
### Docker Compose
|
||||
|
||||
To check the logs when in background, run `docker logs bitwarden_rs`
|
||||
To use Docker compose you need to create a `compose.yaml` which will hold the configuration to run the Vaultwarden container.
|
||||
|
||||
To stop the container in background, run `docker stop bitwarden_rs`
|
||||
|
||||
To make sure the container is restarted automatically, add the `--restart unless-stopped` parameter
|
||||
|
||||
To run the image with a host bind, change the `--mount` parameter to:
|
||||
```
|
||||
--mount type=bind,source=<absolute_path>,target=/data
|
||||
```
|
||||
Where <absolute_path> is an absolute path in the hosts file system (e.g. C:\bitwarden\data)
|
||||
|
||||
|
||||
## How to compile bitwarden_rs
|
||||
Install `rust nightly`, in Windows the recommended way is through `rustup`.
|
||||
|
||||
Install the `openssl` library, in Windows the best option is Microsoft's `vcpkg`,
|
||||
on other systems use their respective package managers.
|
||||
|
||||
Then run:
|
||||
```sh
|
||||
cargo run
|
||||
# or
|
||||
cargo build
|
||||
```yaml
|
||||
services:
|
||||
vaultwarden:
|
||||
image: vaultwarden/server:latest
|
||||
container_name: vaultwarden
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
DOMAIN: "https://vw.domain.tld"
|
||||
volumes:
|
||||
- ./vw-data/:/data/
|
||||
ports:
|
||||
- 80:80
|
||||
```
|
||||
|
||||
## How to install the web-vault locally
|
||||
If you're using docker image, you can just update `VAULT_VERSION` variable in Dockerfile and rebuild the image.
|
||||
<br>
|
||||
|
||||
Install `node.js` and either `yarn` or `npm` (usually included with node)
|
||||
## Get in touch
|
||||
|
||||
Clone the web-vault outside the project:
|
||||
```
|
||||
git clone https://github.com/bitwarden/web.git web-vault
|
||||
```
|
||||
Have a question, suggestion or need help? Join our community on [Matrix](https://matrix.to/#/#vaultwarden:matrix.org), [GitHub Discussions](https://github.com/dani-garcia/vaultwarden/discussions) or [Discourse Forums](https://vaultwarden.discourse.group/).
|
||||
|
||||
Modify `web-vault/settings.Production.json` to look like this:
|
||||
```json
|
||||
{
|
||||
"appSettings": {
|
||||
"apiUri": "/api",
|
||||
"identityUri": "/identity",
|
||||
"iconsUri": "/icons",
|
||||
"stripeKey": "",
|
||||
"braintreeKey": ""
|
||||
}
|
||||
}
|
||||
```
|
||||
Encountered a bug or crash? Please search our issue tracker and discussions to see if it's already been reported. If not, please [start a new discussion](https://github.com/dani-garcia/vaultwarden/discussions) or [create a new issue](https://github.com/dani-garcia/vaultwarden/issues/). Ensure you're using the latest version of Vaultwarden and there aren't any similar issues open or closed!
|
||||
|
||||
Then, run the following from the `web-vault` dir:
|
||||
```sh
|
||||
# With yarn (recommended)
|
||||
yarn
|
||||
yarn gulp dist:selfHosted
|
||||
<br>
|
||||
|
||||
# With npm
|
||||
npm install
|
||||
npx gulp dist:selfHosted
|
||||
```
|
||||
## Contributors
|
||||
|
||||
Finally copy the contents of the `web-vault/dist` folder into the `bitwarden_rs/web-vault` folder.
|
||||
Thanks for your contribution to the project!
|
||||
|
||||
## How to recreate database schemas
|
||||
Install diesel-cli with cargo:
|
||||
```sh
|
||||
cargo install diesel_cli --no-default-features --features sqlite-bundled # Or use only sqlite to use the system version
|
||||
```
|
||||
[](https://github.com/dani-garcia/vaultwarden/graphs/contributors)<br>
|
||||
[](https://github.com/dani-garcia/vaultwarden/graphs/contributors)
|
||||
|
||||
Make sure that the correct path to the database is in the `.env` file.
|
||||
<br>
|
||||
|
||||
If you want to modify the schemas, create a new migration with:
|
||||
```
|
||||
diesel migration generate <name>
|
||||
```
|
||||
## Disclaimer
|
||||
|
||||
Modify the *.sql files, making sure that any changes are reverted in the down.sql file.
|
||||
**This project is not associated with [Bitwarden](https://bitwarden.com/) or Bitwarden, Inc.**
|
||||
|
||||
Apply the migrations and save the generated schemas as follows:
|
||||
```
|
||||
diesel migration redo
|
||||
diesel print-schema > src/db/schema.rs
|
||||
```
|
||||
However, one of the active maintainers for Vaultwarden is employed by Bitwarden and is allowed to contribute to the project on their own time. These contributions are independent of Bitwarden and are reviewed by other maintainers.
|
||||
|
||||
The maintainers work together to set the direction for the project, focusing on serving the self-hosting community, including individuals, families, and small organizations, while ensuring the project's sustainability.
|
||||
|
||||
**Please note:** We cannot be held liable for any data loss that may occur while using Vaultwarden. This includes passwords, attachments, and other information handled by the application. We highly recommend performing regular backups of your files and database. However, should you experience data loss, we encourage you to contact us immediately.
|
||||
|
||||
<br>
|
||||
|
||||
## Bitwarden_RS
|
||||
|
||||
This project was known as Bitwarden_RS and has been renamed to separate itself from the official Bitwarden server in the hopes of avoiding confusion and trademark/branding issues.<br>
|
||||
Please see [#1642 - v1.21.0 release and project rename to Vaultwarden](https://github.com/dani-garcia/vaultwarden/discussions/1642) for more explanation.
|
||||
|
49
SECURITY.md
Normal file
49
SECURITY.md
Normal file
@@ -0,0 +1,49 @@
|
||||
Vaultwarden tries to prevent security issues but there could always slip something through.
|
||||
If you believe you've found a security issue in our application, we encourage you to
|
||||
notify us. We welcome working with you to resolve the issue promptly. Thanks in advance!
|
||||
|
||||
# Disclosure Policy
|
||||
|
||||
- Let us know as soon as possible upon discovery of a potential security issue, and we'll make every
|
||||
effort to quickly resolve the issue.
|
||||
- Provide us a reasonable amount of time to resolve the issue before any disclosure to the public or a
|
||||
third-party. We may publicly disclose the issue before resolving it, if appropriate.
|
||||
- Make a good faith effort to avoid privacy violations, destruction of data, and interruption or
|
||||
degradation of our service. Only interact with accounts you own or with explicit permission of the
|
||||
account holder.
|
||||
|
||||
# In-scope
|
||||
|
||||
- Security issues in any current release of Vaultwarden. Source code is available at https://github.com/dani-garcia/vaultwarden. This includes the current `latest` release and `main / testing` release.
|
||||
|
||||
# Exclusions
|
||||
|
||||
The following bug classes are out-of scope:
|
||||
|
||||
- Bugs that are already reported on Vaultwarden's issue tracker (https://github.com/dani-garcia/vaultwarden/issues)
|
||||
- Bugs that are not part of Vaultwarden, like on the web-vault or mobile and desktop clients. These issues need to be reported in the respective project issue tracker at https://github.com/bitwarden to which we are not associated
|
||||
- Issues in an upstream software dependency (ex: Rust, or External Libraries) which are already reported to the upstream maintainer
|
||||
- Attacks requiring physical access to a user's device
|
||||
- Issues related to software or protocols not under Vaultwarden's control
|
||||
- Vulnerabilities in outdated versions of Vaultwarden
|
||||
- Missing security best practices that do not directly lead to a vulnerability (You may still report them as a normal issue)
|
||||
- Issues that do not have any impact on the general public
|
||||
|
||||
While researching, we'd like to ask you to refrain from:
|
||||
|
||||
- Denial of service
|
||||
- Spamming
|
||||
- Social engineering (including phishing) of Vaultwarden developers, contributors or users
|
||||
|
||||
Thank you for helping keep Vaultwarden and our users safe!
|
||||
|
||||
# How to contact us
|
||||
|
||||
- You can contact us on Matrix https://matrix.to/#/#vaultwarden:matrix.org (users: `@danig:matrix.org` and/or `@blackdex:matrix.org`)
|
||||
- You can send an  to report a security issue.<br>
|
||||
If you want to send an encrypted email you can use the following GPG key: 13BB3A34C9E380258CE43D595CB150B31F6426BC<br>
|
||||
It can be found on several public GPG key servers.<br>
|
||||
* https://keys.openpgp.org/search?q=security%40vaultwarden.org
|
||||
* https://keys.mailvelope.com/pks/lookup?op=get&search=security%40vaultwarden.org
|
||||
* https://pgpkeys.eu/pks/lookup?search=security%40vaultwarden.org&fingerprint=on&op=index
|
||||
* https://keyserver.ubuntu.com/pks/lookup?search=security%40vaultwarden.org&fingerprint=on&op=index
|
97
build.rs
Normal file
97
build.rs
Normal file
@@ -0,0 +1,97 @@
|
||||
use std::env;
|
||||
use std::process::Command;
|
||||
|
||||
fn main() {
|
||||
// This allow using #[cfg(sqlite)] instead of #[cfg(feature = "sqlite")], which helps when trying to add them through macros
|
||||
#[cfg(feature = "sqlite")]
|
||||
println!("cargo:rustc-cfg=sqlite");
|
||||
#[cfg(feature = "mysql")]
|
||||
println!("cargo:rustc-cfg=mysql");
|
||||
#[cfg(feature = "postgresql")]
|
||||
println!("cargo:rustc-cfg=postgresql");
|
||||
#[cfg(feature = "query_logger")]
|
||||
println!("cargo:rustc-cfg=query_logger");
|
||||
#[cfg(feature = "s3")]
|
||||
println!("cargo:rustc-cfg=s3");
|
||||
|
||||
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))]
|
||||
compile_error!(
|
||||
"You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite"
|
||||
);
|
||||
|
||||
// Use check-cfg to let cargo know which cfg's we define,
|
||||
// and avoid warnings when they are used in the code.
|
||||
println!("cargo::rustc-check-cfg=cfg(sqlite)");
|
||||
println!("cargo::rustc-check-cfg=cfg(mysql)");
|
||||
println!("cargo::rustc-check-cfg=cfg(postgresql)");
|
||||
println!("cargo::rustc-check-cfg=cfg(query_logger)");
|
||||
println!("cargo::rustc-check-cfg=cfg(s3)");
|
||||
|
||||
// Rerun when these paths are changed.
|
||||
// Someone could have checked-out a tag or specific commit, but no other files changed.
|
||||
println!("cargo:rerun-if-changed=.git");
|
||||
println!("cargo:rerun-if-changed=.git/HEAD");
|
||||
println!("cargo:rerun-if-changed=.git/index");
|
||||
println!("cargo:rerun-if-changed=.git/refs/tags");
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "query_logger"))]
|
||||
compile_error!("Query Logging is only allowed during development, it is not intended for production usage!");
|
||||
|
||||
// Support $BWRS_VERSION for legacy compatibility, but default to $VW_VERSION.
|
||||
// If neither exist, read from git.
|
||||
let maybe_vaultwarden_version =
|
||||
env::var("VW_VERSION").or_else(|_| env::var("BWRS_VERSION")).or_else(|_| version_from_git_info());
|
||||
|
||||
if let Ok(version) = maybe_vaultwarden_version {
|
||||
println!("cargo:rustc-env=VW_VERSION={version}");
|
||||
println!("cargo:rustc-env=CARGO_PKG_VERSION={version}");
|
||||
}
|
||||
}
|
||||
|
||||
fn run(args: &[&str]) -> Result<String, std::io::Error> {
|
||||
let out = Command::new(args[0]).args(&args[1..]).output()?;
|
||||
if !out.status.success() {
|
||||
use std::io::Error;
|
||||
return Err(Error::other("Command not successful"));
|
||||
}
|
||||
Ok(String::from_utf8(out.stdout).unwrap().trim().to_string())
|
||||
}
|
||||
|
||||
/// This method reads info from Git, namely tags, branch, and revision
|
||||
/// To access these values, use:
|
||||
/// - `env!("GIT_EXACT_TAG")`
|
||||
/// - `env!("GIT_LAST_TAG")`
|
||||
/// - `env!("GIT_BRANCH")`
|
||||
/// - `env!("GIT_REV")`
|
||||
/// - `env!("VW_VERSION")`
|
||||
fn version_from_git_info() -> Result<String, std::io::Error> {
|
||||
// The exact tag for the current commit, can be empty when
|
||||
// the current commit doesn't have an associated tag
|
||||
let exact_tag = run(&["git", "describe", "--abbrev=0", "--tags", "--exact-match"]).ok();
|
||||
if let Some(ref exact) = exact_tag {
|
||||
println!("cargo:rustc-env=GIT_EXACT_TAG={exact}");
|
||||
}
|
||||
|
||||
// The last available tag, equal to exact_tag when
|
||||
// the current commit is tagged
|
||||
let last_tag = run(&["git", "describe", "--abbrev=0", "--tags"])?;
|
||||
println!("cargo:rustc-env=GIT_LAST_TAG={last_tag}");
|
||||
|
||||
// The current branch name
|
||||
let branch = run(&["git", "rev-parse", "--abbrev-ref", "HEAD"])?;
|
||||
println!("cargo:rustc-env=GIT_BRANCH={branch}");
|
||||
|
||||
// The current git commit hash
|
||||
let rev = run(&["git", "rev-parse", "HEAD"])?;
|
||||
let rev_short = rev.get(..8).unwrap_or_default();
|
||||
println!("cargo:rustc-env=GIT_REV={rev_short}");
|
||||
|
||||
// Combined version
|
||||
if let Some(exact) = exact_tag {
|
||||
Ok(exact)
|
||||
} else if &branch != "main" && &branch != "master" && &branch != "HEAD" {
|
||||
Ok(format!("{last_tag}-{rev_short} ({branch})"))
|
||||
} else {
|
||||
Ok(format!("{last_tag}-{rev_short}"))
|
||||
}
|
||||
}
|
5
diesel.toml
Normal file
5
diesel.toml
Normal file
@@ -0,0 +1,5 @@
|
||||
# For documentation on how to configure this file,
|
||||
# see diesel.rs/guides/configuring-diesel-cli
|
||||
|
||||
[print_schema]
|
||||
file = "src/db/schema.rs"
|
29
docker/DockerSettings.yaml
Normal file
29
docker/DockerSettings.yaml
Normal file
@@ -0,0 +1,29 @@
|
||||
---
|
||||
vault_version: "v2025.7.0"
|
||||
vault_image_digest: "sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e"
|
||||
# Cross Compile Docker Helper Scripts v1.6.1
|
||||
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
|
||||
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
|
||||
xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894"
|
||||
rust_version: 1.88.0 # Rust version to be used
|
||||
debian_version: bookworm # Debian release name to be used
|
||||
alpine_version: "3.22" # Alpine version to be used
|
||||
# For which platforms/architectures will we try to build images
|
||||
platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"]
|
||||
# Determine the build images per OS/Arch
|
||||
build_stage_image:
|
||||
debian:
|
||||
image: "docker.io/library/rust:{{rust_version}}-slim-{{debian_version}}"
|
||||
platform: "$BUILDPLATFORM"
|
||||
alpine:
|
||||
image: "build_${TARGETARCH}${TARGETVARIANT}"
|
||||
platform: "linux/amd64" # The Alpine build images only have linux/amd64 images
|
||||
arch_image:
|
||||
amd64: "ghcr.io/blackdex/rust-musl:x86_64-musl-stable-{{rust_version}}"
|
||||
arm64: "ghcr.io/blackdex/rust-musl:aarch64-musl-stable-{{rust_version}}"
|
||||
armv7: "ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-{{rust_version}}"
|
||||
armv6: "ghcr.io/blackdex/rust-musl:arm-musleabi-stable-{{rust_version}}"
|
||||
# The final image which will be used to distribute the container images
|
||||
runtime_stage_image:
|
||||
debian: "docker.io/library/debian:{{debian_version}}-slim"
|
||||
alpine: "docker.io/library/alpine:{{alpine_version}}"
|
159
docker/Dockerfile.alpine
Normal file
159
docker/Dockerfile.alpine
Normal file
@@ -0,0 +1,159 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# check=skip=FromPlatformFlagConstDisallowed,RedundantTargetPlatform
|
||||
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `DockerSettings.yaml` or `Dockerfile.j2` and then `make`
|
||||
# This will generate two Dockerfile's `Dockerfile.debian` and `Dockerfile.alpine`
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
# The web-vault digest specifies a particular web-vault build on Docker Hub.
|
||||
# Using the digest instead of the tag name provides better security,
|
||||
# as the digest of an image is immutable, whereas a tag name can later
|
||||
# be changed to point to a malicious image.
|
||||
#
|
||||
# To verify the current digest for a given tag name:
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.7.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e
|
||||
# [docker.io/vaultwarden/web-vault:v2025.7.0]
|
||||
#
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e AS vault
|
||||
|
||||
########################## ALPINE BUILD IMAGES ##########################
|
||||
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
||||
## And for Alpine we define all build images here, they will only be loaded when actually used
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.88.0 AS build_amd64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.88.0 AS build_arm64
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.88.0 AS build_armv7
|
||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.88.0 AS build_armv6
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# hadolint ignore=DL3006
|
||||
FROM --platform=linux/amd64 build_${TARGETARCH}${TARGETVARIANT} AS build
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
LANG=C.UTF-8 \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
USER="root" \
|
||||
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||
# Debian Bookworm already contains libpq v15
|
||||
PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN mkdir -pv "${CARGO_HOME}" && \
|
||||
rustup set profile minimal
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Environment variables for Cargo on Alpine based builds
|
||||
RUN echo "export CARGO_TARGET=${RUST_MUSL_CROSS_TARGET}" >> /env-cargo && \
|
||||
# Output the current contents of the file
|
||||
cat /env-cargo
|
||||
|
||||
RUN source /env-cargo && \
|
||||
rustup target add "${CARGO_TARGET}"
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./
|
||||
COPY ./macros ./macros
|
||||
|
||||
ARG CARGO_PROFILE=release
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
# Enable MiMalloc to improve performance on Alpine builds
|
||||
ARG DB=sqlite,mysql,postgresql,enable_mimalloc
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN source /env-cargo && \
|
||||
cargo build --features ${DB} --profile "${CARGO_PROFILE}" --target="${CARGO_TARGET}" && \
|
||||
find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
ARG VW_VERSION
|
||||
|
||||
# Builds again, this time it will be the actual source files being build
|
||||
RUN source /env-cargo && \
|
||||
# Make sure that we actually build the project by updating the src/main.rs timestamp
|
||||
# Also do this for build.rs to ensure the version is rechecked
|
||||
touch build.rs src/main.rs && \
|
||||
# Create a symlink to the binary target folder to easy copy the binary in the final stage
|
||||
cargo build --features ${DB} --profile "${CARGO_PROFILE}" --target="${CARGO_TARGET}" && \
|
||||
if [[ "${CARGO_PROFILE}" == "dev" ]] ; then \
|
||||
ln -vfsr "/app/target/${CARGO_TARGET}/debug" /app/target/final ; \
|
||||
else \
|
||||
ln -vfsr "/app/target/${CARGO_TARGET}/${CARGO_PROFILE}" /app/target/final ; \
|
||||
fi
|
||||
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
#
|
||||
# To build these images you need to have qemu binfmt support.
|
||||
# See the following pages to help install these tools locally
|
||||
# Ubuntu/Debian: https://wiki.debian.org/QemuUserEmulation
|
||||
# Arch Linux: https://wiki.archlinux.org/title/QEMU#Chrooting_into_arm/arm64_environment_from_x86_64
|
||||
#
|
||||
# Or use a Docker image which modifies your host system to support this.
|
||||
# The GitHub Actions Workflow uses the same image as used below.
|
||||
# See: https://github.com/tonistiigi/binfmt
|
||||
# Usage: docker run --privileged --rm tonistiigi/binfmt --install arm64,arm
|
||||
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
|
||||
#
|
||||
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
|
||||
FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.22
|
||||
|
||||
ENV ROCKET_PROFILE="release" \
|
||||
ROCKET_ADDRESS=0.0.0.0 \
|
||||
ROCKET_PORT=80 \
|
||||
SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
# Create data folder and Install needed libraries
|
||||
RUN mkdir /data && \
|
||||
apk --no-cache add \
|
||||
ca-certificates \
|
||||
curl \
|
||||
openssl \
|
||||
tzdata
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
WORKDIR /
|
||||
|
||||
COPY docker/healthcheck.sh docker/start.sh /
|
||||
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/final/vaultwarden .
|
||||
|
||||
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||
|
||||
CMD ["/start.sh"]
|
202
docker/Dockerfile.debian
Normal file
202
docker/Dockerfile.debian
Normal file
@@ -0,0 +1,202 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# check=skip=FromPlatformFlagConstDisallowed,RedundantTargetPlatform
|
||||
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `DockerSettings.yaml` or `Dockerfile.j2` and then `make`
|
||||
# This will generate two Dockerfile's `Dockerfile.debian` and `Dockerfile.alpine`
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
# The web-vault digest specifies a particular web-vault build on Docker Hub.
|
||||
# Using the digest instead of the tag name provides better security,
|
||||
# as the digest of an image is immutable, whereas a tag name can later
|
||||
# be changed to point to a malicious image.
|
||||
#
|
||||
# To verify the current digest for a given tag name:
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.7.0
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.0
|
||||
# [docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e
|
||||
# [docker.io/vaultwarden/web-vault:v2025.7.0]
|
||||
#
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e AS vault
|
||||
|
||||
########################## Cross Compile Docker Helper Scripts ##########################
|
||||
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
|
||||
## And these bash scripts do not have any significant difference if at all
|
||||
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894 AS xx
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# hadolint ignore=DL3006
|
||||
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.88.0-slim-bookworm AS build
|
||||
COPY --from=xx / /
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
LANG=C.UTF-8 \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
USER="root"
|
||||
|
||||
# Install clang to get `xx-cargo` working
|
||||
# Install pkg-config to allow amd64 builds to find all libraries
|
||||
# Install git so build.rs can determine the correct version
|
||||
# Install the libc cross packages based upon the debian-arch
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
--no-install-recommends \
|
||||
clang \
|
||||
pkg-config \
|
||||
git \
|
||||
"libc6-$(xx-info debian-arch)-cross" \
|
||||
"libc6-dev-$(xx-info debian-arch)-cross" \
|
||||
"linux-libc-dev-$(xx-info debian-arch)-cross" && \
|
||||
xx-apt-get install -y \
|
||||
--no-install-recommends \
|
||||
gcc \
|
||||
libmariadb3 \
|
||||
libpq-dev \
|
||||
libpq5 \
|
||||
libssl-dev \
|
||||
zlib1g-dev && \
|
||||
# Force install arch dependend mariadb dev packages
|
||||
# Installing them the normal way breaks several other packages (again)
|
||||
apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \
|
||||
dpkg --force-all -i ./libmariadb-dev*.deb && \
|
||||
# Run xx-cargo early, since it sometimes seems to break when run at a later stage
|
||||
echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN mkdir -pv "${CARGO_HOME}" && \
|
||||
rustup set profile minimal
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Environment variables for Cargo on Debian based builds
|
||||
ARG TARGET_PKG_CONFIG_PATH
|
||||
|
||||
RUN source /env-cargo && \
|
||||
if xx-info is-cross ; then \
|
||||
# We can't use xx-cargo since that uses clang, which doesn't work for our libraries.
|
||||
# Because of this we generate the needed environment variables here which we can load in the needed steps.
|
||||
echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
|
||||
echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
|
||||
echo "export CROSS_COMPILE=1" >> /env-cargo && \
|
||||
echo "export PKG_CONFIG_ALLOW_CROSS=1" >> /env-cargo && \
|
||||
# For some architectures `xx-info` returns a triple which doesn't matches the path on disk
|
||||
# In those cases you can override this by setting the `TARGET_PKG_CONFIG_PATH` build-arg
|
||||
if [[ -n "${TARGET_PKG_CONFIG_PATH}" ]]; then \
|
||||
echo "export TARGET_PKG_CONFIG_PATH=${TARGET_PKG_CONFIG_PATH}" >> /env-cargo ; \
|
||||
else \
|
||||
echo "export PKG_CONFIG_PATH=/usr/lib/$(xx-info)/pkgconfig" >> /env-cargo ; \
|
||||
fi && \
|
||||
echo "# End of env-cargo" >> /env-cargo ; \
|
||||
fi && \
|
||||
# Output the current contents of the file
|
||||
cat /env-cargo
|
||||
|
||||
RUN source /env-cargo && \
|
||||
rustup target add "${CARGO_TARGET}"
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./
|
||||
COPY ./macros ./macros
|
||||
|
||||
ARG CARGO_PROFILE=release
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN source /env-cargo && \
|
||||
cargo build --features ${DB} --profile "${CARGO_PROFILE}" --target="${CARGO_TARGET}" && \
|
||||
find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
ARG VW_VERSION
|
||||
|
||||
# Builds again, this time it will be the actual source files being build
|
||||
RUN source /env-cargo && \
|
||||
# Make sure that we actually build the project by updating the src/main.rs timestamp
|
||||
# Also do this for build.rs to ensure the version is rechecked
|
||||
touch build.rs src/main.rs && \
|
||||
# Create a symlink to the binary target folder to easy copy the binary in the final stage
|
||||
cargo build --features ${DB} --profile "${CARGO_PROFILE}" --target="${CARGO_TARGET}" && \
|
||||
if [[ "${CARGO_PROFILE}" == "dev" ]] ; then \
|
||||
ln -vfsr "/app/target/${CARGO_TARGET}/debug" /app/target/final ; \
|
||||
else \
|
||||
ln -vfsr "/app/target/${CARGO_TARGET}/${CARGO_PROFILE}" /app/target/final ; \
|
||||
fi
|
||||
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
#
|
||||
# To build these images you need to have qemu binfmt support.
|
||||
# See the following pages to help install these tools locally
|
||||
# Ubuntu/Debian: https://wiki.debian.org/QemuUserEmulation
|
||||
# Arch Linux: https://wiki.archlinux.org/title/QEMU#Chrooting_into_arm/arm64_environment_from_x86_64
|
||||
#
|
||||
# Or use a Docker image which modifies your host system to support this.
|
||||
# The GitHub Actions Workflow uses the same image as used below.
|
||||
# See: https://github.com/tonistiigi/binfmt
|
||||
# Usage: docker run --privileged --rm tonistiigi/binfmt --install arm64,arm
|
||||
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
|
||||
#
|
||||
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
|
||||
FROM --platform=$TARGETPLATFORM docker.io/library/debian:bookworm-slim
|
||||
|
||||
ENV ROCKET_PROFILE="release" \
|
||||
ROCKET_ADDRESS=0.0.0.0 \
|
||||
ROCKET_PORT=80 \
|
||||
DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Create data folder and Install needed libraries
|
||||
RUN mkdir /data && \
|
||||
apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libmariadb-dev-compat \
|
||||
libpq5 \
|
||||
openssl && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
WORKDIR /
|
||||
|
||||
COPY docker/healthcheck.sh docker/start.sh /
|
||||
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/final/vaultwarden .
|
||||
|
||||
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||
|
||||
CMD ["/start.sh"]
|
246
docker/Dockerfile.j2
Normal file
246
docker/Dockerfile.j2
Normal file
@@ -0,0 +1,246 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# check=skip=FromPlatformFlagConstDisallowed,RedundantTargetPlatform
|
||||
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `DockerSettings.yaml` or `Dockerfile.j2` and then `make`
|
||||
# This will generate two Dockerfile's `Dockerfile.debian` and `Dockerfile.alpine`
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
# The web-vault digest specifies a particular web-vault build on Docker Hub.
|
||||
# Using the digest instead of the tag name provides better security,
|
||||
# as the digest of an image is immutable, whereas a tag name can later
|
||||
# be changed to point to a malicious image.
|
||||
#
|
||||
# To verify the current digest for a given tag name:
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull docker.io/vaultwarden/web-vault:{{ vault_version }}
|
||||
# $ docker image inspect --format "{{ '{{' }}.RepoDigests}}" docker.io/vaultwarden/web-vault:{{ vault_version }}
|
||||
# [docker.io/vaultwarden/web-vault@{{ vault_image_digest }}]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{ '{{' }}.RepoTags}}" docker.io/vaultwarden/web-vault@{{ vault_image_digest }}
|
||||
# [docker.io/vaultwarden/web-vault:{{ vault_version }}]
|
||||
#
|
||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@{{ vault_image_digest }} AS vault
|
||||
|
||||
{% if base == "debian" %}
|
||||
########################## Cross Compile Docker Helper Scripts ##########################
|
||||
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
|
||||
## And these bash scripts do not have any significant difference if at all
|
||||
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@{{ xx_image_digest }} AS xx
|
||||
{% elif base == "alpine" %}
|
||||
########################## ALPINE BUILD IMAGES ##########################
|
||||
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
||||
## And for Alpine we define all build images here, they will only be loaded when actually used
|
||||
{% for arch in build_stage_image[base].arch_image %}
|
||||
FROM --platform={{ build_stage_image[base].platform }} {{ build_stage_image[base].arch_image[arch] }} AS build_{{ arch }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
# hadolint ignore=DL3006
|
||||
FROM --platform={{ build_stage_image[base].platform }} {{ build_stage_image[base].image }} AS build
|
||||
{% if base == "debian" %}
|
||||
COPY --from=xx / /
|
||||
{% endif %}
|
||||
ARG TARGETARCH
|
||||
ARG TARGETVARIANT
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
LANG=C.UTF-8 \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
USER="root"
|
||||
{%- if base == "alpine" %} \
|
||||
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
|
||||
# Debian Bookworm already contains libpq v15
|
||||
PQ_LIB_DIR="/usr/local/musl/pq15/lib"
|
||||
{% endif %}
|
||||
|
||||
{% if base == "debian" %}
|
||||
|
||||
# Install clang to get `xx-cargo` working
|
||||
# Install pkg-config to allow amd64 builds to find all libraries
|
||||
# Install git so build.rs can determine the correct version
|
||||
# Install the libc cross packages based upon the debian-arch
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
--no-install-recommends \
|
||||
clang \
|
||||
pkg-config \
|
||||
git \
|
||||
"libc6-$(xx-info debian-arch)-cross" \
|
||||
"libc6-dev-$(xx-info debian-arch)-cross" \
|
||||
"linux-libc-dev-$(xx-info debian-arch)-cross" && \
|
||||
xx-apt-get install -y \
|
||||
--no-install-recommends \
|
||||
gcc \
|
||||
libmariadb3 \
|
||||
libpq-dev \
|
||||
libpq5 \
|
||||
libssl-dev \
|
||||
zlib1g-dev && \
|
||||
# Force install arch dependend mariadb dev packages
|
||||
# Installing them the normal way breaks several other packages (again)
|
||||
apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \
|
||||
dpkg --force-all -i ./libmariadb-dev*.deb && \
|
||||
# Run xx-cargo early, since it sometimes seems to break when run at a later stage
|
||||
echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo
|
||||
{% endif %}
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN mkdir -pv "${CARGO_HOME}" && \
|
||||
rustup set profile minimal
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
{% if base == "debian" %}
|
||||
# Environment variables for Cargo on Debian based builds
|
||||
ARG TARGET_PKG_CONFIG_PATH
|
||||
|
||||
RUN source /env-cargo && \
|
||||
if xx-info is-cross ; then \
|
||||
# We can't use xx-cargo since that uses clang, which doesn't work for our libraries.
|
||||
# Because of this we generate the needed environment variables here which we can load in the needed steps.
|
||||
echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
|
||||
echo "export CARGO_TARGET_$(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_LINKER=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \
|
||||
echo "export CROSS_COMPILE=1" >> /env-cargo && \
|
||||
echo "export PKG_CONFIG_ALLOW_CROSS=1" >> /env-cargo && \
|
||||
# For some architectures `xx-info` returns a triple which doesn't matches the path on disk
|
||||
# In those cases you can override this by setting the `TARGET_PKG_CONFIG_PATH` build-arg
|
||||
if [[ -n "${TARGET_PKG_CONFIG_PATH}" ]]; then \
|
||||
echo "export TARGET_PKG_CONFIG_PATH=${TARGET_PKG_CONFIG_PATH}" >> /env-cargo ; \
|
||||
else \
|
||||
echo "export PKG_CONFIG_PATH=/usr/lib/$(xx-info)/pkgconfig" >> /env-cargo ; \
|
||||
fi && \
|
||||
echo "# End of env-cargo" >> /env-cargo ; \
|
||||
fi && \
|
||||
# Output the current contents of the file
|
||||
cat /env-cargo
|
||||
|
||||
{% elif base == "alpine" %}
|
||||
# Environment variables for Cargo on Alpine based builds
|
||||
RUN echo "export CARGO_TARGET=${RUST_MUSL_CROSS_TARGET}" >> /env-cargo && \
|
||||
# Output the current contents of the file
|
||||
cat /env-cargo
|
||||
|
||||
{% endif %}
|
||||
RUN source /env-cargo && \
|
||||
rustup target add "${CARGO_TARGET}"
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./
|
||||
COPY ./macros ./macros
|
||||
|
||||
ARG CARGO_PROFILE=release
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
{% if base == "debian" %}
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
{% elif base == "alpine" %}
|
||||
# Enable MiMalloc to improve performance on Alpine builds
|
||||
ARG DB=sqlite,mysql,postgresql,enable_mimalloc
|
||||
{% endif %}
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN source /env-cargo && \
|
||||
cargo build --features ${DB} --profile "${CARGO_PROFILE}" --target="${CARGO_TARGET}" && \
|
||||
find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
ARG VW_VERSION
|
||||
|
||||
# Builds again, this time it will be the actual source files being build
|
||||
RUN source /env-cargo && \
|
||||
# Make sure that we actually build the project by updating the src/main.rs timestamp
|
||||
# Also do this for build.rs to ensure the version is rechecked
|
||||
touch build.rs src/main.rs && \
|
||||
# Create a symlink to the binary target folder to easy copy the binary in the final stage
|
||||
cargo build --features ${DB} --profile "${CARGO_PROFILE}" --target="${CARGO_TARGET}" && \
|
||||
if [[ "${CARGO_PROFILE}" == "dev" ]] ; then \
|
||||
ln -vfsr "/app/target/${CARGO_TARGET}/debug" /app/target/final ; \
|
||||
else \
|
||||
ln -vfsr "/app/target/${CARGO_TARGET}/${CARGO_PROFILE}" /app/target/final ; \
|
||||
fi
|
||||
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
#
|
||||
# To build these images you need to have qemu binfmt support.
|
||||
# See the following pages to help install these tools locally
|
||||
# Ubuntu/Debian: https://wiki.debian.org/QemuUserEmulation
|
||||
# Arch Linux: https://wiki.archlinux.org/title/QEMU#Chrooting_into_arm/arm64_environment_from_x86_64
|
||||
#
|
||||
# Or use a Docker image which modifies your host system to support this.
|
||||
# The GitHub Actions Workflow uses the same image as used below.
|
||||
# See: https://github.com/tonistiigi/binfmt
|
||||
# Usage: docker run --privileged --rm tonistiigi/binfmt --install arm64,arm
|
||||
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
|
||||
#
|
||||
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
|
||||
FROM --platform=$TARGETPLATFORM {{ runtime_stage_image[base] }}
|
||||
|
||||
ENV ROCKET_PROFILE="release" \
|
||||
ROCKET_ADDRESS=0.0.0.0 \
|
||||
ROCKET_PORT=80
|
||||
{%- if base == "debian" %} \
|
||||
DEBIAN_FRONTEND=noninteractive
|
||||
{% elif base == "alpine" %} \
|
||||
SSL_CERT_DIR=/etc/ssl/certs
|
||||
{% endif %}
|
||||
|
||||
# Create data folder and Install needed libraries
|
||||
RUN mkdir /data && \
|
||||
{% if base == "debian" %}
|
||||
apt-get update && apt-get install -y \
|
||||
--no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
libmariadb-dev-compat \
|
||||
libpq5 \
|
||||
openssl && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
{% elif base == "alpine" %}
|
||||
apk --no-cache add \
|
||||
ca-certificates \
|
||||
curl \
|
||||
openssl \
|
||||
tzdata
|
||||
{% endif %}
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
WORKDIR /
|
||||
|
||||
COPY docker/healthcheck.sh docker/start.sh /
|
||||
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/final/vaultwarden .
|
||||
|
||||
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||
|
||||
CMD ["/start.sh"]
|
4
docker/Makefile
Normal file
4
docker/Makefile
Normal file
@@ -0,0 +1,4 @@
|
||||
all:
|
||||
./render_template Dockerfile.j2 '{"base": "debian"}' > Dockerfile.debian
|
||||
./render_template Dockerfile.j2 '{"base": "alpine"}' > Dockerfile.alpine
|
||||
.PHONY: all
|
188
docker/README.md
Normal file
188
docker/README.md
Normal file
@@ -0,0 +1,188 @@
|
||||
# Vaultwarden Container Building
|
||||
|
||||
To build and release new testing and stable releases of Vaultwarden we use `docker buildx bake`.<br>
|
||||
This can be used locally by running the command yourself, but it is also used by GitHub Actions.
|
||||
|
||||
This makes it easier for us to test and maintain the different architectures we provide.<br>
|
||||
We also just have two Dockerfile's one for Debian and one for Alpine based images.<br>
|
||||
With just these two files we can build both Debian and Alpine images for the following platforms:
|
||||
- amd64 (linux/amd64)
|
||||
- arm64 (linux/arm64)
|
||||
- armv7 (linux/arm/v7)
|
||||
- armv6 (linux/arm/v6)
|
||||
|
||||
Some unsupported platforms for Debian based images. These are not built and tested by default and are only provided to make it easier for users to build for these architectures.
|
||||
- 386 (linux/386)
|
||||
- ppc64le (linux/ppc64le)
|
||||
- s390x (linux/s390x)
|
||||
|
||||
To build these containers you need to enable QEMU binfmt support to be able to run/emulate architectures which are different then your host.<br>
|
||||
This ensures the container build process can run binaries from other architectures.<br>
|
||||
|
||||
**NOTE**: Run all the examples below from the root of the repo.<br>
|
||||
|
||||
|
||||
## How to install QEMU binfmt support
|
||||
|
||||
This is different per host OS, but most support this in some way.<br>
|
||||
|
||||
### Ubuntu/Debian
|
||||
```bash
|
||||
apt install binfmt-support qemu-user-static
|
||||
```
|
||||
|
||||
### Arch Linux (others based upon it)
|
||||
```bash
|
||||
pacman -S qemu-user-static qemu-user-static-binfmt
|
||||
```
|
||||
|
||||
### Fedora
|
||||
```bash
|
||||
dnf install qemu-user-static
|
||||
```
|
||||
|
||||
### Others
|
||||
There also is an option to use an other docker container to provide support for this.
|
||||
```bash
|
||||
# To install and activate
|
||||
docker run --privileged --rm tonistiigi/binfmt --install arm64,arm
|
||||
# To uninstall
|
||||
docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
|
||||
```
|
||||
|
||||
|
||||
## Single architecture container building
|
||||
|
||||
You can build a container per supported architecture as long as you have QEMU binfmt support installed on your system.<br>
|
||||
|
||||
```bash
|
||||
# Default bake triggers a Debian build using the hosts architecture
|
||||
docker buildx bake --file docker/docker-bake.hcl
|
||||
|
||||
# Bake Debian ARM64 using a debug build
|
||||
CARGO_PROFILE=dev \
|
||||
SOURCE_COMMIT="$(git rev-parse HEAD)" \
|
||||
docker buildx bake --file docker/docker-bake.hcl debian-arm64
|
||||
|
||||
# Bake Alpine ARMv6 as a release build
|
||||
SOURCE_COMMIT="$(git rev-parse HEAD)" \
|
||||
docker buildx bake --file docker/docker-bake.hcl alpine-armv6
|
||||
```
|
||||
|
||||
|
||||
## Local Multi Architecture container building
|
||||
|
||||
Start the initialization, this only needs to be done once.
|
||||
|
||||
```bash
|
||||
# Create and use a new buildx builder instance which connects to the host network
|
||||
docker buildx create --name vaultwarden --use --driver-opt network=host
|
||||
|
||||
# Validate it runs
|
||||
docker buildx inspect --bootstrap
|
||||
|
||||
# Create a local container registry directly reachable on the localhost
|
||||
docker run -d --name registry --network host registry:2
|
||||
```
|
||||
|
||||
After that is done, you should be able to build and push to the local registry.<br>
|
||||
Use the following command with the modified variables to bake the Alpine images.<br>
|
||||
Replace `alpine` with `debian` if you want to build the debian multi arch images.
|
||||
|
||||
```bash
|
||||
# Start a buildx bake using a debug build
|
||||
CARGO_PROFILE=dev \
|
||||
SOURCE_COMMIT="$(git rev-parse HEAD)" \
|
||||
CONTAINER_REGISTRIES="localhost:5000/vaultwarden/server" \
|
||||
docker buildx bake --file docker/docker-bake.hcl alpine-multi
|
||||
```
|
||||
|
||||
|
||||
## Using the `bake.sh` script
|
||||
|
||||
To make it a bit more easier to trigger a build, there also is a `bake.sh` script.<br>
|
||||
This script calls `docker buildx bake` with all the right parameters and also generates the `SOURCE_COMMIT` and `SOURCE_VERSION` variables.<br>
|
||||
This script can be called from both the repo root or within the docker directory.
|
||||
|
||||
So, if you want to build a Multi Arch Alpine container pushing to your localhost registry you can run this from within the docker directory. (Just make sure you executed the initialization steps above first)
|
||||
```bash
|
||||
CONTAINER_REGISTRIES="localhost:5000/vaultwarden/server" \
|
||||
./bake.sh alpine-multi
|
||||
```
|
||||
|
||||
Or if you want to just build a Debian container from the repo root, you can run this.
|
||||
```bash
|
||||
docker/bake.sh
|
||||
```
|
||||
|
||||
You can append both `alpine` and `debian` with `-amd64`, `-arm64`, `-armv7` or `-armv6`, which will trigger a build for that specific platform.<br>
|
||||
This will also append those values to the tag so you can see the builded container when running `docker images`.
|
||||
|
||||
You can also append extra arguments after the target if you want. This can be useful for example to print what bake will use.
|
||||
```bash
|
||||
docker/bake.sh alpine-all --print
|
||||
```
|
||||
|
||||
### Testing baked images
|
||||
|
||||
To test these images you can run these images by using the correct tag and provide the platform.<br>
|
||||
For example, after you have build an arm64 image via `./bake.sh debian-arm64` you can run:
|
||||
```bash
|
||||
docker run --rm -it \
|
||||
-e DISABLE_ADMIN_TOKEN=true \
|
||||
-e I_REALLY_WANT_VOLATILE_STORAGE=true \
|
||||
-p8080:80 --platform=linux/arm64 \
|
||||
vaultwarden/server:testing-arm64
|
||||
```
|
||||
|
||||
|
||||
## Using the `podman-bake.sh` script
|
||||
|
||||
To also make building easier using podman, there is a `podman-bake.sh` script.<br>
|
||||
This script calls `podman buildx build` with the needed parameters and the same as `bake.sh`, it will generate some variables automatically.<br>
|
||||
This script can be called from both the repo root or within the docker directory.
|
||||
|
||||
**NOTE:** Unlike the `bake.sh` script, this only supports a single `CONTAINER_REGISTRIES`, and a single `BASE_TAGS` value, no comma separated values. It also only supports building separate architectures, no Multi Arch containers.
|
||||
|
||||
To build an Alpine arm64 image with only sqlite support and mimalloc, run this:
|
||||
```bash
|
||||
DB="sqlite,enable_mimalloc" \
|
||||
./podman-bake.sh alpine-arm64
|
||||
```
|
||||
|
||||
Or if you want to just build a Debian container from the repo root, you can run this.
|
||||
```bash
|
||||
docker/podman-bake.sh
|
||||
```
|
||||
|
||||
You can append extra arguments after the target if you want. This can be useful for example to disable cache like this.
|
||||
```bash
|
||||
./podman-bake.sh alpine-arm64 --no-cache
|
||||
```
|
||||
|
||||
For the podman builds you can, just like the `bake.sh` script, also append the architecture to build for that specific platform.<br>
|
||||
|
||||
### Testing podman builded images
|
||||
|
||||
The command to start a podman built container is almost the same as for the docker/bake built containers. The images start with `localhost/`, so you need to prepend that.
|
||||
|
||||
```bash
|
||||
podman run --rm -it \
|
||||
-e DISABLE_ADMIN_TOKEN=true \
|
||||
-e I_REALLY_WANT_VOLATILE_STORAGE=true \
|
||||
-p8080:80 --platform=linux/arm64 \
|
||||
localhost/vaultwarden/server:testing-arm64
|
||||
```
|
||||
|
||||
|
||||
## Variables supported
|
||||
| Variable | default | description |
|
||||
| --------------------- | ------------------ | ----------- |
|
||||
| CARGO_PROFILE | null | Which cargo profile to use. `null` means what is defined in the Dockerfile |
|
||||
| DB | null | Which `features` to build. `null` means what is defined in the Dockerfile |
|
||||
| SOURCE_REPOSITORY_URL | null | The source repository form where this build is triggered |
|
||||
| SOURCE_COMMIT | null | The commit hash of the current commit for this build |
|
||||
| SOURCE_VERSION | null | The current exact tag of this commit, else the last tag and the first 8 chars of the source commit |
|
||||
| BASE_TAGS | testing | Tags to be used. Can be a comma separated value like "latest,1.29.2" |
|
||||
| CONTAINER_REGISTRIES | vaultwarden/server | Comma separated value of container registries. Like `ghcr.io/dani-garcia/vaultwarden,docker.io/vaultwarden/server` |
|
||||
| VW_VERSION | null | To override the `SOURCE_VERSION` value. This is also used by the `build.rs` code for example |
|
15
docker/bake.sh
Executable file
15
docker/bake.sh
Executable file
@@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Determine the basedir of this script.
|
||||
# It should be located in the same directory as the docker-bake.hcl
|
||||
# This ensures you can run this script from both inside and outside of the docker directory
|
||||
BASEDIR=$(RL=$(readlink -n "$0"); SP="${RL:-$0}"; dirname "$(cd "$(dirname "${SP}")" || exit; pwd)/$(basename "${SP}")")
|
||||
|
||||
# Load build env's
|
||||
source "${BASEDIR}/bake_env.sh"
|
||||
|
||||
# Be verbose on what is being executed
|
||||
set -x
|
||||
|
||||
# Make sure we set the context to `..` so it will go up one directory
|
||||
docker buildx bake --progress plain --set "*.context=${BASEDIR}/.." -f "${BASEDIR}/docker-bake.hcl" "$@"
|
33
docker/bake_env.sh
Normal file
33
docker/bake_env.sh
Normal file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# If SOURCE_COMMIT is provided via env skip this
|
||||
if [ -z "${SOURCE_COMMIT+x}" ]; then
|
||||
SOURCE_COMMIT="$(git rev-parse HEAD)"
|
||||
fi
|
||||
|
||||
# If VW_VERSION is provided via env use it as SOURCE_VERSION
|
||||
# Else define it using git
|
||||
if [[ -n "${VW_VERSION}" ]]; then
|
||||
SOURCE_VERSION="${VW_VERSION}"
|
||||
else
|
||||
GIT_EXACT_TAG="$(git describe --tags --abbrev=0 --exact-match 2>/dev/null)"
|
||||
if [[ -n "${GIT_EXACT_TAG}" ]]; then
|
||||
SOURCE_VERSION="${GIT_EXACT_TAG}"
|
||||
else
|
||||
GIT_LAST_TAG="$(git describe --tags --abbrev=0)"
|
||||
SOURCE_VERSION="${GIT_LAST_TAG}-${SOURCE_COMMIT:0:8}"
|
||||
GIT_BRANCH="$(git rev-parse --abbrev-ref HEAD)"
|
||||
case "${GIT_BRANCH}" in
|
||||
main|master|HEAD)
|
||||
# Do not add the branch name for these branches
|
||||
;;
|
||||
*)
|
||||
SOURCE_VERSION="${SOURCE_VERSION} (${GIT_BRANCH})"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
fi
|
||||
|
||||
# Export the rendered variables above so bake will use them
|
||||
export SOURCE_COMMIT
|
||||
export SOURCE_VERSION
|
260
docker/docker-bake.hcl
Normal file
260
docker/docker-bake.hcl
Normal file
@@ -0,0 +1,260 @@
|
||||
// ==== Baking Variables ====
|
||||
|
||||
// Set which cargo profile to use, dev or release for example
|
||||
// Use the value provided in the Dockerfile as default
|
||||
variable "CARGO_PROFILE" {
|
||||
default = null
|
||||
}
|
||||
|
||||
// Set which DB's (features) to enable
|
||||
// Use the value provided in the Dockerfile as default
|
||||
variable "DB" {
|
||||
default = null
|
||||
}
|
||||
|
||||
// The repository this build was triggered from
|
||||
variable "SOURCE_REPOSITORY_URL" {
|
||||
default = null
|
||||
}
|
||||
|
||||
// The commit hash of the current commit this build was triggered on
|
||||
variable "SOURCE_COMMIT" {
|
||||
default = null
|
||||
}
|
||||
|
||||
// The version of this build
|
||||
// Typically the current exact tag of this commit,
|
||||
// else the last tag and the first 8 characters of the source commit
|
||||
variable "SOURCE_VERSION" {
|
||||
default = null
|
||||
}
|
||||
|
||||
// This can be used to overwrite SOURCE_VERSION
|
||||
// It will be used during the build.rs building stage
|
||||
variable "VW_VERSION" {
|
||||
default = null
|
||||
}
|
||||
|
||||
// The base tag(s) to use
|
||||
// This can be a comma separated value like "testing,1.29.2"
|
||||
variable "BASE_TAGS" {
|
||||
default = "testing"
|
||||
}
|
||||
|
||||
// Which container registries should be used for the tagging
|
||||
// This can be a comma separated value
|
||||
// Use a full URI like `ghcr.io/dani-garcia/vaultwarden,docker.io/vaultwarden/server`
|
||||
variable "CONTAINER_REGISTRIES" {
|
||||
default = "vaultwarden/server"
|
||||
}
|
||||
|
||||
|
||||
// ==== Baking Groups ====
|
||||
|
||||
group "default" {
|
||||
targets = ["debian"]
|
||||
}
|
||||
|
||||
|
||||
// ==== Shared Baking ====
|
||||
function "labels" {
|
||||
params = []
|
||||
result = {
|
||||
"org.opencontainers.image.description" = "Unofficial Bitwarden compatible server written in Rust - ${SOURCE_VERSION}"
|
||||
"org.opencontainers.image.licenses" = "AGPL-3.0-only"
|
||||
"org.opencontainers.image.documentation" = "https://github.com/dani-garcia/vaultwarden/wiki"
|
||||
"org.opencontainers.image.url" = "https://github.com/dani-garcia/vaultwarden"
|
||||
"org.opencontainers.image.created" = "${formatdate("YYYY-MM-DD'T'hh:mm:ssZZZZZ", timestamp())}"
|
||||
"org.opencontainers.image.source" = "${SOURCE_REPOSITORY_URL}"
|
||||
"org.opencontainers.image.revision" = "${SOURCE_COMMIT}"
|
||||
"org.opencontainers.image.version" = "${SOURCE_VERSION}"
|
||||
}
|
||||
}
|
||||
|
||||
target "_default_attributes" {
|
||||
labels = labels()
|
||||
args = {
|
||||
DB = "${DB}"
|
||||
CARGO_PROFILE = "${CARGO_PROFILE}"
|
||||
VW_VERSION = "${VW_VERSION}"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ==== Debian Baking ====
|
||||
|
||||
// Default Debian target, will build a container using the hosts platform architecture
|
||||
target "debian" {
|
||||
inherits = ["_default_attributes"]
|
||||
dockerfile = "docker/Dockerfile.debian"
|
||||
tags = generate_tags("", platform_tag())
|
||||
output = ["type=docker"]
|
||||
}
|
||||
|
||||
// Multi Platform target, will build one tagged manifest with all supported architectures
|
||||
// This is mainly used by GitHub Actions to build and push new containers
|
||||
target "debian-multi" {
|
||||
inherits = ["debian"]
|
||||
platforms = ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"]
|
||||
tags = generate_tags("", "")
|
||||
output = [join(",", flatten([["type=registry"], image_index_annotations()]))]
|
||||
}
|
||||
|
||||
// Per platform targets, to individually test building per platform locally
|
||||
target "debian-amd64" {
|
||||
inherits = ["debian"]
|
||||
platforms = ["linux/amd64"]
|
||||
tags = generate_tags("", "-amd64")
|
||||
}
|
||||
|
||||
target "debian-arm64" {
|
||||
inherits = ["debian"]
|
||||
platforms = ["linux/arm64"]
|
||||
tags = generate_tags("", "-arm64")
|
||||
}
|
||||
|
||||
target "debian-armv7" {
|
||||
inherits = ["debian"]
|
||||
platforms = ["linux/arm/v7"]
|
||||
tags = generate_tags("", "-armv7")
|
||||
}
|
||||
|
||||
target "debian-armv6" {
|
||||
inherits = ["debian"]
|
||||
platforms = ["linux/arm/v6"]
|
||||
tags = generate_tags("", "-armv6")
|
||||
}
|
||||
|
||||
// ==== Start of unsupported Debian architecture targets ===
|
||||
// These are provided just to help users build for these rare platforms
|
||||
// They will not be built by default
|
||||
target "debian-386" {
|
||||
inherits = ["debian"]
|
||||
platforms = ["linux/386"]
|
||||
tags = generate_tags("", "-386")
|
||||
args = {
|
||||
TARGET_PKG_CONFIG_PATH = "/usr/lib/i386-linux-gnu/pkgconfig"
|
||||
}
|
||||
}
|
||||
|
||||
target "debian-ppc64le" {
|
||||
inherits = ["debian"]
|
||||
platforms = ["linux/ppc64le"]
|
||||
tags = generate_tags("", "-ppc64le")
|
||||
}
|
||||
|
||||
target "debian-s390x" {
|
||||
inherits = ["debian"]
|
||||
platforms = ["linux/s390x"]
|
||||
tags = generate_tags("", "-s390x")
|
||||
}
|
||||
// ==== End of unsupported Debian architecture targets ===
|
||||
|
||||
// A Group to build all platforms individually for local testing
|
||||
group "debian-all" {
|
||||
targets = ["debian-amd64", "debian-arm64", "debian-armv7", "debian-armv6"]
|
||||
}
|
||||
|
||||
|
||||
// ==== Alpine Baking ====
|
||||
|
||||
// Default Alpine target, will build a container using the hosts platform architecture
|
||||
target "alpine" {
|
||||
inherits = ["_default_attributes"]
|
||||
dockerfile = "docker/Dockerfile.alpine"
|
||||
tags = generate_tags("-alpine", platform_tag())
|
||||
output = ["type=docker"]
|
||||
}
|
||||
|
||||
// Multi Platform target, will build one tagged manifest with all supported architectures
|
||||
// This is mainly used by GitHub Actions to build and push new containers
|
||||
target "alpine-multi" {
|
||||
inherits = ["alpine"]
|
||||
platforms = ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"]
|
||||
tags = generate_tags("-alpine", "")
|
||||
output = [join(",", flatten([["type=registry"], image_index_annotations()]))]
|
||||
}
|
||||
|
||||
// Per platform targets, to individually test building per platform locally
|
||||
target "alpine-amd64" {
|
||||
inherits = ["alpine"]
|
||||
platforms = ["linux/amd64"]
|
||||
tags = generate_tags("-alpine", "-amd64")
|
||||
}
|
||||
|
||||
target "alpine-arm64" {
|
||||
inherits = ["alpine"]
|
||||
platforms = ["linux/arm64"]
|
||||
tags = generate_tags("-alpine", "-arm64")
|
||||
}
|
||||
|
||||
target "alpine-armv7" {
|
||||
inherits = ["alpine"]
|
||||
platforms = ["linux/arm/v7"]
|
||||
tags = generate_tags("-alpine", "-armv7")
|
||||
}
|
||||
|
||||
target "alpine-armv6" {
|
||||
inherits = ["alpine"]
|
||||
platforms = ["linux/arm/v6"]
|
||||
tags = generate_tags("-alpine", "-armv6")
|
||||
}
|
||||
|
||||
// A Group to build all platforms individually for local testing
|
||||
group "alpine-all" {
|
||||
targets = ["alpine-amd64", "alpine-arm64", "alpine-armv7", "alpine-armv6"]
|
||||
}
|
||||
|
||||
|
||||
// ==== Bake everything locally ====
|
||||
|
||||
group "all" {
|
||||
targets = ["debian-all", "alpine-all"]
|
||||
}
|
||||
|
||||
|
||||
// ==== Baking functions ====
|
||||
|
||||
// This will return the local platform as amd64, arm64 or armv7 for example
|
||||
// It can be used for creating a local image tag
|
||||
function "platform_tag" {
|
||||
params = []
|
||||
result = "-${replace(replace(BAKE_LOCAL_PLATFORM, "linux/", ""), "/", "")}"
|
||||
}
|
||||
|
||||
|
||||
function "get_container_registries" {
|
||||
params = []
|
||||
result = flatten(split(",", CONTAINER_REGISTRIES))
|
||||
}
|
||||
|
||||
function "get_base_tags" {
|
||||
params = []
|
||||
result = flatten(split(",", BASE_TAGS))
|
||||
}
|
||||
|
||||
function "generate_tags" {
|
||||
params = [
|
||||
suffix, // What to append to the BASE_TAG when needed, like `-alpine` for example
|
||||
platform // the platform we are building for if needed
|
||||
]
|
||||
result = flatten([
|
||||
for registry in get_container_registries() :
|
||||
[for base_tag in get_base_tags() :
|
||||
concat(
|
||||
# If the base_tag contains latest, and the suffix contains `-alpine` add a `:alpine` tag too
|
||||
base_tag == "latest" ? suffix == "-alpine" ? ["${registry}:alpine${platform}"] : [] : [],
|
||||
# The default tagging strategy
|
||||
["${registry}:${base_tag}${suffix}${platform}"]
|
||||
)
|
||||
]
|
||||
])
|
||||
}
|
||||
|
||||
function "image_index_annotations" {
|
||||
params = []
|
||||
result = flatten([
|
||||
for key, value in labels() :
|
||||
value != null ? formatlist("annotation-index.%s=%s", "${key}", "${value}") : []
|
||||
])
|
||||
}
|
65
docker/healthcheck.sh
Executable file
65
docker/healthcheck.sh
Executable file
@@ -0,0 +1,65 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
# Use the value of the corresponding env var (if present),
|
||||
# or a default value otherwise.
|
||||
: "${DATA_FOLDER:="/data"}"
|
||||
: "${ROCKET_PORT:="80"}"
|
||||
: "${ENV_FILE:="/.env"}"
|
||||
|
||||
CONFIG_FILE="${DATA_FOLDER}"/config.json
|
||||
|
||||
# Check if the $ENV_FILE file exist and is readable
|
||||
# If that is the case, load it into the environment before running any check
|
||||
if [ -r "${ENV_FILE}" ]; then
|
||||
# shellcheck disable=SC1090
|
||||
. "${ENV_FILE}"
|
||||
fi
|
||||
|
||||
# Given a config key, return the corresponding config value from the
|
||||
# config file. If the key doesn't exist, return an empty string.
|
||||
get_config_val() {
|
||||
key="$1"
|
||||
# Extract a line of the form:
|
||||
# "domain": "https://bw.example.com/path",
|
||||
grep "\"${key}\":" "${CONFIG_FILE}" |
|
||||
# To extract just the value (https://bw.example.com/path), delete:
|
||||
# (1) everything up to and including the first ':',
|
||||
# (2) whitespace and '"' from the front,
|
||||
# (3) ',' and '"' from the back.
|
||||
sed -e 's/[^:]\+://' -e 's/^[ "]\+//' -e 's/[,"]\+$//'
|
||||
}
|
||||
|
||||
# Extract the base path from a domain URL. For example:
|
||||
# - `` -> ``
|
||||
# - `https://bw.example.com` -> ``
|
||||
# - `https://bw.example.com/` -> ``
|
||||
# - `https://bw.example.com/path` -> `/path`
|
||||
# - `https://bw.example.com/multi/path` -> `/multi/path`
|
||||
get_base_path() {
|
||||
echo "$1" |
|
||||
# Delete:
|
||||
# (1) everything up to and including '://',
|
||||
# (2) everything up to '/',
|
||||
# (3) trailing '/' from the back.
|
||||
sed -e 's|.*://||' -e 's|[^/]\+||' -e 's|/*$||'
|
||||
}
|
||||
|
||||
# Read domain URL from config.json, if present.
|
||||
if [ -r "${CONFIG_FILE}" ]; then
|
||||
domain="$(get_config_val 'domain')"
|
||||
if [ -n "${domain}" ]; then
|
||||
# config.json 'domain' overrides the DOMAIN env var.
|
||||
DOMAIN="${domain}"
|
||||
fi
|
||||
fi
|
||||
|
||||
addr="${ROCKET_ADDRESS}"
|
||||
if [ -z "${addr}" ] || [ "${addr}" = '0.0.0.0' ] || [ "${addr}" = '::' ]; then
|
||||
addr='localhost'
|
||||
fi
|
||||
base_path="$(get_base_path "${DOMAIN}")"
|
||||
if [ -n "${ROCKET_TLS}" ]; then
|
||||
s='s'
|
||||
fi
|
||||
curl --insecure --fail --silent --show-error \
|
||||
"http${s}://${addr}:${ROCKET_PORT}${base_path}/alive" || exit 1
|
105
docker/podman-bake.sh
Executable file
105
docker/podman-bake.sh
Executable file
@@ -0,0 +1,105 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Determine the basedir of this script.
|
||||
# It should be located in the same directory as the docker-bake.hcl
|
||||
# This ensures you can run this script from both inside and outside of the docker directory
|
||||
BASEDIR=$(RL=$(readlink -n "$0"); SP="${RL:-$0}"; dirname "$(cd "$(dirname "${SP}")" || exit; pwd)/$(basename "${SP}")")
|
||||
|
||||
# Load build env's
|
||||
source "${BASEDIR}/bake_env.sh"
|
||||
|
||||
# Check if a target is given as first argument
|
||||
# If not we assume the defaults and pass the given arguments to the podman command
|
||||
case "${1}" in
|
||||
alpine*|debian*)
|
||||
TARGET="${1}"
|
||||
# Now shift the $@ array so we only have the rest of the arguments
|
||||
# This allows us too append these as extra arguments too the podman buildx build command
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
|
||||
LABEL_ARGS=(
|
||||
--label org.opencontainers.image.description="Unofficial Bitwarden compatible server written in Rust"
|
||||
--label org.opencontainers.image.licenses="AGPL-3.0-only"
|
||||
--label org.opencontainers.image.documentation="https://github.com/dani-garcia/vaultwarden/wiki"
|
||||
--label org.opencontainers.image.url="https://github.com/dani-garcia/vaultwarden"
|
||||
--label org.opencontainers.image.created="$(date --utc --iso-8601=seconds)"
|
||||
)
|
||||
if [[ -n "${SOURCE_REPOSITORY_URL}" ]]; then
|
||||
LABEL_ARGS+=(--label org.opencontainers.image.source="${SOURCE_REPOSITORY_URL}")
|
||||
fi
|
||||
if [[ -n "${SOURCE_COMMIT}" ]]; then
|
||||
LABEL_ARGS+=(--label org.opencontainers.image.revision="${SOURCE_COMMIT}")
|
||||
fi
|
||||
if [[ -n "${SOURCE_VERSION}" ]]; then
|
||||
LABEL_ARGS+=(--label org.opencontainers.image.version="${SOURCE_VERSION}")
|
||||
fi
|
||||
|
||||
# Check if and which --build-arg arguments we need to configure
|
||||
BUILD_ARGS=()
|
||||
if [[ -n "${DB}" ]]; then
|
||||
BUILD_ARGS+=(--build-arg DB="${DB}")
|
||||
fi
|
||||
if [[ -n "${CARGO_PROFILE}" ]]; then
|
||||
BUILD_ARGS+=(--build-arg CARGO_PROFILE="${CARGO_PROFILE}")
|
||||
fi
|
||||
if [[ -n "${VW_VERSION}" ]]; then
|
||||
BUILD_ARGS+=(--build-arg VW_VERSION="${VW_VERSION}")
|
||||
fi
|
||||
|
||||
# Set the default BASE_TAGS if non are provided
|
||||
if [[ -z "${BASE_TAGS}" ]]; then
|
||||
BASE_TAGS="testing"
|
||||
fi
|
||||
|
||||
# Set the default CONTAINER_REGISTRIES if non are provided
|
||||
if [[ -z "${CONTAINER_REGISTRIES}" ]]; then
|
||||
CONTAINER_REGISTRIES="vaultwarden/server"
|
||||
fi
|
||||
|
||||
# Check which Dockerfile we need to use, default is debian
|
||||
case "${TARGET}" in
|
||||
alpine*)
|
||||
BASE_TAGS="${BASE_TAGS}-alpine"
|
||||
DOCKERFILE="Dockerfile.alpine"
|
||||
;;
|
||||
*)
|
||||
DOCKERFILE="Dockerfile.debian"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Check which platform we need to build and append the BASE_TAGS with the architecture
|
||||
case "${TARGET}" in
|
||||
*-arm64)
|
||||
BASE_TAGS="${BASE_TAGS}-arm64"
|
||||
PLATFORM="linux/arm64"
|
||||
;;
|
||||
*-armv7)
|
||||
BASE_TAGS="${BASE_TAGS}-armv7"
|
||||
PLATFORM="linux/arm/v7"
|
||||
;;
|
||||
*-armv6)
|
||||
BASE_TAGS="${BASE_TAGS}-armv6"
|
||||
PLATFORM="linux/arm/v6"
|
||||
;;
|
||||
*)
|
||||
BASE_TAGS="${BASE_TAGS}-amd64"
|
||||
PLATFORM="linux/amd64"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Be verbose on what is being executed
|
||||
set -x
|
||||
|
||||
# Build the image with podman
|
||||
# We use the docker format here since we are using `SHELL`, which is not supported by OCI
|
||||
# shellcheck disable=SC2086
|
||||
podman buildx build \
|
||||
--platform="${PLATFORM}" \
|
||||
--tag="${CONTAINER_REGISTRIES}:${BASE_TAGS}" \
|
||||
--format=docker \
|
||||
"${LABEL_ARGS[@]}" \
|
||||
"${BUILD_ARGS[@]}" \
|
||||
--file="${BASEDIR}/${DOCKERFILE}" "$@" \
|
||||
"${BASEDIR}/.."
|
31
docker/render_template
Executable file
31
docker/render_template
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import argparse
|
||||
import json
|
||||
import yaml
|
||||
import jinja2
|
||||
|
||||
# Load settings file
|
||||
with open("DockerSettings.yaml", 'r') as yaml_file:
|
||||
yaml_data = yaml.safe_load(yaml_file)
|
||||
|
||||
settings_env = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader(os.getcwd()),
|
||||
)
|
||||
settings_yaml = yaml.safe_load(settings_env.get_template("DockerSettings.yaml").render(yaml_data))
|
||||
|
||||
args_parser = argparse.ArgumentParser()
|
||||
args_parser.add_argument('template_file', help='Jinja2 template file to render.')
|
||||
args_parser.add_argument('render_vars', help='JSON-encoded data to pass to the templating engine.')
|
||||
cli_args = args_parser.parse_args()
|
||||
|
||||
# Merge the default config yaml with the json arguments given.
|
||||
render_vars = json.loads(cli_args.render_vars)
|
||||
settings_yaml.update(render_vars)
|
||||
|
||||
environment = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader(os.getcwd()),
|
||||
trim_blocks=True,
|
||||
)
|
||||
print(environment.get_template(cli_args.template_file).render(settings_yaml))
|
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"appSettings": {
|
||||
"apiUri": "/api",
|
||||
"identityUri": "/identity",
|
||||
"iconsUri": "/icons",
|
||||
"stripeKey": "",
|
||||
"braintreeKey": ""
|
||||
}
|
||||
}
|
29
docker/start.sh
Executable file
29
docker/start.sh
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/bin/sh
|
||||
|
||||
if [ -n "${UMASK}" ]; then
|
||||
umask "${UMASK}"
|
||||
fi
|
||||
|
||||
if [ -r /etc/vaultwarden.sh ]; then
|
||||
. /etc/vaultwarden.sh
|
||||
elif [ -r /etc/bitwarden_rs.sh ]; then
|
||||
echo "### You are using the old /etc/bitwarden_rs.sh script, please migrate to /etc/vaultwarden.sh ###"
|
||||
. /etc/bitwarden_rs.sh
|
||||
fi
|
||||
|
||||
if [ -d /etc/vaultwarden.d ]; then
|
||||
for f in /etc/vaultwarden.d/*.sh; do
|
||||
if [ -r "${f}" ]; then
|
||||
. "${f}"
|
||||
fi
|
||||
done
|
||||
elif [ -d /etc/bitwarden_rs.d ]; then
|
||||
echo "### You are using the old /etc/bitwarden_rs.d script directory, please migrate to /etc/vaultwarden.d ###"
|
||||
for f in /etc/bitwarden_rs.d/*.sh; do
|
||||
if [ -r "${f}" ]; then
|
||||
. "${f}"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
exec /vaultwarden "${@}"
|
@@ -1,20 +0,0 @@
|
||||
[package]
|
||||
name = "jsonwebtoken"
|
||||
version = "4.0.1"
|
||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||
license = "MIT"
|
||||
readme = "README.md"
|
||||
description = "Create and parse JWT in a strongly typed way."
|
||||
homepage = "https://github.com/Keats/rust-jwt"
|
||||
repository = "https://github.com/Keats/rust-jwt"
|
||||
keywords = ["jwt", "web", "api", "token", "json"]
|
||||
|
||||
[dependencies]
|
||||
error-chain = { version = "0.11", default-features = false }
|
||||
serde_json = "1.0"
|
||||
serde_derive = "1.0"
|
||||
serde = "1.0"
|
||||
ring = { version = "0.11.0", features = ["rsa_signing", "dev_urandom_fallback"] }
|
||||
base64 = "0.9"
|
||||
untrusted = "0.5"
|
||||
chrono = "0.4"
|
@@ -1,21 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Vincent Prouillet
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
@@ -1,120 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use base64;
|
||||
use ring::{rand, digest, hmac, signature};
|
||||
use ring::constant_time::verify_slices_are_equal;
|
||||
use untrusted;
|
||||
|
||||
use errors::{Result, ErrorKind};
|
||||
|
||||
|
||||
/// The algorithms supported for signing/verifying
|
||||
#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)]
|
||||
pub enum Algorithm {
|
||||
/// HMAC using SHA-256
|
||||
HS256,
|
||||
/// HMAC using SHA-384
|
||||
HS384,
|
||||
/// HMAC using SHA-512
|
||||
HS512,
|
||||
|
||||
/// RSASSA-PKCS1-v1_5 using SHA-256
|
||||
RS256,
|
||||
/// RSASSA-PKCS1-v1_5 using SHA-384
|
||||
RS384,
|
||||
/// RSASSA-PKCS1-v1_5 using SHA-512
|
||||
RS512,
|
||||
}
|
||||
|
||||
/// The actual HS signing + encoding
|
||||
fn sign_hmac(alg: &'static digest::Algorithm, key: &[u8], signing_input: &str) -> Result<String> {
|
||||
let signing_key = hmac::SigningKey::new(alg, key);
|
||||
let digest = hmac::sign(&signing_key, signing_input.as_bytes());
|
||||
|
||||
Ok(
|
||||
base64::encode_config::<hmac::Signature>(&digest, base64::URL_SAFE_NO_PAD)
|
||||
)
|
||||
}
|
||||
|
||||
/// The actual RSA signing + encoding
|
||||
/// Taken from Ring doc https://briansmith.org/rustdoc/ring/signature/index.html
|
||||
fn sign_rsa(alg: Algorithm, key: &[u8], signing_input: &str) -> Result<String> {
|
||||
let ring_alg = match alg {
|
||||
Algorithm::RS256 => &signature::RSA_PKCS1_SHA256,
|
||||
Algorithm::RS384 => &signature::RSA_PKCS1_SHA384,
|
||||
Algorithm::RS512 => &signature::RSA_PKCS1_SHA512,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let key_pair = Arc::new(
|
||||
signature::RSAKeyPair::from_der(untrusted::Input::from(key))
|
||||
.map_err(|_| ErrorKind::InvalidKey)?
|
||||
);
|
||||
let mut signing_state = signature::RSASigningState::new(key_pair)
|
||||
.map_err(|_| ErrorKind::InvalidKey)?;
|
||||
let mut signature = vec![0; signing_state.key_pair().public_modulus_len()];
|
||||
let rng = rand::SystemRandom::new();
|
||||
signing_state.sign(ring_alg, &rng, signing_input.as_bytes(), &mut signature)
|
||||
.map_err(|_| ErrorKind::InvalidKey)?;
|
||||
|
||||
Ok(
|
||||
base64::encode_config::<[u8]>(&signature, base64::URL_SAFE_NO_PAD)
|
||||
)
|
||||
}
|
||||
|
||||
/// Take the payload of a JWT, sign it using the algorithm given and return
|
||||
/// the base64 url safe encoded of the result.
|
||||
///
|
||||
/// Only use this function if you want to do something other than JWT.
|
||||
pub fn sign(signing_input: &str, key: &[u8], algorithm: Algorithm) -> Result<String> {
|
||||
match algorithm {
|
||||
Algorithm::HS256 => sign_hmac(&digest::SHA256, key, signing_input),
|
||||
Algorithm::HS384 => sign_hmac(&digest::SHA384, key, signing_input),
|
||||
Algorithm::HS512 => sign_hmac(&digest::SHA512, key, signing_input),
|
||||
|
||||
Algorithm::RS256 | Algorithm::RS384 | Algorithm::RS512 => sign_rsa(algorithm, key, signing_input),
|
||||
// TODO: if PKCS1 is made prublic, remove the line above and uncomment below
|
||||
// Algorithm::RS256 => sign_rsa(&signature::RSA_PKCS1_SHA256, key, signing_input),
|
||||
// Algorithm::RS384 => sign_rsa(&signature::RSA_PKCS1_SHA384, key, signing_input),
|
||||
// Algorithm::RS512 => sign_rsa(&signature::RSA_PKCS1_SHA512, key, signing_input),
|
||||
}
|
||||
}
|
||||
|
||||
/// See Ring RSA docs for more details
|
||||
fn verify_rsa(alg: &signature::RSAParameters, signature: &str, signing_input: &str, key: &[u8]) -> Result<bool> {
|
||||
let signature_bytes = base64::decode_config(signature, base64::URL_SAFE_NO_PAD)?;
|
||||
let public_key_der = untrusted::Input::from(key);
|
||||
let message = untrusted::Input::from(signing_input.as_bytes());
|
||||
let expected_signature = untrusted::Input::from(signature_bytes.as_slice());
|
||||
|
||||
let res = signature::verify(alg, public_key_der, message, expected_signature);
|
||||
|
||||
Ok(res.is_ok())
|
||||
}
|
||||
|
||||
/// Compares the signature given with a re-computed signature for HMAC or using the public key
|
||||
/// for RSA.
|
||||
///
|
||||
/// Only use this function if you want to do something other than JWT.
|
||||
///
|
||||
/// `signature` is the signature part of a jwt (text after the second '.')
|
||||
///
|
||||
/// `signing_input` is base64(header) + "." + base64(claims)
|
||||
pub fn verify(signature: &str, signing_input: &str, key: &[u8], algorithm: Algorithm) -> Result<bool> {
|
||||
match algorithm {
|
||||
Algorithm::HS256 | Algorithm::HS384 | Algorithm::HS512 => {
|
||||
// we just re-sign the data with the key and compare if they are equal
|
||||
let signed = sign(signing_input, key, algorithm)?;
|
||||
Ok(verify_slices_are_equal(signature.as_ref(), signed.as_ref()).is_ok())
|
||||
},
|
||||
Algorithm::RS256 => verify_rsa(&signature::RSA_PKCS1_2048_8192_SHA256, signature, signing_input, key),
|
||||
Algorithm::RS384 => verify_rsa(&signature::RSA_PKCS1_2048_8192_SHA384, signature, signing_input, key),
|
||||
Algorithm::RS512 => verify_rsa(&signature::RSA_PKCS1_2048_8192_SHA512, signature, signing_input, key),
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Algorithm {
|
||||
fn default() -> Self {
|
||||
Algorithm::HS256
|
||||
}
|
||||
}
|
@@ -1,68 +0,0 @@
|
||||
use base64;
|
||||
use serde_json;
|
||||
use ring;
|
||||
|
||||
error_chain! {
|
||||
errors {
|
||||
/// When a token doesn't have a valid JWT shape
|
||||
InvalidToken {
|
||||
description("invalid token")
|
||||
display("Invalid token")
|
||||
}
|
||||
/// When the signature doesn't match
|
||||
InvalidSignature {
|
||||
description("invalid signature")
|
||||
display("Invalid signature")
|
||||
}
|
||||
/// When the secret given is not a valid RSA key
|
||||
InvalidKey {
|
||||
description("invalid key")
|
||||
display("Invalid Key")
|
||||
}
|
||||
|
||||
// Validation error
|
||||
|
||||
/// When a token’s `exp` claim indicates that it has expired
|
||||
ExpiredSignature {
|
||||
description("expired signature")
|
||||
display("Expired Signature")
|
||||
}
|
||||
/// When a token’s `iss` claim does not match the expected issuer
|
||||
InvalidIssuer {
|
||||
description("invalid issuer")
|
||||
display("Invalid Issuer")
|
||||
}
|
||||
/// When a token’s `aud` claim does not match one of the expected audience values
|
||||
InvalidAudience {
|
||||
description("invalid audience")
|
||||
display("Invalid Audience")
|
||||
}
|
||||
/// When a token’s `aud` claim does not match one of the expected audience values
|
||||
InvalidSubject {
|
||||
description("invalid subject")
|
||||
display("Invalid Subject")
|
||||
}
|
||||
/// When a token’s `iat` claim is in the future
|
||||
InvalidIssuedAt {
|
||||
description("invalid issued at")
|
||||
display("Invalid Issued At")
|
||||
}
|
||||
/// When a token’s nbf claim represents a time in the future
|
||||
ImmatureSignature {
|
||||
description("immature signature")
|
||||
display("Immature Signature")
|
||||
}
|
||||
/// When the algorithm in the header doesn't match the one passed to `decode`
|
||||
InvalidAlgorithm {
|
||||
description("Invalid algorithm")
|
||||
display("Invalid Algorithm")
|
||||
}
|
||||
}
|
||||
|
||||
foreign_links {
|
||||
Unspecified(ring::error::Unspecified) #[doc = "An error happened while signing/verifying a token with RSA"];
|
||||
Base64(base64::DecodeError) #[doc = "An error happened while decoding some base64 text"];
|
||||
Json(serde_json::Error) #[doc = "An error happened while serializing/deserializing JSON"];
|
||||
Utf8(::std::string::FromUtf8Error) #[doc = "An error happened while trying to convert the result of base64 decoding to a String"];
|
||||
}
|
||||
}
|
@@ -1,64 +0,0 @@
|
||||
use crypto::Algorithm;
|
||||
|
||||
|
||||
/// A basic JWT header, the alg defaults to HS256 and typ is automatically
|
||||
/// set to `JWT`. All the other fields are optional.
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
pub struct Header {
|
||||
/// The type of JWS: it can only be "JWT" here
|
||||
///
|
||||
/// Defined in [RFC7515#4.1.9](https://tools.ietf.org/html/rfc7515#section-4.1.9).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub typ: Option<String>,
|
||||
/// The algorithm used
|
||||
///
|
||||
/// Defined in [RFC7515#4.1.1](https://tools.ietf.org/html/rfc7515#section-4.1.1).
|
||||
pub alg: Algorithm,
|
||||
/// Content type
|
||||
///
|
||||
/// Defined in [RFC7519#5.2](https://tools.ietf.org/html/rfc7519#section-5.2).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cty: Option<String>,
|
||||
/// JSON Key URL
|
||||
///
|
||||
/// Defined in [RFC7515#4.1.2](https://tools.ietf.org/html/rfc7515#section-4.1.2).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub jku: Option<String>,
|
||||
/// Key ID
|
||||
///
|
||||
/// Defined in [RFC7515#4.1.4](https://tools.ietf.org/html/rfc7515#section-4.1.4).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub kid: Option<String>,
|
||||
/// X.509 URL
|
||||
///
|
||||
/// Defined in [RFC7515#4.1.5](https://tools.ietf.org/html/rfc7515#section-4.1.5).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub x5u: Option<String>,
|
||||
/// X.509 certificate thumbprint
|
||||
///
|
||||
/// Defined in [RFC7515#4.1.7](https://tools.ietf.org/html/rfc7515#section-4.1.7).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub x5t: Option<String>,
|
||||
}
|
||||
|
||||
impl Header {
|
||||
/// Returns a JWT header with the algorithm given
|
||||
pub fn new(algorithm: Algorithm) -> Header {
|
||||
Header {
|
||||
typ: Some("JWT".to_string()),
|
||||
alg: algorithm,
|
||||
cty: None,
|
||||
jku: None,
|
||||
kid: None,
|
||||
x5u: None,
|
||||
x5t: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Header {
|
||||
/// Returns a JWT header using the default Algorithm, HS256
|
||||
fn default() -> Self {
|
||||
Header::new(Algorithm::default())
|
||||
}
|
||||
}
|
@@ -1,142 +0,0 @@
|
||||
//! Create and parses JWT (JSON Web Tokens)
|
||||
//!
|
||||
//! Documentation: [stable](https://docs.rs/jsonwebtoken/)
|
||||
#![recursion_limit = "300"]
|
||||
#![deny(missing_docs)]
|
||||
#![allow(unused_doc_comments)]
|
||||
#![allow(renamed_and_removed_lints)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate error_chain;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
extern crate serde_json;
|
||||
extern crate serde;
|
||||
extern crate base64;
|
||||
extern crate ring;
|
||||
extern crate untrusted;
|
||||
extern crate chrono;
|
||||
|
||||
/// All the errors, generated using error-chain
|
||||
pub mod errors;
|
||||
mod header;
|
||||
mod crypto;
|
||||
mod serialization;
|
||||
mod validation;
|
||||
|
||||
pub use header::Header;
|
||||
pub use crypto::{
|
||||
Algorithm,
|
||||
sign,
|
||||
verify,
|
||||
};
|
||||
pub use validation::Validation;
|
||||
pub use serialization::TokenData;
|
||||
|
||||
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::ser::Serialize;
|
||||
|
||||
use errors::{Result, ErrorKind};
|
||||
use serialization::{from_jwt_part, from_jwt_part_claims, to_jwt_part};
|
||||
use validation::{validate};
|
||||
|
||||
|
||||
/// Encode the header and claims given and sign the payload using the algorithm from the header and the key
|
||||
///
|
||||
/// ```rust,ignore
|
||||
/// #[macro_use]
|
||||
/// extern crate serde_derive;
|
||||
/// use jsonwebtoken::{encode, Algorithm, Header};
|
||||
///
|
||||
/// /// #[derive(Debug, Serialize, Deserialize)]
|
||||
/// struct Claims {
|
||||
/// sub: String,
|
||||
/// company: String
|
||||
/// }
|
||||
///
|
||||
/// let my_claims = Claims {
|
||||
/// sub: "b@b.com".to_owned(),
|
||||
/// company: "ACME".to_owned()
|
||||
/// };
|
||||
///
|
||||
/// // my_claims is a struct that implements Serialize
|
||||
/// // This will create a JWT using HS256 as algorithm
|
||||
/// let token = encode(&Header::default(), &my_claims, "secret".as_ref()).unwrap();
|
||||
/// ```
|
||||
pub fn encode<T: Serialize>(header: &Header, claims: &T, key: &[u8]) -> Result<String> {
|
||||
let encoded_header = to_jwt_part(&header)?;
|
||||
let encoded_claims = to_jwt_part(&claims)?;
|
||||
let signing_input = [encoded_header.as_ref(), encoded_claims.as_ref()].join(".");
|
||||
let signature = sign(&*signing_input, key.as_ref(), header.alg)?;
|
||||
|
||||
Ok([signing_input, signature].join("."))
|
||||
}
|
||||
|
||||
/// Used in decode: takes the result of a rsplit and ensure we only get 2 parts
|
||||
/// Errors if we don't
|
||||
macro_rules! expect_two {
|
||||
($iter:expr) => {{
|
||||
let mut i = $iter;
|
||||
match (i.next(), i.next(), i.next()) {
|
||||
(Some(first), Some(second), None) => (first, second),
|
||||
_ => return Err(ErrorKind::InvalidToken.into())
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
/// Decode a token into a struct containing 2 fields: `claims` and `header`.
|
||||
///
|
||||
/// If the token or its signature is invalid or the claims fail validation, it will return an error.
|
||||
///
|
||||
/// ```rust,ignore
|
||||
/// #[macro_use]
|
||||
/// extern crate serde_derive;
|
||||
/// use jsonwebtoken::{decode, Validation, Algorithm};
|
||||
///
|
||||
/// #[derive(Debug, Serialize, Deserialize)]
|
||||
/// struct Claims {
|
||||
/// sub: String,
|
||||
/// company: String
|
||||
/// }
|
||||
///
|
||||
/// let token = "a.jwt.token".to_string();
|
||||
/// // Claims is a struct that implements Deserialize
|
||||
/// let token_data = decode::<Claims>(&token, "secret", &Validation::new(Algorithm::HS256));
|
||||
/// ```
|
||||
pub fn decode<T: DeserializeOwned>(token: &str, key: &[u8], validation: &Validation) -> Result<TokenData<T>> {
|
||||
let (signature, signing_input) = expect_two!(token.rsplitn(2, '.'));
|
||||
let (claims, header) = expect_two!(signing_input.rsplitn(2, '.'));
|
||||
let header: Header = from_jwt_part(header)?;
|
||||
|
||||
if !verify(signature, signing_input, key, header.alg)? {
|
||||
return Err(ErrorKind::InvalidSignature.into());
|
||||
}
|
||||
|
||||
if !validation.algorithms.contains(&header.alg) {
|
||||
return Err(ErrorKind::InvalidAlgorithm.into());
|
||||
}
|
||||
|
||||
let (decoded_claims, claims_map): (T, _) = from_jwt_part_claims(claims)?;
|
||||
|
||||
validate(&claims_map, validation)?;
|
||||
|
||||
Ok(TokenData { header: header, claims: decoded_claims })
|
||||
}
|
||||
|
||||
/// Decode a token and return the Header. This is not doing any kind of validation: it is meant to be
|
||||
/// used when you don't know which `alg` the token is using and want to find out.
|
||||
///
|
||||
/// If the token has an invalid format, it will return an error.
|
||||
///
|
||||
/// ```rust,ignore
|
||||
/// use jsonwebtoken::decode_header;
|
||||
///
|
||||
/// let token = "a.jwt.token".to_string();
|
||||
/// let header = decode_header(&token);
|
||||
/// ```
|
||||
pub fn decode_header(token: &str) -> Result<Header> {
|
||||
let (_, signing_input) = expect_two!(token.rsplitn(2, '.'));
|
||||
let (_, header) = expect_two!(signing_input.rsplitn(2, '.'));
|
||||
from_jwt_part(header)
|
||||
}
|
@@ -1,42 +0,0 @@
|
||||
use base64;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::ser::Serialize;
|
||||
use serde_json::{from_str, to_string, Value};
|
||||
use serde_json::map::Map;
|
||||
|
||||
use errors::{Result};
|
||||
use header::Header;
|
||||
|
||||
|
||||
/// The return type of a successful call to decode
|
||||
#[derive(Debug)]
|
||||
pub struct TokenData<T> {
|
||||
/// The decoded JWT header
|
||||
pub header: Header,
|
||||
/// The decoded JWT claims
|
||||
pub claims: T
|
||||
}
|
||||
|
||||
/// Serializes to JSON and encodes to base64
|
||||
pub fn to_jwt_part<T: Serialize>(input: &T) -> Result<String> {
|
||||
let encoded = to_string(input)?;
|
||||
Ok(base64::encode_config(encoded.as_bytes(), base64::URL_SAFE_NO_PAD))
|
||||
}
|
||||
|
||||
/// Decodes from base64 and deserializes from JSON to a struct
|
||||
pub fn from_jwt_part<B: AsRef<str>, T: DeserializeOwned>(encoded: B) -> Result<T> {
|
||||
let decoded = base64::decode_config(encoded.as_ref(), base64::URL_SAFE_NO_PAD)?;
|
||||
let s = String::from_utf8(decoded)?;
|
||||
|
||||
Ok(from_str(&s)?)
|
||||
}
|
||||
|
||||
/// Decodes from base64 and deserializes from JSON to a struct AND a hashmap
|
||||
pub fn from_jwt_part_claims<B: AsRef<str>, T: DeserializeOwned>(encoded: B) -> Result<(T, Map<String, Value>)> {
|
||||
let decoded = base64::decode_config(encoded.as_ref(), base64::URL_SAFE_NO_PAD)?;
|
||||
let s = String::from_utf8(decoded)?;
|
||||
|
||||
let claims: T = from_str(&s)?;
|
||||
let map: Map<_,_> = from_str(&s)?;
|
||||
Ok((claims, map))
|
||||
}
|
@@ -1,377 +0,0 @@
|
||||
use chrono::Utc;
|
||||
use serde::ser::Serialize;
|
||||
use serde_json::{Value, from_value, to_value};
|
||||
use serde_json::map::Map;
|
||||
|
||||
use errors::{Result, ErrorKind};
|
||||
use crypto::Algorithm;
|
||||
|
||||
|
||||
/// Contains the various validations that are applied after decoding a token.
|
||||
///
|
||||
/// All time validation happen on UTC timestamps.
|
||||
///
|
||||
/// ```rust
|
||||
/// use jsonwebtoken::Validation;
|
||||
///
|
||||
/// // Default value
|
||||
/// let validation = Validation::default();
|
||||
///
|
||||
/// // Changing one parameter
|
||||
/// let mut validation = Validation {leeway: 60, ..Default::default()};
|
||||
///
|
||||
/// // Setting audience
|
||||
/// let mut validation = Validation::default();
|
||||
/// validation.set_audience(&"Me"); // string
|
||||
/// validation.set_audience(&["Me", "You"]); // array of strings
|
||||
/// ```
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Validation {
|
||||
/// Add some leeway (in seconds) to the `exp`, `iat` and `nbf` validation to
|
||||
/// account for clock skew.
|
||||
///
|
||||
/// Defaults to `0`.
|
||||
pub leeway: i64,
|
||||
/// Whether to validate the `exp` field.
|
||||
///
|
||||
/// It will return an error if the time in the `exp` field is past.
|
||||
///
|
||||
/// Defaults to `true`.
|
||||
pub validate_exp: bool,
|
||||
/// Whether to validate the `iat` field.
|
||||
///
|
||||
/// It will return an error if the time in the `iat` field is in the future.
|
||||
///
|
||||
/// Defaults to `true`.
|
||||
pub validate_iat: bool,
|
||||
/// Whether to validate the `nbf` field.
|
||||
///
|
||||
/// It will return an error if the current timestamp is before the time in the `nbf` field.
|
||||
///
|
||||
/// Defaults to `true`.
|
||||
pub validate_nbf: bool,
|
||||
/// If it contains a value, the validation will check that the `aud` field is the same as the
|
||||
/// one provided and will error otherwise.
|
||||
/// Since `aud` can be either a String or a Vec<String> in the JWT spec, you will need to use
|
||||
/// the [set_audience](struct.Validation.html#method.set_audience) method to set it.
|
||||
///
|
||||
/// Defaults to `None`.
|
||||
pub aud: Option<Value>,
|
||||
/// If it contains a value, the validation will check that the `iss` field is the same as the
|
||||
/// one provided and will error otherwise.
|
||||
///
|
||||
/// Defaults to `None`.
|
||||
pub iss: Option<String>,
|
||||
/// If it contains a value, the validation will check that the `sub` field is the same as the
|
||||
/// one provided and will error otherwise.
|
||||
///
|
||||
/// Defaults to `None`.
|
||||
pub sub: Option<String>,
|
||||
/// If it contains a value, the validation will check that the `alg` of the header is contained
|
||||
/// in the ones provided and will error otherwise.
|
||||
///
|
||||
/// Defaults to `vec![Algorithm::HS256]`.
|
||||
pub algorithms: Vec<Algorithm>,
|
||||
}
|
||||
|
||||
impl Validation {
|
||||
/// Create a default validation setup allowing the given alg
|
||||
pub fn new(alg: Algorithm) -> Validation {
|
||||
let mut validation = Validation::default();
|
||||
validation.algorithms = vec![alg];
|
||||
validation
|
||||
}
|
||||
|
||||
/// Since `aud` can be either a String or an array of String in the JWT spec, this method will take
|
||||
/// care of serializing the value.
|
||||
pub fn set_audience<T: Serialize>(&mut self, audience: &T) {
|
||||
self.aud = Some(to_value(audience).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Validation {
|
||||
fn default() -> Validation {
|
||||
Validation {
|
||||
leeway: 0,
|
||||
|
||||
validate_exp: true,
|
||||
validate_iat: true,
|
||||
validate_nbf: true,
|
||||
|
||||
iss: None,
|
||||
sub: None,
|
||||
aud: None,
|
||||
|
||||
algorithms: vec![Algorithm::HS256],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
pub fn validate(claims: &Map<String, Value>, options: &Validation) -> Result<()> {
|
||||
let now = Utc::now().timestamp();
|
||||
|
||||
if let Some(iat) = claims.get("iat") {
|
||||
if options.validate_iat && from_value::<i64>(iat.clone())? > now + options.leeway {
|
||||
return Err(ErrorKind::InvalidIssuedAt.into());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(exp) = claims.get("exp") {
|
||||
if options.validate_exp && from_value::<i64>(exp.clone())? < now - options.leeway {
|
||||
return Err(ErrorKind::ExpiredSignature.into());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(nbf) = claims.get("nbf") {
|
||||
if options.validate_nbf && from_value::<i64>(nbf.clone())? > now + options.leeway {
|
||||
return Err(ErrorKind::ImmatureSignature.into());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(iss) = claims.get("iss") {
|
||||
if let Some(ref correct_iss) = options.iss {
|
||||
if from_value::<String>(iss.clone())? != *correct_iss {
|
||||
return Err(ErrorKind::InvalidIssuer.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(sub) = claims.get("sub") {
|
||||
if let Some(ref correct_sub) = options.sub {
|
||||
if from_value::<String>(sub.clone())? != *correct_sub {
|
||||
return Err(ErrorKind::InvalidSubject.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(aud) = claims.get("aud") {
|
||||
if let Some(ref correct_aud) = options.aud {
|
||||
if aud != correct_aud {
|
||||
return Err(ErrorKind::InvalidAudience.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use serde_json::{to_value};
|
||||
use serde_json::map::Map;
|
||||
use chrono::Utc;
|
||||
|
||||
use super::{validate, Validation};
|
||||
|
||||
use errors::ErrorKind;
|
||||
|
||||
#[test]
|
||||
fn iat_in_past_ok() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("iat".to_string(), to_value(Utc::now().timestamp() - 10000).unwrap());
|
||||
let res = validate(&claims, &Validation::default());
|
||||
assert!(res.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iat_in_future_fails() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("iat".to_string(), to_value(Utc::now().timestamp() + 100000).unwrap());
|
||||
let res = validate(&claims, &Validation::default());
|
||||
assert!(res.is_err());
|
||||
|
||||
match res.unwrap_err().kind() {
|
||||
&ErrorKind::InvalidIssuedAt => (),
|
||||
_ => assert!(false),
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iat_in_future_but_in_leeway_ok() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("iat".to_string(), to_value(Utc::now().timestamp() + 50).unwrap());
|
||||
let validation = Validation {
|
||||
leeway: 1000 * 60,
|
||||
..Default::default()
|
||||
};
|
||||
let res = validate(&claims, &validation);
|
||||
assert!(res.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exp_in_future_ok() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("exp".to_string(), to_value(Utc::now().timestamp() + 10000).unwrap());
|
||||
let res = validate(&claims, &Validation::default());
|
||||
assert!(res.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exp_in_past_fails() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("exp".to_string(), to_value(Utc::now().timestamp() - 100000).unwrap());
|
||||
let res = validate(&claims, &Validation::default());
|
||||
assert!(res.is_err());
|
||||
|
||||
match res.unwrap_err().kind() {
|
||||
&ErrorKind::ExpiredSignature => (),
|
||||
_ => assert!(false),
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn exp_in_past_but_in_leeway_ok() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("exp".to_string(), to_value(Utc::now().timestamp() - 500).unwrap());
|
||||
let validation = Validation {
|
||||
leeway: 1000 * 60,
|
||||
..Default::default()
|
||||
};
|
||||
let res = validate(&claims, &validation);
|
||||
assert!(res.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nbf_in_past_ok() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("nbf".to_string(), to_value(Utc::now().timestamp() - 10000).unwrap());
|
||||
let res = validate(&claims, &Validation::default());
|
||||
assert!(res.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nbf_in_future_fails() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("nbf".to_string(), to_value(Utc::now().timestamp() + 100000).unwrap());
|
||||
let res = validate(&claims, &Validation::default());
|
||||
assert!(res.is_err());
|
||||
|
||||
match res.unwrap_err().kind() {
|
||||
&ErrorKind::ImmatureSignature => (),
|
||||
_ => assert!(false),
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nbf_in_future_but_in_leeway_ok() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("nbf".to_string(), to_value(Utc::now().timestamp() + 500).unwrap());
|
||||
let validation = Validation {
|
||||
leeway: 1000 * 60,
|
||||
..Default::default()
|
||||
};
|
||||
let res = validate(&claims, &validation);
|
||||
assert!(res.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iss_ok() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("iss".to_string(), to_value("Keats").unwrap());
|
||||
let validation = Validation {
|
||||
iss: Some("Keats".to_string()),
|
||||
..Default::default()
|
||||
};
|
||||
let res = validate(&claims, &validation);
|
||||
assert!(res.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iss_not_matching_fails() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("iss".to_string(), to_value("Hacked").unwrap());
|
||||
let validation = Validation {
|
||||
iss: Some("Keats".to_string()),
|
||||
..Default::default()
|
||||
};
|
||||
let res = validate(&claims, &validation);
|
||||
assert!(res.is_err());
|
||||
|
||||
match res.unwrap_err().kind() {
|
||||
&ErrorKind::InvalidIssuer => (),
|
||||
_ => assert!(false),
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sub_ok() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("sub".to_string(), to_value("Keats").unwrap());
|
||||
let validation = Validation {
|
||||
sub: Some("Keats".to_string()),
|
||||
..Default::default()
|
||||
};
|
||||
let res = validate(&claims, &validation);
|
||||
assert!(res.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sub_not_matching_fails() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("sub".to_string(), to_value("Hacked").unwrap());
|
||||
let validation = Validation {
|
||||
sub: Some("Keats".to_string()),
|
||||
..Default::default()
|
||||
};
|
||||
let res = validate(&claims, &validation);
|
||||
assert!(res.is_err());
|
||||
|
||||
match res.unwrap_err().kind() {
|
||||
&ErrorKind::InvalidSubject => (),
|
||||
_ => assert!(false),
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aud_string_ok() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("aud".to_string(), to_value("Everyone").unwrap());
|
||||
let mut validation = Validation::default();
|
||||
validation.set_audience(&"Everyone");
|
||||
let res = validate(&claims, &validation);
|
||||
assert!(res.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aud_array_of_string_ok() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("aud".to_string(), to_value(["UserA", "UserB"]).unwrap());
|
||||
let mut validation = Validation::default();
|
||||
validation.set_audience(&["UserA", "UserB"]);
|
||||
let res = validate(&claims, &validation);
|
||||
assert!(res.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aud_type_mismatch_fails() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("aud".to_string(), to_value("Everyone").unwrap());
|
||||
let mut validation = Validation::default();
|
||||
validation.set_audience(&["UserA", "UserB"]);
|
||||
let res = validate(&claims, &validation);
|
||||
assert!(res.is_err());
|
||||
|
||||
match res.unwrap_err().kind() {
|
||||
&ErrorKind::InvalidAudience => (),
|
||||
_ => assert!(false),
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn aud_correct_type_not_matching_fails() {
|
||||
let mut claims = Map::new();
|
||||
claims.insert("aud".to_string(), to_value("Everyone").unwrap());
|
||||
let mut validation = Validation::default();
|
||||
validation.set_audience(&"None");
|
||||
let res = validate(&claims, &validation);
|
||||
assert!(res.is_err());
|
||||
|
||||
match res.unwrap_err().kind() {
|
||||
&ErrorKind::InvalidAudience => (),
|
||||
_ => assert!(false),
|
||||
};
|
||||
}
|
||||
}
|
16
macros/Cargo.toml
Normal file
16
macros/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "macros"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "macros"
|
||||
path = "src/lib.rs"
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
quote = "1.0.40"
|
||||
syn = "2.0.104"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
56
macros/src/lib.rs
Normal file
56
macros/src/lib.rs
Normal file
@@ -0,0 +1,56 @@
|
||||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
|
||||
#[proc_macro_derive(UuidFromParam)]
|
||||
pub fn derive_uuid_from_param(input: TokenStream) -> TokenStream {
|
||||
let ast = syn::parse(input).unwrap();
|
||||
|
||||
impl_derive_uuid_macro(&ast)
|
||||
}
|
||||
|
||||
fn impl_derive_uuid_macro(ast: &syn::DeriveInput) -> TokenStream {
|
||||
let name = &ast.ident;
|
||||
let gen_derive = quote! {
|
||||
#[automatically_derived]
|
||||
impl<'r> rocket::request::FromParam<'r> for #name {
|
||||
type Error = ();
|
||||
|
||||
#[inline(always)]
|
||||
fn from_param(param: &'r str) -> Result<Self, Self::Error> {
|
||||
if uuid::Uuid::parse_str(param).is_ok() {
|
||||
Ok(Self(param.to_string()))
|
||||
} else {
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
gen_derive.into()
|
||||
}
|
||||
|
||||
#[proc_macro_derive(IdFromParam)]
|
||||
pub fn derive_id_from_param(input: TokenStream) -> TokenStream {
|
||||
let ast = syn::parse(input).unwrap();
|
||||
|
||||
impl_derive_safestring_macro(&ast)
|
||||
}
|
||||
|
||||
fn impl_derive_safestring_macro(ast: &syn::DeriveInput) -> TokenStream {
|
||||
let name = &ast.ident;
|
||||
let gen_derive = quote! {
|
||||
#[automatically_derived]
|
||||
impl<'r> rocket::request::FromParam<'r> for #name {
|
||||
type Error = ();
|
||||
|
||||
#[inline(always)]
|
||||
fn from_param(param: &'r str) -> Result<Self, Self::Error> {
|
||||
if param.chars().all(|c| matches!(c, 'a'..='z' | 'A'..='Z' |'0'..='9' | '-')) {
|
||||
Ok(Self(param.to_string()))
|
||||
} else {
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
gen_derive.into()
|
||||
}
|
62
migrations/mysql/2018-01-14-171611_create_tables/up.sql
Normal file
62
migrations/mysql/2018-01-14-171611_create_tables/up.sql
Normal file
@@ -0,0 +1,62 @@
|
||||
CREATE TABLE users (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL,
|
||||
updated_at DATETIME NOT NULL,
|
||||
email VARCHAR(255) NOT NULL UNIQUE,
|
||||
name TEXT NOT NULL,
|
||||
password_hash BLOB NOT NULL,
|
||||
salt BLOB NOT NULL,
|
||||
password_iterations INTEGER NOT NULL,
|
||||
password_hint TEXT,
|
||||
`key` TEXT NOT NULL,
|
||||
private_key TEXT,
|
||||
public_key TEXT,
|
||||
totp_secret TEXT,
|
||||
totp_recover TEXT,
|
||||
security_stamp TEXT NOT NULL,
|
||||
equivalent_domains TEXT NOT NULL,
|
||||
excluded_globals TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE devices (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL,
|
||||
updated_at DATETIME NOT NULL,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
name TEXT NOT NULL,
|
||||
type INTEGER NOT NULL,
|
||||
push_token TEXT,
|
||||
refresh_token TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE ciphers (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL,
|
||||
updated_at DATETIME NOT NULL,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
folder_uuid CHAR(36) REFERENCES folders (uuid),
|
||||
organization_uuid CHAR(36),
|
||||
type INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
notes TEXT,
|
||||
fields TEXT,
|
||||
data TEXT NOT NULL,
|
||||
favorite BOOLEAN NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE attachments (
|
||||
id CHAR(36) NOT NULL PRIMARY KEY,
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||
file_name TEXT NOT NULL,
|
||||
file_size INTEGER NOT NULL
|
||||
|
||||
);
|
||||
|
||||
CREATE TABLE folders (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL,
|
||||
updated_at DATETIME NOT NULL,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
|
@@ -0,0 +1,30 @@
|
||||
CREATE TABLE collections (
|
||||
uuid VARCHAR(40) NOT NULL PRIMARY KEY,
|
||||
org_uuid VARCHAR(40) NOT NULL REFERENCES organizations (uuid),
|
||||
name TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE organizations (
|
||||
uuid VARCHAR(40) NOT NULL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
billing_email TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE users_collections (
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
|
||||
PRIMARY KEY (user_uuid, collection_uuid)
|
||||
);
|
||||
|
||||
CREATE TABLE users_organizations (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
org_uuid CHAR(36) NOT NULL REFERENCES organizations (uuid),
|
||||
|
||||
access_all BOOLEAN NOT NULL,
|
||||
`key` TEXT NOT NULL,
|
||||
status INTEGER NOT NULL,
|
||||
type INTEGER NOT NULL,
|
||||
|
||||
UNIQUE (user_uuid, org_uuid)
|
||||
);
|
@@ -0,0 +1,34 @@
|
||||
ALTER TABLE ciphers RENAME TO oldCiphers;
|
||||
|
||||
CREATE TABLE ciphers (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL,
|
||||
updated_at DATETIME NOT NULL,
|
||||
user_uuid CHAR(36) REFERENCES users (uuid), -- Make this optional
|
||||
organization_uuid CHAR(36) REFERENCES organizations (uuid), -- Add reference to orgs table
|
||||
-- Remove folder_uuid
|
||||
type INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
notes TEXT,
|
||||
fields TEXT,
|
||||
data TEXT NOT NULL,
|
||||
favorite BOOLEAN NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE folders_ciphers (
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||
folder_uuid CHAR(36) NOT NULL REFERENCES folders (uuid),
|
||||
|
||||
PRIMARY KEY (cipher_uuid, folder_uuid)
|
||||
);
|
||||
|
||||
INSERT INTO ciphers (uuid, created_at, updated_at, user_uuid, organization_uuid, type, name, notes, fields, data, favorite)
|
||||
SELECT uuid, created_at, updated_at, user_uuid, organization_uuid, type, name, notes, fields, data, favorite FROM oldCiphers;
|
||||
|
||||
INSERT INTO folders_ciphers (cipher_uuid, folder_uuid)
|
||||
SELECT uuid, folder_uuid FROM oldCiphers WHERE folder_uuid IS NOT NULL;
|
||||
|
||||
|
||||
DROP TABLE oldCiphers;
|
||||
|
||||
ALTER TABLE users_collections ADD COLUMN read_only BOOLEAN NOT NULL DEFAULT 0; -- False
|
@@ -0,0 +1,5 @@
|
||||
CREATE TABLE ciphers_collections (
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||
collection_uuid CHAR(36) NOT NULL REFERENCES collections (uuid),
|
||||
PRIMARY KEY (cipher_uuid, collection_uuid)
|
||||
);
|
@@ -0,0 +1,14 @@
|
||||
ALTER TABLE attachments RENAME TO oldAttachments;
|
||||
|
||||
CREATE TABLE attachments (
|
||||
id CHAR(36) NOT NULL PRIMARY KEY,
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers (uuid),
|
||||
file_name TEXT NOT NULL,
|
||||
file_size INTEGER NOT NULL
|
||||
|
||||
);
|
||||
|
||||
INSERT INTO attachments (id, cipher_uuid, file_name, file_size)
|
||||
SELECT id, cipher_uuid, file_name, file_size FROM oldAttachments;
|
||||
|
||||
DROP TABLE oldAttachments;
|
@@ -0,0 +1,8 @@
|
||||
UPDATE users
|
||||
SET totp_secret = (
|
||||
SELECT twofactor.data FROM twofactor
|
||||
WHERE twofactor.type = 0
|
||||
AND twofactor.user_uuid = users.uuid
|
||||
);
|
||||
|
||||
DROP TABLE twofactor;
|
@@ -0,0 +1,15 @@
|
||||
CREATE TABLE twofactor (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users (uuid),
|
||||
type INTEGER NOT NULL,
|
||||
enabled BOOLEAN NOT NULL,
|
||||
data TEXT NOT NULL,
|
||||
|
||||
UNIQUE (user_uuid, type)
|
||||
);
|
||||
|
||||
|
||||
INSERT INTO twofactor (uuid, user_uuid, type, enabled, data)
|
||||
SELECT UUID(), uuid, 0, 1, u.totp_secret FROM users u where u.totp_secret IS NOT NULL;
|
||||
|
||||
UPDATE users SET totp_secret = NULL; -- Instead of recreating the table, just leave the columns empty
|
3
migrations/mysql/2018-08-27-172114_update_ciphers/up.sql
Normal file
3
migrations/mysql/2018-08-27-172114_update_ciphers/up.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE ciphers
|
||||
ADD COLUMN
|
||||
password_history TEXT;
|
1
migrations/mysql/2018-09-10-111213_add_invites/down.sql
Normal file
1
migrations/mysql/2018-09-10-111213_add_invites/down.sql
Normal file
@@ -0,0 +1 @@
|
||||
DROP TABLE invitations;
|
3
migrations/mysql/2018-09-10-111213_add_invites/up.sql
Normal file
3
migrations/mysql/2018-09-10-111213_add_invites/up.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
CREATE TABLE invitations (
|
||||
email VARCHAR(255) NOT NULL PRIMARY KEY
|
||||
);
|
@@ -0,0 +1,7 @@
|
||||
ALTER TABLE users
|
||||
ADD COLUMN
|
||||
client_kdf_type INTEGER NOT NULL DEFAULT 0; -- PBKDF2
|
||||
|
||||
ALTER TABLE users
|
||||
ADD COLUMN
|
||||
client_kdf_iter INTEGER NOT NULL DEFAULT 100000;
|
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE attachments
|
||||
ADD COLUMN
|
||||
`key` TEXT;
|
@@ -0,0 +1,7 @@
|
||||
ALTER TABLE attachments CHANGE COLUMN akey `key` TEXT;
|
||||
ALTER TABLE ciphers CHANGE COLUMN atype type INTEGER NOT NULL;
|
||||
ALTER TABLE devices CHANGE COLUMN atype type INTEGER NOT NULL;
|
||||
ALTER TABLE twofactor CHANGE COLUMN atype type INTEGER NOT NULL;
|
||||
ALTER TABLE users CHANGE COLUMN akey `key` TEXT;
|
||||
ALTER TABLE users_organizations CHANGE COLUMN akey `key` TEXT;
|
||||
ALTER TABLE users_organizations CHANGE COLUMN atype type INTEGER NOT NULL;
|
@@ -0,0 +1,7 @@
|
||||
ALTER TABLE attachments CHANGE COLUMN `key` akey TEXT;
|
||||
ALTER TABLE ciphers CHANGE COLUMN type atype INTEGER NOT NULL;
|
||||
ALTER TABLE devices CHANGE COLUMN type atype INTEGER NOT NULL;
|
||||
ALTER TABLE twofactor CHANGE COLUMN type atype INTEGER NOT NULL;
|
||||
ALTER TABLE users CHANGE COLUMN `key` akey TEXT;
|
||||
ALTER TABLE users_organizations CHANGE COLUMN `key` akey TEXT;
|
||||
ALTER TABLE users_organizations CHANGE COLUMN type atype INTEGER NOT NULL;
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE twofactor ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;
|
@@ -0,0 +1 @@
|
||||
|
@@ -0,0 +1,5 @@
|
||||
ALTER TABLE users ADD COLUMN verified_at DATETIME DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN last_verifying_at DATETIME DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN login_verify_count INTEGER NOT NULL DEFAULT 0;
|
||||
ALTER TABLE users ADD COLUMN email_new VARCHAR(255) DEFAULT NULL;
|
||||
ALTER TABLE users ADD COLUMN email_new_token VARCHAR(16) DEFAULT NULL;
|
@@ -0,0 +1 @@
|
||||
DROP TABLE org_policies;
|
@@ -0,0 +1,9 @@
|
||||
CREATE TABLE org_policies (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
org_uuid CHAR(36) NOT NULL REFERENCES organizations (uuid),
|
||||
atype INTEGER NOT NULL,
|
||||
enabled BOOLEAN NOT NULL,
|
||||
data TEXT NOT NULL,
|
||||
|
||||
UNIQUE (org_uuid, atype)
|
||||
);
|
@@ -0,0 +1 @@
|
||||
|
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE ciphers
|
||||
ADD COLUMN
|
||||
deleted_at DATETIME;
|
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE users_collections
|
||||
ADD COLUMN hide_passwords BOOLEAN NOT NULL DEFAULT FALSE;
|
@@ -0,0 +1,13 @@
|
||||
ALTER TABLE ciphers
|
||||
ADD COLUMN favorite BOOLEAN NOT NULL DEFAULT FALSE;
|
||||
|
||||
-- Transfer favorite status for user-owned ciphers.
|
||||
UPDATE ciphers
|
||||
SET favorite = TRUE
|
||||
WHERE EXISTS (
|
||||
SELECT * FROM favorites
|
||||
WHERE favorites.user_uuid = ciphers.user_uuid
|
||||
AND favorites.cipher_uuid = ciphers.uuid
|
||||
);
|
||||
|
||||
DROP TABLE favorites;
|
@@ -0,0 +1,16 @@
|
||||
CREATE TABLE favorites (
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users(uuid),
|
||||
cipher_uuid CHAR(36) NOT NULL REFERENCES ciphers(uuid),
|
||||
|
||||
PRIMARY KEY (user_uuid, cipher_uuid)
|
||||
);
|
||||
|
||||
-- Transfer favorite status for user-owned ciphers.
|
||||
INSERT INTO favorites(user_uuid, cipher_uuid)
|
||||
SELECT user_uuid, uuid
|
||||
FROM ciphers
|
||||
WHERE favorite = TRUE
|
||||
AND user_uuid IS NOT NULL;
|
||||
|
||||
ALTER TABLE ciphers
|
||||
DROP COLUMN favorite;
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE users ADD COLUMN enabled BOOLEAN NOT NULL DEFAULT 1;
|
@@ -0,0 +1 @@
|
||||
ALTER TABLE users ADD COLUMN stamp_exception TEXT DEFAULT NULL;
|
1
migrations/mysql/2021-03-11-190243_add_sends/down.sql
Normal file
1
migrations/mysql/2021-03-11-190243_add_sends/down.sql
Normal file
@@ -0,0 +1 @@
|
||||
DROP TABLE sends;
|
25
migrations/mysql/2021-03-11-190243_add_sends/up.sql
Normal file
25
migrations/mysql/2021-03-11-190243_add_sends/up.sql
Normal file
@@ -0,0 +1,25 @@
|
||||
CREATE TABLE sends (
|
||||
uuid CHAR(36) NOT NULL PRIMARY KEY,
|
||||
user_uuid CHAR(36) REFERENCES users (uuid),
|
||||
organization_uuid CHAR(36) REFERENCES organizations (uuid),
|
||||
|
||||
name TEXT NOT NULL,
|
||||
notes TEXT,
|
||||
|
||||
atype INTEGER NOT NULL,
|
||||
data TEXT NOT NULL,
|
||||
akey TEXT NOT NULL,
|
||||
password_hash BLOB,
|
||||
password_salt BLOB,
|
||||
password_iter INTEGER,
|
||||
|
||||
max_access_count INTEGER,
|
||||
access_count INTEGER NOT NULL,
|
||||
|
||||
creation_date DATETIME NOT NULL,
|
||||
revision_date DATETIME NOT NULL,
|
||||
expiration_date DATETIME,
|
||||
deletion_date DATETIME NOT NULL,
|
||||
|
||||
disabled BOOLEAN NOT NULL
|
||||
);
|
2
migrations/mysql/2021-04-30-233251_add_reprompt/up.sql
Normal file
2
migrations/mysql/2021-04-30-233251_add_reprompt/up.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE ciphers
|
||||
ADD COLUMN reprompt INTEGER;
|
2
migrations/mysql/2021-05-11-205202_add_hide_email/up.sql
Normal file
2
migrations/mysql/2021-05-11-205202_add_hide_email/up.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE sends
|
||||
ADD COLUMN hide_email BOOLEAN;
|
@@ -0,0 +1,5 @@
|
||||
ALTER TABLE organizations
|
||||
ADD COLUMN private_key TEXT;
|
||||
|
||||
ALTER TABLE organizations
|
||||
ADD COLUMN public_key TEXT;
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user