mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2026-01-16 20:50:33 +00:00
Compare commits
67 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
45122bed9e | ||
|
|
0876d4a5fd | ||
|
|
7d552dbdc8 | ||
|
|
9a60eb04c2 | ||
|
|
1b99da91fb | ||
|
|
a64a400c9c | ||
|
|
85c0aa1619 | ||
|
|
19e78e3509 | ||
|
|
bf6330374c | ||
|
|
e639d9063b | ||
|
|
4a88e7ec78 | ||
|
|
65dad5a9d1 | ||
|
|
ba9ad14fbb | ||
|
|
62c7a4d491 | ||
|
|
14e3dcad8e | ||
|
|
f4a9645b54 | ||
|
|
8f7900759f | ||
|
|
69ee4a70b4 | ||
|
|
e4e16ed50f | ||
|
|
a16c656770 | ||
|
|
76b7de15de | ||
|
|
8ba6e61fd5 | ||
|
|
a30a1c9703 | ||
|
|
76687e2df7 | ||
|
|
bf5aefd129 | ||
|
|
1fa178d1d3 | ||
|
|
b7eedbcddc | ||
|
|
920371929b | ||
|
|
6ddbe84bde | ||
|
|
690d0ed1bb | ||
|
|
248e7dabc2 | ||
|
|
4584cfe3c1 | ||
|
|
cc646b1519 | ||
|
|
e501dc6d0e | ||
|
|
85ac9783f0 | ||
|
|
5b430f22bc | ||
|
|
d4eb21c2d9 | ||
|
|
6bf8a9d93d | ||
|
|
605419ae1b | ||
|
|
b89ffb2731 | ||
|
|
bda123b1eb | ||
|
|
2c94ea075c | ||
|
|
4bd8eae07e | ||
|
|
5529264c3f | ||
|
|
0a5df06e77 | ||
|
|
2f9ac61a4e | ||
|
|
840cf8740a | ||
|
|
d8869adf52 | ||
|
|
a631fc0077 | ||
|
|
9e4d372213 | ||
|
|
d0bf0ab237 | ||
|
|
e327583aa5 | ||
|
|
ead2f02cbd | ||
|
|
c453528dc1 | ||
|
|
6ae48aa8c2 | ||
|
|
88643fd9d5 | ||
|
|
73e0002219 | ||
|
|
c49ee47de0 | ||
|
|
14408396bb | ||
|
|
6cbb724069 | ||
|
|
a2316ca091 | ||
|
|
c476e19796 | ||
|
|
9f393cfd9d | ||
|
|
450c4d4d97 | ||
|
|
75e62abed0 | ||
|
|
97f9eb1320 | ||
|
|
53cc8a65af |
@@ -82,6 +82,10 @@
|
||||
## Defaults to daily (5 minutes after midnight). Set blank to disable this job.
|
||||
# TRASH_PURGE_SCHEDULE="0 5 0 * * *"
|
||||
##
|
||||
## Cron schedule of the job that checks for incomplete 2FA logins.
|
||||
## Defaults to once every minute. Set blank to disable this job.
|
||||
# INCOMPLETE_2FA_SCHEDULE="30 * * * * *"
|
||||
##
|
||||
## Cron schedule of the job that sends expiration reminders to emergency access grantors.
|
||||
## Defaults to hourly (5 minutes after the hour). Set blank to disable this job.
|
||||
# EMERGENCY_NOTIFICATION_REMINDER_SCHEDULE="0 5 * * * *"
|
||||
@@ -125,10 +129,32 @@
|
||||
## Number of times to retry the database connection during startup, with 1 second delay between each retry, set to 0 to retry indefinitely
|
||||
# DB_CONNECTION_RETRIES=15
|
||||
|
||||
## Icon service
|
||||
## The predefined icon services are: internal, bitwarden, duckduckgo, google.
|
||||
## To specify a custom icon service, set a URL template with exactly one instance of `{}`,
|
||||
## which is replaced with the domain. For example: `https://icon.example.com/domain/{}`.
|
||||
##
|
||||
## `internal` refers to Vaultwarden's built-in icon fetching implementation.
|
||||
## If an external service is set, an icon request to Vaultwarden will return an HTTP
|
||||
## redirect to the corresponding icon at the external service. An external service may
|
||||
## be useful if your Vaultwarden instance has no external network connectivity, or if
|
||||
## you are concerned that someone may probe your instance to try to detect whether icons
|
||||
## for certain sites have been cached.
|
||||
# ICON_SERVICE=internal
|
||||
|
||||
## Icon redirect code
|
||||
## The HTTP status code to use for redirects to an external icon service.
|
||||
## The supported codes are 301 (legacy permanent), 302 (legacy temporary), 307 (temporary), and 308 (permanent).
|
||||
## Temporary redirects are useful while testing different icon services, but once a service
|
||||
## has been decided on, consider using permanent redirects for cacheability. The legacy codes
|
||||
## are currently better supported by the Bitwarden clients.
|
||||
# ICON_REDIRECT_CODE=302
|
||||
|
||||
## Disable icon downloading
|
||||
## Set to true to disable icon downloading, this would still serve icons from $ICON_CACHE_FOLDER,
|
||||
## but it won't produce any external network request. Needs to set $ICON_CACHE_TTL to 0,
|
||||
## otherwise it will delete them and they won't be downloaded again.
|
||||
## Set to true to disable icon downloading in the internal icon service.
|
||||
## This still serves existing icons from $ICON_CACHE_FOLDER, without generating any external
|
||||
## network requests. $ICON_CACHE_TTL must also be set to 0; otherwise, the existing icons
|
||||
## will be deleted eventually, but won't be downloaded again.
|
||||
# DISABLE_ICON_DOWNLOAD=false
|
||||
|
||||
## Icon download timeout
|
||||
@@ -159,7 +185,7 @@
|
||||
# EMAIL_EXPIRATION_TIME=600
|
||||
|
||||
## Email token size
|
||||
## Number of digits in an email token (min: 6, max: 19).
|
||||
## Number of digits in an email 2FA token (min: 6, max: 255).
|
||||
## Note that the Bitwarden clients are hardcoded to mention 6 digit codes regardless of this setting!
|
||||
# EMAIL_TOKEN_SIZE=6
|
||||
|
||||
@@ -220,6 +246,13 @@
|
||||
## This setting applies globally, so make sure to inform all users of any changes to this setting.
|
||||
# TRASH_AUTO_DELETE_DAYS=
|
||||
|
||||
## Number of minutes to wait before a 2FA-enabled login is considered incomplete,
|
||||
## resulting in an email notification. An incomplete 2FA login is one where the correct
|
||||
## master password was provided but the required 2FA step was not completed, which
|
||||
## potentially indicates a master password compromise. Set to 0 to disable this check.
|
||||
## This setting applies globally to all users.
|
||||
# INCOMPLETE_2FA_TIME_LIMIT=3
|
||||
|
||||
## Controls the PBBKDF password iterations to apply on the server
|
||||
## The change only applies when the password is changed
|
||||
# PASSWORD_ITERATIONS=100000
|
||||
@@ -243,6 +276,17 @@
|
||||
## Multiple values must be separated with a whitespace.
|
||||
# ALLOWED_IFRAME_ANCESTORS=
|
||||
|
||||
## Number of seconds, on average, between login requests from the same IP address before rate limiting kicks in.
|
||||
# LOGIN_RATELIMIT_SECONDS=60
|
||||
## Allow a burst of requests of up to this size, while maintaining the average indicated by `LOGIN_RATELIMIT_SECONDS`.
|
||||
## Note that this applies to both the login and the 2FA, so it's recommended to allow a burst size of at least 2.
|
||||
# LOGIN_RATELIMIT_MAX_BURST=10
|
||||
|
||||
## Number of seconds, on average, between admin requests from the same IP address before rate limiting kicks in.
|
||||
# ADMIN_RATELIMIT_SECONDS=300
|
||||
## Allow a burst of requests of up to this size, while maintaining the average indicated by `ADMIN_RATELIMIT_SECONDS`.
|
||||
# ADMIN_RATELIMIT_MAX_BURST=3
|
||||
|
||||
## Yubico (Yubikey) Settings
|
||||
## Set your Client ID and Secret Key for Yubikey OTP
|
||||
## You can generate it here: https://upgrade.yubico.com/getapikey/
|
||||
|
||||
640
Cargo.lock
generated
640
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
36
Cargo.toml
36
Cargo.toml
@@ -2,8 +2,8 @@
|
||||
name = "vaultwarden"
|
||||
version = "1.0.0"
|
||||
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
||||
edition = "2018"
|
||||
rust-version = "1.57"
|
||||
edition = "2021"
|
||||
rust-version = "1.60"
|
||||
resolver = "2"
|
||||
|
||||
repository = "https://github.com/dani-garcia/vaultwarden"
|
||||
@@ -34,11 +34,11 @@ rocket = { version = "=0.5.0-dev", features = ["tls"], default-features = false
|
||||
rocket_contrib = "=0.5.0-dev"
|
||||
|
||||
# HTTP client
|
||||
reqwest = { version = "0.11.5", features = ["blocking", "json", "gzip", "brotli", "socks", "cookies"] }
|
||||
reqwest = { version = "0.11.8", features = ["blocking", "json", "gzip", "brotli", "socks", "cookies", "trust-dns"] }
|
||||
|
||||
# Used for custom short lived cookie jar
|
||||
cookie = "0.15.1"
|
||||
cookie_store = "0.15.0"
|
||||
cookie_store = "0.15.1"
|
||||
bytes = "1.1.0"
|
||||
url = "2.2.2"
|
||||
|
||||
@@ -46,7 +46,7 @@ url = "2.2.2"
|
||||
multipart = { version = "0.18.0", features = ["server"], default-features = false }
|
||||
|
||||
# WebSockets library
|
||||
ws = { version = "0.11.0", package = "parity-ws" }
|
||||
ws = { version = "0.11.1", package = "parity-ws" }
|
||||
|
||||
# MessagePack library
|
||||
rmpv = "1.0.0"
|
||||
@@ -55,8 +55,8 @@ rmpv = "1.0.0"
|
||||
chashmap = "2.2.2"
|
||||
|
||||
# A generic serialization/deserialization framework
|
||||
serde = { version = "1.0.130", features = ["derive"] }
|
||||
serde_json = "1.0.68"
|
||||
serde = { version = "1.0.132", features = ["derive"] }
|
||||
serde_json = "1.0.73"
|
||||
|
||||
# Logging
|
||||
log = "0.4.14"
|
||||
@@ -78,7 +78,7 @@ uuid = { version = "0.8.2", features = ["v4"] }
|
||||
|
||||
# Date and time libraries
|
||||
chrono = { version = "0.4.19", features = ["serde"] }
|
||||
chrono-tz = "0.6.0"
|
||||
chrono-tz = "0.6.1"
|
||||
time = "0.2.27"
|
||||
|
||||
# Job scheduler
|
||||
@@ -95,7 +95,7 @@ jsonwebtoken = "7.2.0"
|
||||
|
||||
# U2F library
|
||||
u2f = "0.2.0"
|
||||
webauthn-rs = "0.3.0-alpha.12"
|
||||
webauthn-rs = "0.3.1"
|
||||
|
||||
# Yubico Library
|
||||
yubico = { version = "0.10.0", features = ["online-tokio"], default-features = false }
|
||||
@@ -104,7 +104,7 @@ yubico = { version = "0.10.0", features = ["online-tokio"], default-features = f
|
||||
dotenv = { version = "0.15.0", default-features = false }
|
||||
|
||||
# Lazy initialization
|
||||
once_cell = "1.8.0"
|
||||
once_cell = "1.9.0"
|
||||
|
||||
# Numerical libraries
|
||||
num-traits = "0.2.14"
|
||||
@@ -112,19 +112,19 @@ num-derive = "0.3.3"
|
||||
|
||||
# Email libraries
|
||||
tracing = { version = "0.1.29", features = ["log"] } # Needed to have lettre trace logging used when SMTP_DEBUG is enabled.
|
||||
lettre = { version = "0.10.0-rc.3", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname", "tracing"], default-features = false }
|
||||
lettre = { version = "0.10.0-rc.4", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname", "tracing"], default-features = false }
|
||||
|
||||
# Template library
|
||||
handlebars = { version = "4.1.3", features = ["dir_source"] }
|
||||
handlebars = { version = "4.1.6", features = ["dir_source"] }
|
||||
|
||||
# For favicon extraction from main website
|
||||
html5ever = "0.25.1"
|
||||
markup5ever_rcdom = "0.1.0"
|
||||
regex = { version = "1.5.4", features = ["std", "perf", "unicode-perl"], default-features = false }
|
||||
data-url = "0.1.0"
|
||||
data-url = "0.1.1"
|
||||
|
||||
# Used by U2F, JWT and Postgres
|
||||
openssl = "0.10.36"
|
||||
openssl = "0.10.38"
|
||||
|
||||
# URL encoding library
|
||||
percent-encoding = "2.1.0"
|
||||
@@ -135,19 +135,17 @@ idna = "0.2.3"
|
||||
pico-args = "0.4.2"
|
||||
|
||||
# Logging panics to logfile instead stderr only
|
||||
backtrace = "0.3.61"
|
||||
backtrace = "0.3.63"
|
||||
|
||||
# Macro ident concatenation
|
||||
paste = "1.0.5"
|
||||
paste = "1.0.6"
|
||||
governor = "0.3.2"
|
||||
|
||||
[patch.crates-io]
|
||||
# Use newest ring
|
||||
rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = '263e39b5b429de1913ce7e3036575a7b4d88b6d7' }
|
||||
rocket_contrib = { git = 'https://github.com/SergioBenitez/Rocket', rev = '263e39b5b429de1913ce7e3036575a7b4d88b6d7' }
|
||||
|
||||
# For favicon extraction from main website
|
||||
data-url = { git = 'https://github.com/servo/rust-url', package="data-url", rev = 'eb7330b5296c0d43816d1346211b74182bb4ae37' }
|
||||
|
||||
# The maintainer of the `job_scheduler` crate doesn't seem to have responded
|
||||
# to any issues or PRs for almost a year (as of April 2021). This hopefully
|
||||
# temporary fork updates Cargo.toml to use more up-to-date dependencies.
|
||||
|
||||
41
build.rs
41
build.rs
@@ -15,11 +15,14 @@ fn main() {
|
||||
"You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite"
|
||||
);
|
||||
|
||||
if let Ok(version) = env::var("BWRS_VERSION") {
|
||||
println!("cargo:rustc-env=BWRS_VERSION={}", version);
|
||||
// Support $BWRS_VERSION for legacy compatibility, but default to $VW_VERSION.
|
||||
// If neither exist, read from git.
|
||||
let maybe_vaultwarden_version =
|
||||
env::var("VW_VERSION").or_else(|_| env::var("BWRS_VERSION")).or_else(|_| version_from_git_info());
|
||||
|
||||
if let Ok(version) = maybe_vaultwarden_version {
|
||||
println!("cargo:rustc-env=VW_VERSION={}", version);
|
||||
println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version);
|
||||
} else {
|
||||
read_git_info().ok();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,7 +36,13 @@ fn run(args: &[&str]) -> Result<String, std::io::Error> {
|
||||
}
|
||||
|
||||
/// This method reads info from Git, namely tags, branch, and revision
|
||||
fn read_git_info() -> Result<(), std::io::Error> {
|
||||
/// To access these values, use:
|
||||
/// - env!("GIT_EXACT_TAG")
|
||||
/// - env!("GIT_LAST_TAG")
|
||||
/// - env!("GIT_BRANCH")
|
||||
/// - env!("GIT_REV")
|
||||
/// - env!("VW_VERSION")
|
||||
fn version_from_git_info() -> Result<String, std::io::Error> {
|
||||
// The exact tag for the current commit, can be empty when
|
||||
// the current commit doesn't have an associated tag
|
||||
let exact_tag = run(&["git", "describe", "--abbrev=0", "--tags", "--exact-match"]).ok();
|
||||
@@ -56,23 +65,11 @@ fn read_git_info() -> Result<(), std::io::Error> {
|
||||
println!("cargo:rustc-env=GIT_REV={}", rev_short);
|
||||
|
||||
// Combined version
|
||||
let version = if let Some(exact) = exact_tag {
|
||||
exact
|
||||
if let Some(exact) = exact_tag {
|
||||
Ok(exact)
|
||||
} else if &branch != "main" && &branch != "master" {
|
||||
format!("{}-{} ({})", last_tag, rev_short, branch)
|
||||
Ok(format!("{}-{} ({})", last_tag, rev_short, branch))
|
||||
} else {
|
||||
format!("{}-{}", last_tag, rev_short)
|
||||
};
|
||||
|
||||
println!("cargo:rustc-env=BWRS_VERSION={}", version);
|
||||
println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version);
|
||||
|
||||
// To access these values, use:
|
||||
// env!("GIT_EXACT_TAG")
|
||||
// env!("GIT_LAST_TAG")
|
||||
// env!("GIT_BRANCH")
|
||||
// env!("GIT_REV")
|
||||
// env!("BWRS_VERSION")
|
||||
|
||||
Ok(())
|
||||
Ok(format!("{}-{}", last_tag, rev_short))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,16 +3,24 @@
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||
|
||||
{% set build_stage_base_image = "rust:1.55-buster" %}
|
||||
{% set build_stage_base_image = "rust:1.58-buster" %}
|
||||
{% if "alpine" in target_file %}
|
||||
{% if "amd64" in target_file %}
|
||||
{% set build_stage_base_image = "clux/muslrust:nightly-2021-10-06" %}
|
||||
{% set runtime_stage_base_image = "alpine:3.14" %}
|
||||
{% set build_stage_base_image = "blackdex/rust-musl:x86_64-musl-nightly-2022-01-23" %}
|
||||
{% set runtime_stage_base_image = "alpine:3.15" %}
|
||||
{% set package_arch_target = "x86_64-unknown-linux-musl" %}
|
||||
{% elif "armv7" in target_file %}
|
||||
{% set build_stage_base_image = "messense/rust-musl-cross:armv7-musleabihf" %}
|
||||
{% set runtime_stage_base_image = "balenalib/armv7hf-alpine:3.14" %}
|
||||
{% set build_stage_base_image = "blackdex/rust-musl:armv7-musleabihf-nightly-2022-01-23" %}
|
||||
{% set runtime_stage_base_image = "balenalib/armv7hf-alpine:3.15" %}
|
||||
{% set package_arch_target = "armv7-unknown-linux-musleabihf" %}
|
||||
{% elif "armv6" in target_file %}
|
||||
{% set build_stage_base_image = "blackdex/rust-musl:arm-musleabi-nightly-2022-01-23" %}
|
||||
{% set runtime_stage_base_image = "balenalib/rpi-alpine:3.15" %}
|
||||
{% set package_arch_target = "arm-unknown-linux-musleabi" %}
|
||||
{% elif "arm64" in target_file %}
|
||||
{% set build_stage_base_image = "blackdex/rust-musl:aarch64-musl-nightly-2022-01-23" %}
|
||||
{% set runtime_stage_base_image = "balenalib/aarch64-alpine:3.15" %}
|
||||
{% set package_arch_target = "aarch64-unknown-linux-musl" %}
|
||||
{% endif %}
|
||||
{% elif "amd64" in target_file %}
|
||||
{% set runtime_stage_base_image = "debian:buster-slim" %}
|
||||
@@ -51,8 +59,8 @@
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
{% set vault_version = "2.23.0c" %}
|
||||
{% set vault_image_digest = "sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459" %}
|
||||
{% set vault_version = "2.25.1b" %}
|
||||
{% set vault_image_digest = "sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba" %}
|
||||
# The web-vault digest specifies a particular web-vault build on Docker Hub.
|
||||
# Using the digest instead of the tag name provides better security,
|
||||
# as the digest of an image is immutable, whereas a tag name can later
|
||||
@@ -75,22 +83,7 @@ FROM vaultwarden/web-vault@{{ vault_image_digest }} as vault
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM {{ build_stage_base_image }} as build
|
||||
|
||||
{% if "alpine" in target_file %}
|
||||
{% if "amd64" in target_file %}
|
||||
# Alpine-based AMD64 (musl) does not support mysql/mariadb during compile time.
|
||||
ARG DB=sqlite,postgresql
|
||||
{% set features = "sqlite,postgresql" %}
|
||||
{% else %}
|
||||
# Alpine-based ARM (musl) only supports sqlite during compile time.
|
||||
# We now also need to add vendored_openssl, because the current base image we use to build has OpenSSL removed.
|
||||
ARG DB=sqlite,vendored_openssl
|
||||
{% set features = "sqlite" %}
|
||||
{% endif %}
|
||||
{% else %}
|
||||
# Debian-based builds support multidb
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
{% set features = "sqlite,mysql,postgresql" %}
|
||||
{% endif %}
|
||||
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -118,51 +111,32 @@ ENV RUSTFLAGS='-C link-arg=-s'
|
||||
ENV CFLAGS_armv7_unknown_linux_musleabihf="-mfpu=vfpv3-d16"
|
||||
{% endif %}
|
||||
{% elif "arm" in target_file %}
|
||||
# NOTE: Any apt-get/dpkg after this stage will fail because of broken dependencies.
|
||||
# For Diesel-RS migrations_macros to compile with MySQL/MariaDB we need to do some magic.
|
||||
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
||||
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the {{ package_arch_prefix }} version.
|
||||
# What we can do is a force install, because nothing important is overlapping each other.
|
||||
#
|
||||
# Install required build libs for {{ package_arch_name }} architecture.
|
||||
# To compile both mysql and postgresql we need some extra packages for both host arch and target arch
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > /etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture {{ package_arch_name }} \
|
||||
# hadolint ignore=DL3059
|
||||
RUN dpkg --add-architecture {{ package_arch_name }} \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev{{ package_arch_prefix }} \
|
||||
libc6-dev{{ package_arch_prefix }} \
|
||||
libpq5{{ package_arch_prefix }} \
|
||||
libpq-dev \
|
||||
libmariadb3:amd64 \
|
||||
libpq-dev{{ package_arch_prefix }} \
|
||||
libmariadb3{{ package_arch_prefix }} \
|
||||
libmariadb-dev{{ package_arch_prefix }} \
|
||||
libmariadb-dev-compat{{ package_arch_prefix }} \
|
||||
gcc-{{ package_cross_compiler }} \
|
||||
#
|
||||
# Manual install libmariadb-dev-compat:amd64 ( After this broken dependencies will break apt )
|
||||
&& apt-get download libmariadb-dev-compat:amd64 \
|
||||
&& dpkg --force-all -i ./libmariadb-dev-compat*.deb \
|
||||
&& rm -rvf ./libmariadb-dev-compat*.deb \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
#
|
||||
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
||||
# The libpq5{{ package_arch_prefix }} package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
||||
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
||||
# Without this specific file the ld command will fail and compilation fails with it.
|
||||
&& ln -sfnr /usr/lib/{{ package_cross_compiler }}/libpq.so.5 /usr/lib/{{ package_cross_compiler }}/libpq.so \
|
||||
#
|
||||
# Make sure cargo has the right target config
|
||||
&& echo '[target.{{ package_arch_target }}]' >> "${CARGO_HOME}/config" \
|
||||
&& echo 'linker = "{{ package_cross_compiler }}-gcc"' >> "${CARGO_HOME}/config" \
|
||||
&& echo 'rustflags = ["-L/usr/lib/{{ package_cross_compiler }}"]' >> "${CARGO_HOME}/config"
|
||||
|
||||
# Set arm specific environment values
|
||||
ENV CC_{{ package_arch_target | replace("-", "_") }}="/usr/bin/{{ package_cross_compiler }}-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/{{ package_cross_compiler }}"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/{{ package_cross_compiler }}"
|
||||
ENV CC_{{ package_arch_target | replace("-", "_") }}="/usr/bin/{{ package_cross_compiler }}-gcc" \
|
||||
CROSS_COMPILE="1" \
|
||||
OPENSSL_INCLUDE_DIR="/usr/include/{{ package_cross_compiler }}" \
|
||||
OPENSSL_LIB_DIR="/usr/lib/{{ package_cross_compiler }}"
|
||||
|
||||
{% elif "amd64" in target_file %}
|
||||
# Install DB packages
|
||||
@@ -188,6 +162,9 @@ COPY ./build.rs ./build.rs
|
||||
RUN {{ mount_rust_cache -}} rustup target add {{ package_arch_target }}
|
||||
{% endif %}
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
@@ -203,6 +180,7 @@ RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN {{ mount_rust_cache -}} cargo build --features ${DB} --release{{ package_arch_target_param }}
|
||||
{% if "alpine" in target_file %}
|
||||
{% if "armv7" in target_file %}
|
||||
@@ -216,13 +194,14 @@ RUN musl-strip target/{{ package_arch_target }}/release/vaultwarden
|
||||
# because we already have a binary built
|
||||
FROM {{ runtime_stage_base_image }}
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
{% if "alpine" in runtime_stage_base_image %}
|
||||
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10
|
||||
{%- if "alpine" in runtime_stage_base_image %} \
|
||||
SSL_CERT_DIR=/etc/ssl/certs
|
||||
{% endif %}
|
||||
|
||||
|
||||
{% if "amd64" not in target_file %}
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-start" ]
|
||||
@@ -236,12 +215,6 @@ RUN mkdir /data \
|
||||
tzdata \
|
||||
curl \
|
||||
dumb-init \
|
||||
{% if "mysql" in features %}
|
||||
mariadb-connector-c \
|
||||
{% endif %}
|
||||
{% if "postgresql" in features %}
|
||||
postgresql-libs \
|
||||
{% endif %}
|
||||
ca-certificates
|
||||
{% else %}
|
||||
&& apt-get update && apt-get install -y \
|
||||
|
||||
@@ -16,21 +16,20 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.23.0c
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.23.0c
|
||||
# [vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459]
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459
|
||||
# [vaultwarden/web-vault:v2.23.0c]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459 as vault
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM rust:1.55-buster as build
|
||||
FROM rust:1.58-buster as build
|
||||
|
||||
|
||||
# Debian-based builds support multidb
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -64,6 +63,9 @@ COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
@@ -79,6 +81,7 @@ RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN cargo build --features ${DB} --release
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
@@ -86,9 +89,9 @@ RUN cargo build --features ${DB} --release
|
||||
# because we already have a binary built
|
||||
FROM debian:buster-slim
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10
|
||||
|
||||
|
||||
# Create data folder and Install needed libraries
|
||||
|
||||
@@ -16,21 +16,20 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.23.0c
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.23.0c
|
||||
# [vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459]
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459
|
||||
# [vaultwarden/web-vault:v2.23.0c]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459 as vault
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM clux/muslrust:nightly-2021-10-06 as build
|
||||
FROM blackdex/rust-musl:x86_64-musl-nightly-2022-01-23 as build
|
||||
|
||||
|
||||
# Alpine-based AMD64 (musl) does not support mysql/mariadb during compile time.
|
||||
ARG DB=sqlite,postgresql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -58,6 +57,9 @@ COPY ./build.rs ./build.rs
|
||||
|
||||
RUN rustup target add x86_64-unknown-linux-musl
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
@@ -73,17 +75,19 @@ RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN cargo build --features ${DB} --release --target=x86_64-unknown-linux-musl
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM alpine:3.14
|
||||
FROM alpine:3.15
|
||||
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10 \
|
||||
SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
|
||||
# Create data folder and Install needed libraries
|
||||
@@ -93,7 +97,6 @@ RUN mkdir /data \
|
||||
tzdata \
|
||||
curl \
|
||||
dumb-init \
|
||||
postgresql-libs \
|
||||
ca-certificates
|
||||
|
||||
|
||||
|
||||
@@ -16,21 +16,20 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.23.0c
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.23.0c
|
||||
# [vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459]
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459
|
||||
# [vaultwarden/web-vault:v2.23.0c]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459 as vault
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM rust:1.55-buster as build
|
||||
FROM rust:1.58-buster as build
|
||||
|
||||
|
||||
# Debian-based builds support multidb
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -64,6 +63,9 @@ COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
@@ -79,6 +81,7 @@ RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
@@ -86,9 +89,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.
|
||||
# because we already have a binary built
|
||||
FROM debian:buster-slim
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10
|
||||
|
||||
|
||||
# Create data folder and Install needed libraries
|
||||
|
||||
@@ -16,21 +16,20 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.23.0c
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.23.0c
|
||||
# [vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459]
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459
|
||||
# [vaultwarden/web-vault:v2.23.0c]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459 as vault
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM clux/muslrust:nightly-2021-10-06 as build
|
||||
FROM blackdex/rust-musl:x86_64-musl-nightly-2022-01-23 as build
|
||||
|
||||
|
||||
# Alpine-based AMD64 (musl) does not support mysql/mariadb during compile time.
|
||||
ARG DB=sqlite,postgresql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -58,6 +57,9 @@ COPY ./build.rs ./build.rs
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add x86_64-unknown-linux-musl
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
@@ -73,17 +75,19 @@ RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=x86_64-unknown-linux-musl
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM alpine:3.14
|
||||
FROM alpine:3.15
|
||||
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10 \
|
||||
SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
|
||||
# Create data folder and Install needed libraries
|
||||
@@ -93,7 +97,6 @@ RUN mkdir /data \
|
||||
tzdata \
|
||||
curl \
|
||||
dumb-init \
|
||||
postgresql-libs \
|
||||
ca-certificates
|
||||
|
||||
|
||||
|
||||
@@ -16,21 +16,20 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.23.0c
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.23.0c
|
||||
# [vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459]
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459
|
||||
# [vaultwarden/web-vault:v2.23.0c]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459 as vault
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM rust:1.55-buster as build
|
||||
FROM rust:1.58-buster as build
|
||||
|
||||
|
||||
# Debian-based builds support multidb
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -45,51 +44,32 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
RUN mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
# NOTE: Any apt-get/dpkg after this stage will fail because of broken dependencies.
|
||||
# For Diesel-RS migrations_macros to compile with MySQL/MariaDB we need to do some magic.
|
||||
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
||||
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :arm64 version.
|
||||
# What we can do is a force install, because nothing important is overlapping each other.
|
||||
#
|
||||
# Install required build libs for arm64 architecture.
|
||||
# To compile both mysql and postgresql we need some extra packages for both host arch and target arch
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > /etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture arm64 \
|
||||
# hadolint ignore=DL3059
|
||||
RUN dpkg --add-architecture arm64 \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:arm64 \
|
||||
libc6-dev:arm64 \
|
||||
libpq5:arm64 \
|
||||
libpq-dev \
|
||||
libmariadb3:amd64 \
|
||||
libpq-dev:arm64 \
|
||||
libmariadb3:arm64 \
|
||||
libmariadb-dev:arm64 \
|
||||
libmariadb-dev-compat:arm64 \
|
||||
gcc-aarch64-linux-gnu \
|
||||
#
|
||||
# Manual install libmariadb-dev-compat:amd64 ( After this broken dependencies will break apt )
|
||||
&& apt-get download libmariadb-dev-compat:amd64 \
|
||||
&& dpkg --force-all -i ./libmariadb-dev-compat*.deb \
|
||||
&& rm -rvf ./libmariadb-dev-compat*.deb \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
#
|
||||
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
||||
# The libpq5:arm64 package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
||||
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
||||
# Without this specific file the ld command will fail and compilation fails with it.
|
||||
&& ln -sfnr /usr/lib/aarch64-linux-gnu/libpq.so.5 /usr/lib/aarch64-linux-gnu/libpq.so \
|
||||
#
|
||||
# Make sure cargo has the right target config
|
||||
&& echo '[target.aarch64-unknown-linux-gnu]' >> "${CARGO_HOME}/config" \
|
||||
&& echo 'linker = "aarch64-linux-gnu-gcc"' >> "${CARGO_HOME}/config" \
|
||||
&& echo 'rustflags = ["-L/usr/lib/aarch64-linux-gnu"]' >> "${CARGO_HOME}/config"
|
||||
|
||||
# Set arm specific environment values
|
||||
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
||||
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc" \
|
||||
CROSS_COMPILE="1" \
|
||||
OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu" \
|
||||
OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
||||
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
@@ -103,6 +83,9 @@ COPY ./build.rs ./build.rs
|
||||
|
||||
RUN rustup target add aarch64-unknown-linux-gnu
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
@@ -118,6 +101,7 @@ RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
@@ -125,9 +109,9 @@ RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||
# because we already have a binary built
|
||||
FROM balenalib/aarch64-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
125
docker/arm64/Dockerfile.alpine
Normal file
125
docker/arm64/Dockerfile.alpine
Normal file
@@ -0,0 +1,125 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
# The web-vault digest specifies a particular web-vault build on Docker Hub.
|
||||
# Using the digest instead of the tag name provides better security,
|
||||
# as the digest of an image is immutable, whereas a tag name can later
|
||||
# be changed to point to a malicious image.
|
||||
#
|
||||
# To verify the current digest for a given tag name:
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM blackdex/rust-musl:aarch64-musl-nightly-2022-01-23 as build
|
||||
|
||||
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
LANG=C.UTF-8 \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
USER="root"
|
||||
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
ENV RUSTFLAGS='-C link-arg=-s'
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
RUN rustup target add aarch64-unknown-linux-musl
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-musl \
|
||||
&& find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-musl
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/aarch64-alpine:3.15
|
||||
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10 \
|
||||
SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
# Create data folder and Install needed libraries
|
||||
RUN mkdir /data \
|
||||
&& apk add --no-cache \
|
||||
openssl \
|
||||
tzdata \
|
||||
curl \
|
||||
dumb-init \
|
||||
ca-certificates
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
WORKDIR /
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/aarch64-unknown-linux-musl/release/vaultwarden .
|
||||
|
||||
COPY docker/healthcheck.sh /healthcheck.sh
|
||||
COPY docker/start.sh /start.sh
|
||||
|
||||
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||
|
||||
# Configures the startup!
|
||||
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
|
||||
CMD ["/start.sh"]
|
||||
@@ -16,21 +16,20 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.23.0c
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.23.0c
|
||||
# [vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459]
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459
|
||||
# [vaultwarden/web-vault:v2.23.0c]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459 as vault
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM rust:1.55-buster as build
|
||||
FROM rust:1.58-buster as build
|
||||
|
||||
|
||||
# Debian-based builds support multidb
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -45,51 +44,32 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
# NOTE: Any apt-get/dpkg after this stage will fail because of broken dependencies.
|
||||
# For Diesel-RS migrations_macros to compile with MySQL/MariaDB we need to do some magic.
|
||||
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
||||
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :arm64 version.
|
||||
# What we can do is a force install, because nothing important is overlapping each other.
|
||||
#
|
||||
# Install required build libs for arm64 architecture.
|
||||
# To compile both mysql and postgresql we need some extra packages for both host arch and target arch
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > /etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture arm64 \
|
||||
# hadolint ignore=DL3059
|
||||
RUN dpkg --add-architecture arm64 \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:arm64 \
|
||||
libc6-dev:arm64 \
|
||||
libpq5:arm64 \
|
||||
libpq-dev \
|
||||
libmariadb3:amd64 \
|
||||
libpq-dev:arm64 \
|
||||
libmariadb3:arm64 \
|
||||
libmariadb-dev:arm64 \
|
||||
libmariadb-dev-compat:arm64 \
|
||||
gcc-aarch64-linux-gnu \
|
||||
#
|
||||
# Manual install libmariadb-dev-compat:amd64 ( After this broken dependencies will break apt )
|
||||
&& apt-get download libmariadb-dev-compat:amd64 \
|
||||
&& dpkg --force-all -i ./libmariadb-dev-compat*.deb \
|
||||
&& rm -rvf ./libmariadb-dev-compat*.deb \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
#
|
||||
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
||||
# The libpq5:arm64 package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
||||
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
||||
# Without this specific file the ld command will fail and compilation fails with it.
|
||||
&& ln -sfnr /usr/lib/aarch64-linux-gnu/libpq.so.5 /usr/lib/aarch64-linux-gnu/libpq.so \
|
||||
#
|
||||
# Make sure cargo has the right target config
|
||||
&& echo '[target.aarch64-unknown-linux-gnu]' >> "${CARGO_HOME}/config" \
|
||||
&& echo 'linker = "aarch64-linux-gnu-gcc"' >> "${CARGO_HOME}/config" \
|
||||
&& echo 'rustflags = ["-L/usr/lib/aarch64-linux-gnu"]' >> "${CARGO_HOME}/config"
|
||||
|
||||
# Set arm specific environment values
|
||||
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
||||
ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc" \
|
||||
CROSS_COMPILE="1" \
|
||||
OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu" \
|
||||
OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu"
|
||||
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
@@ -103,6 +83,9 @@ COPY ./build.rs ./build.rs
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add aarch64-unknown-linux-gnu
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
@@ -118,6 +101,7 @@ RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
@@ -125,9 +109,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.
|
||||
# because we already have a binary built
|
||||
FROM balenalib/aarch64-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
125
docker/arm64/Dockerfile.buildx.alpine
Normal file
125
docker/arm64/Dockerfile.buildx.alpine
Normal file
@@ -0,0 +1,125 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
# The web-vault digest specifies a particular web-vault build on Docker Hub.
|
||||
# Using the digest instead of the tag name provides better security,
|
||||
# as the digest of an image is immutable, whereas a tag name can later
|
||||
# be changed to point to a malicious image.
|
||||
#
|
||||
# To verify the current digest for a given tag name:
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM blackdex/rust-musl:aarch64-musl-nightly-2022-01-23 as build
|
||||
|
||||
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
LANG=C.UTF-8 \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
USER="root"
|
||||
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
ENV RUSTFLAGS='-C link-arg=-s'
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add aarch64-unknown-linux-musl
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=aarch64-unknown-linux-musl \
|
||||
&& find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=aarch64-unknown-linux-musl
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/aarch64-alpine:3.15
|
||||
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10 \
|
||||
SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
# Create data folder and Install needed libraries
|
||||
RUN mkdir /data \
|
||||
&& apk add --no-cache \
|
||||
openssl \
|
||||
tzdata \
|
||||
curl \
|
||||
dumb-init \
|
||||
ca-certificates
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
WORKDIR /
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/aarch64-unknown-linux-musl/release/vaultwarden .
|
||||
|
||||
COPY docker/healthcheck.sh /healthcheck.sh
|
||||
COPY docker/start.sh /start.sh
|
||||
|
||||
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||
|
||||
# Configures the startup!
|
||||
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
|
||||
CMD ["/start.sh"]
|
||||
@@ -16,21 +16,20 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.23.0c
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.23.0c
|
||||
# [vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459]
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459
|
||||
# [vaultwarden/web-vault:v2.23.0c]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459 as vault
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM rust:1.55-buster as build
|
||||
FROM rust:1.58-buster as build
|
||||
|
||||
|
||||
# Debian-based builds support multidb
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -45,51 +44,32 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
RUN mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
# NOTE: Any apt-get/dpkg after this stage will fail because of broken dependencies.
|
||||
# For Diesel-RS migrations_macros to compile with MySQL/MariaDB we need to do some magic.
|
||||
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
||||
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :armel version.
|
||||
# What we can do is a force install, because nothing important is overlapping each other.
|
||||
#
|
||||
# Install required build libs for armel architecture.
|
||||
# To compile both mysql and postgresql we need some extra packages for both host arch and target arch
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > /etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armel \
|
||||
# hadolint ignore=DL3059
|
||||
RUN dpkg --add-architecture armel \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armel \
|
||||
libc6-dev:armel \
|
||||
libpq5:armel \
|
||||
libpq-dev \
|
||||
libmariadb3:amd64 \
|
||||
libpq-dev:armel \
|
||||
libmariadb3:armel \
|
||||
libmariadb-dev:armel \
|
||||
libmariadb-dev-compat:armel \
|
||||
gcc-arm-linux-gnueabi \
|
||||
#
|
||||
# Manual install libmariadb-dev-compat:amd64 ( After this broken dependencies will break apt )
|
||||
&& apt-get download libmariadb-dev-compat:amd64 \
|
||||
&& dpkg --force-all -i ./libmariadb-dev-compat*.deb \
|
||||
&& rm -rvf ./libmariadb-dev-compat*.deb \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
#
|
||||
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
||||
# The libpq5:armel package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
||||
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
||||
# Without this specific file the ld command will fail and compilation fails with it.
|
||||
&& ln -sfnr /usr/lib/arm-linux-gnueabi/libpq.so.5 /usr/lib/arm-linux-gnueabi/libpq.so \
|
||||
#
|
||||
# Make sure cargo has the right target config
|
||||
&& echo '[target.arm-unknown-linux-gnueabi]' >> "${CARGO_HOME}/config" \
|
||||
&& echo 'linker = "arm-linux-gnueabi-gcc"' >> "${CARGO_HOME}/config" \
|
||||
&& echo 'rustflags = ["-L/usr/lib/arm-linux-gnueabi"]' >> "${CARGO_HOME}/config"
|
||||
|
||||
# Set arm specific environment values
|
||||
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc" \
|
||||
CROSS_COMPILE="1" \
|
||||
OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi" \
|
||||
OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
@@ -103,6 +83,9 @@ COPY ./build.rs ./build.rs
|
||||
|
||||
RUN rustup target add arm-unknown-linux-gnueabi
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
@@ -118,6 +101,7 @@ RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
@@ -125,9 +109,9 @@ RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||
# because we already have a binary built
|
||||
FROM balenalib/rpi-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
125
docker/armv6/Dockerfile.alpine
Normal file
125
docker/armv6/Dockerfile.alpine
Normal file
@@ -0,0 +1,125 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
# The web-vault digest specifies a particular web-vault build on Docker Hub.
|
||||
# Using the digest instead of the tag name provides better security,
|
||||
# as the digest of an image is immutable, whereas a tag name can later
|
||||
# be changed to point to a malicious image.
|
||||
#
|
||||
# To verify the current digest for a given tag name:
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM blackdex/rust-musl:arm-musleabi-nightly-2022-01-23 as build
|
||||
|
||||
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
LANG=C.UTF-8 \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
USER="root"
|
||||
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
ENV RUSTFLAGS='-C link-arg=-s'
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
RUN rustup target add arm-unknown-linux-musleabi
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-musleabi \
|
||||
&& find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN cargo build --features ${DB} --release --target=arm-unknown-linux-musleabi
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/rpi-alpine:3.15
|
||||
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10 \
|
||||
SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
# Create data folder and Install needed libraries
|
||||
RUN mkdir /data \
|
||||
&& apk add --no-cache \
|
||||
openssl \
|
||||
tzdata \
|
||||
curl \
|
||||
dumb-init \
|
||||
ca-certificates
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
WORKDIR /
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/arm-unknown-linux-musleabi/release/vaultwarden .
|
||||
|
||||
COPY docker/healthcheck.sh /healthcheck.sh
|
||||
COPY docker/start.sh /start.sh
|
||||
|
||||
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||
|
||||
# Configures the startup!
|
||||
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
|
||||
CMD ["/start.sh"]
|
||||
@@ -16,21 +16,20 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.23.0c
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.23.0c
|
||||
# [vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459]
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459
|
||||
# [vaultwarden/web-vault:v2.23.0c]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459 as vault
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM rust:1.55-buster as build
|
||||
FROM rust:1.58-buster as build
|
||||
|
||||
|
||||
# Debian-based builds support multidb
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -45,51 +44,32 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
# NOTE: Any apt-get/dpkg after this stage will fail because of broken dependencies.
|
||||
# For Diesel-RS migrations_macros to compile with MySQL/MariaDB we need to do some magic.
|
||||
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
||||
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :armel version.
|
||||
# What we can do is a force install, because nothing important is overlapping each other.
|
||||
#
|
||||
# Install required build libs for armel architecture.
|
||||
# To compile both mysql and postgresql we need some extra packages for both host arch and target arch
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > /etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armel \
|
||||
# hadolint ignore=DL3059
|
||||
RUN dpkg --add-architecture armel \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armel \
|
||||
libc6-dev:armel \
|
||||
libpq5:armel \
|
||||
libpq-dev \
|
||||
libmariadb3:amd64 \
|
||||
libpq-dev:armel \
|
||||
libmariadb3:armel \
|
||||
libmariadb-dev:armel \
|
||||
libmariadb-dev-compat:armel \
|
||||
gcc-arm-linux-gnueabi \
|
||||
#
|
||||
# Manual install libmariadb-dev-compat:amd64 ( After this broken dependencies will break apt )
|
||||
&& apt-get download libmariadb-dev-compat:amd64 \
|
||||
&& dpkg --force-all -i ./libmariadb-dev-compat*.deb \
|
||||
&& rm -rvf ./libmariadb-dev-compat*.deb \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
#
|
||||
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
||||
# The libpq5:armel package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
||||
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
||||
# Without this specific file the ld command will fail and compilation fails with it.
|
||||
&& ln -sfnr /usr/lib/arm-linux-gnueabi/libpq.so.5 /usr/lib/arm-linux-gnueabi/libpq.so \
|
||||
#
|
||||
# Make sure cargo has the right target config
|
||||
&& echo '[target.arm-unknown-linux-gnueabi]' >> "${CARGO_HOME}/config" \
|
||||
&& echo 'linker = "arm-linux-gnueabi-gcc"' >> "${CARGO_HOME}/config" \
|
||||
&& echo 'rustflags = ["-L/usr/lib/arm-linux-gnueabi"]' >> "${CARGO_HOME}/config"
|
||||
|
||||
# Set arm specific environment values
|
||||
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||
ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc" \
|
||||
CROSS_COMPILE="1" \
|
||||
OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi" \
|
||||
OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi"
|
||||
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
@@ -103,6 +83,9 @@ COPY ./build.rs ./build.rs
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add arm-unknown-linux-gnueabi
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
@@ -118,6 +101,7 @@ RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
@@ -125,9 +109,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.
|
||||
# because we already have a binary built
|
||||
FROM balenalib/rpi-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
125
docker/armv6/Dockerfile.buildx.alpine
Normal file
125
docker/armv6/Dockerfile.buildx.alpine
Normal file
@@ -0,0 +1,125 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
# This file was generated using a Jinja2 template.
|
||||
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
|
||||
|
||||
# Using multistage build:
|
||||
# https://docs.docker.com/develop/develop-images/multistage-build/
|
||||
# https://whitfin.io/speeding-up-rust-docker-builds/
|
||||
####################### VAULT BUILD IMAGE #######################
|
||||
# The web-vault digest specifies a particular web-vault build on Docker Hub.
|
||||
# Using the digest instead of the tag name provides better security,
|
||||
# as the digest of an image is immutable, whereas a tag name can later
|
||||
# be changed to point to a malicious image.
|
||||
#
|
||||
# To verify the current digest for a given tag name:
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM blackdex/rust-musl:arm-musleabi-nightly-2022-01-23 as build
|
||||
|
||||
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
LANG=C.UTF-8 \
|
||||
TZ=UTC \
|
||||
TERM=xterm-256color \
|
||||
CARGO_HOME="/root/.cargo" \
|
||||
USER="root"
|
||||
|
||||
|
||||
# Create CARGO_HOME folder and don't download rust docs
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
ENV RUSTFLAGS='-C link-arg=-s'
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
RUN USER=root cargo new --bin /app
|
||||
WORKDIR /app
|
||||
|
||||
# Copies over *only* your manifests and build files
|
||||
COPY ./Cargo.* ./
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
COPY ./build.rs ./build.rs
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add arm-unknown-linux-musleabi
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=arm-unknown-linux-musleabi \
|
||||
&& find . -not -path "./target*" -delete
|
||||
|
||||
# Copies the complete project
|
||||
# To avoid copying unneeded files, use .dockerignore
|
||||
COPY . .
|
||||
|
||||
# Make sure that we actually build the project
|
||||
RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=arm-unknown-linux-musleabi
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/rpi-alpine:3.15
|
||||
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10 \
|
||||
SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
# Create data folder and Install needed libraries
|
||||
RUN mkdir /data \
|
||||
&& apk add --no-cache \
|
||||
openssl \
|
||||
tzdata \
|
||||
curl \
|
||||
dumb-init \
|
||||
ca-certificates
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-end" ]
|
||||
|
||||
VOLUME /data
|
||||
EXPOSE 80
|
||||
EXPOSE 3012
|
||||
|
||||
# Copies the files from the context (Rocket.toml file and web-vault)
|
||||
# and the binary from the "build" stage to the current stage
|
||||
WORKDIR /
|
||||
COPY Rocket.toml .
|
||||
COPY --from=vault /web-vault ./web-vault
|
||||
COPY --from=build /app/target/arm-unknown-linux-musleabi/release/vaultwarden .
|
||||
|
||||
COPY docker/healthcheck.sh /healthcheck.sh
|
||||
COPY docker/start.sh /start.sh
|
||||
|
||||
HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"]
|
||||
|
||||
# Configures the startup!
|
||||
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
|
||||
CMD ["/start.sh"]
|
||||
@@ -16,21 +16,20 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.23.0c
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.23.0c
|
||||
# [vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459]
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459
|
||||
# [vaultwarden/web-vault:v2.23.0c]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459 as vault
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM rust:1.55-buster as build
|
||||
FROM rust:1.58-buster as build
|
||||
|
||||
|
||||
# Debian-based builds support multidb
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -45,51 +44,32 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
RUN mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
# NOTE: Any apt-get/dpkg after this stage will fail because of broken dependencies.
|
||||
# For Diesel-RS migrations_macros to compile with MySQL/MariaDB we need to do some magic.
|
||||
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
||||
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :armhf version.
|
||||
# What we can do is a force install, because nothing important is overlapping each other.
|
||||
#
|
||||
# Install required build libs for armhf architecture.
|
||||
# To compile both mysql and postgresql we need some extra packages for both host arch and target arch
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > /etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armhf \
|
||||
# hadolint ignore=DL3059
|
||||
RUN dpkg --add-architecture armhf \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armhf \
|
||||
libc6-dev:armhf \
|
||||
libpq5:armhf \
|
||||
libpq-dev \
|
||||
libmariadb3:amd64 \
|
||||
libpq-dev:armhf \
|
||||
libmariadb3:armhf \
|
||||
libmariadb-dev:armhf \
|
||||
libmariadb-dev-compat:armhf \
|
||||
gcc-arm-linux-gnueabihf \
|
||||
#
|
||||
# Manual install libmariadb-dev-compat:amd64 ( After this broken dependencies will break apt )
|
||||
&& apt-get download libmariadb-dev-compat:amd64 \
|
||||
&& dpkg --force-all -i ./libmariadb-dev-compat*.deb \
|
||||
&& rm -rvf ./libmariadb-dev-compat*.deb \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
#
|
||||
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
||||
# The libpq5:armhf package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
||||
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
||||
# Without this specific file the ld command will fail and compilation fails with it.
|
||||
&& ln -sfnr /usr/lib/arm-linux-gnueabihf/libpq.so.5 /usr/lib/arm-linux-gnueabihf/libpq.so \
|
||||
#
|
||||
# Make sure cargo has the right target config
|
||||
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> "${CARGO_HOME}/config" \
|
||||
&& echo 'linker = "arm-linux-gnueabihf-gcc"' >> "${CARGO_HOME}/config" \
|
||||
&& echo 'rustflags = ["-L/usr/lib/arm-linux-gnueabihf"]' >> "${CARGO_HOME}/config"
|
||||
|
||||
# Set arm specific environment values
|
||||
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
||||
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc" \
|
||||
CROSS_COMPILE="1" \
|
||||
OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf" \
|
||||
OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
||||
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
@@ -103,6 +83,9 @@ COPY ./build.rs ./build.rs
|
||||
|
||||
RUN rustup target add armv7-unknown-linux-gnueabihf
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
@@ -118,6 +101,7 @@ RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
@@ -125,9 +109,9 @@ RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabih
|
||||
# because we already have a binary built
|
||||
FROM balenalib/armv7hf-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
@@ -16,22 +16,20 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.23.0c
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.23.0c
|
||||
# [vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459]
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459
|
||||
# [vaultwarden/web-vault:v2.23.0c]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459 as vault
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM messense/rust-musl-cross:armv7-musleabihf as build
|
||||
FROM blackdex/rust-musl:armv7-musleabihf-nightly-2022-01-23 as build
|
||||
|
||||
|
||||
# Alpine-based ARM (musl) only supports sqlite during compile time.
|
||||
# We now also need to add vendored_openssl, because the current base image we use to build has OpenSSL removed.
|
||||
ARG DB=sqlite,vendored_openssl
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -60,6 +58,9 @@ COPY ./build.rs ./build.rs
|
||||
|
||||
RUN rustup target add armv7-unknown-linux-musleabihf
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
@@ -75,6 +76,7 @@ RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf
|
||||
# hadolint ignore=DL3059
|
||||
RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden
|
||||
@@ -82,12 +84,13 @@ RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/armv7hf-alpine:3.14
|
||||
FROM balenalib/armv7hf-alpine:3.15
|
||||
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10 \
|
||||
SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
@@ -16,21 +16,20 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.23.0c
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.23.0c
|
||||
# [vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459]
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459
|
||||
# [vaultwarden/web-vault:v2.23.0c]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459 as vault
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM rust:1.55-buster as build
|
||||
FROM rust:1.58-buster as build
|
||||
|
||||
|
||||
# Debian-based builds support multidb
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -45,51 +44,32 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \
|
||||
&& rustup set profile minimal
|
||||
|
||||
# NOTE: Any apt-get/dpkg after this stage will fail because of broken dependencies.
|
||||
# For Diesel-RS migrations_macros to compile with MySQL/MariaDB we need to do some magic.
|
||||
# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client)
|
||||
# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :armhf version.
|
||||
# What we can do is a force install, because nothing important is overlapping each other.
|
||||
#
|
||||
# Install required build libs for armhf architecture.
|
||||
# To compile both mysql and postgresql we need some extra packages for both host arch and target arch
|
||||
RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > /etc/apt/sources.list.d/deb-src.list \
|
||||
&& dpkg --add-architecture armhf \
|
||||
# hadolint ignore=DL3059
|
||||
RUN dpkg --add-architecture armhf \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
--no-install-recommends \
|
||||
libssl-dev:armhf \
|
||||
libc6-dev:armhf \
|
||||
libpq5:armhf \
|
||||
libpq-dev \
|
||||
libmariadb3:amd64 \
|
||||
libpq-dev:armhf \
|
||||
libmariadb3:armhf \
|
||||
libmariadb-dev:armhf \
|
||||
libmariadb-dev-compat:armhf \
|
||||
gcc-arm-linux-gnueabihf \
|
||||
#
|
||||
# Manual install libmariadb-dev-compat:amd64 ( After this broken dependencies will break apt )
|
||||
&& apt-get download libmariadb-dev-compat:amd64 \
|
||||
&& dpkg --force-all -i ./libmariadb-dev-compat*.deb \
|
||||
&& rm -rvf ./libmariadb-dev-compat*.deb \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
#
|
||||
# For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic.
|
||||
# The libpq5:armhf package seems to not provide a symlink to libpq.so.5 with the name libpq.so.
|
||||
# This is only provided by the libpq-dev package which can't be installed for both arch at the same time.
|
||||
# Without this specific file the ld command will fail and compilation fails with it.
|
||||
&& ln -sfnr /usr/lib/arm-linux-gnueabihf/libpq.so.5 /usr/lib/arm-linux-gnueabihf/libpq.so \
|
||||
#
|
||||
# Make sure cargo has the right target config
|
||||
&& echo '[target.armv7-unknown-linux-gnueabihf]' >> "${CARGO_HOME}/config" \
|
||||
&& echo 'linker = "arm-linux-gnueabihf-gcc"' >> "${CARGO_HOME}/config" \
|
||||
&& echo 'rustflags = ["-L/usr/lib/arm-linux-gnueabihf"]' >> "${CARGO_HOME}/config"
|
||||
|
||||
# Set arm specific environment values
|
||||
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc"
|
||||
ENV CROSS_COMPILE="1"
|
||||
ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf"
|
||||
ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
||||
ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc" \
|
||||
CROSS_COMPILE="1" \
|
||||
OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf" \
|
||||
OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf"
|
||||
|
||||
|
||||
# Creates a dummy project used to grab dependencies
|
||||
@@ -103,6 +83,9 @@ COPY ./build.rs ./build.rs
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add armv7-unknown-linux-gnueabihf
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
@@ -118,6 +101,7 @@ RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf
|
||||
|
||||
######################## RUNTIME IMAGE ########################
|
||||
@@ -125,9 +109,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.
|
||||
# because we already have a binary built
|
||||
FROM balenalib/armv7hf-debian:buster
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
@@ -16,22 +16,20 @@
|
||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||
# click the tag name to view the digest of the image it currently points to.
|
||||
# - From the command line:
|
||||
# $ docker pull vaultwarden/web-vault:v2.23.0c
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.23.0c
|
||||
# [vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459]
|
||||
# $ docker pull vaultwarden/web-vault:v2.25.1b
|
||||
# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1b
|
||||
# [vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba]
|
||||
#
|
||||
# - Conversely, to get the tag name from the digest:
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459
|
||||
# [vaultwarden/web-vault:v2.23.0c]
|
||||
# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba
|
||||
# [vaultwarden/web-vault:v2.25.1b]
|
||||
#
|
||||
FROM vaultwarden/web-vault@sha256:dc94d303def3583af08816e91803f1c42107645612440f474f553f0cb0f97459 as vault
|
||||
FROM vaultwarden/web-vault@sha256:9b82318d553d72f091e8755f5aff80eed495f90bbe5b0703522953480f5c2fba as vault
|
||||
|
||||
########################## BUILD IMAGE ##########################
|
||||
FROM messense/rust-musl-cross:armv7-musleabihf as build
|
||||
FROM blackdex/rust-musl:armv7-musleabihf-nightly-2022-01-23 as build
|
||||
|
||||
|
||||
# Alpine-based ARM (musl) only supports sqlite during compile time.
|
||||
# We now also need to add vendored_openssl, because the current base image we use to build has OpenSSL removed.
|
||||
ARG DB=sqlite,vendored_openssl
|
||||
|
||||
# Build time options to avoid dpkg warnings and help with reproducible builds.
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
@@ -60,6 +58,9 @@ COPY ./build.rs ./build.rs
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add armv7-unknown-linux-musleabihf
|
||||
|
||||
# Configure the DB ARG as late as possible to not invalidate the cached layers above
|
||||
ARG DB=sqlite,mysql,postgresql
|
||||
|
||||
# Builds your dependencies and removes the
|
||||
# dummy project, except the target folder
|
||||
# This folder contains the compiled dependencies
|
||||
@@ -75,6 +76,7 @@ RUN touch src/main.rs
|
||||
|
||||
# Builds again, this time it'll just be
|
||||
# your actual source files being built
|
||||
# hadolint ignore=DL3059
|
||||
RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf
|
||||
# hadolint ignore=DL3059
|
||||
RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden
|
||||
@@ -82,12 +84,13 @@ RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden
|
||||
######################## RUNTIME IMAGE ########################
|
||||
# Create a new stage with a minimal image
|
||||
# because we already have a binary built
|
||||
FROM balenalib/armv7hf-alpine:3.14
|
||||
FROM balenalib/armv7hf-alpine:3.15
|
||||
|
||||
ENV ROCKET_ENV="staging" \
|
||||
ROCKET_PORT=80 \
|
||||
ROCKET_WORKERS=10 \
|
||||
SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
ENV ROCKET_ENV "staging"
|
||||
ENV ROCKET_PORT=80
|
||||
ENV ROCKET_WORKERS=10
|
||||
ENV SSL_CERT_DIR=/etc/ssl/certs
|
||||
|
||||
# hadolint ignore=DL3059
|
||||
RUN [ "cross-build-start" ]
|
||||
|
||||
@@ -7,10 +7,5 @@ arches=(
|
||||
)
|
||||
|
||||
if [[ "${DOCKER_TAG}" == *alpine ]]; then
|
||||
# The Alpine image build currently only works for certain arches.
|
||||
distro_suffix=.alpine
|
||||
arches=(
|
||||
amd64
|
||||
armv7
|
||||
)
|
||||
fi
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
DROP TABLE twofactor_incomplete;
|
||||
@@ -0,0 +1,9 @@
|
||||
CREATE TABLE twofactor_incomplete (
|
||||
user_uuid CHAR(36) NOT NULL REFERENCES users(uuid),
|
||||
device_uuid CHAR(36) NOT NULL,
|
||||
device_name TEXT NOT NULL,
|
||||
login_time DATETIME NOT NULL,
|
||||
ip_address TEXT NOT NULL,
|
||||
|
||||
PRIMARY KEY (user_uuid, device_uuid)
|
||||
);
|
||||
2
migrations/mysql/2022-01-17-234911_add_api_key/up.sql
Normal file
2
migrations/mysql/2022-01-17-234911_add_api_key/up.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE users
|
||||
ADD COLUMN api_key VARCHAR(255);
|
||||
@@ -0,0 +1 @@
|
||||
DROP TABLE twofactor_incomplete;
|
||||
@@ -0,0 +1,9 @@
|
||||
CREATE TABLE twofactor_incomplete (
|
||||
user_uuid VARCHAR(40) NOT NULL REFERENCES users(uuid),
|
||||
device_uuid VARCHAR(40) NOT NULL,
|
||||
device_name TEXT NOT NULL,
|
||||
login_time TIMESTAMP NOT NULL,
|
||||
ip_address TEXT NOT NULL,
|
||||
|
||||
PRIMARY KEY (user_uuid, device_uuid)
|
||||
);
|
||||
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE users
|
||||
ADD COLUMN api_key TEXT;
|
||||
@@ -0,0 +1 @@
|
||||
DROP TABLE twofactor_incomplete;
|
||||
@@ -0,0 +1,9 @@
|
||||
CREATE TABLE twofactor_incomplete (
|
||||
user_uuid TEXT NOT NULL REFERENCES users(uuid),
|
||||
device_uuid TEXT NOT NULL,
|
||||
device_name TEXT NOT NULL,
|
||||
login_time DATETIME NOT NULL,
|
||||
ip_address TEXT NOT NULL,
|
||||
|
||||
PRIMARY KEY (user_uuid, device_uuid)
|
||||
);
|
||||
2
migrations/sqlite/2022-01-17-234911_add_api_key/up.sql
Normal file
2
migrations/sqlite/2022-01-17-234911_add_api_key/up.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE users
|
||||
ADD COLUMN api_key TEXT;
|
||||
@@ -1 +1 @@
|
||||
nightly-2021-10-14
|
||||
nightly-2022-01-23
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde_json::Value;
|
||||
use std::{env, time::Duration};
|
||||
use std::env;
|
||||
|
||||
use rocket::{
|
||||
http::{Cookie, Cookies, SameSite, Status},
|
||||
@@ -21,7 +21,7 @@ use crate::{
|
||||
util::{
|
||||
docker_base_image, format_naive_datetime_local, get_display_size, get_reqwest_client, is_running_in_docker,
|
||||
},
|
||||
CONFIG,
|
||||
CONFIG, VERSION,
|
||||
};
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
@@ -74,11 +74,10 @@ fn admin_disabled() -> &'static str {
|
||||
"The admin panel is disabled, please configure the 'ADMIN_TOKEN' variable to enable it"
|
||||
}
|
||||
|
||||
const COOKIE_NAME: &str = "BWRS_ADMIN";
|
||||
const COOKIE_NAME: &str = "VW_ADMIN";
|
||||
const ADMIN_PATH: &str = "/admin";
|
||||
|
||||
const BASE_TEMPLATE: &str = "admin/base";
|
||||
const VERSION: Option<&str> = option_env!("BWRS_VERSION");
|
||||
|
||||
fn admin_path() -> String {
|
||||
format!("{}{}", CONFIG.domain_path(), ADMIN_PATH)
|
||||
@@ -166,6 +165,10 @@ fn post_admin_login(
|
||||
) -> Result<Redirect, Flash<Redirect>> {
|
||||
let data = data.into_inner();
|
||||
|
||||
if crate::ratelimit::check_limit_admin(&ip.ip).is_err() {
|
||||
return Err(Flash::error(Redirect::to(admin_url(referer)), "Too many requests, try again later."));
|
||||
}
|
||||
|
||||
// If the token is invalid, redirect to login page
|
||||
if !_validate_token(&data.token) {
|
||||
error!("Invalid admin token. IP: {}", ip.ip);
|
||||
@@ -236,7 +239,7 @@ impl AdminTemplateData {
|
||||
}
|
||||
|
||||
#[get("/", rank = 1)]
|
||||
fn admin_page(_token: AdminToken, _conn: DbConn) -> ApiResult<Html<String>> {
|
||||
fn admin_page(_token: AdminToken) -> ApiResult<Html<String>> {
|
||||
let text = AdminTemplateData::new().render()?;
|
||||
Ok(Html(text))
|
||||
}
|
||||
@@ -462,13 +465,13 @@ struct GitCommit {
|
||||
fn get_github_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> {
|
||||
let github_api = get_reqwest_client();
|
||||
|
||||
Ok(github_api.get(url).timeout(Duration::from_secs(10)).send()?.error_for_status()?.json::<T>()?)
|
||||
Ok(github_api.get(url).send()?.error_for_status()?.json::<T>()?)
|
||||
}
|
||||
|
||||
fn has_http_access() -> bool {
|
||||
let http_access = get_reqwest_client();
|
||||
|
||||
match http_access.head("https://github.com/dani-garcia/vaultwarden").timeout(Duration::from_secs(10)).send() {
|
||||
match http_access.head("https://github.com/dani-garcia/vaultwarden").send() {
|
||||
Ok(r) => r.status().is_success(),
|
||||
_ => false,
|
||||
}
|
||||
@@ -482,7 +485,7 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
|
||||
|
||||
// Get current running versions
|
||||
let web_vault_version: WebVaultVersion =
|
||||
match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "bwrs-version.json")) {
|
||||
match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "vw-version.json")) {
|
||||
Ok(s) => serde_json::from_str(&s)?,
|
||||
_ => match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
|
||||
Ok(s) => serde_json::from_str(&s)?,
|
||||
@@ -494,7 +497,6 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
|
||||
|
||||
// Execute some environment checks
|
||||
let running_within_docker = is_running_in_docker();
|
||||
let docker_base_image = docker_base_image();
|
||||
let has_http_access = has_http_access();
|
||||
let uses_proxy = env::var_os("HTTP_PROXY").is_some()
|
||||
|| env::var_os("http_proxy").is_some()
|
||||
@@ -552,7 +554,7 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu
|
||||
"web_vault_version": web_vault_version.version,
|
||||
"latest_web_build": latest_web_build,
|
||||
"running_within_docker": running_within_docker,
|
||||
"docker_base_image": docker_base_image,
|
||||
"docker_base_image": docker_base_image(),
|
||||
"has_http_access": has_http_access,
|
||||
"ip_header_exists": &ip_header.0.is_some(),
|
||||
"ip_header_match": ip_header_name == CONFIG.ip_header(),
|
||||
|
||||
@@ -34,6 +34,8 @@ pub fn routes() -> Vec<rocket::Route> {
|
||||
password_hint,
|
||||
prelogin,
|
||||
verify_password,
|
||||
api_key,
|
||||
rotate_api_key,
|
||||
]
|
||||
}
|
||||
|
||||
@@ -87,14 +89,11 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
|
||||
user_org.status = UserOrgStatus::Accepted as i32;
|
||||
user_org.save(&conn)?;
|
||||
}
|
||||
|
||||
user
|
||||
} else if EmergencyAccess::find_invited_by_grantee_email(&email, &conn).is_some() {
|
||||
user
|
||||
} else if CONFIG.is_signup_allowed(&email) {
|
||||
// check if it's invited by emergency contact
|
||||
match EmergencyAccess::find_invited_by_grantee_email(&data.Email, &conn) {
|
||||
Some(_) => user,
|
||||
_ => err!("Account with this email already exists"),
|
||||
}
|
||||
err!("Account with this email already exists")
|
||||
} else {
|
||||
err!("Registration not allowed or user already exists")
|
||||
}
|
||||
@@ -382,7 +381,7 @@ fn post_email_token(data: JsonUpcase<EmailTokenData>, headers: Headers, conn: Db
|
||||
err!("Email domain not allowed");
|
||||
}
|
||||
|
||||
let token = crypto::generate_token(6)?;
|
||||
let token = crypto::generate_email_token(6);
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
if let Err(e) = mail::send_change_email(&data.NewEmail, &token) {
|
||||
@@ -454,7 +453,7 @@ fn post_email(data: JsonUpcase<ChangeEmailData>, headers: Headers, conn: DbConn)
|
||||
}
|
||||
|
||||
#[post("/accounts/verify-email")]
|
||||
fn post_verify_email(headers: Headers, _conn: DbConn) -> EmptyResult {
|
||||
fn post_verify_email(headers: Headers) -> EmptyResult {
|
||||
let user = headers.user;
|
||||
|
||||
if !CONFIG.mail_enabled() {
|
||||
@@ -647,15 +646,17 @@ fn prelogin(data: JsonUpcase<PreloginData>, conn: DbConn) -> Json<Value> {
|
||||
"KdfIterations": kdf_iter
|
||||
}))
|
||||
}
|
||||
|
||||
// https://github.com/bitwarden/server/blob/master/src/Api/Models/Request/Accounts/SecretVerificationRequestModel.cs
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
struct VerifyPasswordData {
|
||||
struct SecretVerificationRequest {
|
||||
MasterPasswordHash: String,
|
||||
}
|
||||
|
||||
#[post("/accounts/verify-password", data = "<data>")]
|
||||
fn verify_password(data: JsonUpcase<VerifyPasswordData>, headers: Headers, _conn: DbConn) -> EmptyResult {
|
||||
let data: VerifyPasswordData = data.into_inner().data;
|
||||
fn verify_password(data: JsonUpcase<SecretVerificationRequest>, headers: Headers) -> EmptyResult {
|
||||
let data: SecretVerificationRequest = data.into_inner().data;
|
||||
let user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
@@ -664,3 +665,32 @@ fn verify_password(data: JsonUpcase<VerifyPasswordData>, headers: Headers, _conn
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn _api_key(data: JsonUpcase<SecretVerificationRequest>, rotate: bool, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
let data: SecretVerificationRequest = data.into_inner().data;
|
||||
let mut user = headers.user;
|
||||
|
||||
if !user.check_valid_password(&data.MasterPasswordHash) {
|
||||
err!("Invalid password")
|
||||
}
|
||||
|
||||
if rotate || user.api_key.is_none() {
|
||||
user.api_key = Some(crypto::generate_api_key());
|
||||
user.save(&conn).expect("Error saving API key");
|
||||
}
|
||||
|
||||
Ok(Json(json!({
|
||||
"ApiKey": user.api_key,
|
||||
"Object": "apiKey",
|
||||
})))
|
||||
}
|
||||
|
||||
#[post("/accounts/api-key", data = "<data>")]
|
||||
fn api_key(data: JsonUpcase<SecretVerificationRequest>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
_api_key(data, false, headers, conn)
|
||||
}
|
||||
|
||||
#[post("/accounts/rotate-api-key", data = "<data>")]
|
||||
fn rotate_api_key(data: JsonUpcase<SecretVerificationRequest>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
_api_key(data, true, headers, conn)
|
||||
}
|
||||
|
||||
@@ -182,7 +182,7 @@ fn send_invite(data: JsonUpcase<EmergencyAccessInviteData>, headers: Headers, co
|
||||
|
||||
let grantee_user = match User::find_by_mail(&email, &conn) {
|
||||
None => {
|
||||
if !CONFIG.signups_allowed() {
|
||||
if !CONFIG.invitations_allowed() {
|
||||
err!(format!("Grantee user does not exist: {}", email))
|
||||
}
|
||||
|
||||
@@ -539,7 +539,6 @@ fn reject_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> J
|
||||
};
|
||||
|
||||
emergency_access.status = EmergencyAccessStatus::Confirmed as i32;
|
||||
emergency_access.key_encrypted = None;
|
||||
emergency_access.save(&conn)?;
|
||||
|
||||
if CONFIG.mail_enabled() {
|
||||
|
||||
@@ -9,6 +9,7 @@ pub mod two_factor;
|
||||
pub use ciphers::purge_trashed_ciphers;
|
||||
pub use emergency_access::{emergency_notification_reminder_job, emergency_request_timeout_job};
|
||||
pub use sends::purge_sends;
|
||||
pub use two_factor::send_incomplete_2fa_notifications;
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
let mut mod_routes =
|
||||
@@ -169,7 +170,7 @@ fn hibp_breach(username: String) -> JsonResult {
|
||||
"BreachDate": "2019-08-18T00:00:00Z",
|
||||
"AddedDate": "2019-08-18T00:00:00Z",
|
||||
"Description": format!("Go to: <a href=\"https://haveibeenpwned.com/account/{account}\" target=\"_blank\" rel=\"noreferrer\">https://haveibeenpwned.com/account/{account}</a> for a manual check.<br/><br/>HaveIBeenPwned API key not set!<br/>Go to <a href=\"https://haveibeenpwned.com/API/Key\" target=\"_blank\" rel=\"noreferrer\">https://haveibeenpwned.com/API/Key</a> to purchase an API key from HaveIBeenPwned.<br/><br/>", account=username),
|
||||
"LogoPath": "bwrs_static/hibp.png",
|
||||
"LogoPath": "vw_static/hibp.png",
|
||||
"PwnCount": 0,
|
||||
"DataClasses": [
|
||||
"Error - No API key set!"
|
||||
|
||||
@@ -235,7 +235,7 @@ fn get_user_collections(headers: Headers, conn: DbConn) -> Json<Value> {
|
||||
}
|
||||
|
||||
#[get("/organizations/<org_id>/collections")]
|
||||
fn get_org_collections(org_id: String, _headers: AdminHeaders, conn: DbConn) -> Json<Value> {
|
||||
fn get_org_collections(org_id: String, _headers: ManagerHeadersLoose, conn: DbConn) -> Json<Value> {
|
||||
Json(json!({
|
||||
"Data":
|
||||
Collection::find_by_organization(&org_id, &conn)
|
||||
@@ -1294,71 +1294,43 @@ fn put_policy(
|
||||
|
||||
#[allow(unused_variables)]
|
||||
#[get("/organizations/<org_id>/tax")]
|
||||
fn get_organization_tax(org_id: String, _headers: Headers, _conn: DbConn) -> EmptyResult {
|
||||
fn get_organization_tax(org_id: String, _headers: Headers) -> Json<Value> {
|
||||
// Prevent a 404 error, which also causes Javascript errors.
|
||||
err!("Only allowed when not self hosted.")
|
||||
// Upstream sends "Only allowed when not self hosted." As an error message.
|
||||
// If we do the same it will also output this to the log, which is overkill.
|
||||
// An empty list/data also works fine.
|
||||
Json(_empty_data_json())
|
||||
}
|
||||
|
||||
#[get("/plans")]
|
||||
fn get_plans(_headers: Headers, _conn: DbConn) -> Json<Value> {
|
||||
fn get_plans(_headers: Headers) -> Json<Value> {
|
||||
// Respond with a minimal json just enough to allow the creation of an new organization.
|
||||
Json(json!({
|
||||
"Object": "list",
|
||||
"Data": [
|
||||
{
|
||||
"Data": [{
|
||||
"Object": "plan",
|
||||
"Type": 0,
|
||||
"Product": 0,
|
||||
"Name": "Free",
|
||||
"IsAnnual": false,
|
||||
"NameLocalizationKey": "planNameFree",
|
||||
"DescriptionLocalizationKey": "planDescFree",
|
||||
"CanBeUsedByBusiness": false,
|
||||
"BaseSeats": 2,
|
||||
"BaseStorageGb": null,
|
||||
"MaxCollections": 2,
|
||||
"MaxUsers": 2,
|
||||
"HasAdditionalSeatsOption": false,
|
||||
"MaxAdditionalSeats": null,
|
||||
"HasAdditionalStorageOption": false,
|
||||
"MaxAdditionalStorage": null,
|
||||
"HasPremiumAccessOption": false,
|
||||
"TrialPeriodDays": null,
|
||||
"HasSelfHost": false,
|
||||
"HasPolicies": false,
|
||||
"HasGroups": false,
|
||||
"HasDirectory": false,
|
||||
"HasEvents": false,
|
||||
"HasTotp": false,
|
||||
"Has2fa": false,
|
||||
"HasApi": false,
|
||||
"HasSso": false,
|
||||
"UsersGetPremium": false,
|
||||
"UpgradeSortOrder": -1,
|
||||
"DisplaySortOrder": -1,
|
||||
"LegacyYear": null,
|
||||
"Disabled": false,
|
||||
"StripePlanId": null,
|
||||
"StripeSeatPlanId": null,
|
||||
"StripeStoragePlanId": null,
|
||||
"StripePremiumAccessPlanId": null,
|
||||
"BasePrice": 0.0,
|
||||
"SeatPrice": 0.0,
|
||||
"AdditionalStoragePricePerGb": 0.0,
|
||||
"PremiumAccessOptionPrice": 0.0
|
||||
}
|
||||
],
|
||||
"DescriptionLocalizationKey": "planDescFree"
|
||||
}],
|
||||
"ContinuationToken": null
|
||||
}))
|
||||
}
|
||||
|
||||
#[get("/plans/sales-tax-rates")]
|
||||
fn get_plans_tax_rates(_headers: Headers, _conn: DbConn) -> Json<Value> {
|
||||
fn get_plans_tax_rates(_headers: Headers) -> Json<Value> {
|
||||
// Prevent a 404 error, which also causes Javascript errors.
|
||||
Json(json!({
|
||||
Json(_empty_data_json())
|
||||
}
|
||||
|
||||
fn _empty_data_json() -> Value {
|
||||
json!({
|
||||
"Object": "list",
|
||||
"Data": [],
|
||||
"ContinuationToken": null
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
|
||||
@@ -7,7 +7,7 @@ use rocket_contrib::json::Json;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::{
|
||||
api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType},
|
||||
api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, Notify, NumberOrString, UpdateType},
|
||||
auth::{Headers, Host},
|
||||
db::{models::*, DbConn, DbPool},
|
||||
util::SafeString,
|
||||
@@ -42,21 +42,21 @@ pub fn purge_sends(pool: DbPool) {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
pub struct SendData {
|
||||
pub Type: i32,
|
||||
pub Key: String,
|
||||
pub Password: Option<String>,
|
||||
pub MaxAccessCount: Option<i32>,
|
||||
pub ExpirationDate: Option<DateTime<Utc>>,
|
||||
pub DeletionDate: DateTime<Utc>,
|
||||
pub Disabled: bool,
|
||||
pub HideEmail: Option<bool>,
|
||||
struct SendData {
|
||||
Type: i32,
|
||||
Key: String,
|
||||
Password: Option<String>,
|
||||
MaxAccessCount: Option<NumberOrString>,
|
||||
ExpirationDate: Option<DateTime<Utc>>,
|
||||
DeletionDate: DateTime<Utc>,
|
||||
Disabled: bool,
|
||||
HideEmail: Option<bool>,
|
||||
|
||||
// Data field
|
||||
pub Name: String,
|
||||
pub Notes: Option<String>,
|
||||
pub Text: Option<Value>,
|
||||
pub File: Option<Value>,
|
||||
Name: String,
|
||||
Notes: Option<String>,
|
||||
Text: Option<Value>,
|
||||
File: Option<Value>,
|
||||
}
|
||||
|
||||
/// Enforces the `Disable Send` policy. A non-owner/admin user belonging to
|
||||
@@ -119,7 +119,10 @@ fn create_send(data: SendData, user_uuid: String) -> ApiResult<Send> {
|
||||
let mut send = Send::new(data.Type, data.Name, data_str, data.Key, data.DeletionDate.naive_utc());
|
||||
send.user_uuid = Some(user_uuid);
|
||||
send.notes = data.Notes;
|
||||
send.max_access_count = data.MaxAccessCount;
|
||||
send.max_access_count = match data.MaxAccessCount {
|
||||
Some(m) => Some(m.into_i32()?),
|
||||
_ => None,
|
||||
};
|
||||
send.expiration_date = data.ExpirationDate.map(|d| d.naive_utc());
|
||||
send.disabled = data.Disabled;
|
||||
send.hide_email = data.HideEmail;
|
||||
@@ -414,7 +417,10 @@ fn put_send(id: String, data: JsonUpcase<SendData>, headers: Headers, conn: DbCo
|
||||
send.akey = data.Key;
|
||||
send.deletion_date = data.DeletionDate.naive_utc();
|
||||
send.notes = data.Notes;
|
||||
send.max_access_count = data.MaxAccessCount;
|
||||
send.max_access_count = match data.MaxAccessCount {
|
||||
Some(m) => Some(m.into_i32()?),
|
||||
_ => None,
|
||||
};
|
||||
send.expiration_date = data.ExpirationDate.map(|d| d.naive_utc());
|
||||
send.hide_email = data.HideEmail;
|
||||
send.disabled = data.Disabled;
|
||||
|
||||
@@ -58,7 +58,7 @@ pub fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||
let type_ = TwoFactorType::Email as i32;
|
||||
let mut twofactor = TwoFactor::find_by_user_and_type(user_uuid, type_, conn).map_res("Two factor not found")?;
|
||||
|
||||
let generated_token = crypto::generate_token(CONFIG.email_token_size())?;
|
||||
let generated_token = crypto::generate_email_token(CONFIG.email_token_size());
|
||||
|
||||
let mut twofactor_data = EmailTokenData::from_json(&twofactor.data)?;
|
||||
twofactor_data.set_token(generated_token);
|
||||
@@ -123,7 +123,7 @@ fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, conn: DbConn) -
|
||||
tf.delete(&conn)?;
|
||||
}
|
||||
|
||||
let generated_token = crypto::generate_token(CONFIG.email_token_size())?;
|
||||
let generated_token = crypto::generate_email_token(CONFIG.email_token_size());
|
||||
let twofactor_data = EmailTokenData::new(data.Email, generated_token);
|
||||
|
||||
// Uses EmailVerificationChallenge as type to show that it's not verified yet.
|
||||
@@ -309,18 +309,4 @@ mod tests {
|
||||
// If it's smaller than 3 characters it should only show asterisks.
|
||||
assert_eq!(result, "***@example.ext");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_token() {
|
||||
let result = crypto::generate_token(19).unwrap();
|
||||
|
||||
assert_eq!(result.chars().count(), 19);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_token_too_large() {
|
||||
let result = crypto::generate_token(20);
|
||||
|
||||
assert!(result.is_err(), "too large token should give an error");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use chrono::{Duration, Utc};
|
||||
use data_encoding::BASE32;
|
||||
use rocket::Route;
|
||||
use rocket_contrib::json::Json;
|
||||
@@ -7,7 +8,7 @@ use crate::{
|
||||
api::{JsonResult, JsonUpcase, NumberOrString, PasswordData},
|
||||
auth::Headers,
|
||||
crypto,
|
||||
db::{models::*, DbConn},
|
||||
db::{models::*, DbConn, DbPool},
|
||||
mail, CONFIG,
|
||||
};
|
||||
|
||||
@@ -156,3 +157,33 @@ fn disable_twofactor(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, c
|
||||
fn disable_twofactor_put(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||
disable_twofactor(data, headers, conn)
|
||||
}
|
||||
|
||||
pub fn send_incomplete_2fa_notifications(pool: DbPool) {
|
||||
debug!("Sending notifications for incomplete 2FA logins");
|
||||
|
||||
if CONFIG.incomplete_2fa_time_limit() <= 0 || !CONFIG.mail_enabled() {
|
||||
return;
|
||||
}
|
||||
|
||||
let conn = match pool.get() {
|
||||
Ok(conn) => conn,
|
||||
_ => {
|
||||
error!("Failed to get DB connection in send_incomplete_2fa_notifications()");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let now = Utc::now().naive_utc();
|
||||
let time_limit = Duration::minutes(CONFIG.incomplete_2fa_time_limit());
|
||||
let incomplete_logins = TwoFactorIncomplete::find_logins_before(&(now - time_limit), &conn);
|
||||
for login in incomplete_logins {
|
||||
let user = User::find_by_uuid(&login.user_uuid, &conn).expect("User not found");
|
||||
info!(
|
||||
"User {} did not complete a 2FA login within the configured time limit. IP: {}",
|
||||
user.email, login.ip_address
|
||||
);
|
||||
mail::send_incomplete_2fa_login(&user.email, &login.ip_address, &login.login_time, &login.device_name)
|
||||
.expect("Error sending incomplete 2FA email");
|
||||
login.delete(&conn).expect("Error deleting incomplete 2FA record");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use rocket::Route;
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json::Value;
|
||||
use url::Url;
|
||||
use webauthn_rs::{base64_data::Base64UrlSafeData, proto::*, AuthenticationState, RegistrationState, Webauthn};
|
||||
|
||||
use crate::{
|
||||
@@ -22,7 +23,7 @@ pub fn routes() -> Vec<Route> {
|
||||
|
||||
struct WebauthnConfig {
|
||||
url: String,
|
||||
origin: String,
|
||||
origin: Url,
|
||||
rpid: String,
|
||||
}
|
||||
|
||||
@@ -31,13 +32,9 @@ impl WebauthnConfig {
|
||||
let domain = CONFIG.domain();
|
||||
let domain_origin = CONFIG.domain_origin();
|
||||
Webauthn::new(Self {
|
||||
rpid: reqwest::Url::parse(&domain)
|
||||
.map(|u| u.domain().map(str::to_owned))
|
||||
.ok()
|
||||
.flatten()
|
||||
.unwrap_or_default(),
|
||||
rpid: Url::parse(&domain).map(|u| u.domain().map(str::to_owned)).ok().flatten().unwrap_or_default(),
|
||||
url: domain,
|
||||
origin: domain_origin,
|
||||
origin: Url::parse(&domain_origin).unwrap(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -47,7 +44,7 @@ impl webauthn_rs::WebauthnConfig for WebauthnConfig {
|
||||
&self.url
|
||||
}
|
||||
|
||||
fn get_origin(&self) -> &str {
|
||||
fn get_origin(&self) -> &Url {
|
||||
&self.origin
|
||||
}
|
||||
|
||||
|
||||
@@ -10,7 +10,11 @@ use std::{
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use reqwest::{blocking::Client, blocking::Response, header};
|
||||
use rocket::{http::ContentType, response::Content, Route};
|
||||
use rocket::{
|
||||
http::ContentType,
|
||||
response::{Content, Redirect},
|
||||
Route,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
error::Error,
|
||||
@@ -19,7 +23,13 @@ use crate::{
|
||||
};
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
routes![icon]
|
||||
match CONFIG.icon_service().as_str() {
|
||||
"internal" => routes![icon_internal],
|
||||
"bitwarden" => routes![icon_bitwarden],
|
||||
"duckduckgo" => routes![icon_duckduckgo],
|
||||
"google" => routes![icon_google],
|
||||
_ => routes![icon_custom],
|
||||
}
|
||||
}
|
||||
|
||||
static CLIENT: Lazy<Client> = Lazy::new(|| {
|
||||
@@ -50,8 +60,51 @@ static ICON_SIZE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+
|
||||
// Special HashMap which holds the user defined Regex to speedup matching the regex.
|
||||
static ICON_BLACKLIST_REGEX: Lazy<RwLock<HashMap<String, Regex>>> = Lazy::new(|| RwLock::new(HashMap::new()));
|
||||
|
||||
fn icon_redirect(domain: &str, template: &str) -> Option<Redirect> {
|
||||
if !is_valid_domain(domain) {
|
||||
warn!("Invalid domain: {}", domain);
|
||||
return None;
|
||||
}
|
||||
|
||||
if is_domain_blacklisted(domain) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let url = template.replace("{}", domain);
|
||||
match CONFIG.icon_redirect_code() {
|
||||
301 => Some(Redirect::moved(url)), // legacy permanent redirect
|
||||
302 => Some(Redirect::found(url)), // legacy temporary redirect
|
||||
307 => Some(Redirect::temporary(url)),
|
||||
308 => Some(Redirect::permanent(url)),
|
||||
_ => {
|
||||
error!("Unexpected redirect code {}", CONFIG.icon_redirect_code());
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/<domain>/icon.png")]
|
||||
fn icon(domain: String) -> Cached<Content<Vec<u8>>> {
|
||||
fn icon_custom(domain: String) -> Option<Redirect> {
|
||||
icon_redirect(&domain, &CONFIG.icon_service())
|
||||
}
|
||||
|
||||
#[get("/<domain>/icon.png")]
|
||||
fn icon_bitwarden(domain: String) -> Option<Redirect> {
|
||||
icon_redirect(&domain, "https://icons.bitwarden.net/{}/icon.png")
|
||||
}
|
||||
|
||||
#[get("/<domain>/icon.png")]
|
||||
fn icon_duckduckgo(domain: String) -> Option<Redirect> {
|
||||
icon_redirect(&domain, "https://icons.duckduckgo.com/ip3/{}.ico")
|
||||
}
|
||||
|
||||
#[get("/<domain>/icon.png")]
|
||||
fn icon_google(domain: String) -> Option<Redirect> {
|
||||
icon_redirect(&domain, "https://www.google.com/s2/favicons?domain={}&sz=32")
|
||||
}
|
||||
|
||||
#[get("/<domain>/icon.png")]
|
||||
fn icon_internal(domain: String) -> Cached<Content<Vec<u8>>> {
|
||||
const FALLBACK_ICON: &[u8] = include_bytes!("../static/images/fallback-icon.png");
|
||||
|
||||
if !is_valid_domain(&domain) {
|
||||
@@ -59,14 +112,19 @@ fn icon(domain: String) -> Cached<Content<Vec<u8>>> {
|
||||
return Cached::ttl(
|
||||
Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()),
|
||||
CONFIG.icon_cache_negttl(),
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
match get_icon(&domain) {
|
||||
Some((icon, icon_type)) => {
|
||||
Cached::ttl(Content(ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl())
|
||||
Cached::ttl(Content(ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl(), true)
|
||||
}
|
||||
_ => Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()),
|
||||
_ => Cached::ttl(
|
||||
Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()),
|
||||
CONFIG.icon_cache_negttl(),
|
||||
true,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -250,7 +308,7 @@ fn is_domain_blacklisted(domain: &str) -> bool {
|
||||
|
||||
// Use the pre-generate Regex stored in a Lazy HashMap.
|
||||
if regex.is_match(domain) {
|
||||
warn!("Blacklisted domain: {} matched ICON_BLACKLIST_REGEX", domain);
|
||||
debug!("Blacklisted domain: {} matched ICON_BLACKLIST_REGEX", domain);
|
||||
is_blacklisted = true;
|
||||
}
|
||||
}
|
||||
@@ -286,7 +344,7 @@ fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> {
|
||||
Some((icon, icon_type.unwrap_or("x-icon").to_string()))
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Error downloading icon: {:?}", e);
|
||||
warn!("Unable to download icon: {:?}", e);
|
||||
let miss_indicator = path + ".miss";
|
||||
save_icon(&miss_indicator, &[]);
|
||||
None
|
||||
@@ -555,7 +613,7 @@ fn get_page(url: &str) -> Result<Response, Error> {
|
||||
|
||||
fn get_page_with_referer(url: &str, referer: &str) -> Result<Response, Error> {
|
||||
if is_domain_blacklisted(url::Url::parse(url).unwrap().host_str().unwrap_or_default()) {
|
||||
err!("Favicon resolves to a blacklisted domain or IP!", url);
|
||||
warn!("Favicon '{}' resolves to a blacklisted domain or IP!", url);
|
||||
}
|
||||
|
||||
let mut client = CLIENT.get(url);
|
||||
@@ -713,10 +771,10 @@ fn save_icon(path: &str, icon: &[u8]) {
|
||||
f.write_all(icon).expect("Error writing icon file");
|
||||
}
|
||||
Err(ref e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
create_dir_all(&CONFIG.icon_cache_folder()).expect("Error creating icon cache");
|
||||
create_dir_all(&CONFIG.icon_cache_folder()).expect("Error creating icon cache folder");
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Icon save error: {:?}", e);
|
||||
warn!("Unable to save icon: {:?}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use chrono::Local;
|
||||
use chrono::Utc;
|
||||
use num_traits::FromPrimitive;
|
||||
use rocket::{
|
||||
request::{Form, FormItems, FromForm},
|
||||
@@ -43,6 +43,13 @@ fn login(data: Form<ConnectData>, conn: DbConn, ip: ClientIp) -> JsonResult {
|
||||
|
||||
_password_login(data, conn, &ip)
|
||||
}
|
||||
"client_credentials" => {
|
||||
_check_is_some(&data.client_id, "client_id cannot be blank")?;
|
||||
_check_is_some(&data.client_secret, "client_secret cannot be blank")?;
|
||||
_check_is_some(&data.scope, "scope cannot be blank")?;
|
||||
|
||||
_api_key_login(data, conn, &ip)
|
||||
}
|
||||
t => err!("Invalid type", t),
|
||||
}
|
||||
}
|
||||
@@ -54,13 +61,15 @@ fn _refresh_login(data: ConnectData, conn: DbConn) -> JsonResult {
|
||||
// Get device by refresh token
|
||||
let mut device = Device::find_by_refresh_token(&token, &conn).map_res("Invalid refresh token")?;
|
||||
|
||||
// COMMON
|
||||
let scope = "api offline_access";
|
||||
let scope_vec = vec!["api".into(), "offline_access".into()];
|
||||
|
||||
// Common
|
||||
let user = User::find_by_uuid(&device.user_uuid, &conn).unwrap();
|
||||
let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn);
|
||||
|
||||
let (access_token, expires_in) = device.refresh_tokens(&user, orgs);
|
||||
|
||||
let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec);
|
||||
device.save(&conn)?;
|
||||
|
||||
Ok(Json(json!({
|
||||
"access_token": access_token,
|
||||
"expires_in": expires_in,
|
||||
@@ -72,7 +81,7 @@ fn _refresh_login(data: ConnectData, conn: DbConn) -> JsonResult {
|
||||
"Kdf": user.client_kdf_type,
|
||||
"KdfIterations": user.client_kdf_iter,
|
||||
"ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing
|
||||
"scope": "api offline_access",
|
||||
"scope": scope,
|
||||
"unofficialServer": true,
|
||||
})))
|
||||
}
|
||||
@@ -83,6 +92,10 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult
|
||||
if scope != "api offline_access" {
|
||||
err!("Scope not supported")
|
||||
}
|
||||
let scope_vec = vec!["api".into(), "offline_access".into()];
|
||||
|
||||
// Ratelimit the login
|
||||
crate::ratelimit::check_limit_login(&ip.ip)?;
|
||||
|
||||
// Get the user
|
||||
let username = data.username.as_ref().unwrap();
|
||||
@@ -102,10 +115,9 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult
|
||||
err!("This user has been disabled", format!("IP: {}. Username: {}.", ip.ip, username))
|
||||
}
|
||||
|
||||
let now = Local::now();
|
||||
let now = Utc::now().naive_utc();
|
||||
|
||||
if user.verified_at.is_none() && CONFIG.mail_enabled() && CONFIG.signups_verify() {
|
||||
let now = now.naive_utc();
|
||||
if user.last_verifying_at.is_none()
|
||||
|| now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds()
|
||||
> CONFIG.signups_verify_resend_time() as i64
|
||||
@@ -148,8 +160,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult
|
||||
|
||||
// Common
|
||||
let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn);
|
||||
|
||||
let (access_token, expires_in) = device.refresh_tokens(&user, orgs);
|
||||
let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec);
|
||||
device.save(&conn)?;
|
||||
|
||||
let mut result = json!({
|
||||
@@ -164,7 +175,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult
|
||||
"Kdf": user.client_kdf_type,
|
||||
"KdfIterations": user.client_kdf_iter,
|
||||
"ResetMasterPassword": false,// TODO: Same as above
|
||||
"scope": "api offline_access",
|
||||
"scope": scope,
|
||||
"unofficialServer": true,
|
||||
});
|
||||
|
||||
@@ -176,6 +187,76 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult {
|
||||
// Validate scope
|
||||
let scope = data.scope.as_ref().unwrap();
|
||||
if scope != "api" {
|
||||
err!("Scope not supported")
|
||||
}
|
||||
let scope_vec = vec!["api".into()];
|
||||
|
||||
// Ratelimit the login
|
||||
crate::ratelimit::check_limit_login(&ip.ip)?;
|
||||
|
||||
// Get the user via the client_id
|
||||
let client_id = data.client_id.as_ref().unwrap();
|
||||
let user_uuid = match client_id.strip_prefix("user.") {
|
||||
Some(uuid) => uuid,
|
||||
None => err!("Malformed client_id", format!("IP: {}.", ip.ip)),
|
||||
};
|
||||
let user = match User::find_by_uuid(user_uuid, &conn) {
|
||||
Some(user) => user,
|
||||
None => err!("Invalid client_id", format!("IP: {}.", ip.ip)),
|
||||
};
|
||||
|
||||
// Check if the user is disabled
|
||||
if !user.enabled {
|
||||
err!("This user has been disabled (API key login)", format!("IP: {}. Username: {}.", ip.ip, user.email))
|
||||
}
|
||||
|
||||
// Check API key. Note that API key logins bypass 2FA.
|
||||
let client_secret = data.client_secret.as_ref().unwrap();
|
||||
if !user.check_valid_api_key(client_secret) {
|
||||
err!("Incorrect client_secret", format!("IP: {}. Username: {}.", ip.ip, user.email))
|
||||
}
|
||||
|
||||
let (mut device, new_device) = get_device(&data, &conn, &user);
|
||||
|
||||
if CONFIG.mail_enabled() && new_device {
|
||||
let now = Utc::now().naive_utc();
|
||||
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name) {
|
||||
error!("Error sending new device email: {:#?}", e);
|
||||
|
||||
if CONFIG.require_device_email() {
|
||||
err!("Could not send login notification email. Please contact your administrator.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Common
|
||||
let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn);
|
||||
let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec);
|
||||
device.save(&conn)?;
|
||||
|
||||
info!("User {} logged in successfully via API key. IP: {}", user.email, ip.ip);
|
||||
|
||||
// Note: No refresh_token is returned. The CLI just repeats the
|
||||
// client_credentials login flow when the existing token expires.
|
||||
Ok(Json(json!({
|
||||
"access_token": access_token,
|
||||
"expires_in": expires_in,
|
||||
"token_type": "Bearer",
|
||||
"Key": user.akey,
|
||||
"PrivateKey": user.private_key,
|
||||
|
||||
"Kdf": user.client_kdf_type,
|
||||
"KdfIterations": user.client_kdf_iter,
|
||||
"ResetMasterPassword": false, // TODO: Same as above
|
||||
"scope": scope,
|
||||
"unofficialServer": true,
|
||||
})))
|
||||
}
|
||||
|
||||
/// Retrieves an existing device or creates a new device from ConnectData and the User
|
||||
fn get_device(data: &ConnectData, conn: &DbConn, user: &User) -> (Device, bool) {
|
||||
// On iOS, device_type sends "iOS", on others it sends a number
|
||||
@@ -219,6 +300,8 @@ fn twofactor_auth(
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
TwoFactorIncomplete::mark_incomplete(user_uuid, &device.uuid, &device.name, ip, conn)?;
|
||||
|
||||
let twofactor_ids: Vec<_> = twofactors.iter().map(|tf| tf.atype).collect();
|
||||
let selected_id = data.two_factor_provider.unwrap_or(twofactor_ids[0]); // If we aren't given a two factor provider, asume the first one
|
||||
|
||||
@@ -262,6 +345,8 @@ fn twofactor_auth(
|
||||
_ => err!("Invalid two factor provider"),
|
||||
}
|
||||
|
||||
TwoFactorIncomplete::mark_complete(user_uuid, &device.uuid, conn)?;
|
||||
|
||||
if !CONFIG.disable_2fa_remember() && remember == 1 {
|
||||
Ok(Some(device.refresh_twofactor_remember()))
|
||||
} else {
|
||||
@@ -368,17 +453,20 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
// https://github.com/bitwarden/jslib/blob/master/common/src/models/request/tokenRequest.ts
|
||||
// https://github.com/bitwarden/mobile/blob/master/src/Core/Models/Request/TokenRequest.cs
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[allow(non_snake_case)]
|
||||
struct ConnectData {
|
||||
grant_type: String, // refresh_token, password
|
||||
// refresh_token, password, client_credentials (API key)
|
||||
grant_type: String,
|
||||
|
||||
// Needed for grant_type="refresh_token"
|
||||
refresh_token: Option<String>,
|
||||
|
||||
// Needed for grant_type="password"
|
||||
client_id: Option<String>, // web, cli, desktop, browser, mobile
|
||||
// Needed for grant_type = "password" | "client_credentials"
|
||||
client_id: Option<String>, // web, cli, desktop, browser, mobile
|
||||
client_secret: Option<String>, // API key login (cli only)
|
||||
password: Option<String>,
|
||||
scope: Option<String>,
|
||||
username: Option<String>,
|
||||
@@ -408,6 +496,7 @@ impl<'f> FromForm<'f> for ConnectData {
|
||||
"granttype" => form.grant_type = value,
|
||||
"refreshtoken" => form.refresh_token = Some(value),
|
||||
"clientid" => form.client_id = Some(value),
|
||||
"clientsecret" => form.client_secret = Some(value),
|
||||
"password" => form.password = Some(value),
|
||||
"scope" => form.scope = Some(value),
|
||||
"username" => form.username = Some(value),
|
||||
|
||||
@@ -13,6 +13,7 @@ pub use crate::api::{
|
||||
core::purge_sends,
|
||||
core::purge_trashed_ciphers,
|
||||
core::routes as core_routes,
|
||||
core::two_factor::send_incomplete_2fa_notifications,
|
||||
core::{emergency_notification_reminder_job, emergency_request_timeout_job},
|
||||
icons::routes as icons_routes,
|
||||
identity::routes as identity_routes,
|
||||
|
||||
@@ -4,7 +4,7 @@ use rocket::Route;
|
||||
use rocket_contrib::json::Json;
|
||||
use serde_json::Value as JsonValue;
|
||||
|
||||
use crate::{api::EmptyResult, auth::Headers, db::DbConn, Error, CONFIG};
|
||||
use crate::{api::EmptyResult, auth::Headers, Error, CONFIG};
|
||||
|
||||
pub fn routes() -> Vec<Route> {
|
||||
routes![negotiate, websockets_err]
|
||||
@@ -30,7 +30,7 @@ fn websockets_err() -> EmptyResult {
|
||||
}
|
||||
|
||||
#[post("/hub/negotiate")]
|
||||
fn negotiate(_headers: Headers, _conn: DbConn) -> Json<JsonValue> {
|
||||
fn negotiate(_headers: Headers) -> Json<JsonValue> {
|
||||
use crate::crypto;
|
||||
use data_encoding::BASE64URL;
|
||||
|
||||
|
||||
@@ -22,41 +22,44 @@ pub fn routes() -> Vec<Route> {
|
||||
|
||||
#[get("/")]
|
||||
fn web_index() -> Cached<Option<NamedFile>> {
|
||||
Cached::short(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join("index.html")).ok())
|
||||
Cached::short(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join("index.html")).ok(), false)
|
||||
}
|
||||
|
||||
#[get("/app-id.json")]
|
||||
fn app_id() -> Cached<Content<Json<Value>>> {
|
||||
let content_type = ContentType::new("application", "fido.trusted-apps+json");
|
||||
|
||||
Cached::long(Content(
|
||||
content_type,
|
||||
Json(json!({
|
||||
"trustedFacets": [
|
||||
{
|
||||
"version": { "major": 1, "minor": 0 },
|
||||
"ids": [
|
||||
// Per <https://fidoalliance.org/specs/fido-v2.0-id-20180227/fido-appid-and-facets-v2.0-id-20180227.html#determining-the-facetid-of-a-calling-application>:
|
||||
//
|
||||
// "In the Web case, the FacetID MUST be the Web Origin [RFC6454]
|
||||
// of the web page triggering the FIDO operation, written as
|
||||
// a URI with an empty path. Default ports are omitted and any
|
||||
// path component is ignored."
|
||||
//
|
||||
// This leaves it unclear as to whether the path must be empty,
|
||||
// or whether it can be non-empty and will be ignored. To be on
|
||||
// the safe side, use a proper web origin (with empty path).
|
||||
&CONFIG.domain_origin(),
|
||||
"ios:bundle-id:com.8bit.bitwarden",
|
||||
"android:apk-key-hash:dUGFzUzf3lmHSLBDBIv+WaFyZMI" ]
|
||||
}]
|
||||
})),
|
||||
))
|
||||
Cached::long(
|
||||
Content(
|
||||
content_type,
|
||||
Json(json!({
|
||||
"trustedFacets": [
|
||||
{
|
||||
"version": { "major": 1, "minor": 0 },
|
||||
"ids": [
|
||||
// Per <https://fidoalliance.org/specs/fido-v2.0-id-20180227/fido-appid-and-facets-v2.0-id-20180227.html#determining-the-facetid-of-a-calling-application>:
|
||||
//
|
||||
// "In the Web case, the FacetID MUST be the Web Origin [RFC6454]
|
||||
// of the web page triggering the FIDO operation, written as
|
||||
// a URI with an empty path. Default ports are omitted and any
|
||||
// path component is ignored."
|
||||
//
|
||||
// This leaves it unclear as to whether the path must be empty,
|
||||
// or whether it can be non-empty and will be ignored. To be on
|
||||
// the safe side, use a proper web origin (with empty path).
|
||||
&CONFIG.domain_origin(),
|
||||
"ios:bundle-id:com.8bit.bitwarden",
|
||||
"android:apk-key-hash:dUGFzUzf3lmHSLBDBIv+WaFyZMI" ]
|
||||
}]
|
||||
})),
|
||||
),
|
||||
true,
|
||||
)
|
||||
}
|
||||
|
||||
#[get("/<p..>", rank = 10)] // Only match this if the other routes don't match
|
||||
fn web_files(p: PathBuf) -> Cached<Option<NamedFile>> {
|
||||
Cached::long(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join(p)).ok())
|
||||
Cached::long(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join(p)).ok(), true)
|
||||
}
|
||||
|
||||
#[get("/attachments/<uuid>/<file_id>")]
|
||||
@@ -74,7 +77,7 @@ fn alive(_conn: DbConn) -> Json<String> {
|
||||
Json(format_date(&Utc::now().naive_utc()))
|
||||
}
|
||||
|
||||
#[get("/bwrs_static/<filename>")]
|
||||
#[get("/vw_static/<filename>")]
|
||||
fn static_files(filename: String) -> Result<Content<&'static [u8]>, Error> {
|
||||
match filename.as_ref() {
|
||||
"mail-github.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/mail-github.png"))),
|
||||
|
||||
@@ -165,7 +165,6 @@ pub fn generate_invite_claims(
|
||||
}
|
||||
}
|
||||
|
||||
// var token = _dataProtector.Protect($"EmergencyAccessInvite {emergencyAccess.Id} {emergencyAccess.Email} {nowMillis}");
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct EmergencyAccessInviteJwtClaims {
|
||||
// Not before
|
||||
|
||||
238
src/config.rs
238
src/config.rs
@@ -2,7 +2,6 @@ use std::process::exit;
|
||||
use std::sync::RwLock;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use reqwest::Url;
|
||||
|
||||
use crate::{
|
||||
@@ -23,21 +22,6 @@ pub static CONFIG: Lazy<Config> = Lazy::new(|| {
|
||||
})
|
||||
});
|
||||
|
||||
static PRIVACY_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"[\w]").unwrap());
|
||||
const PRIVACY_CONFIG: &[&str] = &[
|
||||
"allowed_iframe_ancestors",
|
||||
"database_url",
|
||||
"domain_origin",
|
||||
"domain_path",
|
||||
"domain",
|
||||
"helo_name",
|
||||
"org_creation_users",
|
||||
"signups_domains_whitelist",
|
||||
"smtp_from",
|
||||
"smtp_host",
|
||||
"smtp_username",
|
||||
];
|
||||
|
||||
pub type Pass = String;
|
||||
|
||||
macro_rules! make_config {
|
||||
@@ -61,7 +45,7 @@ macro_rules! make_config {
|
||||
_overrides: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
|
||||
#[derive(Clone, Default, Deserialize, Serialize)]
|
||||
pub struct ConfigBuilder {
|
||||
$($(
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
@@ -133,19 +117,6 @@ macro_rules! make_config {
|
||||
builder
|
||||
}
|
||||
|
||||
/// Returns a new builder with all the elements from self,
|
||||
/// except those that are equal in both sides
|
||||
fn _remove(&self, other: &Self) -> Self {
|
||||
let mut builder = ConfigBuilder::default();
|
||||
$($(
|
||||
if &self.$name != &other.$name {
|
||||
builder.$name = self.$name.clone();
|
||||
}
|
||||
|
||||
)+)+
|
||||
builder
|
||||
}
|
||||
|
||||
fn build(&self) -> ConfigItems {
|
||||
let mut config = ConfigItems::default();
|
||||
let _domain_set = self.domain.is_some();
|
||||
@@ -161,12 +132,13 @@ macro_rules! make_config {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ConfigItems { $($(pub $name: make_config!{@type $ty, $none_action}, )+)+ }
|
||||
#[derive(Clone, Default)]
|
||||
struct ConfigItems { $($( $name: make_config!{@type $ty, $none_action}, )+)+ }
|
||||
|
||||
#[allow(unused)]
|
||||
impl Config {
|
||||
$($(
|
||||
$(#[doc = $doc])+
|
||||
pub fn $name(&self) -> make_config!{@type $ty, $none_action} {
|
||||
self.inner.read().unwrap().config.$name.clone()
|
||||
}
|
||||
@@ -189,38 +161,91 @@ macro_rules! make_config {
|
||||
|
||||
fn _get_doc(doc: &str) -> serde_json::Value {
|
||||
let mut split = doc.split("|>").map(str::trim);
|
||||
json!({
|
||||
"name": split.next(),
|
||||
"description": split.next()
|
||||
|
||||
// We do not use the json!() macro here since that causes a lot of macro recursion.
|
||||
// This slows down compile time and it also causes issues with rust-analyzer
|
||||
serde_json::Value::Object({
|
||||
let mut doc_json = serde_json::Map::new();
|
||||
doc_json.insert("name".into(), serde_json::to_value(split.next()).unwrap());
|
||||
doc_json.insert("description".into(), serde_json::to_value(split.next()).unwrap());
|
||||
doc_json
|
||||
})
|
||||
}
|
||||
|
||||
json!([ $({
|
||||
"group": stringify!($group),
|
||||
"grouptoggle": stringify!($($group_enabled)?),
|
||||
"groupdoc": make_config!{ @show $($groupdoc)? },
|
||||
"elements": [
|
||||
$( {
|
||||
"editable": $editable,
|
||||
"name": stringify!($name),
|
||||
"value": cfg.$name,
|
||||
"default": def.$name,
|
||||
"type": _get_form_type(stringify!($ty)),
|
||||
"doc": _get_doc(concat!($($doc),+)),
|
||||
"overridden": overriden.contains(&stringify!($name).to_uppercase()),
|
||||
}, )+
|
||||
]}, )+ ])
|
||||
// We do not use the json!() macro here since that causes a lot of macro recursion.
|
||||
// This slows down compile time and it also causes issues with rust-analyzer
|
||||
serde_json::Value::Array(<[_]>::into_vec(Box::new([
|
||||
$(
|
||||
serde_json::Value::Object({
|
||||
let mut group = serde_json::Map::new();
|
||||
group.insert("group".into(), (stringify!($group)).into());
|
||||
group.insert("grouptoggle".into(), (stringify!($($group_enabled)?)).into());
|
||||
group.insert("groupdoc".into(), (make_config!{ @show $($groupdoc)? }).into());
|
||||
|
||||
group.insert("elements".into(), serde_json::Value::Array(<[_]>::into_vec(Box::new([
|
||||
$(
|
||||
serde_json::Value::Object({
|
||||
let mut element = serde_json::Map::new();
|
||||
element.insert("editable".into(), ($editable).into());
|
||||
element.insert("name".into(), (stringify!($name)).into());
|
||||
element.insert("value".into(), serde_json::to_value(cfg.$name).unwrap());
|
||||
element.insert("default".into(), serde_json::to_value(def.$name).unwrap());
|
||||
element.insert("type".into(), (_get_form_type(stringify!($ty))).into());
|
||||
element.insert("doc".into(), (_get_doc(concat!($($doc),+))).into());
|
||||
element.insert("overridden".into(), (overriden.contains(&stringify!($name).to_uppercase())).into());
|
||||
element
|
||||
}),
|
||||
)+
|
||||
]))));
|
||||
group
|
||||
}),
|
||||
)+
|
||||
])))
|
||||
}
|
||||
|
||||
pub fn get_support_json(&self) -> serde_json::Value {
|
||||
// Define which config keys need to be masked.
|
||||
// Pass types will always be masked and no need to put them in the list.
|
||||
// Besides Pass, only String types will be masked via _privacy_mask.
|
||||
const PRIVACY_CONFIG: &[&str] = &[
|
||||
"allowed_iframe_ancestors",
|
||||
"database_url",
|
||||
"domain_origin",
|
||||
"domain_path",
|
||||
"domain",
|
||||
"helo_name",
|
||||
"org_creation_users",
|
||||
"signups_domains_whitelist",
|
||||
"smtp_from",
|
||||
"smtp_host",
|
||||
"smtp_username",
|
||||
];
|
||||
|
||||
let cfg = {
|
||||
let inner = &self.inner.read().unwrap();
|
||||
inner.config.clone()
|
||||
};
|
||||
|
||||
json!({ $($(
|
||||
stringify!($name): make_config!{ @supportstr $name, cfg.$name, $ty, $none_action },
|
||||
)+)+ })
|
||||
/// We map over the string and remove all alphanumeric, _ and - characters.
|
||||
/// This is the fastest way (within micro-seconds) instead of using a regex (which takes mili-seconds)
|
||||
fn _privacy_mask(value: &str) -> String {
|
||||
value.chars().map(|c|
|
||||
match c {
|
||||
c if c.is_alphanumeric() => '*',
|
||||
'_' => '*',
|
||||
'-' => '*',
|
||||
_ => c
|
||||
}
|
||||
).collect::<String>()
|
||||
}
|
||||
|
||||
serde_json::Value::Object({
|
||||
let mut json = serde_json::Map::new();
|
||||
$($(
|
||||
json.insert(stringify!($name).into(), make_config!{ @supportstr $name, cfg.$name, $ty, $none_action });
|
||||
)+)+;
|
||||
json
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_overrides(&self) -> Vec<String> {
|
||||
@@ -228,29 +253,30 @@ macro_rules! make_config {
|
||||
let inner = &self.inner.read().unwrap();
|
||||
inner._overrides.clone()
|
||||
};
|
||||
|
||||
overrides
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Support string print
|
||||
( @supportstr $name:ident, $value:expr, Pass, option ) => { $value.as_ref().map(|_| String::from("***")) }; // Optional pass, we map to an Option<String> with "***"
|
||||
( @supportstr $name:ident, $value:expr, Pass, $none_action:ident ) => { String::from("***") }; // Required pass, we return "***"
|
||||
( @supportstr $name:ident, $value:expr, $ty:ty, option ) => { // Optional other value, we return as is or convert to string to apply the privacy config
|
||||
( @supportstr $name:ident, $value:expr, Pass, option ) => { serde_json::to_value($value.as_ref().map(|_| String::from("***"))).unwrap() }; // Optional pass, we map to an Option<String> with "***"
|
||||
( @supportstr $name:ident, $value:expr, Pass, $none_action:ident ) => { "***".into() }; // Required pass, we return "***"
|
||||
( @supportstr $name:ident, $value:expr, String, option ) => { // Optional other value, we return as is or convert to string to apply the privacy config
|
||||
if PRIVACY_CONFIG.contains(&stringify!($name)) {
|
||||
json!($value.as_ref().map(|x| PRIVACY_REGEX.replace_all(&x.to_string(), "${1}*").to_string()))
|
||||
serde_json::to_value($value.as_ref().map(|x| _privacy_mask(x) )).unwrap()
|
||||
} else {
|
||||
json!($value)
|
||||
serde_json::to_value($value).unwrap()
|
||||
}
|
||||
};
|
||||
( @supportstr $name:ident, $value:expr, $ty:ty, $none_action:ident ) => { // Required other value, we return as is or convert to string to apply the privacy config
|
||||
( @supportstr $name:ident, $value:expr, String, $none_action:ident ) => { // Required other value, we return as is or convert to string to apply the privacy config
|
||||
if PRIVACY_CONFIG.contains(&stringify!($name)) {
|
||||
json!(PRIVACY_REGEX.replace_all(&$value.to_string(), "${1}*").to_string())
|
||||
} else {
|
||||
json!($value)
|
||||
}
|
||||
_privacy_mask(&$value).into()
|
||||
} else {
|
||||
($value).into()
|
||||
}
|
||||
};
|
||||
( @supportstr $name:ident, $value:expr, $ty:ty, option ) => { serde_json::to_value($value).unwrap() }; // Optional other value, we return as is or convert to string to apply the privacy config
|
||||
( @supportstr $name:ident, $value:expr, $ty:ty, $none_action:ident ) => { ($value).into() }; // Required other value, we return as is or convert to string to apply the privacy config
|
||||
|
||||
// Group or empty string
|
||||
( @show ) => { "" };
|
||||
@@ -300,8 +326,6 @@ make_config! {
|
||||
data_folder: String, false, def, "data".to_string();
|
||||
/// Database URL
|
||||
database_url: String, false, auto, |c| format!("{}/{}", c.data_folder, "db.sqlite3");
|
||||
/// Database connection pool size
|
||||
database_max_conns: u32, false, def, 10;
|
||||
/// Icon cache folder
|
||||
icon_cache_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "icon_cache");
|
||||
/// Attachments folder
|
||||
@@ -333,6 +357,9 @@ make_config! {
|
||||
/// Trash purge schedule |> Cron schedule of the job that checks for trashed items to delete permanently.
|
||||
/// Defaults to daily. Set blank to disable this job.
|
||||
trash_purge_schedule: String, false, def, "0 5 0 * * *".to_string();
|
||||
/// Incomplete 2FA login schedule |> Cron schedule of the job that checks for incomplete 2FA logins.
|
||||
/// Defaults to once every minute. Set blank to disable this job.
|
||||
incomplete_2fa_schedule: String, false, def, "30 * * * * *".to_string();
|
||||
/// Emergency notification reminder schedule |> Cron schedule of the job that sends expiration reminders to emergency access grantors.
|
||||
/// Defaults to hourly. Set blank to disable this job.
|
||||
emergency_notification_reminder_schedule: String, false, def, "0 5 * * * *".to_string();
|
||||
@@ -372,9 +399,17 @@ make_config! {
|
||||
/// sure to inform all users of any changes to this setting.
|
||||
trash_auto_delete_days: i64, true, option;
|
||||
|
||||
/// Disable icon downloads |> Set to true to disable icon downloading, this would still serve icons from
|
||||
/// $ICON_CACHE_FOLDER, but it won't produce any external network request. Needs to set $ICON_CACHE_TTL to 0,
|
||||
/// otherwise it will delete them and they won't be downloaded again.
|
||||
/// Incomplete 2FA time limit |> Number of minutes to wait before a 2FA-enabled login is
|
||||
/// considered incomplete, resulting in an email notification. An incomplete 2FA login is one
|
||||
/// where the correct master password was provided but the required 2FA step was not completed,
|
||||
/// which potentially indicates a master password compromise. Set to 0 to disable this check.
|
||||
/// This setting applies globally to all users.
|
||||
incomplete_2fa_time_limit: i64, true, def, 3;
|
||||
|
||||
/// Disable icon downloads |> Set to true to disable icon downloading in the internal icon service.
|
||||
/// This still serves existing icons from $ICON_CACHE_FOLDER, without generating any external
|
||||
/// network requests. $ICON_CACHE_TTL must also be set to 0; otherwise, the existing icons
|
||||
/// will be deleted eventually, but won't be downloaded again.
|
||||
disable_icon_download: bool, true, def, false;
|
||||
/// Allow new signups |> Controls whether new users can register. Users can be invited by the vaultwarden admin even if this is disabled
|
||||
signups_allowed: bool, true, def, true;
|
||||
@@ -415,6 +450,19 @@ make_config! {
|
||||
ip_header: String, true, def, "X-Real-IP".to_string();
|
||||
/// Internal IP header property, used to avoid recomputing each time
|
||||
_ip_header_enabled: bool, false, gen, |c| &c.ip_header.trim().to_lowercase() != "none";
|
||||
/// Icon service |> The predefined icon services are: internal, bitwarden, duckduckgo, google.
|
||||
/// To specify a custom icon service, set a URL template with exactly one instance of `{}`,
|
||||
/// which is replaced with the domain. For example: `https://icon.example.com/domain/{}`.
|
||||
/// `internal` refers to Vaultwarden's built-in icon fetching implementation. If an external
|
||||
/// service is set, an icon request to Vaultwarden will return an HTTP redirect to the
|
||||
/// corresponding icon at the external service.
|
||||
icon_service: String, false, def, "internal".to_string();
|
||||
/// Icon redirect code |> The HTTP status code to use for redirects to an external icon service.
|
||||
/// The supported codes are 301 (legacy permanent), 302 (legacy temporary), 307 (temporary), and 308 (permanent).
|
||||
/// Temporary redirects are useful while testing different icon services, but once a service
|
||||
/// has been decided on, consider using permanent redirects for cacheability. The legacy codes
|
||||
/// are currently better supported by the Bitwarden clients.
|
||||
icon_redirect_code: u32, true, def, 302;
|
||||
/// Positive icon cache expiry |> Number of seconds to consider that an already cached icon is fresh. After this period, the icon will be redownloaded
|
||||
icon_cache_ttl: u64, true, def, 2_592_000;
|
||||
/// Negative icon cache expiry |> Number of seconds before trying to download an icon that failed again.
|
||||
@@ -461,11 +509,24 @@ make_config! {
|
||||
/// Max database connection retries |> Number of times to retry the database connection during startup, with 1 second between each retry, set to 0 to retry indefinitely
|
||||
db_connection_retries: u32, false, def, 15;
|
||||
|
||||
/// Database connection pool size
|
||||
database_max_conns: u32, false, def, 10;
|
||||
|
||||
/// Bypass admin page security (Know the risks!) |> Disables the Admin Token for the admin page so you may use your own auth in-front
|
||||
disable_admin_token: bool, true, def, false;
|
||||
|
||||
/// Allowed iframe ancestors (Know the risks!) |> Allows other domains to embed the web vault into an iframe, useful for embedding into secure intranets
|
||||
allowed_iframe_ancestors: String, true, def, String::new();
|
||||
|
||||
/// Seconds between login requests |> Number of seconds, on average, between login and 2FA requests from the same IP address before rate limiting kicks in
|
||||
login_ratelimit_seconds: u64, false, def, 60;
|
||||
/// Max burst size for login requests |> Allow a burst of requests of up to this size, while maintaining the average indicated by `login_ratelimit_seconds`. Note that this applies to both the login and the 2FA, so it's recommended to allow a burst size of at least 2
|
||||
login_ratelimit_max_burst: u32, false, def, 10;
|
||||
|
||||
/// Seconds between admin requests |> Number of seconds, on average, between admin requests from the same IP address before rate limiting kicks in
|
||||
admin_ratelimit_seconds: u64, false, def, 300;
|
||||
/// Max burst size for login requests |> Allow a burst of requests of up to this size, while maintaining the average indicated by `admin_ratelimit_seconds`
|
||||
admin_ratelimit_max_burst: u32, false, def, 3;
|
||||
},
|
||||
|
||||
/// Yubikey settings
|
||||
@@ -532,8 +593,8 @@ make_config! {
|
||||
email_2fa: _enable_email_2fa {
|
||||
/// Enabled |> Disabling will prevent users from setting up new email 2FA and using existing email 2FA configured
|
||||
_enable_email_2fa: bool, true, auto, |c| c._enable_smtp && c.smtp_host.is_some();
|
||||
/// Email token size |> Number of digits in an email token (min: 6, max: 19). Note that the Bitwarden clients are hardcoded to mention 6 digit codes regardless of this setting.
|
||||
email_token_size: u32, true, def, 6;
|
||||
/// Email token size |> Number of digits in an email 2FA token (min: 6, max: 255). Note that the Bitwarden clients are hardcoded to mention 6 digit codes regardless of this setting.
|
||||
email_token_size: u8, true, def, 6;
|
||||
/// Token expiration time |> Maximum time in seconds a token is valid. The time the user has to open email client and copy token.
|
||||
email_expiration_time: u64, true, def, 600;
|
||||
/// Maximum attempts |> Maximum attempts before an email token is reset and a new email will need to be sent
|
||||
@@ -607,21 +668,39 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
|
||||
if cfg._enable_email_2fa && cfg.email_token_size < 6 {
|
||||
err!("`EMAIL_TOKEN_SIZE` has a minimum size of 6")
|
||||
}
|
||||
|
||||
if cfg._enable_email_2fa && cfg.email_token_size > 19 {
|
||||
err!("`EMAIL_TOKEN_SIZE` has a maximum size of 19")
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the icon blacklist regex is valid
|
||||
if let Some(ref r) = cfg.icon_blacklist_regex {
|
||||
let validate_regex = Regex::new(r);
|
||||
let validate_regex = regex::Regex::new(r);
|
||||
match validate_regex {
|
||||
Ok(_) => (),
|
||||
Err(e) => err!(format!("`ICON_BLACKLIST_REGEX` is invalid: {:#?}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the icon service is valid
|
||||
let icon_service = cfg.icon_service.as_str();
|
||||
match icon_service {
|
||||
"internal" | "bitwarden" | "duckduckgo" | "google" => (),
|
||||
_ => {
|
||||
if !icon_service.starts_with("http") {
|
||||
err!(format!("Icon service URL `{}` must start with \"http\"", icon_service))
|
||||
}
|
||||
match icon_service.matches("{}").count() {
|
||||
1 => (), // nominal
|
||||
0 => err!(format!("Icon service URL `{}` has no placeholder \"{{}}\"", icon_service)),
|
||||
_ => err!(format!("Icon service URL `{}` has more than one placeholder \"{{}}\"", icon_service)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the icon redirect code is valid
|
||||
match cfg.icon_redirect_code {
|
||||
301 | 302 | 307 | 308 => (),
|
||||
_ => err!("Only HTTP 301/302 and 307/308 redirects are supported"),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -707,7 +786,7 @@ impl Config {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_config_partial(&self, other: ConfigBuilder) -> Result<(), Error> {
|
||||
fn update_config_partial(&self, other: ConfigBuilder) -> Result<(), Error> {
|
||||
let builder = {
|
||||
let usr = &self.inner.read().unwrap()._usr;
|
||||
let mut _overrides = Vec::new();
|
||||
@@ -861,8 +940,6 @@ where
|
||||
|
||||
reg!("email/change_email", ".html");
|
||||
reg!("email/delete_account", ".html");
|
||||
reg!("email/invite_accepted", ".html");
|
||||
reg!("email/invite_confirmed", ".html");
|
||||
reg!("email/emergency_access_invite_accepted", ".html");
|
||||
reg!("email/emergency_access_invite_confirmed", ".html");
|
||||
reg!("email/emergency_access_recovery_approved", ".html");
|
||||
@@ -870,6 +947,9 @@ where
|
||||
reg!("email/emergency_access_recovery_rejected", ".html");
|
||||
reg!("email/emergency_access_recovery_reminder", ".html");
|
||||
reg!("email/emergency_access_recovery_timed_out", ".html");
|
||||
reg!("email/incomplete_2fa_login", ".html");
|
||||
reg!("email/invite_accepted", ".html");
|
||||
reg!("email/invite_confirmed", ".html");
|
||||
reg!("email/new_device_logged_in", ".html");
|
||||
reg!("email/pw_hint_none", ".html");
|
||||
reg!("email/pw_hint_some", ".html");
|
||||
|
||||
@@ -6,8 +6,6 @@ use std::num::NonZeroU32;
|
||||
use data_encoding::HEXLOWER;
|
||||
use ring::{digest, hmac, pbkdf2};
|
||||
|
||||
use crate::error::Error;
|
||||
|
||||
static DIGEST_ALG: pbkdf2::Algorithm = pbkdf2::PBKDF2_HMAC_SHA256;
|
||||
const OUTPUT_LEN: usize = digest::SHA256_OUTPUT_LEN;
|
||||
|
||||
@@ -51,6 +49,34 @@ pub fn get_random(mut array: Vec<u8>) -> Vec<u8> {
|
||||
array
|
||||
}
|
||||
|
||||
/// Generates a random string over a specified alphabet.
|
||||
pub fn get_random_string(alphabet: &[u8], num_chars: usize) -> String {
|
||||
// Ref: https://rust-lang-nursery.github.io/rust-cookbook/algorithms/randomness.html
|
||||
use rand::Rng;
|
||||
let mut rng = rand::thread_rng();
|
||||
|
||||
(0..num_chars)
|
||||
.map(|_| {
|
||||
let i = rng.gen_range(0..alphabet.len());
|
||||
alphabet[i] as char
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Generates a random numeric string.
|
||||
pub fn get_random_string_numeric(num_chars: usize) -> String {
|
||||
const ALPHABET: &[u8] = b"0123456789";
|
||||
get_random_string(ALPHABET, num_chars)
|
||||
}
|
||||
|
||||
/// Generates a random alphanumeric string.
|
||||
pub fn get_random_string_alphanum(num_chars: usize) -> String {
|
||||
const ALPHABET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\
|
||||
abcdefghijklmnopqrstuvwxyz\
|
||||
0123456789";
|
||||
get_random_string(ALPHABET, num_chars)
|
||||
}
|
||||
|
||||
pub fn generate_id(num_bytes: usize) -> String {
|
||||
HEXLOWER.encode(&get_random(vec![0; num_bytes]))
|
||||
}
|
||||
@@ -65,23 +91,15 @@ pub fn generate_attachment_id() -> String {
|
||||
generate_id(10) // 80 bits
|
||||
}
|
||||
|
||||
pub fn generate_token(token_size: u32) -> Result<String, Error> {
|
||||
// A u64 can represent all whole numbers up to 19 digits long.
|
||||
if token_size > 19 {
|
||||
err!("Token size is limited to 19 digits")
|
||||
}
|
||||
/// Generates a numeric token for email-based verifications.
|
||||
pub fn generate_email_token(token_size: u8) -> String {
|
||||
get_random_string_numeric(token_size as usize)
|
||||
}
|
||||
|
||||
let low: u64 = 0;
|
||||
let high: u64 = 10u64.pow(token_size);
|
||||
|
||||
// Generate a random number in the range [low, high), then format it as a
|
||||
// token of fixed width, left-padding with 0 as needed.
|
||||
use rand::{thread_rng, Rng};
|
||||
let mut rng = thread_rng();
|
||||
let number: u64 = rng.gen_range(low..high);
|
||||
let token = format!("{:0size$}", number, size = token_size as usize);
|
||||
|
||||
Ok(token)
|
||||
/// Generates a personal API key.
|
||||
/// Upstream uses 30 chars, which is ~178 bits of entropy.
|
||||
pub fn generate_api_key() -> String {
|
||||
get_random_string_alphanum(30)
|
||||
}
|
||||
|
||||
//
|
||||
|
||||
@@ -343,36 +343,39 @@ impl Cipher {
|
||||
db_run! {conn: {
|
||||
// Check whether this cipher is in any collections accessible to the
|
||||
// user. If so, retrieve the access flags for each collection.
|
||||
let query = ciphers::table
|
||||
let rows = ciphers::table
|
||||
.filter(ciphers::uuid.eq(&self.uuid))
|
||||
.inner_join(ciphers_collections::table.on(
|
||||
ciphers::uuid.eq(ciphers_collections::cipher_uuid)))
|
||||
.inner_join(users_collections::table.on(
|
||||
ciphers_collections::collection_uuid.eq(users_collections::collection_uuid)
|
||||
.and(users_collections::user_uuid.eq(user_uuid))))
|
||||
.select((users_collections::read_only, users_collections::hide_passwords));
|
||||
.select((users_collections::read_only, users_collections::hide_passwords))
|
||||
.load::<(bool, bool)>(conn)
|
||||
.expect("Error getting access restrictions");
|
||||
|
||||
// There's an edge case where a cipher can be in multiple collections
|
||||
// with inconsistent access flags. For example, a cipher could be in
|
||||
// one collection where the user has read-only access, but also in
|
||||
// another collection where the user has read/write access. To handle
|
||||
// this, we do a boolean OR of all values in each of the `read_only`
|
||||
// and `hide_passwords` columns. This could ideally be done as part
|
||||
// of the query, but Diesel doesn't support a max() or bool_or()
|
||||
// function on booleans and this behavior isn't portable anyway.
|
||||
if let Ok(vec) = query.load::<(bool, bool)>(conn) {
|
||||
let mut read_only = false;
|
||||
let mut hide_passwords = false;
|
||||
for (ro, hp) in vec.iter() {
|
||||
read_only |= ro;
|
||||
hide_passwords |= hp;
|
||||
}
|
||||
|
||||
Some((read_only, hide_passwords))
|
||||
} else {
|
||||
if rows.is_empty() {
|
||||
// This cipher isn't in any collections accessible to the user.
|
||||
None
|
||||
return None;
|
||||
}
|
||||
|
||||
// A cipher can be in multiple collections with inconsistent access flags.
|
||||
// For example, a cipher could be in one collection where the user has
|
||||
// read-only access, but also in another collection where the user has
|
||||
// read/write access. For a flag to be in effect for a cipher, upstream
|
||||
// requires all collections the cipher is in to have that flag set.
|
||||
// Therefore, we do a boolean AND of all values in each of the `read_only`
|
||||
// and `hide_passwords` columns. This could ideally be done as part of the
|
||||
// query, but Diesel doesn't support a min() or bool_and() function on
|
||||
// booleans and this behavior isn't portable anyway.
|
||||
let mut read_only = true;
|
||||
let mut hide_passwords = true;
|
||||
for (ro, hp) in rows.iter() {
|
||||
read_only &= ro;
|
||||
hide_passwords &= hp;
|
||||
}
|
||||
|
||||
Some((read_only, hide_passwords))
|
||||
}}
|
||||
}
|
||||
|
||||
|
||||
@@ -17,8 +17,7 @@ db_object! {
|
||||
pub user_uuid: String,
|
||||
|
||||
pub name: String,
|
||||
// https://github.com/bitwarden/core/tree/master/src/Core/Enums
|
||||
pub atype: i32,
|
||||
pub atype: i32, // https://github.com/bitwarden/server/blob/master/src/Core/Enums/DeviceType.cs
|
||||
pub push_token: Option<String>,
|
||||
|
||||
pub refresh_token: String,
|
||||
@@ -61,7 +60,12 @@ impl Device {
|
||||
self.twofactor_remember = None;
|
||||
}
|
||||
|
||||
pub fn refresh_tokens(&mut self, user: &super::User, orgs: Vec<super::UserOrganization>) -> (String, i64) {
|
||||
pub fn refresh_tokens(
|
||||
&mut self,
|
||||
user: &super::User,
|
||||
orgs: Vec<super::UserOrganization>,
|
||||
scope: Vec<String>,
|
||||
) -> (String, i64) {
|
||||
// If there is no refresh token, we create one
|
||||
if self.refresh_token.is_empty() {
|
||||
use crate::crypto;
|
||||
@@ -99,7 +103,7 @@ impl Device {
|
||||
|
||||
sstamp: user.security_stamp.to_string(),
|
||||
device: self.uuid.to_string(),
|
||||
scope: vec!["api".into(), "offline_access".into()],
|
||||
scope,
|
||||
amr: vec!["Application".into()],
|
||||
};
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ mod org_policy;
|
||||
mod organization;
|
||||
mod send;
|
||||
mod two_factor;
|
||||
mod two_factor_incomplete;
|
||||
mod user;
|
||||
|
||||
pub use self::attachment::Attachment;
|
||||
@@ -22,4 +23,5 @@ pub use self::org_policy::{OrgPolicy, OrgPolicyType};
|
||||
pub use self::organization::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
|
||||
pub use self::send::{Send, SendType};
|
||||
pub use self::two_factor::{TwoFactor, TwoFactorType};
|
||||
pub use self::two_factor_incomplete::TwoFactorIncomplete;
|
||||
pub use self::user::{Invitation, User, UserStampException};
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::api::EmptyResult;
|
||||
use crate::db::DbConn;
|
||||
use crate::error::MapResult;
|
||||
use crate::{api::EmptyResult, db::DbConn, error::MapResult};
|
||||
|
||||
use super::User;
|
||||
|
||||
@@ -161,7 +159,6 @@ impl TwoFactor {
|
||||
|
||||
use crate::api::core::two_factor::u2f::U2FRegistration;
|
||||
use crate::api::core::two_factor::webauthn::{get_webauthn_registrations, WebauthnRegistration};
|
||||
use std::convert::TryInto;
|
||||
use webauthn_rs::proto::*;
|
||||
|
||||
for mut u2f in u2f_factors {
|
||||
|
||||
108
src/db/models/two_factor_incomplete.rs
Normal file
108
src/db/models/two_factor_incomplete.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use chrono::{NaiveDateTime, Utc};
|
||||
|
||||
use crate::{api::EmptyResult, auth::ClientIp, db::DbConn, error::MapResult, CONFIG};
|
||||
|
||||
use super::User;
|
||||
|
||||
db_object! {
|
||||
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)]
|
||||
#[table_name = "twofactor_incomplete"]
|
||||
#[belongs_to(User, foreign_key = "user_uuid")]
|
||||
#[primary_key(user_uuid, device_uuid)]
|
||||
pub struct TwoFactorIncomplete {
|
||||
pub user_uuid: String,
|
||||
// This device UUID is simply what's claimed by the device. It doesn't
|
||||
// necessarily correspond to any UUID in the devices table, since a device
|
||||
// must complete 2FA login before being added into the devices table.
|
||||
pub device_uuid: String,
|
||||
pub device_name: String,
|
||||
pub login_time: NaiveDateTime,
|
||||
pub ip_address: String,
|
||||
}
|
||||
}
|
||||
|
||||
impl TwoFactorIncomplete {
|
||||
pub fn mark_incomplete(
|
||||
user_uuid: &str,
|
||||
device_uuid: &str,
|
||||
device_name: &str,
|
||||
ip: &ClientIp,
|
||||
conn: &DbConn,
|
||||
) -> EmptyResult {
|
||||
if CONFIG.incomplete_2fa_time_limit() <= 0 || !CONFIG.mail_enabled() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Don't update the data for an existing user/device pair, since that
|
||||
// would allow an attacker to arbitrarily delay notifications by
|
||||
// sending repeated 2FA attempts to reset the timer.
|
||||
let existing = Self::find_by_user_and_device(user_uuid, device_uuid, conn);
|
||||
if existing.is_some() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
db_run! { conn: {
|
||||
diesel::insert_into(twofactor_incomplete::table)
|
||||
.values((
|
||||
twofactor_incomplete::user_uuid.eq(user_uuid),
|
||||
twofactor_incomplete::device_uuid.eq(device_uuid),
|
||||
twofactor_incomplete::device_name.eq(device_name),
|
||||
twofactor_incomplete::login_time.eq(Utc::now().naive_utc()),
|
||||
twofactor_incomplete::ip_address.eq(ip.ip.to_string()),
|
||||
))
|
||||
.execute(conn)
|
||||
.map_res("Error adding twofactor_incomplete record")
|
||||
}}
|
||||
}
|
||||
|
||||
pub fn mark_complete(user_uuid: &str, device_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||
if CONFIG.incomplete_2fa_time_limit() <= 0 || !CONFIG.mail_enabled() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Self::delete_by_user_and_device(user_uuid, device_uuid, conn)
|
||||
}
|
||||
|
||||
pub fn find_by_user_and_device(user_uuid: &str, device_uuid: &str, conn: &DbConn) -> Option<Self> {
|
||||
db_run! { conn: {
|
||||
twofactor_incomplete::table
|
||||
.filter(twofactor_incomplete::user_uuid.eq(user_uuid))
|
||||
.filter(twofactor_incomplete::device_uuid.eq(device_uuid))
|
||||
.first::<TwoFactorIncompleteDb>(conn)
|
||||
.ok()
|
||||
.from_db()
|
||||
}}
|
||||
}
|
||||
|
||||
pub fn find_logins_before(dt: &NaiveDateTime, conn: &DbConn) -> Vec<Self> {
|
||||
db_run! {conn: {
|
||||
twofactor_incomplete::table
|
||||
.filter(twofactor_incomplete::login_time.lt(dt))
|
||||
.load::<TwoFactorIncompleteDb>(conn)
|
||||
.expect("Error loading twofactor_incomplete")
|
||||
.from_db()
|
||||
}}
|
||||
}
|
||||
|
||||
pub fn delete(self, conn: &DbConn) -> EmptyResult {
|
||||
Self::delete_by_user_and_device(&self.user_uuid, &self.device_uuid, conn)
|
||||
}
|
||||
|
||||
pub fn delete_by_user_and_device(user_uuid: &str, device_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||
db_run! { conn: {
|
||||
diesel::delete(twofactor_incomplete::table
|
||||
.filter(twofactor_incomplete::user_uuid.eq(user_uuid))
|
||||
.filter(twofactor_incomplete::device_uuid.eq(device_uuid)))
|
||||
.execute(conn)
|
||||
.map_res("Error in twofactor_incomplete::delete_by_user_and_device()")
|
||||
}}
|
||||
}
|
||||
|
||||
pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
|
||||
db_run! { conn: {
|
||||
diesel::delete(twofactor_incomplete::table.filter(twofactor_incomplete::user_uuid.eq(user_uuid)))
|
||||
.execute(conn)
|
||||
.map_res("Error in twofactor_incomplete::delete_all_by_user()")
|
||||
}}
|
||||
}
|
||||
}
|
||||
@@ -44,8 +44,9 @@ db_object! {
|
||||
|
||||
pub client_kdf_type: i32,
|
||||
pub client_kdf_iter: i32,
|
||||
}
|
||||
|
||||
pub api_key: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Identifiable, Queryable, Insertable)]
|
||||
#[table_name = "invitations"]
|
||||
@@ -110,6 +111,8 @@ impl User {
|
||||
|
||||
client_kdf_type: Self::CLIENT_KDF_TYPE_DEFAULT,
|
||||
client_kdf_iter: Self::CLIENT_KDF_ITER_DEFAULT,
|
||||
|
||||
api_key: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -130,6 +133,10 @@ impl User {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_valid_api_key(&self, key: &str) -> bool {
|
||||
matches!(self.api_key, Some(ref api_key) if crate::crypto::ct_eq(api_key, key))
|
||||
}
|
||||
|
||||
/// Set the password hash generated
|
||||
/// And resets the security_stamp. Based upon the allow_next_route the security_stamp will be different.
|
||||
///
|
||||
@@ -176,7 +183,10 @@ impl User {
|
||||
}
|
||||
}
|
||||
|
||||
use super::{Cipher, Device, EmergencyAccess, Favorite, Folder, Send, TwoFactor, UserOrgType, UserOrganization};
|
||||
use super::{
|
||||
Cipher, Device, EmergencyAccess, Favorite, Folder, Send, TwoFactor, TwoFactorIncomplete, UserOrgType,
|
||||
UserOrganization,
|
||||
};
|
||||
use crate::db::DbConn;
|
||||
|
||||
use crate::api::EmptyResult;
|
||||
@@ -273,6 +283,7 @@ impl User {
|
||||
Folder::delete_all_by_user(&self.uuid, conn)?;
|
||||
Device::delete_all_by_user(&self.uuid, conn)?;
|
||||
TwoFactor::delete_all_by_user(&self.uuid, conn)?;
|
||||
TwoFactorIncomplete::delete_all_by_user(&self.uuid, conn)?;
|
||||
Invitation::take(&self.email, conn); // Delete invitation if any
|
||||
|
||||
db_run! {conn: {
|
||||
|
||||
@@ -140,6 +140,16 @@ table! {
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
twofactor_incomplete (user_uuid, device_uuid) {
|
||||
user_uuid -> Text,
|
||||
device_uuid -> Text,
|
||||
device_name -> Text,
|
||||
login_time -> Timestamp,
|
||||
ip_address -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
users (uuid) {
|
||||
uuid -> Text,
|
||||
@@ -168,6 +178,7 @@ table! {
|
||||
excluded_globals -> Text,
|
||||
client_kdf_type -> Integer,
|
||||
client_kdf_iter -> Integer,
|
||||
api_key -> Nullable<Text>,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -140,6 +140,16 @@ table! {
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
twofactor_incomplete (user_uuid, device_uuid) {
|
||||
user_uuid -> Text,
|
||||
device_uuid -> Text,
|
||||
device_name -> Text,
|
||||
login_time -> Timestamp,
|
||||
ip_address -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
users (uuid) {
|
||||
uuid -> Text,
|
||||
@@ -168,6 +178,7 @@ table! {
|
||||
excluded_globals -> Text,
|
||||
client_kdf_type -> Integer,
|
||||
client_kdf_iter -> Integer,
|
||||
api_key -> Nullable<Text>,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -140,6 +140,16 @@ table! {
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
twofactor_incomplete (user_uuid, device_uuid) {
|
||||
user_uuid -> Text,
|
||||
device_uuid -> Text,
|
||||
device_name -> Text,
|
||||
login_time -> Timestamp,
|
||||
ip_address -> Text,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
users (uuid) {
|
||||
uuid -> Text,
|
||||
@@ -168,6 +178,7 @@ table! {
|
||||
excluded_globals -> Text,
|
||||
client_kdf_type -> Integer,
|
||||
client_kdf_iter -> Integer,
|
||||
api_key -> Nullable<Text>,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -73,7 +73,7 @@ make_error! {
|
||||
Serde(SerdeErr): _has_source, _api_error,
|
||||
JWt(JwtErr): _has_source, _api_error,
|
||||
Handlebars(HbErr): _has_source, _api_error,
|
||||
//WsError(ws::Error): _has_source, _api_error,
|
||||
|
||||
Io(IoErr): _has_source, _api_error,
|
||||
Time(TimeErr): _has_source, _api_error,
|
||||
Req(ReqErr): _has_source, _api_error,
|
||||
@@ -119,11 +119,13 @@ impl Error {
|
||||
Empty {}.into()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_msg<M: Into<String>>(mut self, msg: M) -> Self {
|
||||
self.message = msg.into();
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub const fn with_code(mut self, code: u16) -> Self {
|
||||
self.error_code = code;
|
||||
self
|
||||
|
||||
35
src/mail.rs
35
src/mail.rs
@@ -1,6 +1,6 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use chrono::{DateTime, Local};
|
||||
use chrono::NaiveDateTime;
|
||||
use percent_encoding::{percent_encode, NON_ALPHANUMERIC};
|
||||
|
||||
use lettre::{
|
||||
@@ -394,7 +394,7 @@ pub fn send_invite_confirmed(address: &str, org_name: &str) -> EmptyResult {
|
||||
send_email(address, &subject, body_html, body_text)
|
||||
}
|
||||
|
||||
pub fn send_new_device_logged_in(address: &str, ip: &str, dt: &DateTime<Local>, device: &str) -> EmptyResult {
|
||||
pub fn send_new_device_logged_in(address: &str, ip: &str, dt: &NaiveDateTime, device: &str) -> EmptyResult {
|
||||
use crate::util::upcase_first;
|
||||
let device = upcase_first(device);
|
||||
|
||||
@@ -405,7 +405,26 @@ pub fn send_new_device_logged_in(address: &str, ip: &str, dt: &DateTime<Local>,
|
||||
"url": CONFIG.domain(),
|
||||
"ip": ip,
|
||||
"device": device,
|
||||
"datetime": crate::util::format_datetime_local(dt, fmt),
|
||||
"datetime": crate::util::format_naive_datetime_local(dt, fmt),
|
||||
}),
|
||||
)?;
|
||||
|
||||
send_email(address, &subject, body_html, body_text)
|
||||
}
|
||||
|
||||
pub fn send_incomplete_2fa_login(address: &str, ip: &str, dt: &NaiveDateTime, device: &str) -> EmptyResult {
|
||||
use crate::util::upcase_first;
|
||||
let device = upcase_first(device);
|
||||
|
||||
let fmt = "%A, %B %_d, %Y at %r %Z";
|
||||
let (subject, body_html, body_text) = get_text(
|
||||
"email/incomplete_2fa_login",
|
||||
json!({
|
||||
"url": CONFIG.domain(),
|
||||
"ip": ip,
|
||||
"device": device,
|
||||
"datetime": crate::util::format_naive_datetime_local(dt, fmt),
|
||||
"time_limit": CONFIG.incomplete_2fa_time_limit(),
|
||||
}),
|
||||
)?;
|
||||
|
||||
@@ -486,10 +505,10 @@ fn send_email(address: &str, subject: &str, body_html: String, body_text: String
|
||||
Err(e) => {
|
||||
if e.is_client() {
|
||||
debug!("SMTP Client error: {:#?}", e);
|
||||
err!(format!("SMTP Client error: {}", e.to_string()));
|
||||
err!(format!("SMTP Client error: {}", e));
|
||||
} else if e.is_transient() {
|
||||
debug!("SMTP 4xx error: {:#?}", e);
|
||||
err!(format!("SMTP 4xx error: {}", e.to_string()));
|
||||
err!(format!("SMTP 4xx error: {}", e));
|
||||
} else if e.is_permanent() {
|
||||
debug!("SMTP 5xx error: {:#?}", e);
|
||||
let mut msg = e.to_string();
|
||||
@@ -500,13 +519,13 @@ fn send_email(address: &str, subject: &str, body_html: String, body_text: String
|
||||
err!(format!("SMTP 5xx error: {}", msg));
|
||||
} else if e.is_timeout() {
|
||||
debug!("SMTP timeout error: {:#?}", e);
|
||||
err!(format!("SMTP timeout error: {}", e.to_string()));
|
||||
err!(format!("SMTP timeout error: {}", e));
|
||||
} else if e.is_tls() {
|
||||
debug!("SMTP Encryption error: {:#?}", e);
|
||||
err!(format!("SMTP Encryption error: {}", e.to_string()));
|
||||
err!(format!("SMTP Encryption error: {}", e));
|
||||
} else {
|
||||
debug!("SMTP {:#?}", e);
|
||||
err!(format!("SMTP {}", e.to_string()));
|
||||
err!(format!("SMTP {}", e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
35
src/main.rs
35
src/main.rs
@@ -1,6 +1,10 @@
|
||||
#![forbid(unsafe_code)]
|
||||
#![cfg_attr(feature = "unstable", feature(ip))]
|
||||
#![recursion_limit = "512"]
|
||||
// The recursion_limit is mainly triggered by the json!() macro.
|
||||
// The more key/value pairs there are the more recursion occurs.
|
||||
// We want to keep this as low as possible, but not higher then 128.
|
||||
// If you go above 128 it will cause rust-analyzer to fail,
|
||||
#![recursion_limit = "87"]
|
||||
|
||||
extern crate openssl;
|
||||
#[macro_use]
|
||||
@@ -28,6 +32,7 @@ mod crypto;
|
||||
#[macro_use]
|
||||
mod db;
|
||||
mod mail;
|
||||
mod ratelimit;
|
||||
mod util;
|
||||
|
||||
pub use config::CONFIG;
|
||||
@@ -71,16 +76,18 @@ const HELP: &str = "\
|
||||
-v, --version Prints the app version
|
||||
";
|
||||
|
||||
pub const VERSION: Option<&str> = option_env!("VW_VERSION");
|
||||
|
||||
fn parse_args() {
|
||||
const NO_VERSION: &str = "(Version info from Git not present)";
|
||||
let mut pargs = pico_args::Arguments::from_env();
|
||||
let version = VERSION.unwrap_or("(Version info from Git not present)");
|
||||
|
||||
if pargs.contains(["-h", "--help"]) {
|
||||
println!("vaultwarden {}", option_env!("BWRS_VERSION").unwrap_or(NO_VERSION));
|
||||
println!("vaultwarden {}", version);
|
||||
print!("{}", HELP);
|
||||
exit(0);
|
||||
} else if pargs.contains(["-v", "--version"]) {
|
||||
println!("vaultwarden {}", option_env!("BWRS_VERSION").unwrap_or(NO_VERSION));
|
||||
println!("vaultwarden {}", version);
|
||||
exit(0);
|
||||
}
|
||||
}
|
||||
@@ -89,7 +96,7 @@ fn launch_info() {
|
||||
println!("/--------------------------------------------------------------------\\");
|
||||
println!("| Starting Vaultwarden |");
|
||||
|
||||
if let Some(version) = option_env!("BWRS_VERSION") {
|
||||
if let Some(version) = VERSION {
|
||||
println!("|{:^68}|", format!("Version {}", version));
|
||||
}
|
||||
|
||||
@@ -104,6 +111,14 @@ fn launch_info() {
|
||||
}
|
||||
|
||||
fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> {
|
||||
// Depending on the main log level we either want to disable or enable logging for trust-dns.
|
||||
// Else if there are timeouts it will clutter the logs since trust-dns uses warn for this.
|
||||
let trust_dns_level = if level >= log::LevelFilter::Debug {
|
||||
level
|
||||
} else {
|
||||
log::LevelFilter::Off
|
||||
};
|
||||
|
||||
let mut logger = fern::Dispatch::new()
|
||||
.level(level)
|
||||
// Hide unknown certificate errors if using self-signed
|
||||
@@ -122,6 +137,8 @@ fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> {
|
||||
.level_for("hyper::client", log::LevelFilter::Off)
|
||||
// Prevent cookie_store logs
|
||||
.level_for("cookie_store", log::LevelFilter::Off)
|
||||
// Variable level for trust-dns used by reqwest
|
||||
.level_for("trust_dns_proto", trust_dns_level)
|
||||
.chain(std::io::stdout());
|
||||
|
||||
// Enable smtp debug logging only specifically for smtp when need.
|
||||
@@ -345,6 +362,14 @@ fn schedule_jobs(pool: db::DbPool) {
|
||||
}));
|
||||
}
|
||||
|
||||
// Send email notifications about incomplete 2FA logins, which potentially
|
||||
// indicates that a user's master password has been compromised.
|
||||
if !CONFIG.incomplete_2fa_schedule().is_empty() {
|
||||
sched.add(Job::new(CONFIG.incomplete_2fa_schedule().parse().unwrap(), || {
|
||||
api::send_incomplete_2fa_notifications(pool.clone());
|
||||
}));
|
||||
}
|
||||
|
||||
// Grant emergency access requests that have met the required wait time.
|
||||
// This job should run before the emergency access reminders job to avoid
|
||||
// sending reminders for requests that are about to be granted anyway.
|
||||
|
||||
38
src/ratelimit.rs
Normal file
38
src/ratelimit.rs
Normal file
@@ -0,0 +1,38 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use std::{net::IpAddr, num::NonZeroU32, time::Duration};
|
||||
|
||||
use governor::{clock::DefaultClock, state::keyed::DashMapStateStore, Quota, RateLimiter};
|
||||
|
||||
use crate::{Error, CONFIG};
|
||||
|
||||
type Limiter<T = IpAddr> = RateLimiter<T, DashMapStateStore<T>, DefaultClock>;
|
||||
|
||||
static LIMITER_LOGIN: Lazy<Limiter> = Lazy::new(|| {
|
||||
let seconds = Duration::from_secs(CONFIG.login_ratelimit_seconds());
|
||||
let burst = NonZeroU32::new(CONFIG.login_ratelimit_max_burst()).expect("Non-zero login ratelimit burst");
|
||||
RateLimiter::keyed(Quota::with_period(seconds).expect("Non-zero login ratelimit seconds").allow_burst(burst))
|
||||
});
|
||||
|
||||
static LIMITER_ADMIN: Lazy<Limiter> = Lazy::new(|| {
|
||||
let seconds = Duration::from_secs(CONFIG.admin_ratelimit_seconds());
|
||||
let burst = NonZeroU32::new(CONFIG.admin_ratelimit_max_burst()).expect("Non-zero admin ratelimit burst");
|
||||
RateLimiter::keyed(Quota::with_period(seconds).expect("Non-zero admin ratelimit seconds").allow_burst(burst))
|
||||
});
|
||||
|
||||
pub fn check_limit_login(ip: &IpAddr) -> Result<(), Error> {
|
||||
match LIMITER_LOGIN.check_key(ip) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(_e) => {
|
||||
err_code!("Too many login requests", 429);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_limit_admin(ip: &IpAddr) -> Result<(), Error> {
|
||||
match LIMITER_ADMIN.check_key(ip) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(_e) => {
|
||||
err_code!("Too many admin requests", 429);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -47,7 +47,8 @@
|
||||
"Type": 5,
|
||||
"Domains": [
|
||||
"wellsfargo.com",
|
||||
"wf.com"
|
||||
"wf.com",
|
||||
"wellsfargoadvisors.com"
|
||||
],
|
||||
"Excluded": false
|
||||
},
|
||||
@@ -905,5 +906,21 @@
|
||||
"protonvpn.com"
|
||||
],
|
||||
"Excluded": false
|
||||
},
|
||||
{
|
||||
"Type": 86,
|
||||
"Domains": [
|
||||
"ubisoft.com",
|
||||
"ubi.com"
|
||||
],
|
||||
"Excluded": false
|
||||
},
|
||||
{
|
||||
"Type": 87,
|
||||
"Domains": [
|
||||
"transferwise.com",
|
||||
"wise.com"
|
||||
],
|
||||
"Excluded": false
|
||||
}
|
||||
]
|
||||
391
src/static/scripts/bootstrap-native.js
vendored
391
src/static/scripts/bootstrap-native.js
vendored
@@ -1,5 +1,5 @@
|
||||
/*!
|
||||
* Native JavaScript for Bootstrap v4.0.6 (https://thednp.github.io/bootstrap.native/)
|
||||
* Native JavaScript for Bootstrap v4.0.8 (https://thednp.github.io/bootstrap.native/)
|
||||
* Copyright 2015-2021 © dnp_theme
|
||||
* Licensed under MIT (https://github.com/thednp/bootstrap.native/blob/master/LICENSE)
|
||||
*/
|
||||
@@ -7,7 +7,7 @@
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
|
||||
typeof define === 'function' && define.amd ? define(factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.BSN = factory());
|
||||
}(this, (function () { 'use strict';
|
||||
})(this, (function () { 'use strict';
|
||||
|
||||
const transitionEndEvent = 'webkitTransition' in document.head.style ? 'webkitTransitionEnd' : 'transitionend';
|
||||
|
||||
@@ -188,7 +188,7 @@
|
||||
element.dispatchEvent(closedAlertEvent);
|
||||
|
||||
self.dispose();
|
||||
element.parentNode.removeChild(element);
|
||||
element.remove();
|
||||
}
|
||||
|
||||
// ALERT PRIVATE METHOD
|
||||
@@ -1022,9 +1022,9 @@
|
||||
function isEmptyAnchor(elem) {
|
||||
const parentAnchor = elem.closest('A');
|
||||
// anchor href starts with #
|
||||
return elem && ((elem.href && elem.href.slice(-1) === '#')
|
||||
return elem && ((elem.hasAttribute('href') && elem.href.slice(-1) === '#')
|
||||
// OR a child of an anchor with href starts with #
|
||||
|| (parentAnchor && parentAnchor.href && parentAnchor.href.slice(-1) === '#'));
|
||||
|| (parentAnchor && parentAnchor.hasAttribute('href') && parentAnchor.href.slice(-1) === '#'));
|
||||
}
|
||||
|
||||
function setFocus(element) {
|
||||
@@ -1487,7 +1487,7 @@
|
||||
|
||||
function appendOverlay(hasFade, isModal) {
|
||||
toggleOverlayType(isModal);
|
||||
document.body.appendChild(overlay);
|
||||
document.body.append(overlay);
|
||||
if (hasFade) addClass(overlay, fadeClass);
|
||||
}
|
||||
|
||||
@@ -1501,12 +1501,11 @@
|
||||
}
|
||||
|
||||
function removeOverlay() {
|
||||
const bd = document.body;
|
||||
const currentOpen = getCurrentOpen();
|
||||
|
||||
if (!currentOpen) {
|
||||
removeClass(overlay, fadeClass);
|
||||
bd.removeChild(overlay);
|
||||
overlay.remove();
|
||||
resetScrollbar();
|
||||
}
|
||||
}
|
||||
@@ -1928,7 +1927,7 @@
|
||||
|
||||
if ((!element.contains(target) && options.backdrop
|
||||
&& (!trigger || (trigger && !triggers.includes(trigger))))
|
||||
|| offCanvasDismiss.contains(target)) {
|
||||
|| (offCanvasDismiss && offCanvasDismiss.contains(target))) {
|
||||
self.relatedTarget = target === offCanvasDismiss ? offCanvasDismiss : null;
|
||||
self.hide();
|
||||
}
|
||||
@@ -2122,19 +2121,6 @@
|
||||
.some((mediaType) => element instanceof mediaType);
|
||||
}
|
||||
|
||||
function closestRelative(element) {
|
||||
let retval = null;
|
||||
let el = element;
|
||||
while (el !== document.body) {
|
||||
el = el.parentElement;
|
||||
if (getComputedStyle(el).position === 'relative') {
|
||||
retval = el;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
// both popovers and tooltips (this, event)
|
||||
function styleTip(self, e) {
|
||||
const tipClasses = /\b(top|bottom|start|end)+/;
|
||||
@@ -2148,32 +2134,32 @@
|
||||
let tipDimensions = { w: tip.offsetWidth, h: tip.offsetHeight };
|
||||
const windowWidth = (document.documentElement.clientWidth || document.body.clientWidth);
|
||||
const windowHeight = (document.documentElement.clientHeight || document.body.clientHeight);
|
||||
const { element, options, arrow } = self;
|
||||
const {
|
||||
element, options, arrow, positions,
|
||||
} = self;
|
||||
let { container, placement } = options;
|
||||
let parentIsBody = container === document.body;
|
||||
const targetPosition = getComputedStyle(element).position;
|
||||
const parentPosition = getComputedStyle(container).position;
|
||||
const staticParent = !parentIsBody && parentPosition === 'static';
|
||||
let relativeParent = !parentIsBody && parentPosition === 'relative';
|
||||
const relContainer = staticParent && closestRelative(container);
|
||||
|
||||
const { elementPosition, containerIsStatic, relContainer } = positions;
|
||||
let { containerIsRelative } = positions;
|
||||
// static containers should refer to another relative container or the body
|
||||
container = relContainer || container;
|
||||
relativeParent = staticParent && relContainer ? 1 : relativeParent;
|
||||
containerIsRelative = containerIsStatic && relContainer ? 1 : containerIsRelative;
|
||||
parentIsBody = container === document.body;
|
||||
const parentRect = container.getBoundingClientRect();
|
||||
const leftBoundry = relativeParent ? parentRect.left : 0;
|
||||
const rightBoundry = relativeParent ? parentRect.right : windowWidth;
|
||||
const leftBoundry = containerIsRelative ? parentRect.left : 0;
|
||||
const rightBoundry = containerIsRelative ? parentRect.right : windowWidth;
|
||||
// this case should not be possible
|
||||
// absoluteParent = !parentIsBody && parentPosition === 'absolute',
|
||||
// this case requires a container with placement: relative
|
||||
const absoluteTarget = targetPosition === 'absolute';
|
||||
// containerIsAbsolute = !parentIsBody && containerPosition === 'absolute',
|
||||
// this case requires a container with position: relative
|
||||
const absoluteTarget = elementPosition === 'absolute';
|
||||
const targetRect = element.getBoundingClientRect();
|
||||
const scroll = parentIsBody
|
||||
? { x: window.pageXOffset, y: window.pageYOffset }
|
||||
: { x: container.scrollLeft, y: container.scrollTop };
|
||||
const elemDimensions = { w: element.offsetWidth, h: element.offsetHeight };
|
||||
const top = relativeParent ? element.offsetTop : targetRect.top;
|
||||
const left = relativeParent ? element.offsetLeft : targetRect.left;
|
||||
const top = containerIsRelative ? element.offsetTop : targetRect.top;
|
||||
const left = containerIsRelative ? element.offsetLeft : targetRect.left;
|
||||
// reset arrow style
|
||||
arrow.style.top = '';
|
||||
arrow.style.left = '';
|
||||
@@ -2245,8 +2231,12 @@
|
||||
}
|
||||
} else if (['top', 'bottom'].includes(placement)) {
|
||||
if (e && isMedia(element)) {
|
||||
const eX = !relativeParent ? e.pageX : e.layerX + (absoluteTarget ? element.offsetLeft : 0);
|
||||
const eY = !relativeParent ? e.pageY : e.layerY + (absoluteTarget ? element.offsetTop : 0);
|
||||
const eX = !containerIsRelative
|
||||
? e.pageX
|
||||
: e.layerX + (absoluteTarget ? element.offsetLeft : 0);
|
||||
const eY = !containerIsRelative
|
||||
? e.pageY
|
||||
: e.layerY + (absoluteTarget ? element.offsetTop : 0);
|
||||
|
||||
if (placement === 'top') {
|
||||
topPosition = eY - tipDimensions.h - (isPopover ? arrowWidth : arrowHeight);
|
||||
@@ -2323,6 +2313,36 @@
|
||||
return modal || navbarFixed || document.body;
|
||||
}
|
||||
|
||||
function closestRelative(element) {
|
||||
let retval = null;
|
||||
let el = element;
|
||||
while (el !== document.body) {
|
||||
el = el.parentElement;
|
||||
if (getComputedStyle(el).position === 'relative') {
|
||||
retval = el;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
function setHtml(element, content, sanitizeFn) {
|
||||
if (typeof content === 'string' && !content.length) return;
|
||||
|
||||
if (typeof content === 'object') {
|
||||
element.append(content);
|
||||
} else {
|
||||
let dirty = content.trim(); // fixing #233
|
||||
|
||||
if (typeof sanitizeFn === 'function') dirty = sanitizeFn(dirty);
|
||||
|
||||
const domParser = new DOMParser();
|
||||
const tempDocument = domParser.parseFromString(dirty, 'text/html');
|
||||
const method = tempDocument.children.length ? 'innerHTML' : 'innerText';
|
||||
element[method] = tempDocument.body[method];
|
||||
}
|
||||
}
|
||||
|
||||
/* Native JavaScript for Bootstrap 5 | Popover
|
||||
---------------------------------------------- */
|
||||
|
||||
@@ -2335,12 +2355,13 @@
|
||||
template: '<div class="popover" role="tooltip"><div class="popover-arrow"></div><h3 class="popover-header"></h3><div class="popover-body"></div></div>', // string
|
||||
title: null, // string
|
||||
content: null, // string
|
||||
sanitizeFn: null, // function
|
||||
customClass: null, // string
|
||||
dismissible: false, // boolean
|
||||
animation: true, // boolean
|
||||
trigger: 'hover', // string
|
||||
placement: 'top', // string
|
||||
btnClose: '<button class="btn-close" aria-label="Close"></button>', // string
|
||||
sanitizeFn: null, // function
|
||||
dismissible: false, // boolean
|
||||
animation: true, // boolean
|
||||
delay: 200, // number
|
||||
};
|
||||
|
||||
@@ -2350,11 +2371,8 @@
|
||||
const isIphone = navigator.userAgentData
|
||||
? navigator.userAgentData.brands.some((x) => appleBrands.test(x.brand))
|
||||
: appleBrands.test(navigator.userAgent);
|
||||
// popoverArrowClass = `${popoverString}-arrow`,
|
||||
const popoverHeaderClass = `${popoverString}-header`;
|
||||
const popoverBodyClass = `${popoverString}-body`;
|
||||
// close btn for dissmissible popover
|
||||
let popoverCloseButton = '<button type="button" class="btn-close"></button>';
|
||||
|
||||
// POPOVER CUSTOM EVENTS
|
||||
// =====================
|
||||
@@ -2387,51 +2405,59 @@
|
||||
const {
|
||||
animation, customClass, sanitizeFn, placement, dismissible,
|
||||
} = options;
|
||||
let { title, content, template } = options;
|
||||
let {
|
||||
title, content,
|
||||
} = options;
|
||||
const {
|
||||
template, btnClose,
|
||||
} = options;
|
||||
|
||||
// set initial popover class
|
||||
const placementClass = `bs-${popoverString}-${tipClassPositions[placement]}`;
|
||||
|
||||
// fixing #233
|
||||
title = title ? title.trim() : null;
|
||||
content = content ? content.trim() : null;
|
||||
|
||||
// sanitize title && content
|
||||
if (sanitizeFn) {
|
||||
title = title ? sanitizeFn(title) : null;
|
||||
content = content ? sanitizeFn(content) : null;
|
||||
template = template ? sanitizeFn(template) : null;
|
||||
popoverCloseButton = sanitizeFn(popoverCloseButton);
|
||||
// load template
|
||||
let popoverTemplate;
|
||||
if (typeof template === 'object') {
|
||||
popoverTemplate = template;
|
||||
} else {
|
||||
const htmlMarkup = document.createElement('div');
|
||||
setHtml(htmlMarkup, template, sanitizeFn);
|
||||
popoverTemplate = htmlMarkup.firstChild;
|
||||
}
|
||||
// set popover markup
|
||||
self.popover = popoverTemplate.cloneNode(true);
|
||||
|
||||
self.popover = document.createElement('div');
|
||||
const { popover } = self;
|
||||
|
||||
// set id and aria-describedby
|
||||
// set id and role attributes
|
||||
popover.setAttribute('id', id);
|
||||
popover.setAttribute('role', 'tooltip');
|
||||
|
||||
// load template
|
||||
const popoverTemplate = document.createElement('div');
|
||||
popoverTemplate.innerHTML = template.trim();
|
||||
popover.className = popoverTemplate.firstChild.className;
|
||||
popover.innerHTML = popoverTemplate.firstChild.innerHTML;
|
||||
|
||||
const popoverHeader = queryElement(`.${popoverHeaderClass}`, popover);
|
||||
const popoverBody = queryElement(`.${popoverBodyClass}`, popover);
|
||||
|
||||
// set arrow
|
||||
// set arrow and enable access for styleTip
|
||||
self.arrow = queryElement(`.${popoverString}-arrow`, popover);
|
||||
|
||||
// set dismissible button
|
||||
if (dismissible) {
|
||||
title = title ? title + popoverCloseButton : title;
|
||||
content = title === null ? +popoverCloseButton : content;
|
||||
if (title) {
|
||||
if (title instanceof Element) setHtml(title, btnClose, sanitizeFn);
|
||||
else title += btnClose;
|
||||
} else {
|
||||
if (popoverHeader) popoverHeader.remove();
|
||||
if (content instanceof Element) setHtml(content, btnClose, sanitizeFn);
|
||||
else content += btnClose;
|
||||
}
|
||||
}
|
||||
|
||||
// fill the template with content from data attributes
|
||||
if (title && popoverHeader) popoverHeader.innerHTML = title.trim();
|
||||
if (content && popoverBody) popoverBody.innerHTML = content.trim();
|
||||
// fill the template with content from options / data attributes
|
||||
// also sanitize title && content
|
||||
if (title && popoverHeader) setHtml(popoverHeader, title, sanitizeFn);
|
||||
if (content && popoverBody) setHtml(popoverBody, content, sanitizeFn);
|
||||
|
||||
// set btn and enable access for styleTip
|
||||
[self.btn] = popover.getElementsByClassName('btn-close');
|
||||
|
||||
// set popover animation and placement
|
||||
if (!hasClass(popover, popoverString)) addClass(popover, popoverString);
|
||||
@@ -2443,9 +2469,9 @@
|
||||
}
|
||||
|
||||
function removePopover(self) {
|
||||
const { element, popover, options } = self;
|
||||
const { element, popover } = self;
|
||||
element.removeAttribute(ariaDescribedBy);
|
||||
options.container.removeChild(popover);
|
||||
popover.remove();
|
||||
self.timer = null;
|
||||
}
|
||||
|
||||
@@ -2470,12 +2496,11 @@
|
||||
|
||||
function dismissHandlerToggle(self, add) {
|
||||
const action = add ? addEventListener : removeEventListener;
|
||||
const { options, element, popover } = self;
|
||||
const { options, element, btn } = self;
|
||||
const { trigger, dismissible } = options;
|
||||
|
||||
if (dismissible) {
|
||||
const [btnClose] = popover.getElementsByClassName('btn-close');
|
||||
if (btnClose) btnClose[action]('click', self.hide);
|
||||
if (btn) btn[action]('click', self.hide);
|
||||
} else {
|
||||
if (trigger === 'focus') element[action]('focusout', self.hide);
|
||||
if (trigger === 'hover') document[action]('touchstart', popoverTouchHandler, passiveHandler);
|
||||
@@ -2488,12 +2513,10 @@
|
||||
}
|
||||
|
||||
function popoverShowTrigger(self) {
|
||||
dismissHandlerToggle(self, 1);
|
||||
self.element.dispatchEvent(shownPopoverEvent);
|
||||
}
|
||||
|
||||
function popoverHideTrigger(self) {
|
||||
dismissHandlerToggle(self);
|
||||
removePopover(self);
|
||||
self.element.dispatchEvent(hiddenPopoverEvent);
|
||||
}
|
||||
@@ -2514,6 +2537,7 @@
|
||||
self.timer = null;
|
||||
self.popover = null;
|
||||
self.arrow = null;
|
||||
self.btn = null;
|
||||
self.enabled = false;
|
||||
// set unique ID for aria-describedby
|
||||
self.id = `${popoverString}-${getUID(element)}`;
|
||||
@@ -2535,6 +2559,21 @@
|
||||
// crate popover
|
||||
createPopover(self);
|
||||
|
||||
// set positions
|
||||
const { container } = self.options;
|
||||
const elementPosition = getComputedStyle(element).position;
|
||||
const containerPosition = getComputedStyle(container).position;
|
||||
const parentIsBody = container === document.body;
|
||||
const containerIsStatic = !parentIsBody && containerPosition === 'static';
|
||||
const containerIsRelative = !parentIsBody && containerPosition === 'relative';
|
||||
const relContainer = containerIsStatic && closestRelative(container);
|
||||
self.positions = {
|
||||
elementPosition,
|
||||
containerIsRelative,
|
||||
containerIsStatic,
|
||||
relContainer,
|
||||
};
|
||||
|
||||
// bind
|
||||
self.update = self.update.bind(self);
|
||||
|
||||
@@ -2563,23 +2602,21 @@
|
||||
const { container } = options;
|
||||
|
||||
clearTimeout(self.timer);
|
||||
if (!isVisibleTip(popover, container)) {
|
||||
element.dispatchEvent(showPopoverEvent);
|
||||
if (showPopoverEvent.defaultPrevented) return;
|
||||
|
||||
self.timer = setTimeout(() => {
|
||||
if (!isVisibleTip(popover, container)) {
|
||||
element.dispatchEvent(showPopoverEvent);
|
||||
if (showPopoverEvent.defaultPrevented) return;
|
||||
// append to the container
|
||||
container.append(popover);
|
||||
element.setAttribute(ariaDescribedBy, id);
|
||||
|
||||
// append to the container
|
||||
container.appendChild(popover);
|
||||
element.setAttribute(ariaDescribedBy, id);
|
||||
self.update(e);
|
||||
if (!hasClass(popover, showClass)) addClass(popover, showClass);
|
||||
dismissHandlerToggle(self, 1);
|
||||
|
||||
self.update(e);
|
||||
if (!hasClass(popover, showClass)) addClass(popover, showClass);
|
||||
|
||||
if (options.animation) emulateTransitionEnd(popover, () => popoverShowTrigger(self));
|
||||
else popoverShowTrigger(self);
|
||||
}
|
||||
}, 17);
|
||||
if (options.animation) emulateTransitionEnd(popover, () => popoverShowTrigger(self));
|
||||
else popoverShowTrigger(self);
|
||||
}
|
||||
}
|
||||
|
||||
hide(e) {
|
||||
@@ -2596,13 +2633,13 @@
|
||||
const { element, popover, options } = self;
|
||||
|
||||
clearTimeout(self.timer);
|
||||
|
||||
self.timer = setTimeout(() => {
|
||||
if (isVisibleTip(popover, options.container)) {
|
||||
element.dispatchEvent(hidePopoverEvent);
|
||||
if (hidePopoverEvent.defaultPrevented) return;
|
||||
|
||||
removeClass(popover, showClass);
|
||||
dismissHandlerToggle(self);
|
||||
|
||||
if (options.animation) emulateTransitionEnd(popover, () => popoverHideTrigger(self));
|
||||
else popoverHideTrigger(self);
|
||||
@@ -2648,7 +2685,7 @@
|
||||
const { popover, options } = self;
|
||||
const { container, animation } = options;
|
||||
if (animation && isVisibleTip(popover, container)) {
|
||||
options.delay = 0; // reset delay
|
||||
self.options.delay = 0; // reset delay
|
||||
self.hide();
|
||||
emulateTransitionEnd(popover, () => togglePopoverHandlers(self));
|
||||
} else {
|
||||
@@ -3067,7 +3104,7 @@
|
||||
const toastSelector = `.${toastString}`;
|
||||
const toastDismissSelector = `[${dataBsDismiss}="${toastString}"]`;
|
||||
const showingClass = 'showing';
|
||||
const hideClass = 'hide';
|
||||
const hideClass = 'hide'; // marked as deprecated
|
||||
const toastDefaultOptions = {
|
||||
animation: true,
|
||||
autohide: true,
|
||||
@@ -3085,10 +3122,7 @@
|
||||
// =====================
|
||||
function showToastComplete(self) {
|
||||
const { element, options } = self;
|
||||
if (!options.animation) {
|
||||
removeClass(element, showingClass);
|
||||
addClass(element, showClass);
|
||||
}
|
||||
removeClass(element, showingClass);
|
||||
|
||||
element.dispatchEvent(shownToastEvent);
|
||||
if (options.autohide) self.hide();
|
||||
@@ -3096,13 +3130,15 @@
|
||||
|
||||
function hideToastComplete(self) {
|
||||
const { element } = self;
|
||||
addClass(element, hideClass);
|
||||
removeClass(element, showingClass);
|
||||
removeClass(element, showClass);
|
||||
addClass(element, hideClass); // B/C
|
||||
element.dispatchEvent(hiddenToastEvent);
|
||||
}
|
||||
|
||||
function closeToast(self) {
|
||||
function hideToast(self) {
|
||||
const { element, options } = self;
|
||||
removeClass(element, showClass);
|
||||
addClass(element, showingClass);
|
||||
|
||||
if (options.animation) {
|
||||
reflow(element);
|
||||
@@ -3112,15 +3148,14 @@
|
||||
}
|
||||
}
|
||||
|
||||
function openToast(self) {
|
||||
function showToast(self) {
|
||||
const { element, options } = self;
|
||||
removeClass(element, hideClass);
|
||||
removeClass(element, hideClass); // B/C
|
||||
reflow(element);
|
||||
addClass(element, showClass);
|
||||
addClass(element, showingClass);
|
||||
|
||||
if (options.animation) {
|
||||
reflow(element);
|
||||
addClass(element, showingClass);
|
||||
addClass(element, showClass);
|
||||
|
||||
emulateTransitionEnd(element, () => showToastComplete(self));
|
||||
} else {
|
||||
showToastComplete(self);
|
||||
@@ -3148,9 +3183,13 @@
|
||||
super(toastComponent, target, toastDefaultOptions, config);
|
||||
// bind
|
||||
const self = this;
|
||||
const { element, options } = self;
|
||||
|
||||
// set fadeClass, the options.animation will override the markup
|
||||
if (options.animation && !hasClass(element, fadeClass)) addClass(element, fadeClass);
|
||||
else if (!options.animation && hasClass(element, fadeClass)) removeClass(element, fadeClass);
|
||||
// dismiss button
|
||||
self.dismiss = queryElement(toastDismissSelector, self.element);
|
||||
self.dismiss = queryElement(toastDismissSelector, element);
|
||||
|
||||
// bind
|
||||
self.show = self.show.bind(self);
|
||||
@@ -3165,13 +3204,12 @@
|
||||
show() {
|
||||
const self = this;
|
||||
const { element } = self;
|
||||
if (element && hasClass(element, hideClass)) {
|
||||
if (element && !hasClass(element, showClass)) {
|
||||
element.dispatchEvent(showToastEvent);
|
||||
if (showToastEvent.defaultPrevented) return;
|
||||
|
||||
addClass(element, fadeClass);
|
||||
clearTimeout(self.timer);
|
||||
self.timer = setTimeout(() => openToast(self), 10);
|
||||
self.timer = setTimeout(() => showToast(self), 10);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3184,7 +3222,7 @@
|
||||
if (hideToastEvent.defaultPrevented) return;
|
||||
|
||||
clearTimeout(self.timer);
|
||||
self.timer = setTimeout(() => closeToast(self),
|
||||
self.timer = setTimeout(() => hideToast(self),
|
||||
noTimer ? 10 : options.delay);
|
||||
}
|
||||
}
|
||||
@@ -3192,7 +3230,7 @@
|
||||
dispose() {
|
||||
const self = this;
|
||||
const { element, options } = self;
|
||||
self.hide();
|
||||
self.hide(1);
|
||||
|
||||
if (options.animation) emulateTransitionEnd(element, () => completeDisposeToast(self));
|
||||
else completeDisposeToast(self);
|
||||
@@ -3221,13 +3259,14 @@
|
||||
const titleAttr = 'title';
|
||||
const tooltipInnerClass = `${tooltipString}-inner`;
|
||||
const tooltipDefaultOptions = {
|
||||
title: null,
|
||||
template: '<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',
|
||||
placement: 'top',
|
||||
animation: true,
|
||||
customClass: null,
|
||||
delay: 200,
|
||||
sanitizeFn: null,
|
||||
title: null, // string
|
||||
customClass: null, // string | null
|
||||
placement: 'top', // string
|
||||
sanitizeFn: null, // function
|
||||
animation: true, // bool
|
||||
html: false, // bool
|
||||
delay: 200, // number
|
||||
};
|
||||
|
||||
// TOOLTIP CUSTOM EVENTS
|
||||
@@ -3241,51 +3280,48 @@
|
||||
// =======================
|
||||
function createTooltip(self) {
|
||||
const { options, id } = self;
|
||||
const placementClass = `bs-${tooltipString}-${tipClassPositions[options.placement]}`;
|
||||
let titleString = options.title.trim();
|
||||
const {
|
||||
title, template, customClass, animation, placement, sanitizeFn,
|
||||
} = options;
|
||||
const placementClass = `bs-${tooltipString}-${tipClassPositions[placement]}`;
|
||||
|
||||
// sanitize stuff
|
||||
if (options.sanitizeFn) {
|
||||
titleString = options.sanitizeFn(titleString);
|
||||
options.template = options.sanitizeFn(options.template);
|
||||
if (!title) return;
|
||||
|
||||
// load template
|
||||
let tooltipTemplate;
|
||||
if (typeof template === 'object') {
|
||||
tooltipTemplate = template;
|
||||
} else {
|
||||
const htmlMarkup = document.createElement('div');
|
||||
setHtml(htmlMarkup, template, sanitizeFn);
|
||||
tooltipTemplate = htmlMarkup.firstChild;
|
||||
}
|
||||
|
||||
if (!titleString) return;
|
||||
|
||||
// create tooltip
|
||||
self.tooltip = document.createElement('div');
|
||||
self.tooltip = tooltipTemplate.cloneNode(true);
|
||||
const { tooltip } = self;
|
||||
|
||||
// set aria
|
||||
// set title
|
||||
setHtml(queryElement(`.${tooltipInnerClass}`, tooltip), title, sanitizeFn);
|
||||
// set id & role attribute
|
||||
tooltip.setAttribute('id', id);
|
||||
|
||||
// set markup
|
||||
const tooltipMarkup = document.createElement('div');
|
||||
tooltipMarkup.innerHTML = options.template.trim();
|
||||
|
||||
tooltip.className = tooltipMarkup.firstChild.className;
|
||||
tooltip.innerHTML = tooltipMarkup.firstChild.innerHTML;
|
||||
|
||||
queryElement(`.${tooltipInnerClass}`, tooltip).innerHTML = titleString;
|
||||
tooltip.setAttribute('role', tooltipString);
|
||||
|
||||
// set arrow
|
||||
self.arrow = queryElement(`.${tooltipString}-arrow`, tooltip);
|
||||
|
||||
// set class and role attribute
|
||||
tooltip.setAttribute('role', tooltipString);
|
||||
// set classes
|
||||
if (!hasClass(tooltip, tooltipString)) addClass(tooltip, tooltipString);
|
||||
if (options.animation && !hasClass(tooltip, fadeClass)) addClass(tooltip, fadeClass);
|
||||
if (options.customClass && !hasClass(tooltip, options.customClass)) {
|
||||
addClass(tooltip, options.customClass);
|
||||
if (animation && !hasClass(tooltip, fadeClass)) addClass(tooltip, fadeClass);
|
||||
if (customClass && !hasClass(tooltip, customClass)) {
|
||||
addClass(tooltip, customClass);
|
||||
}
|
||||
if (!hasClass(tooltip, placementClass)) addClass(tooltip, placementClass);
|
||||
}
|
||||
|
||||
function removeTooltip(self) {
|
||||
const { element, options, tooltip } = self;
|
||||
const { element, tooltip } = self;
|
||||
element.removeAttribute(ariaDescribedBy);
|
||||
options.container.removeChild(tooltip);
|
||||
tooltip.remove();
|
||||
self.timer = null;
|
||||
}
|
||||
|
||||
@@ -3387,6 +3423,21 @@
|
||||
self.id = `${tooltipString}-${getUID(element)}`;
|
||||
createTooltip(self);
|
||||
|
||||
// set positions
|
||||
const { container } = self.options;
|
||||
const elementPosition = getComputedStyle(element).position;
|
||||
const containerPosition = getComputedStyle(container).position;
|
||||
const parentIsBody = container === document.body;
|
||||
const containerIsStatic = !parentIsBody && containerPosition === 'static';
|
||||
const containerIsRelative = !parentIsBody && containerPosition === 'relative';
|
||||
const relContainer = containerIsStatic && closestRelative(container);
|
||||
self.positions = {
|
||||
elementPosition,
|
||||
containerIsRelative,
|
||||
containerIsStatic,
|
||||
relContainer,
|
||||
};
|
||||
|
||||
// attach events
|
||||
toggleTooltipHandlers(self, 1);
|
||||
}
|
||||
@@ -3398,22 +3449,23 @@
|
||||
const {
|
||||
options, tooltip, element, id,
|
||||
} = self;
|
||||
const {
|
||||
container, animation,
|
||||
} = options;
|
||||
clearTimeout(self.timer);
|
||||
self.timer = setTimeout(() => {
|
||||
if (!isVisibleTip(tooltip, options.container)) {
|
||||
element.dispatchEvent(showTooltipEvent);
|
||||
if (showTooltipEvent.defaultPrevented) return;
|
||||
if (!isVisibleTip(tooltip, container)) {
|
||||
element.dispatchEvent(showTooltipEvent);
|
||||
if (showTooltipEvent.defaultPrevented) return;
|
||||
|
||||
// append to container
|
||||
options.container.appendChild(tooltip);
|
||||
element.setAttribute(ariaDescribedBy, id);
|
||||
// append to container
|
||||
container.append(tooltip);
|
||||
element.setAttribute(ariaDescribedBy, id);
|
||||
|
||||
self.update(e);
|
||||
if (!hasClass(tooltip, showClass)) addClass(tooltip, showClass);
|
||||
if (options.animation) emulateTransitionEnd(tooltip, () => tooltipShownAction(self));
|
||||
else tooltipShownAction(self);
|
||||
}
|
||||
}, 20);
|
||||
self.update(e);
|
||||
if (!hasClass(tooltip, showClass)) addClass(tooltip, showClass);
|
||||
if (animation) emulateTransitionEnd(tooltip, () => tooltipShownAction(self));
|
||||
else tooltipShownAction(self);
|
||||
}
|
||||
}
|
||||
|
||||
hide(e) {
|
||||
@@ -3498,20 +3550,9 @@
|
||||
constructor: Tooltip,
|
||||
};
|
||||
|
||||
var version = "4.0.6";
|
||||
var version = "4.0.8";
|
||||
|
||||
// import { alertInit } from '../components/alert-native.js';
|
||||
// import { buttonInit } from '../components/button-native.js';
|
||||
// import { carouselInit } from '../components/carousel-native.js';
|
||||
// import { collapseInit } from '../components/collapse-native.js';
|
||||
// import { dropdownInit } from '../components/dropdown-native.js';
|
||||
// import { modalInit } from '../components/modal-native.js';
|
||||
// import { offcanvasInit } from '../components/offcanvas-native.js';
|
||||
// import { popoverInit } from '../components/popover-native.js';
|
||||
// import { scrollSpyInit } from '../components/scrollspy-native.js';
|
||||
// import { tabInit } from '../components/tab-native.js';
|
||||
// import { toastInit } from '../components/toast-native.js';
|
||||
// import { tooltipInit } from '../components/tooltip-native.js';
|
||||
const Version = version;
|
||||
|
||||
const componentsInit = {
|
||||
Alert: Alert.init,
|
||||
@@ -3547,7 +3588,7 @@
|
||||
document.addEventListener('DOMContentLoaded', () => initCallback(), { once: true });
|
||||
}
|
||||
|
||||
var index = {
|
||||
const BSN = {
|
||||
Alert,
|
||||
Button,
|
||||
Carousel,
|
||||
@@ -3562,9 +3603,9 @@
|
||||
Tooltip,
|
||||
|
||||
initCallback,
|
||||
Version: version,
|
||||
Version,
|
||||
};
|
||||
|
||||
return index;
|
||||
return BSN;
|
||||
|
||||
})));
|
||||
}));
|
||||
937
src/static/scripts/bootstrap.css
vendored
937
src/static/scripts/bootstrap.css
vendored
File diff suppressed because it is too large
Load Diff
4
src/static/scripts/datatables.css
vendored
4
src/static/scripts/datatables.css
vendored
@@ -4,10 +4,10 @@
|
||||
*
|
||||
* To rebuild or modify this file with the latest versions of the included
|
||||
* software please visit:
|
||||
* https://datatables.net/download/#bs5/dt-1.11.2
|
||||
* https://datatables.net/download/#bs5/dt-1.11.3
|
||||
*
|
||||
* Included libraries:
|
||||
* DataTables 1.11.2
|
||||
* DataTables 1.11.3
|
||||
*/
|
||||
|
||||
@charset "UTF-8";
|
||||
|
||||
245
src/static/scripts/datatables.js
vendored
245
src/static/scripts/datatables.js
vendored
@@ -4,20 +4,20 @@
|
||||
*
|
||||
* To rebuild or modify this file with the latest versions of the included
|
||||
* software please visit:
|
||||
* https://datatables.net/download/#bs5/dt-1.11.2
|
||||
* https://datatables.net/download/#bs5/dt-1.11.3
|
||||
*
|
||||
* Included libraries:
|
||||
* DataTables 1.11.2
|
||||
* DataTables 1.11.3
|
||||
*/
|
||||
|
||||
/*! DataTables 1.11.2
|
||||
/*! DataTables 1.11.3
|
||||
* ©2008-2021 SpryMedia Ltd - datatables.net/license
|
||||
*/
|
||||
|
||||
/**
|
||||
* @summary DataTables
|
||||
* @description Paginate, search and order HTML tables
|
||||
* @version 1.11.2
|
||||
* @version 1.11.3
|
||||
* @file jquery.dataTables.js
|
||||
* @author SpryMedia Ltd
|
||||
* @contact www.datatables.net
|
||||
@@ -1626,6 +1626,14 @@
|
||||
return out;
|
||||
}
|
||||
|
||||
var _includes = function (search, start) {
|
||||
if (start === undefined) {
|
||||
start = 0;
|
||||
}
|
||||
|
||||
return this.indexOf(search, start) !== -1;
|
||||
};
|
||||
|
||||
// Array.isArray polyfill.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/isArray
|
||||
if (! Array.isArray) {
|
||||
@@ -1634,6 +1642,10 @@
|
||||
};
|
||||
}
|
||||
|
||||
if (! Array.prototype.includes) {
|
||||
Array.prototype.includes = _includes;
|
||||
}
|
||||
|
||||
// .trim() polyfill
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/trim
|
||||
if (!String.prototype.trim) {
|
||||
@@ -1642,6 +1654,10 @@
|
||||
};
|
||||
}
|
||||
|
||||
if (! String.prototype.includes) {
|
||||
String.prototype.includes = _includes;
|
||||
}
|
||||
|
||||
/**
|
||||
* DataTables utility methods
|
||||
*
|
||||
@@ -2808,9 +2824,18 @@
|
||||
return cellData.call( rowData );
|
||||
}
|
||||
|
||||
if ( cellData === null && type == 'display' ) {
|
||||
if ( cellData === null && type === 'display' ) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if ( type === 'filter' ) {
|
||||
var fomatters = DataTable.ext.type.search;
|
||||
|
||||
if ( fomatters[ col.sType ] ) {
|
||||
cellData = fomatters[ col.sType ]( cellData );
|
||||
}
|
||||
}
|
||||
|
||||
return cellData;
|
||||
}
|
||||
|
||||
@@ -4565,7 +4590,6 @@
|
||||
var columns = settings.aoColumns;
|
||||
var column;
|
||||
var i, j, ien, jen, filterData, cellData, row;
|
||||
var fomatters = DataTable.ext.type.search;
|
||||
var wasInvalidated = false;
|
||||
|
||||
for ( i=0, ien=settings.aoData.length ; i<ien ; i++ ) {
|
||||
@@ -4580,10 +4604,6 @@
|
||||
if ( column.bSearchable ) {
|
||||
cellData = _fnGetCellData( settings, i, j, 'filter' );
|
||||
|
||||
if ( fomatters[ column.sType ] ) {
|
||||
cellData = fomatters[ column.sType ]( cellData );
|
||||
}
|
||||
|
||||
// Search in DataTables 1.10 is string based. In 1.11 this
|
||||
// should be altered to also allow strict type checking.
|
||||
if ( cellData === null ) {
|
||||
@@ -6374,6 +6394,10 @@
|
||||
*/
|
||||
function _fnSaveState ( settings )
|
||||
{
|
||||
if (settings._bLoadingState) {
|
||||
return;
|
||||
}
|
||||
|
||||
/* Store the interesting variables */
|
||||
var state = {
|
||||
time: +new Date(),
|
||||
@@ -6408,99 +6432,129 @@
|
||||
*/
|
||||
function _fnLoadState ( settings, oInit, callback )
|
||||
{
|
||||
var i, ien;
|
||||
var columns = settings.aoColumns;
|
||||
var loaded = function ( s ) {
|
||||
if ( ! s || ! s.time ) {
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
// Allow custom and plug-in manipulation functions to alter the saved data set and
|
||||
// cancelling of loading by returning false
|
||||
var abStateLoad = _fnCallbackFire( settings, 'aoStateLoadParams', 'stateLoadParams', [settings, s] );
|
||||
if ( $.inArray( false, abStateLoad ) !== -1 ) {
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
// Reject old data
|
||||
var duration = settings.iStateDuration;
|
||||
if ( duration > 0 && s.time < +new Date() - (duration*1000) ) {
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
// Number of columns have changed - all bets are off, no restore of settings
|
||||
if ( s.columns && columns.length !== s.columns.length ) {
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
// Store the saved state so it might be accessed at any time
|
||||
settings.oLoadedState = $.extend( true, {}, s );
|
||||
|
||||
// Restore key features - todo - for 1.11 this needs to be done by
|
||||
// subscribed events
|
||||
if ( s.start !== undefined ) {
|
||||
settings._iDisplayStart = s.start;
|
||||
settings.iInitDisplayStart = s.start;
|
||||
}
|
||||
if ( s.length !== undefined ) {
|
||||
settings._iDisplayLength = s.length;
|
||||
}
|
||||
|
||||
// Order
|
||||
if ( s.order !== undefined ) {
|
||||
settings.aaSorting = [];
|
||||
$.each( s.order, function ( i, col ) {
|
||||
settings.aaSorting.push( col[0] >= columns.length ?
|
||||
[ 0, col[1] ] :
|
||||
col
|
||||
);
|
||||
} );
|
||||
}
|
||||
|
||||
// Search
|
||||
if ( s.search !== undefined ) {
|
||||
$.extend( settings.oPreviousSearch, _fnSearchToHung( s.search ) );
|
||||
}
|
||||
|
||||
// Columns
|
||||
//
|
||||
if ( s.columns ) {
|
||||
for ( i=0, ien=s.columns.length ; i<ien ; i++ ) {
|
||||
var col = s.columns[i];
|
||||
|
||||
// Visibility
|
||||
if ( col.visible !== undefined ) {
|
||||
columns[i].bVisible = col.visible;
|
||||
}
|
||||
|
||||
// Search
|
||||
if ( col.search !== undefined ) {
|
||||
$.extend( settings.aoPreSearchCols[i], _fnSearchToHung( col.search ) );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_fnCallbackFire( settings, 'aoStateLoaded', 'stateLoaded', [settings, s] );
|
||||
callback();
|
||||
};
|
||||
|
||||
if ( ! settings.oFeatures.bStateSave ) {
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
var loaded = function(state) {
|
||||
_fnImplementState(settings, state, callback);
|
||||
}
|
||||
|
||||
var state = settings.fnStateLoadCallback.call( settings.oInstance, settings, loaded );
|
||||
|
||||
if ( state !== undefined ) {
|
||||
loaded( state );
|
||||
_fnImplementState( settings, state, callback );
|
||||
}
|
||||
// otherwise, wait for the loaded callback to be executed
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function _fnImplementState ( settings, s, callback) {
|
||||
var i, ien;
|
||||
var columns = settings.aoColumns;
|
||||
settings._bLoadingState = true;
|
||||
|
||||
// When StateRestore was introduced the state could now be implemented at any time
|
||||
// Not just initialisation. To do this an api instance is required in some places
|
||||
var api = settings._bInitComplete ? new DataTable.Api(settings) : null;
|
||||
|
||||
if ( ! s || ! s.time ) {
|
||||
settings._bLoadingState = false;
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
// Allow custom and plug-in manipulation functions to alter the saved data set and
|
||||
// cancelling of loading by returning false
|
||||
var abStateLoad = _fnCallbackFire( settings, 'aoStateLoadParams', 'stateLoadParams', [settings, s] );
|
||||
if ( $.inArray( false, abStateLoad ) !== -1 ) {
|
||||
settings._bLoadingState = false;
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
// Reject old data
|
||||
var duration = settings.iStateDuration;
|
||||
if ( duration > 0 && s.time < +new Date() - (duration*1000) ) {
|
||||
settings._bLoadingState = false;
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
// Number of columns have changed - all bets are off, no restore of settings
|
||||
if ( s.columns && columns.length !== s.columns.length ) {
|
||||
settings._bLoadingState = false;
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
// Store the saved state so it might be accessed at any time
|
||||
settings.oLoadedState = $.extend( true, {}, s );
|
||||
|
||||
// Restore key features - todo - for 1.11 this needs to be done by
|
||||
// subscribed events
|
||||
if ( s.start !== undefined ) {
|
||||
settings._iDisplayStart = s.start;
|
||||
if(api === null) {
|
||||
settings.iInitDisplayStart = s.start;
|
||||
}
|
||||
}
|
||||
if ( s.length !== undefined ) {
|
||||
settings._iDisplayLength = s.length;
|
||||
}
|
||||
|
||||
// Order
|
||||
if ( s.order !== undefined ) {
|
||||
settings.aaSorting = [];
|
||||
$.each( s.order, function ( i, col ) {
|
||||
settings.aaSorting.push( col[0] >= columns.length ?
|
||||
[ 0, col[1] ] :
|
||||
col
|
||||
);
|
||||
} );
|
||||
}
|
||||
|
||||
// Search
|
||||
if ( s.search !== undefined ) {
|
||||
$.extend( settings.oPreviousSearch, _fnSearchToHung( s.search ) );
|
||||
}
|
||||
|
||||
// Columns
|
||||
if ( s.columns ) {
|
||||
for ( i=0, ien=s.columns.length ; i<ien ; i++ ) {
|
||||
var col = s.columns[i];
|
||||
|
||||
// Visibility
|
||||
if ( col.visible !== undefined ) {
|
||||
// If the api is defined, the table has been initialised so we need to use it rather than internal settings
|
||||
if (api) {
|
||||
// Don't redraw the columns on every iteration of this loop, we will do this at the end instead
|
||||
api.column(i).visible(col.visible, false);
|
||||
}
|
||||
else {
|
||||
columns[i].bVisible = col.visible;
|
||||
}
|
||||
}
|
||||
|
||||
// Search
|
||||
if ( col.search !== undefined ) {
|
||||
$.extend( settings.aoPreSearchCols[i], _fnSearchToHung( col.search ) );
|
||||
}
|
||||
}
|
||||
|
||||
// If the api is defined then we need to adjust the columns once the visibility has been changed
|
||||
if (api) {
|
||||
api.columns.adjust();
|
||||
}
|
||||
}
|
||||
|
||||
settings._bLoadingState = false;
|
||||
_fnCallbackFire( settings, 'aoStateLoaded', 'stateLoaded', [settings, s] );
|
||||
callback();
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Return the settings object for a particular table
|
||||
@@ -9590,7 +9644,7 @@
|
||||
* @type string
|
||||
* @default Version number
|
||||
*/
|
||||
DataTable.version = "1.11.2";
|
||||
DataTable.version = "1.11.3";
|
||||
|
||||
/**
|
||||
* Private data store, containing all of the settings objects that are
|
||||
@@ -14015,7 +14069,7 @@
|
||||
*
|
||||
* @type string
|
||||
*/
|
||||
build:"bs5/dt-1.11.2",
|
||||
build:"bs5/dt-1.11.3",
|
||||
|
||||
|
||||
/**
|
||||
@@ -15048,6 +15102,10 @@
|
||||
*/
|
||||
|
||||
var __htmlEscapeEntities = function ( d ) {
|
||||
if (Array.isArray(d)) {
|
||||
d = d.join(',');
|
||||
}
|
||||
|
||||
return typeof d === 'string' ?
|
||||
d
|
||||
.replace(/&/g, '&')
|
||||
@@ -15242,6 +15300,7 @@
|
||||
_fnSortData: _fnSortData,
|
||||
_fnSaveState: _fnSaveState,
|
||||
_fnLoadState: _fnLoadState,
|
||||
_fnImplementState: _fnImplementState,
|
||||
_fnSettingsFromNode: _fnSettingsFromNode,
|
||||
_fnLog: _fnLog,
|
||||
_fnMap: _fnMap,
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no" />
|
||||
<meta name="robots" content="noindex,nofollow" />
|
||||
<link rel="icon" type="image/png" href="{{urlpath}}/bwrs_static/vaultwarden-icon.png">
|
||||
<link rel="icon" type="image/png" href="{{urlpath}}/vw_static/vaultwarden-icon.png">
|
||||
<title>Vaultwarden Admin Panel</title>
|
||||
<link rel="stylesheet" href="{{urlpath}}/bwrs_static/bootstrap.css" />
|
||||
<link rel="stylesheet" href="{{urlpath}}/vw_static/bootstrap.css" />
|
||||
<style>
|
||||
body {
|
||||
padding-top: 75px;
|
||||
@@ -21,7 +21,7 @@
|
||||
margin: -5px 0 0 0;
|
||||
}
|
||||
</style>
|
||||
<script src="{{urlpath}}/bwrs_static/identicon.js"></script>
|
||||
<script src="{{urlpath}}/vw_static/identicon.js"></script>
|
||||
<script>
|
||||
'use strict';
|
||||
|
||||
@@ -85,7 +85,7 @@
|
||||
<body class="bg-light">
|
||||
<nav class="navbar navbar-expand-md navbar-dark bg-dark mb-4 shadow fixed-top">
|
||||
<div class="container-xl">
|
||||
<a class="navbar-brand" href="{{urlpath}}/admin"><img class="vaultwarden-icon" src="{{urlpath}}/bwrs_static/vaultwarden-icon.png" alt="V">aultwarden Admin</a>
|
||||
<a class="navbar-brand" href="{{urlpath}}/admin"><img class="vaultwarden-icon" src="{{urlpath}}/vw_static/vaultwarden-icon.png" alt="V">aultwarden Admin</a>
|
||||
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarCollapse"
|
||||
aria-controls="navbarCollapse" aria-expanded="false" aria-label="Toggle navigation">
|
||||
<span class="navbar-toggler-icon"></span>
|
||||
@@ -135,6 +135,6 @@
|
||||
}
|
||||
})();
|
||||
</script>
|
||||
<script src="{{urlpath}}/bwrs_static/bootstrap-native.js"></script>
|
||||
<script src="{{urlpath}}/vw_static/bootstrap-native.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -150,7 +150,7 @@
|
||||
|
||||
<dt class="col-sm-5">Domain configuration
|
||||
<span class="badge bg-success d-none" id="domain-success" title="The domain variable matches the browser location and seems to be configured correctly.">Match</span>
|
||||
<span class="badge bg-danger d-none" id="domain-warning" title="The domain variable does not matches the browsers location.
The domain variable does not seem to be configured correctly.
Some features may not work as expected!">No Match</span>
|
||||
<span class="badge bg-danger d-none" id="domain-warning" title="The domain variable does not match the browser location.
The domain variable does not seem to be configured correctly.
Some features may not work as expected!">No Match</span>
|
||||
<span class="badge bg-success d-none" id="https-success" title="Configurued to use HTTPS">HTTPS</span>
|
||||
<span class="badge bg-danger d-none" id="https-warning" title="Not configured to use HTTPS.
Some features may not work as expected!">No HTTPS</span>
|
||||
</dt>
|
||||
|
||||
@@ -48,9 +48,9 @@
|
||||
</div>
|
||||
</main>
|
||||
|
||||
<link rel="stylesheet" href="{{urlpath}}/bwrs_static/datatables.css" />
|
||||
<script src="{{urlpath}}/bwrs_static/jquery-3.6.0.slim.js"></script>
|
||||
<script src="{{urlpath}}/bwrs_static/datatables.js"></script>
|
||||
<link rel="stylesheet" href="{{urlpath}}/vw_static/datatables.css" />
|
||||
<script src="{{urlpath}}/vw_static/jquery-3.6.0.slim.js"></script>
|
||||
<script src="{{urlpath}}/vw_static/datatables.js"></script>
|
||||
<script>
|
||||
'use strict';
|
||||
|
||||
|
||||
@@ -135,9 +135,9 @@
|
||||
</div>
|
||||
</main>
|
||||
|
||||
<link rel="stylesheet" href="{{urlpath}}/bwrs_static/datatables.css" />
|
||||
<script src="{{urlpath}}/bwrs_static/jquery-3.6.0.slim.js"></script>
|
||||
<script src="{{urlpath}}/bwrs_static/datatables.js"></script>
|
||||
<link rel="stylesheet" href="{{urlpath}}/vw_static/datatables.css" />
|
||||
<script src="{{urlpath}}/vw_static/jquery-3.6.0.slim.js"></script>
|
||||
<script src="{{urlpath}}/vw_static/datatables.js"></script>
|
||||
<script>
|
||||
'use strict';
|
||||
|
||||
|
||||
@@ -4,13 +4,13 @@
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
<table class="footer" cellpadding="0" cellspacing="0" width="100%" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; clear: both; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; width: 100%;">
|
||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
||||
<td class="aligncenter social-icons" align="center" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; padding: 15px 0 0 0;" valign="top">
|
||||
<table cellpadding="0" cellspacing="0" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0 auto;">
|
||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
||||
<td style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; padding: 0 10px;" valign="top"><a href="https://github.com/dani-garcia/vaultwarden" target="_blank" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; text-decoration: underline;"><img src="{{url}}/bwrs_static/mail-github.png" alt="GitHub" width="30" height="30" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; border: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; max-width: 100%;" /></a></td>
|
||||
<td style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; padding: 0 10px;" valign="top"><a href="https://github.com/dani-garcia/vaultwarden" target="_blank" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #999; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 12px; line-height: 20px; margin: 0; text-decoration: underline;"><img src="{{url}}/vw_static/mail-github.png" alt="GitHub" width="30" height="30" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; border: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; max-width: 100%;" /></a></td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
@@ -21,4 +21,4 @@
|
||||
</tr>
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
|
||||
@@ -81,7 +81,7 @@
|
||||
<table class="body-wrap" cellpadding="0" cellspacing="0" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; width: 100%;" bgcolor="#f6f6f6">
|
||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
||||
<td valign="middle" class="aligncenter middle logo" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; padding: 20px 0 10px;" align="center">
|
||||
<img src="{{url}}/bwrs_static/logo-gray.png" alt="" width="190" height="39" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; border: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; max-width: 100%;" />
|
||||
<img src="{{url}}/vw_static/logo-gray.png" alt="" width="190" height="39" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; border: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0; max-width: 100%;" />
|
||||
</td>
|
||||
</tr>
|
||||
<tr style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 16px; line-height: 25px; margin: 0;">
|
||||
@@ -91,4 +91,4 @@
|
||||
<td class="content" align="center" style="-webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none; box-sizing: border-box; color: #333; display: block; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: 0; line-height: 0; margin: 0 auto; max-width: 600px; padding-bottom: 20px;" valign="top">
|
||||
<table class="main" width="100%" cellpadding="0" cellspacing="0" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; margin: 0; -webkit-text-size-adjust: none; border: 1px solid #e9e9e9; border-radius: 3px;" bgcolor="white">
|
||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||
<td class="content-wrap" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 20px; -webkit-text-size-adjust: none;" valign="top">
|
||||
<td class="content-wrap" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 20px; -webkit-text-size-adjust: none;" valign="top">
|
||||
|
||||
10
src/static/templates/email/incomplete_2fa_login.hbs
Normal file
10
src/static/templates/email/incomplete_2fa_login.hbs
Normal file
@@ -0,0 +1,10 @@
|
||||
Incomplete Two-Step Login From {{{device}}}
|
||||
<!---------------->
|
||||
Someone attempted to log into your account with the correct master password, but did not provide the correct token or action required to complete the two-step login process within {{time_limit}} minutes of the initial login attempt.
|
||||
|
||||
* Date: {{datetime}}
|
||||
* IP Address: {{ip}}
|
||||
* Device Type: {{device}}
|
||||
|
||||
If this was not you or someone you authorized, then you should change your master password as soon as possible, as it is likely to be compromised.
|
||||
{{> email/email_footer_text }}
|
||||
31
src/static/templates/email/incomplete_2fa_login.html.hbs
Normal file
31
src/static/templates/email/incomplete_2fa_login.html.hbs
Normal file
@@ -0,0 +1,31 @@
|
||||
Incomplete Two-Step Login From {{{device}}}
|
||||
<!---------------->
|
||||
{{> email/email_header }}
|
||||
<table width="100%" cellpadding="0" cellspacing="0" style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none;" valign="top">
|
||||
Someone attempted to log into your account with the correct master password, but did not provide the correct token or action required to complete the two-step login process within {{time_limit}} minutes of the initial login attempt.
|
||||
</td>
|
||||
</tr>
|
||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none;" valign="top">
|
||||
<b>Date</b>: {{datetime}}
|
||||
</td>
|
||||
</tr>
|
||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none;" valign="top">
|
||||
<b>IP Address:</b> {{ip}}
|
||||
</td>
|
||||
</tr>
|
||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none;" valign="top">
|
||||
<b>Device Type:</b> {{device}}
|
||||
</td>
|
||||
</tr>
|
||||
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
|
||||
<td class="content-block last" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0; -webkit-text-size-adjust: none;" valign="top">
|
||||
If this was not you or someone you authorized, then you should change your master password as soon as possible, as it is likely to be compromised.
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
{{> email/email_footer }}
|
||||
81
src/util.rs
81
src/util.rs
@@ -11,6 +11,9 @@ use rocket::{
|
||||
Data, Request, Response, Rocket,
|
||||
};
|
||||
|
||||
use std::thread::sleep;
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::CONFIG;
|
||||
|
||||
pub struct AppHeaders();
|
||||
@@ -24,7 +27,7 @@ impl Fairing for AppHeaders {
|
||||
}
|
||||
|
||||
fn on_response(&self, _req: &Request, res: &mut Response) {
|
||||
res.set_raw_header("Feature-Policy", "accelerometer 'none'; ambient-light-sensor 'none'; autoplay 'none'; camera 'none'; encrypted-media 'none'; fullscreen 'none'; geolocation 'none'; gyroscope 'none'; magnetometer 'none'; microphone 'none'; midi 'none'; payment 'none'; picture-in-picture 'none'; sync-xhr 'self' https://haveibeenpwned.com https://2fa.directory; usb 'none'; vr 'none'");
|
||||
res.set_raw_header("Permissions-Policy", "accelerometer=(), ambient-light-sensor=(), autoplay=(), camera=(), encrypted-media=(), fullscreen=(), geolocation=(), gyroscope=(), magnetometer=(), microphone=(), midi=(), payment=(), picture-in-picture=(), sync-xhr=(self \"https://haveibeenpwned.com\" \"https://2fa.directory\"), usb=(), vr=()");
|
||||
res.set_raw_header("Referrer-Policy", "same-origin");
|
||||
res.set_raw_header("X-Frame-Options", "SAMEORIGIN");
|
||||
res.set_raw_header("X-Content-Type-Options", "nosniff");
|
||||
@@ -99,29 +102,53 @@ impl Fairing for Cors {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Cached<R>(R, String);
|
||||
pub struct Cached<R> {
|
||||
response: R,
|
||||
is_immutable: bool,
|
||||
ttl: u64,
|
||||
}
|
||||
|
||||
impl<R> Cached<R> {
|
||||
pub fn long(r: R) -> Cached<R> {
|
||||
// 7 days
|
||||
Self::ttl(r, 604800)
|
||||
pub fn long(response: R, is_immutable: bool) -> Cached<R> {
|
||||
Self {
|
||||
response,
|
||||
is_immutable,
|
||||
ttl: 604800, // 7 days
|
||||
}
|
||||
}
|
||||
|
||||
pub fn short(r: R) -> Cached<R> {
|
||||
// 10 minutes
|
||||
Self(r, String::from("public, max-age=600"))
|
||||
pub fn short(response: R, is_immutable: bool) -> Cached<R> {
|
||||
Self {
|
||||
response,
|
||||
is_immutable,
|
||||
ttl: 600, // 10 minutes
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ttl(r: R, ttl: u64) -> Cached<R> {
|
||||
Self(r, format!("public, immutable, max-age={}", ttl))
|
||||
pub fn ttl(response: R, ttl: u64, is_immutable: bool) -> Cached<R> {
|
||||
Self {
|
||||
response,
|
||||
is_immutable,
|
||||
ttl,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'r, R: Responder<'r>> Responder<'r> for Cached<R> {
|
||||
fn respond_to(self, req: &Request) -> response::Result<'r> {
|
||||
match self.0.respond_to(req) {
|
||||
let cache_control_header = if self.is_immutable {
|
||||
format!("public, immutable, max-age={}", self.ttl)
|
||||
} else {
|
||||
format!("public, max-age={}", self.ttl)
|
||||
};
|
||||
|
||||
let time_now = chrono::Local::now();
|
||||
|
||||
match self.response.respond_to(req) {
|
||||
Ok(mut res) => {
|
||||
res.set_raw_header("Cache-Control", self.1);
|
||||
res.set_raw_header("Cache-Control", cache_control_header);
|
||||
let expiry_time = time_now + chrono::Duration::seconds(self.ttl.try_into().unwrap());
|
||||
res.set_raw_header("Expires", format_datetime_http(&expiry_time));
|
||||
Ok(res)
|
||||
}
|
||||
e @ Err(_) => e,
|
||||
@@ -282,9 +309,9 @@ pub fn delete_file(path: &str) -> IOResult<()> {
|
||||
res
|
||||
}
|
||||
|
||||
const UNITS: [&str; 6] = ["bytes", "KB", "MB", "GB", "TB", "PB"];
|
||||
|
||||
pub fn get_display_size(size: i32) -> String {
|
||||
const UNITS: [&str; 6] = ["bytes", "KB", "MB", "GB", "TB", "PB"];
|
||||
|
||||
let mut size: f64 = size.into();
|
||||
let mut unit_counter = 0;
|
||||
|
||||
@@ -359,10 +386,10 @@ where
|
||||
try_parse_string(get_env_str_value(key))
|
||||
}
|
||||
|
||||
const TRUE_VALUES: &[&str] = &["true", "t", "yes", "y", "1"];
|
||||
const FALSE_VALUES: &[&str] = &["false", "f", "no", "n", "0"];
|
||||
|
||||
pub fn get_env_bool(key: &str) -> Option<bool> {
|
||||
const TRUE_VALUES: &[&str] = &["true", "t", "yes", "y", "1"];
|
||||
const FALSE_VALUES: &[&str] = &["false", "f", "no", "n", "0"];
|
||||
|
||||
match get_env_str_value(key) {
|
||||
Some(val) if TRUE_VALUES.contains(&val.to_lowercase().as_ref()) => Some(true),
|
||||
Some(val) if FALSE_VALUES.contains(&val.to_lowercase().as_ref()) => Some(false),
|
||||
@@ -375,7 +402,6 @@ pub fn get_env_bool(key: &str) -> Option<bool> {
|
||||
//
|
||||
|
||||
use chrono::{DateTime, Local, NaiveDateTime, TimeZone};
|
||||
use chrono_tz::Tz;
|
||||
|
||||
/// Formats a UTC-offset `NaiveDateTime` in the format used by Bitwarden API
|
||||
/// responses with "date" fields (`CreationDate`, `RevisionDate`, etc.).
|
||||
@@ -393,7 +419,7 @@ pub fn format_datetime_local(dt: &DateTime<Local>, fmt: &str) -> String {
|
||||
// Try parsing the `TZ` environment variable to enable formatting `%Z` as
|
||||
// a time zone abbreviation.
|
||||
if let Ok(tz) = env::var("TZ") {
|
||||
if let Ok(tz) = tz.parse::<Tz>() {
|
||||
if let Ok(tz) = tz.parse::<chrono_tz::Tz>() {
|
||||
return dt.with_timezone(&tz).format(fmt).to_string();
|
||||
}
|
||||
}
|
||||
@@ -410,6 +436,17 @@ pub fn format_naive_datetime_local(dt: &NaiveDateTime, fmt: &str) -> String {
|
||||
format_datetime_local(&Local.from_utc_datetime(dt), fmt)
|
||||
}
|
||||
|
||||
/// Formats a `DateTime<Local>` as required for HTTP
|
||||
///
|
||||
/// https://httpwg.org/specs/rfc7231.html#http.date
|
||||
pub fn format_datetime_http(dt: &DateTime<Local>) -> String {
|
||||
let expiry_time: chrono::DateTime<chrono::Utc> = chrono::DateTime::from_utc(dt.naive_utc(), chrono::Utc);
|
||||
|
||||
// HACK: HTTP expects the date to always be GMT (UTC) rather than giving an
|
||||
// offset (which would always be 0 in UTC anyway)
|
||||
expiry_time.to_rfc2822().replace("+0000", "GMT")
|
||||
}
|
||||
|
||||
//
|
||||
// Deployment environment methods
|
||||
//
|
||||
@@ -442,7 +479,7 @@ use serde_json::{self, Value};
|
||||
|
||||
pub type JsonMap = serde_json::Map<String, Value>;
|
||||
|
||||
#[derive(PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct UpCase<T: DeserializeOwned> {
|
||||
#[serde(deserialize_with = "upcase_deserialize")]
|
||||
#[serde(flatten)]
|
||||
@@ -517,6 +554,8 @@ fn upcase_value(value: Value) -> Value {
|
||||
}
|
||||
}
|
||||
|
||||
// Inner function to handle some speciale case for the 'ssn' key.
|
||||
// This key is part of the Identity Cipher (Social Security Number)
|
||||
fn _process_key(key: &str) -> String {
|
||||
match key.to_lowercase().as_ref() {
|
||||
"ssn" => "SSN".into(),
|
||||
@@ -550,8 +589,6 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
use std::{thread::sleep, time::Duration};
|
||||
|
||||
pub fn retry_db<F, T, E>(func: F, max_tries: u32) -> Result<T, E>
|
||||
where
|
||||
F: Fn() -> Result<T, E>,
|
||||
|
||||
Reference in New Issue
Block a user