Compare commits

..

No commits in common. "main" and "1.34.2" have entirely different histories.
main ... 1.34.2

174 changed files with 5646 additions and 13680 deletions

View File

@ -80,16 +80,8 @@
## Timeout when acquiring database connection
# DATABASE_TIMEOUT=30
## Database idle timeout
## Timeout in seconds before idle connections to the database are closed.
# DATABASE_IDLE_TIMEOUT=600
## Database min connections
## Define the minimum size of the connection pool used for connecting to the database.
# DATABASE_MIN_CONNS=2
## Database max connections
## Define the maximum size of the connection pool used for connecting to the database.
## Define the size of the connection pool used for connecting to the database.
# DATABASE_MAX_CONNS=10
## Database connection initialization
@ -182,10 +174,6 @@
## Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt.
## Defaults to every minute. Set blank to disable this job.
# DUO_CONTEXT_PURGE_SCHEDULE="30 * * * * *"
#
## Cron schedule of the job that cleans sso auth from incomplete flow
## Defaults to daily (20 minutes after midnight). Set blank to disable this job.
# PURGE_INCOMPLETE_SSO_AUTH="0 20 0 * * *"
########################
### General settings ###
@ -348,7 +336,7 @@
## Default: 2592000 (30 days)
# ICON_CACHE_TTL=2592000
## Cache time-to-live for icons which weren't available, in seconds (0 is "forever")
## Default: 259200 (3 days)
## Default: 2592000 (3 days)
# ICON_CACHE_NEGTTL=259200
## Icon download timeout
@ -376,7 +364,6 @@
## - "inline-menu-totp": Enable the use of inline menu TOTP codes in the browser extension.
## - "ssh-agent": Enable SSH agent support on Desktop. (Needs desktop >=2024.12.0)
## - "ssh-key-vault-item": Enable the creation and use of SSH key vault items. (Needs clients >=2024.12.0)
## - "pm-25373-windows-biometrics-v2": Enable the new implementation of biometrics on Windows. (Needs desktop >= 2025.11.0)
## - "export-attachments": Enable support for exporting attachments (Clients >=2025.4.0)
## - "anon-addy-self-host-alias": Enable configuring self-hosted Anon Addy alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0)
## - "simple-login-self-host-alias": Enable configuring self-hosted Simple Login alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0)
@ -472,60 +459,6 @@
## Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy.
# ENFORCE_SINGLE_ORG_WITH_RESET_PW_POLICY=false
## Prefer IPv6 (AAAA) resolving
## This settings configures the DNS resolver to resolve IPv6 first, and if not available try IPv4
## This could be useful in IPv6 only environments.
# DNS_PREFER_IPV6=false
#####################################
### SSO settings (OpenID Connect) ###
#####################################
## Controls whether users can login using an OpenID Connect identity provider
# SSO_ENABLED=false
## Prevent users from logging in directly without going through SSO
# SSO_ONLY=false
## On SSO Signup if a user with a matching email already exists make the association
# SSO_SIGNUPS_MATCH_EMAIL=true
## Allow unknown email verification status. Allowing this with `SSO_SIGNUPS_MATCH_EMAIL=true` open potential account takeover.
# SSO_ALLOW_UNKNOWN_EMAIL_VERIFICATION=false
## Base URL of the OIDC server (auto-discovery is used)
## - Should not include the `/.well-known/openid-configuration` part and no trailing `/`
## - ${SSO_AUTHORITY}/.well-known/openid-configuration should return a json document: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationResponse
# SSO_AUTHORITY=https://auth.example.com
## Authorization request scopes. Optional SSO scopes, override if email and profile are not enough (`openid` is implicit).
# SSO_SCOPES="email profile"
## Additional authorization url parameters (ex: to obtain a `refresh_token` with Google Auth).
# SSO_AUTHORIZE_EXTRA_PARAMS="access_type=offline&prompt=consent"
## Activate PKCE for the Auth Code flow.
# SSO_PKCE=true
## Regex for additional trusted Id token audience (by default only the client_id is trusted).
# SSO_AUDIENCE_TRUSTED='^$'
## Set your Client ID and Client Key
# SSO_CLIENT_ID=11111
# SSO_CLIENT_SECRET=AAAAAAAAAAAAAAAAAAAAAAAA
## Optional Master password policy (minComplexity=[0-4]), `enforceOnLogin` is not supported at the moment.
# SSO_MASTER_PASSWORD_POLICY='{"enforceOnLogin":false,"minComplexity":3,"minLength":12,"requireLower":false,"requireNumbers":false,"requireSpecial":false,"requireUpper":false}'
## Use sso only for authentication not the session lifecycle
# SSO_AUTH_ONLY_NOT_SESSION=false
## Client cache for discovery endpoint. Duration in seconds (0 to disable).
# SSO_CLIENT_CACHE_EXPIRATION=0
## Log all the tokens, LOG_LEVEL=debug is required
# SSO_DEBUG_TOKENS=false
########################
### MFA/2FA settings ###
########################
@ -585,7 +518,7 @@
##
## According to the RFC6238 (https://tools.ietf.org/html/rfc6238),
## we allow by default the TOTP code which was valid one step back and one in the future.
## This can however allow attackers to be a bit more lucky with their attempts because there are 3 valid codes.
## This can however allow attackers to be a bit more lucky with there attempts because there are 3 valid codes.
## You can disable this, so that only the current TOTP Code is allowed.
## Keep in mind that when a sever drifts out of time, valid codes could be marked as invalid.
## In any case, if a code has been used it can not be used again, also codes which predates it will be invalid.
@ -625,7 +558,7 @@
# SMTP_AUTH_MECHANISM=
## Server name sent during the SMTP HELO
## By default this value should be the machine's hostname,
## By default this value should be is on the machine's hostname,
## but might need to be changed in case it trips some anti-spam filters
# HELO_NAME=

View File

@ -14,7 +14,6 @@ on:
- "diesel.toml"
- "docker/Dockerfile.j2"
- "docker/DockerSettings.yaml"
- "macros/**"
pull_request:
paths:
@ -28,12 +27,15 @@ on:
- "diesel.toml"
- "docker/Dockerfile.j2"
- "docker/DockerSettings.yaml"
- "macros/**"
jobs:
build:
name: Build and Test ${{ matrix.channel }}
runs-on: ubuntu-24.04
permissions:
actions: write
contents: read
# We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers
runs-on: ubuntu-22.04
timeout-minutes: 120
# Make warnings errors, this is to prevent warnings slipping through.
# This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes.
@ -54,7 +56,7 @@ jobs:
# Checkout the repo
- name: "Checkout"
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
fetch-depth: 0
@ -68,9 +70,9 @@ jobs:
CHANNEL: ${{ matrix.channel }}
run: |
if [[ "${CHANNEL}" == 'rust-toolchain' ]]; then
RUST_TOOLCHAIN="$(grep -m1 -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)"
RUST_TOOLCHAIN="$(grep -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)"
elif [[ "${CHANNEL}" == 'msrv' ]]; then
RUST_TOOLCHAIN="$(grep -m1 -oP 'rust-version\s.*"(\K.*?)(?=")' Cargo.toml)"
RUST_TOOLCHAIN="$(grep -oP 'rust-version.*"(\K.*?)(?=")' Cargo.toml)"
else
RUST_TOOLCHAIN="${CHANNEL}"
fi
@ -80,7 +82,7 @@ jobs:
# Only install the clippy and rustfmt components on the default rust-toolchain
- name: "Install rust-toolchain version"
uses: dtolnay/rust-toolchain@f7ccc83f9ed1e5b9c81d8a67d7ad1a747e22a561 # master @ Dec 16, 2025, 6:11 PM GMT+1
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master @ Apr 29, 2025, 9:22 PM GMT+2
if: ${{ matrix.channel == 'rust-toolchain' }}
with:
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@ -90,7 +92,7 @@ jobs:
# Install the any other channel to be used for which we do not execute clippy and rustfmt
- name: "Install MSRV version"
uses: dtolnay/rust-toolchain@f7ccc83f9ed1e5b9c81d8a67d7ad1a747e22a561 # master @ Dec 16, 2025, 6:11 PM GMT+1
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master @ Apr 29, 2025, 9:22 PM GMT+2
if: ${{ matrix.channel != 'rust-toolchain' }}
with:
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@ -115,60 +117,60 @@ jobs:
# Enable Rust Caching
- name: Rust Caching
uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
with:
# Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
# Like changing the build host from Ubuntu 20.04 to 22.04 for example.
# Only update when really needed! Use a <year>.<month>[.<inc>] format.
prefix-key: "v2025.09-rust"
prefix-key: "v2023.07-rust"
# End Enable Rust Caching
# Run cargo tests
# First test all features together, afterwards test them separately.
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc,s3"
id: test_sqlite_mysql_postgresql_mimalloc_s3
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc,query_logger"
id: test_sqlite_mysql_postgresql_mimalloc_logger
if: ${{ !cancelled() }}
run: |
cargo test --profile ci --features sqlite,mysql,postgresql,enable_mimalloc,s3
cargo test --features sqlite,mysql,postgresql,enable_mimalloc,query_logger
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc"
id: test_sqlite_mysql_postgresql_mimalloc
if: ${{ !cancelled() }}
run: |
cargo test --profile ci --features sqlite,mysql,postgresql,enable_mimalloc
cargo test --features sqlite,mysql,postgresql,enable_mimalloc
- name: "test features: sqlite,mysql,postgresql"
id: test_sqlite_mysql_postgresql
if: ${{ !cancelled() }}
run: |
cargo test --profile ci --features sqlite,mysql,postgresql
cargo test --features sqlite,mysql,postgresql
- name: "test features: sqlite"
id: test_sqlite
if: ${{ !cancelled() }}
run: |
cargo test --profile ci --features sqlite
cargo test --features sqlite
- name: "test features: mysql"
id: test_mysql
if: ${{ !cancelled() }}
run: |
cargo test --profile ci --features mysql
cargo test --features mysql
- name: "test features: postgresql"
id: test_postgresql
if: ${{ !cancelled() }}
run: |
cargo test --profile ci --features postgresql
cargo test --features postgresql
# End Run cargo tests
# Run cargo clippy, and fail on warnings
- name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc,s3"
- name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc"
id: clippy
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }}
run: |
cargo clippy --profile ci --features sqlite,mysql,postgresql,enable_mimalloc,s3
cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc
# End Run cargo clippy
@ -186,7 +188,7 @@ jobs:
- name: "Some checks failed"
if: ${{ failure() }}
env:
TEST_DB_M_S3: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_s3.outcome }}
TEST_DB_M_L: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}
TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}
TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }}
TEST_SQLITE: ${{ steps.test_sqlite.outcome }}
@ -199,13 +201,13 @@ jobs:
echo "" >> "${GITHUB_STEP_SUMMARY}"
echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}"
echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,s3)|${TEST_DB_M_S3}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${TEST_DB_M_L}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc,s3)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}"
echo "" >> "${GITHUB_STEP_SUMMARY}"
echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}"

View File

@ -6,13 +6,15 @@ on: [ push, pull_request ]
jobs:
docker-templates:
name: Validate docker templates
permissions:
contents: read
runs-on: ubuntu-24.04
timeout-minutes: 30
steps:
# Checkout the repo
- name: "Checkout"
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
# End Checkout the repo

View File

@ -1,19 +1,20 @@
name: Hadolint
on: [ push, pull_request ]
permissions: {}
on: [ push, pull_request ]
jobs:
hadolint:
name: Validate Dockerfile syntax
permissions:
contents: read
runs-on: ubuntu-24.04
timeout-minutes: 30
steps:
# Start Docker Buildx
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
# https://github.com/moby/buildkit/issues/3969
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
with:
@ -30,11 +31,11 @@ jobs:
sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \
sudo chmod +x /usr/local/bin/hadolint
env:
HADOLINT_VERSION: 2.14.0
HADOLINT_VERSION: 2.12.0
# End Download hadolint
# Checkout the repo
- name: Checkout
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
# End Checkout the repo

View File

@ -10,39 +10,39 @@ on:
# https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
- '[1-2].[0-9]+.[0-9]+'
concurrency:
# Apply concurrency control only on the upstream repo
group: ${{ github.repository == 'dani-garcia/vaultwarden' && format('{0}-{1}', github.workflow, github.ref) || github.run_id }}
# Don't cancel other runs when creating a tag
cancel-in-progress: ${{ github.ref_type == 'branch' }}
defaults:
run:
shell: bash
env:
# The *_REPO variables need to be configured as repository variables
# Append `/settings/variables/actions` to your repo url
# DOCKERHUB_REPO needs to be 'index.docker.io/<user>/<repo>'
# Check for Docker hub credentials in secrets
HAVE_DOCKERHUB_LOGIN: ${{ vars.DOCKERHUB_REPO != '' && secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}
# GHCR_REPO needs to be 'ghcr.io/<user>/<repo>'
# Check for Github credentials in secrets
HAVE_GHCR_LOGIN: ${{ vars.GHCR_REPO != '' && github.repository_owner != '' && secrets.GITHUB_TOKEN != '' }}
# QUAY_REPO needs to be 'quay.io/<user>/<repo>'
# Check for Quay.io credentials in secrets
HAVE_QUAY_LOGIN: ${{ vars.QUAY_REPO != '' && secrets.QUAY_USERNAME != '' && secrets.QUAY_TOKEN != '' }}
jobs:
docker-build:
name: Build Vaultwarden containers
# https://github.com/marketplace/actions/skip-duplicate-actions
# Some checks to determine if we need to continue with building a new docker.
# We will skip this check if we are creating a tag, because that has the same hash as a previous run already.
skip_check:
# Only run this in the upstream repo and not on forks
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
name: Cancel older jobs when running
permissions:
packages: write # Needed to upload packages and artifacts
actions: write
runs-on: ubuntu-24.04
outputs:
should_skip: ${{ steps.skip_check.outputs.should_skip }}
steps:
- name: Skip Duplicates Actions
id: skip_check
uses: fkirc/skip-duplicate-actions@f75f66ce1886f00957d99748a42c724f4330bdcf # v5.3.1
with:
cancel_others: 'true'
# Only run this when not creating a tag
if: ${{ github.ref_type == 'branch' }}
docker-build:
needs: skip_check
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
name: Build Vaultwarden containers
permissions:
packages: write
contents: read
attestations: write # Needed to generate an artifact attestation for a build
id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate
runs-on: ${{ contains(matrix.arch, 'arm') && 'ubuntu-24.04-arm' || 'ubuntu-24.04' }}
attestations: write
id-token: write
runs-on: ubuntu-24.04
timeout-minutes: 120
# Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them
services:
@ -53,22 +53,30 @@ jobs:
env:
SOURCE_COMMIT: ${{ github.sha }}
SOURCE_REPOSITORY_URL: "https://github.com/${{ github.repository }}"
# The *_REPO variables need to be configured as repository variables
# Append `/settings/variables/actions` to your repo url
# DOCKERHUB_REPO needs to be 'index.docker.io/<user>/<repo>'
# Check for Docker hub credentials in secrets
HAVE_DOCKERHUB_LOGIN: ${{ vars.DOCKERHUB_REPO != '' && secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}
# GHCR_REPO needs to be 'ghcr.io/<user>/<repo>'
# Check for Github credentials in secrets
HAVE_GHCR_LOGIN: ${{ vars.GHCR_REPO != '' && github.repository_owner != '' && secrets.GITHUB_TOKEN != '' }}
# QUAY_REPO needs to be 'quay.io/<user>/<repo>'
# Check for Quay.io credentials in secrets
HAVE_QUAY_LOGIN: ${{ vars.QUAY_REPO != '' && secrets.QUAY_USERNAME != '' && secrets.QUAY_TOKEN != '' }}
strategy:
matrix:
arch: ["amd64", "arm64", "arm/v7", "arm/v6"]
base_image: ["debian","alpine"]
outputs:
base-tags: ${{ steps.determine-version.outputs.BASE_TAGS }}
steps:
- name: Initialize QEMU binfmt support
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
with:
platforms: "arm64,arm"
# Start Docker Buildx
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
# https://github.com/moby/buildkit/issues/3969
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
with:
@ -81,32 +89,23 @@ jobs:
# Checkout the repo
- name: Checkout
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# We need fetch-depth of 0 so we also get all the tag metadata
with:
persist-credentials: false
fetch-depth: 0
# Normalize the architecture string for use in paths and cache keys
- name: Normalize architecture string
env:
MATRIX_ARCH: ${{ matrix.arch }}
run: |
# Replace slashes with nothing to create a safe string for paths/cache keys
NORMALIZED_ARCH="${MATRIX_ARCH//\/}"
echo "NORMALIZED_ARCH=${NORMALIZED_ARCH}" | tee -a "${GITHUB_ENV}"
# Determine Base Tags and Source Version
- name: Determine Base Tags and Source Version
id: determine-version
shell: bash
env:
REF_TYPE: ${{ github.ref_type }}
run: |
# Check which main tag we are going to build determined by ref_type
if [[ "${REF_TYPE}" == "tag" ]]; then
echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_OUTPUT}"
echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}"
elif [[ "${REF_TYPE}" == "branch" ]]; then
echo "BASE_TAGS=testing" | tee -a "${GITHUB_OUTPUT}"
echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}"
fi
# Get the Source Version for this release
@ -121,7 +120,7 @@ jobs:
# Login to Docker Hub
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
@ -129,6 +128,7 @@ jobs:
- name: Add registry for DockerHub
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
shell: bash
env:
DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }}
run: |
@ -136,7 +136,7 @@ jobs:
# Login to GitHub Container Registry
- name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@ -145,6 +145,7 @@ jobs:
- name: Add registry for ghcr.io
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
shell: bash
env:
GHCR_REPO: ${{ vars.GHCR_REPO }}
run: |
@ -152,7 +153,7 @@ jobs:
# Login to Quay.io
- name: Login to Quay.io
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: quay.io
username: ${{ secrets.QUAY_USERNAME }}
@ -161,22 +162,23 @@ jobs:
- name: Add registry for Quay.io
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
shell: bash
env:
QUAY_REPO: ${{ vars.QUAY_REPO }}
run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}"
- name: Configure build cache from/to
shell: bash
env:
GHCR_REPO: ${{ vars.GHCR_REPO }}
BASE_IMAGE: ${{ matrix.base_image }}
NORMALIZED_ARCH: ${{ env.NORMALIZED_ARCH }}
run: |
#
# Check if there is a GitHub Container Registry Login and use it for caching
if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then
echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}-${NORMALIZED_ARCH}" | tee -a "${GITHUB_ENV}"
echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}-${NORMALIZED_ARCH},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}" | tee -a "${GITHUB_ENV}"
echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
else
echo "BAKE_CACHE_FROM="
echo "BAKE_CACHE_TO="
@ -184,45 +186,31 @@ jobs:
#
- name: Add localhost registry
shell: bash
run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}"
- name: Generate tags
id: tags
env:
CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}"
run: |
# Convert comma-separated list to newline-separated set commands
TAGS=$(echo "${CONTAINER_REGISTRIES}" | tr ',' '\n' | sed "s|.*|*.tags=&|")
# Output for use in next step
{
echo "TAGS<<EOF"
echo "$TAGS"
echo "EOF"
} >> "$GITHUB_ENV"
- name: Bake ${{ matrix.base_image }} containers
id: bake_vw
uses: docker/bake-action@5be5f02ff8819ecd3092ea6b2e6261c31774f2b4 # v6.10.0
uses: docker/bake-action@37816e747588cb137173af99ab33873600c46ea8 # v6.8.0
env:
BASE_TAGS: "${{ steps.determine-version.outputs.BASE_TAGS }}"
BASE_TAGS: "${{ env.BASE_TAGS }}"
SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}"
SOURCE_VERSION: "${{ env.SOURCE_VERSION }}"
SOURCE_REPOSITORY_URL: "${{ env.SOURCE_REPOSITORY_URL }}"
CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}"
with:
pull: true
push: true
source: .
files: docker/docker-bake.hcl
targets: "${{ matrix.base_image }}-multi"
set: |
*.cache-from=${{ env.BAKE_CACHE_FROM }}
*.cache-to=${{ env.BAKE_CACHE_TO }}
*.platform=linux/${{ matrix.arch }}
${{ env.TAGS }}
*.output=type=image,push-by-digest=true,name-canonical=true,push=true
- name: Extract digest SHA
shell: bash
env:
BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }}
BASE_IMAGE: ${{ matrix.base_image }}
@ -230,30 +218,38 @@ jobs:
GET_DIGEST_SHA="$(jq -r --arg base "$BASE_IMAGE" '.[$base + "-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")"
echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}"
- name: Export digest
env:
DIGEST_SHA: ${{ env.DIGEST_SHA }}
RUNNER_TEMP: ${{ runner.temp }}
run: |
mkdir -p "${RUNNER_TEMP}"/digests
digest="${DIGEST_SHA}"
touch "${RUNNER_TEMP}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
# Attest container images
- name: Attest - docker.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
with:
name: digests-${{ env.NORMALIZED_ARCH }}-${{ matrix.base_image }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
retention-days: 1
subject-name: ${{ vars.DOCKERHUB_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true
- name: Attest - ghcr.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
with:
subject-name: ${{ vars.GHCR_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true
- name: Attest - quay.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
with:
subject-name: ${{ vars.QUAY_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true
# Extract the Alpine binaries from the containers
- name: Extract binaries
shell: bash
env:
REF_TYPE: ${{ github.ref_type }}
BASE_IMAGE: ${{ matrix.base_image }}
DIGEST_SHA: ${{ env.DIGEST_SHA }}
NORMALIZED_ARCH: ${{ env.NORMALIZED_ARCH }}
run: |
# Check which main tag we are going to build determined by ref_type
if [[ "${REF_TYPE}" == "tag" ]]; then
@ -267,154 +263,60 @@ jobs:
EXTRACT_TAG="${EXTRACT_TAG}-alpine"
fi
CONTAINER_ID="$(docker create "localhost:5000/vaultwarden/server:${EXTRACT_TAG}@${DIGEST_SHA}")"
# After each extraction the image is removed.
# This is needed because using different platforms doesn't trigger a new pull/download
# Copy the binary
docker cp "$CONTAINER_ID":/vaultwarden vaultwarden-"${NORMALIZED_ARCH}"
# Extract amd64 binary
docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
docker cp amd64:/vaultwarden vaultwarden-amd64-${BASE_IMAGE}
docker rm --force amd64
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
# Clean up
docker rm "$CONTAINER_ID"
# Extract arm64 binary
docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
docker cp arm64:/vaultwarden vaultwarden-arm64-${BASE_IMAGE}
docker rm --force arm64
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
# Extract armv7 binary
docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
docker cp armv7:/vaultwarden vaultwarden-armv7-${BASE_IMAGE}
docker rm --force armv7
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
# Extract armv6 binary
docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
docker cp armv6:/vaultwarden vaultwarden-armv6-${BASE_IMAGE}
docker rm --force armv6
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
# Upload artifacts to Github Actions and Attest the binaries
- name: Attest binaries
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0
- name: "Upload amd64 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
subject-path: vaultwarden-${{ env.NORMALIZED_ARCH }}
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }}
path: vaultwarden-amd64-${{ matrix.base_image }}
- name: Upload binaries as artifacts
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
- name: "Upload arm64 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-${{ env.NORMALIZED_ARCH }}-${{ matrix.base_image }}
path: vaultwarden-${{ env.NORMALIZED_ARCH }}
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }}
path: vaultwarden-arm64-${{ matrix.base_image }}
merge-manifests:
name: Merge manifests
runs-on: ubuntu-latest
needs: docker-build
env:
BASE_TAGS: ${{ needs.docker-build.outputs.base-tags }}
permissions:
packages: write # Needed to upload packages and artifacts
attestations: write # Needed to generate an artifact attestation for a build
id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate
strategy:
matrix:
base_image: ["debian","alpine"]
steps:
- name: Download digests
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
- name: "Upload armv7 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
path: ${{ runner.temp }}/digests
pattern: digests-*-${{ matrix.base_image }}
merge-multiple: true
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }}
path: vaultwarden-armv7-${{ matrix.base_image }}
# Login to Docker Hub
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
- name: "Upload armv6 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }}
path: vaultwarden-armv6-${{ matrix.base_image }}
- name: Add registry for DockerHub
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
env:
DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }}
run: |
echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}"
# Login to GitHub Container Registry
- name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
- name: "Attest artifacts ${{ matrix.base_image }}"
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
- name: Add registry for ghcr.io
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
env:
GHCR_REPO: ${{ vars.GHCR_REPO }}
run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}"
# Login to Quay.io
- name: Login to Quay.io
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: quay.io
username: ${{ secrets.QUAY_USERNAME }}
password: ${{ secrets.QUAY_TOKEN }}
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
- name: Add registry for Quay.io
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
env:
QUAY_REPO: ${{ vars.QUAY_REPO }}
run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}"
- name: Create manifest list, push it and extract digest SHA
working-directory: ${{ runner.temp }}/digests
env:
BASE_IMAGE_TAG: "${{ matrix.base_image != 'debian' && format('-{0}', matrix.base_image) || '' }}"
BASE_TAGS: "${{ env.BASE_TAGS }}"
CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}"
run: |
set +e
IFS=',' read -ra IMAGES <<< "${CONTAINER_REGISTRIES}"
IFS=',' read -ra TAGS <<< "${BASE_TAGS}"
for img in "${IMAGES[@]}"; do
for tag in "${TAGS[@]}"; do
echo "Creating manifest for ${img}:${tag}${BASE_IMAGE_TAG}"
OUTPUT=$(docker buildx imagetools create \
-t "${img}:${tag}${BASE_IMAGE_TAG}" \
$(printf "${img}@sha256:%s " *) 2>&1)
STATUS=$?
if [ ${STATUS} -ne 0 ]; then
echo "Manifest creation failed for ${img}:${tag}${BASE_IMAGE_TAG}"
echo "${OUTPUT}"
exit ${STATUS}
fi
echo "Manifest created for ${img}:${tag}${BASE_IMAGE_TAG}"
echo "${OUTPUT}"
done
done
set -e
# Extract digest SHA for subsequent steps
GET_DIGEST_SHA="$(echo "${OUTPUT}" | grep -oE 'sha256:[a-f0-9]{64}' | tail -1)"
echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}"
# Attest container images
- name: Attest - docker.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && env.DIGEST_SHA != ''}}
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0
with:
subject-name: ${{ vars.DOCKERHUB_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true
- name: Attest - ghcr.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_GHCR_LOGIN == 'true' && env.DIGEST_SHA != ''}}
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0
with:
subject-name: ${{ vars.GHCR_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true
- name: Attest - quay.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_QUAY_LOGIN == 'true' && env.DIGEST_SHA != ''}}
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0
with:
subject-name: ${{ vars.QUAY_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true
subject-path: vaultwarden-*
# End Upload artifacts to Github Actions

View File

@ -16,7 +16,7 @@ jobs:
releasecache-cleanup:
name: Releasecache Cleanup
permissions:
packages: write # To be able to cleanup old caches
packages: write
runs-on: ubuntu-24.04
continue-on-error: true
timeout-minutes: 30

View File

@ -23,18 +23,20 @@ jobs:
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
name: Trivy Scan
permissions:
security-events: write # To write the security report
contents: read
actions: read
security-events: write
runs-on: ubuntu-24.04
timeout-minutes: 30
steps:
- name: Checkout code
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # 0.33.1
uses: aquasecurity/trivy-action@dc5a429b52fcf669ce959baa2c2dd26090d2a6c4 # v0.32.0
env:
TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2,public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2
TRIVY_JAVA_DB_REPOSITORY: docker.io/aquasec/trivy-java-db:1,public.ecr.aws/aquasecurity/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1
@ -46,6 +48,6 @@ jobs:
severity: CRITICAL,HIGH
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
uses: github/codeql-action/upload-sarif@4e828ff8d448a8a6e532957b1811f387a63867e8 # v3.29.4
with:
sarif_file: 'trivy-results.sarif'

View File

@ -1,22 +0,0 @@
name: Code Spell Checking
on: [ push, pull_request ]
permissions: {}
jobs:
typos:
name: Run typos spell checking
runs-on: ubuntu-24.04
timeout-minutes: 30
steps:
# Checkout the repo
- name: Checkout
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
with:
persist-credentials: false
# End Checkout the repo
# When this version is updated, do not forget to update this in `.pre-commit-config.yaml` too
- name: Spell Check Repo
uses: crate-ci/typos@2d0ce569feab1f8752f1dde43cc2f2aa53236e06 # v1.40.0

View File

@ -13,15 +13,15 @@ jobs:
name: Run zizmor
runs-on: ubuntu-latest
permissions:
security-events: write # To write the security report
security-events: write
steps:
- name: Checkout repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Run zizmor
uses: zizmorcore/zizmor-action@e639db99335bc9038abc0e066dfcd72e23d26fb4 # v0.3.0
uses: zizmorcore/zizmor-action@f52a838cfabf134edcbaa7c8b3677dde20045018 # v0.1.1
with:
# intentionally not scanning the entire repository,
# since it contains integration tests.

View File

@ -1,7 +1,7 @@
---
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # v6.0.0
rev: v5.0.0
hooks:
- id: check-yaml
- id: check-json
@ -22,15 +22,14 @@ repos:
description: Format files with cargo fmt.
entry: cargo fmt
language: system
always_run: true
pass_filenames: false
types: [rust]
args: ["--", "--check"]
- id: cargo-test
name: cargo test
description: Test the package for errors.
entry: cargo test
language: system
args: ["--features", "sqlite,mysql,postgresql", "--"]
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--"]
types_or: [rust, file]
files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
pass_filenames: false
@ -39,7 +38,7 @@ repos:
description: Lint Rust sources
entry: cargo clippy
language: system
args: ["--features", "sqlite,mysql,postgresql", "--", "-D", "warnings"]
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--", "-D", "warnings"]
types_or: [rust, file]
files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
pass_filenames: false
@ -51,8 +50,3 @@ repos:
args:
- "-c"
- "cd docker && make"
# When this version is updated, do not forget to update this in `.github/workflows/typos.yaml` too
- repo: https://github.com/crate-ci/typos
rev: 2d0ce569feab1f8752f1dde43cc2f2aa53236e06 # v1.40.0
hooks:
- id: typos

View File

@ -1,26 +0,0 @@
[files]
extend-exclude = [
".git/",
"playwright/",
"*.js", # Ignore all JavaScript files
"!admin*.js", # Except our own JavaScript files
]
ignore-hidden = false
[default]
extend-ignore-re = [
# We use this in place of the reserved type identifier at some places
"typ",
# In SMTP it's called HELO, so ignore it
"(?i)helo_name",
"Server name sent during.+HELO",
# COSE Is short for CBOR Object Signing and Encryption, ignore these specific items
"COSEKey",
"COSEAlgorithm",
# Ignore this specific string as it's valid
"Ensure they are valid OTPs",
# This word is misspelled upstream
# https://github.com/bitwarden/server/blob/dff9f1cf538198819911cf2c20f8cda3307701c5/src/Notifications/HubHelpers.cs#L86
# https://github.com/bitwarden/clients/blob/9612a4ac45063e372a6fbe87eb253c7cb3c588fb/libs/common/src/auth/services/anonymous-hub.service.ts#L45
"AuthRequestResponseRecieved",
]

2348
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +1,3 @@
[workspace.package]
edition = "2021"
rust-version = "1.90.0"
license = "AGPL-3.0-only"
repository = "https://github.com/dani-garcia/vaultwarden"
publish = false
[workspace]
members = ["macros"]
@ -12,21 +5,18 @@ members = ["macros"]
name = "vaultwarden"
version = "1.0.0"
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
readme = "README.md"
build = "build.rs"
edition = "2021"
rust-version = "1.86.0"
resolver = "2"
repository.workspace = true
edition.workspace = true
rust-version.workspace = true
license.workspace = true
publish.workspace = true
repository = "https://github.com/dani-garcia/vaultwarden"
readme = "README.md"
license = "AGPL-3.0-only"
publish = false
build = "build.rs"
[features]
default = [
# "sqlite",
# "mysql",
# "postgresql",
]
# default = ["sqlite"]
# Empty to keep compatibility, prefer to set USE_SYSLOG=true
enable_syslog = []
mysql = ["diesel/mysql", "diesel_migrations/mysql"]
@ -37,12 +27,13 @@ vendored_openssl = ["openssl/vendored"]
# Enable MiMalloc memory allocator to replace the default malloc
# This can improve performance for Alpine builds
enable_mimalloc = ["dep:mimalloc"]
# This is a development dependency, and should only be used during development!
# It enables the usage of the diesel_logger crate, which is able to output the generated queries.
# You also need to set an env variable `QUERY_LOGGER=1` to fully activate this so you do not have to re-compile
# if you want to turn off the logging for a specific run.
query_logger = ["dep:diesel_logger"]
s3 = ["opendal/services-s3", "dep:aws-config", "dep:aws-credential-types", "dep:aws-smithy-runtime-api", "dep:anyhow", "dep:http", "dep:reqsign"]
# OIDC specific features
oidc-accept-rfc3339-timestamps = ["openidconnect/accept-rfc3339-timestamps"]
oidc-accept-string-booleans = ["openidconnect/accept-string-booleans"]
# Enable unstable features, requires nightly
# Currently only used to enable rusts official ip support
unstable = []
@ -55,17 +46,20 @@ syslog = "7.0.0"
macros = { path = "./macros" }
# Logging
log = "0.4.29"
log = "0.4.27"
fern = { version = "0.7.1", features = ["syslog-7", "reopen-1"] }
tracing = { version = "0.1.44", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
# A `dotenv` implementation for Rust
dotenvy = { version = "0.15.7", default-features = false }
# Lazy initialization
once_cell = "1.21.3"
# Numerical libraries
num-traits = "0.2.19"
num-derive = "0.4.2"
bigdecimal = "0.4.9"
bigdecimal = "0.4.8"
# Web framework
rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false }
@ -79,19 +73,19 @@ dashmap = "6.1.0"
# Async futures
futures = "0.3.31"
tokio = { version = "1.48.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
tokio-util = { version = "0.7.17", features = ["compat"]}
tokio = { version = "1.46.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
tokio-util = { version = "0.7.15", features = ["compat"]}
# A generic serialization/deserialization framework
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.145"
serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.141"
# A safe, extensible ORM and Query builder
# Currently pinned diesel to v2.3.3 as newer version break MySQL/MariaDB compatibility
diesel = { version = "2.3.5", features = ["chrono", "r2d2", "numeric"] }
diesel_migrations = "2.3.1"
diesel = { version = "2.2.12", features = ["chrono", "r2d2", "numeric"] }
diesel_migrations = "2.2.0"
diesel_logger = { version = "0.4.0", optional = true }
derive_more = { version = "2.1.0", features = ["from", "into", "as_ref", "deref", "display"] }
derive_more = { version = "2.0.1", features = ["from", "into", "as_ref", "deref", "display"] }
diesel-derive-newtype = "2.1.2"
# Bundled/Static SQLite
@ -103,55 +97,51 @@ ring = "0.17.14"
subtle = "2.6.1"
# UUID generation
uuid = { version = "1.19.0", features = ["v4"] }
uuid = { version = "1.17.0", features = ["v4"] }
# Date and time libraries
chrono = { version = "0.4.42", features = ["clock", "serde"], default-features = false }
chrono = { version = "0.4.41", features = ["clock", "serde"], default-features = false }
chrono-tz = "0.10.4"
time = "0.3.44"
time = "0.3.41"
# Job scheduler
job_scheduler_ng = "2.4.0"
job_scheduler_ng = "2.2.0"
# Data encoding library Hex/Base32/Base64
data-encoding = "2.9.0"
# JWT library
jsonwebtoken = { version = "10.2.0", features = ["use_pem", "rust_crypto"], default-features = false }
jsonwebtoken = "9.3.1"
# TOTP library
totp-lite = "2.0.1"
# Yubico Library
yubico = { package = "yubico_ng", version = "0.14.1", features = ["online-tokio"], default-features = false }
yubico = { package = "yubico_ng", version = "0.13.0", features = ["online-tokio"], default-features = false }
# WebAuthn libraries
# danger-allow-state-serialisation is needed to save the state in the db
# danger-credential-internals is needed to support U2F to Webauthn migration
webauthn-rs = { version = "0.5.4", features = ["danger-allow-state-serialisation", "danger-credential-internals"] }
webauthn-rs-proto = "0.5.4"
webauthn-rs-core = "0.5.4"
webauthn-rs = "0.3.2"
# Handling of URL's for WebAuthn and favicons
url = "2.5.7"
url = "2.5.4"
# Email libraries
lettre = { version = "0.11.19", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "hostname", "tracing", "tokio1-rustls", "ring", "rustls-native-certs"], default-features = false }
percent-encoding = "2.3.2" # URL encoding library used for URL's in the emails
lettre = { version = "0.11.17", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "hostname", "tracing", "tokio1-rustls", "ring", "rustls-native-certs"], default-features = false }
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
email_address = "0.2.9"
# HTML Template library
handlebars = { version = "6.3.2", features = ["dir_source"] }
# HTTP client (Used for favicons, version check, DUO and HIBP API)
reqwest = { version = "0.12.26", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false}
reqwest = { version = "0.12.22", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false}
hickory-resolver = "0.25.2"
# Favicon extraction libraries
html5gum = "0.8.3"
regex = { version = "1.12.2", features = ["std", "perf", "unicode-perl"], default-features = false }
data-url = "0.3.2"
bytes = "1.11.0"
html5gum = "0.7.0"
regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false }
data-url = "0.3.1"
bytes = "1.10.1"
svg-hush = "0.9.5"
# Cache function results (Used for version check and favicon fetching)
@ -159,28 +149,24 @@ cached = { version = "0.56.0", features = ["async"] }
# Used for custom short lived cookie jar during favicon extraction
cookie = "0.18.1"
cookie_store = "0.22.0"
cookie_store = "0.21.1"
# Used by U2F, JWT and PostgreSQL
openssl = "0.10.75"
openssl = "0.10.73"
# CLI argument parsing
pico-args = "0.5.0"
# Macro ident concatenation
pastey = "0.2.1"
governor = "0.10.4"
# OIDC for SSO
openidconnect = { version = "4.0.1", features = ["reqwest", "native-tls"] }
mini-moka = "0.10.3"
pastey = "0.1.0"
governor = "0.10.0"
# Check client versions for specific features.
semver = "1.0.27"
semver = "1.0.26"
# Allow overriding the default memory allocator
# Mainly used for the musl builds, since the default musl malloc is very slow
mimalloc = { version = "0.1.48", features = ["secure"], default-features = false, optional = true }
mimalloc = { version = "0.1.47", features = ["secure"], default-features = false, optional = true }
which = "8.0.0"
@ -194,14 +180,14 @@ rpassword = "7.4.0"
grass_compiler = { version = "0.13.4", default-features = false }
# File are accessed through Apache OpenDAL
opendal = { version = "0.55.0", features = ["services-fs"], default-features = false }
opendal = { version = "0.54.0", features = ["services-fs"], default-features = false }
# For retrieving AWS credentials, including temporary SSO credentials
anyhow = { version = "1.0.100", optional = true }
aws-config = { version = "1.8.12", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true }
aws-credential-types = { version = "1.2.11", optional = true }
aws-smithy-runtime-api = { version = "1.9.3", optional = true }
http = { version = "1.4.0", optional = true }
anyhow = { version = "1.0.98", optional = true }
aws-config = { version = "1.8.3", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true }
aws-credential-types = { version = "1.2.4", optional = true }
aws-smithy-runtime-api = { version = "1.8.5", optional = true }
http = { version = "1.3.1", optional = true }
reqsign = { version = "0.16.5", optional = true }
# Strip debuginfo from the release builds
@ -211,13 +197,23 @@ reqsign = { version = "0.16.5", optional = true }
strip = "debuginfo"
lto = "fat"
codegen-units = 1
debug = false
# A little bit of a speedup
[profile.dev]
split-debuginfo = "unpacked"
# Always build argon2 using opt-level 3
# This is a huge speed improvement during testing
[profile.dev.package.argon2]
opt-level = 3
# Optimize for size
[profile.release-micro]
inherits = "release"
strip = "symbols"
opt-level = "z"
strip = "symbols"
lto = "fat"
codegen-units = 1
panic = "abort"
# Profile for systems with low resources
@ -228,32 +224,6 @@ strip = "symbols"
lto = "thin"
codegen-units = 16
# Used for profiling and debugging like valgrind or heaptrack
# Inherits release to be sure all optimizations have been done
[profile.dbg]
inherits = "release"
strip = "none"
split-debuginfo = "off"
debug = "full"
# A little bit of a speedup for generic building
[profile.dev]
split-debuginfo = "unpacked"
debug = "line-tables-only"
# Used for CI builds to improve compile time
[profile.ci]
inherits = "dev"
debug = false
debug-assertions = false
strip = "symbols"
panic = "abort"
# Always build argon2 using opt-level 3
# This is a huge speed improvement during testing
[profile.dev.package.argon2]
opt-level = 3
# Linting config
# https://doc.rust-lang.org/rustc/lints/groups.html
[workspace.lints.rust]
@ -263,16 +233,15 @@ non_ascii_idents = "forbid"
# Deny
deprecated_in_future = "deny"
deprecated_safe = { level = "deny", priority = -1 }
future_incompatible = { level = "deny", priority = -1 }
keyword_idents = { level = "deny", priority = -1 }
let_underscore = { level = "deny", priority = -1 }
nonstandard_style = { level = "deny", priority = -1 }
noop_method_call = "deny"
refining_impl_trait = { level = "deny", priority = -1 }
rust_2018_idioms = { level = "deny", priority = -1 }
rust_2021_compatibility = { level = "deny", priority = -1 }
rust_2024_compatibility = { level = "deny", priority = -1 }
edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
single_use_lifetimes = "deny"
trivial_casts = "deny"
trivial_numeric_casts = "deny"
@ -282,8 +251,7 @@ unused_lifetimes = "deny"
unused_qualifications = "deny"
variant_size_differences = "deny"
# Allow the following lints since these cause issues with Rust v1.84.0 or newer
# Building Vaultwarden with Rust v1.85.0 with edition 2024 also works without issues
edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
# Building Vaultwarden with Rust v1.85.0 and edition 2024 also works without issues
if_let_rescope = "allow"
tail_expr_drop_order = "allow"
@ -297,15 +265,12 @@ todo = "warn"
result_large_err = "allow"
# Deny
branches_sharing_code = "deny"
case_sensitive_file_extension_comparisons = "deny"
cast_lossless = "deny"
clone_on_ref_ptr = "deny"
equatable_if_let = "deny"
excessive_precision = "deny"
filter_map_next = "deny"
float_cmp_const = "deny"
implicit_clone = "deny"
inefficient_to_string = "deny"
iter_on_empty_collections = "deny"
iter_on_single_items = "deny"
@ -316,19 +281,16 @@ manual_instant_elapsed = "deny"
manual_string_new = "deny"
match_wildcard_for_single_variants = "deny"
mem_forget = "deny"
needless_borrow = "deny"
needless_collect = "deny"
needless_continue = "deny"
needless_lifetimes = "deny"
option_option = "deny"
redundant_clone = "deny"
string_add_assign = "deny"
string_to_string = "deny"
unnecessary_join = "deny"
unnecessary_self_imports = "deny"
unnested_or_patterns = "deny"
unused_async = "deny"
unused_self = "deny"
useless_let_if_seq = "deny"
verbose_file_reads = "deny"
zero_sized_map_values = "deny"

View File

@ -9,6 +9,8 @@ fn main() {
println!("cargo:rustc-cfg=mysql");
#[cfg(feature = "postgresql")]
println!("cargo:rustc-cfg=postgresql");
#[cfg(feature = "query_logger")]
println!("cargo:rustc-cfg=query_logger");
#[cfg(feature = "s3")]
println!("cargo:rustc-cfg=s3");
@ -22,6 +24,7 @@ fn main() {
println!("cargo::rustc-check-cfg=cfg(sqlite)");
println!("cargo::rustc-check-cfg=cfg(mysql)");
println!("cargo::rustc-check-cfg=cfg(postgresql)");
println!("cargo::rustc-check-cfg=cfg(query_logger)");
println!("cargo::rustc-check-cfg=cfg(s3)");
// Rerun when these paths are changed.
@ -31,6 +34,9 @@ fn main() {
println!("cargo:rerun-if-changed=.git/index");
println!("cargo:rerun-if-changed=.git/refs/tags");
#[cfg(all(not(debug_assertions), feature = "query_logger"))]
compile_error!("Query Logging is only allowed during development, it is not intended for production usage!");
// Support $BWRS_VERSION for legacy compatibility, but default to $VW_VERSION.
// If neither exist, read from git.
let maybe_vaultwarden_version =

View File

@ -1,13 +1,13 @@
---
vault_version: "v2025.12.0"
vault_image_digest: "sha256:bb7303efafdb7e2b41bee2c772e14f67676ae2c9047bd7bba80d3544d4162613"
# Cross Compile Docker Helper Scripts v1.9.0
vault_version: "v2025.7.0"
vault_image_digest: "sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e"
# Cross Compile Docker Helper Scripts v1.6.1
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
xx_image_digest: "sha256:c64defb9ed5a91eacb37f96ccc3d4cd72521c4bd18d5442905b95e2226b0e707"
rust_version: 1.92.0 # Rust version to be used
debian_version: trixie # Debian release name to be used
alpine_version: "3.23" # Alpine version to be used
xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894"
rust_version: 1.88.0 # Rust version to be used
debian_version: bookworm # Debian release name to be used
alpine_version: "3.22" # Alpine version to be used
# For which platforms/architectures will we try to build images
platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"]
# Determine the build images per OS/Arch
@ -17,6 +17,7 @@ build_stage_image:
platform: "$BUILDPLATFORM"
alpine:
image: "build_${TARGETARCH}${TARGETVARIANT}"
platform: "linux/amd64" # The Alpine build images only have linux/amd64 images
arch_image:
amd64: "ghcr.io/blackdex/rust-musl:x86_64-musl-stable-{{rust_version}}"
arm64: "ghcr.io/blackdex/rust-musl:aarch64-musl-stable-{{rust_version}}"

View File

@ -19,27 +19,27 @@
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
# click the tag name to view the digest of the image it currently points to.
# - From the command line:
# $ docker pull docker.io/vaultwarden/web-vault:v2025.12.0
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.12.0
# [docker.io/vaultwarden/web-vault@sha256:bb7303efafdb7e2b41bee2c772e14f67676ae2c9047bd7bba80d3544d4162613]
# $ docker pull docker.io/vaultwarden/web-vault:v2025.7.0
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.0
# [docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e]
#
# - Conversely, to get the tag name from the digest:
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:bb7303efafdb7e2b41bee2c772e14f67676ae2c9047bd7bba80d3544d4162613
# [docker.io/vaultwarden/web-vault:v2025.12.0]
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e
# [docker.io/vaultwarden/web-vault:v2025.7.0]
#
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:bb7303efafdb7e2b41bee2c772e14f67676ae2c9047bd7bba80d3544d4162613 AS vault
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e AS vault
########################## ALPINE BUILD IMAGES ##########################
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 and linux/arm64
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
## And for Alpine we define all build images here, they will only be loaded when actually used
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.92.0 AS build_amd64
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.92.0 AS build_arm64
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.92.0 AS build_armv7
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.92.0 AS build_armv6
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.88.0 AS build_amd64
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.88.0 AS build_arm64
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.88.0 AS build_armv7
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.88.0 AS build_armv6
########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006
FROM --platform=$BUILDPLATFORM build_${TARGETARCH}${TARGETVARIANT} AS build
FROM --platform=linux/amd64 build_${TARGETARCH}${TARGETVARIANT} AS build
ARG TARGETARCH
ARG TARGETVARIANT
ARG TARGETPLATFORM
@ -53,9 +53,9 @@ ENV DEBIAN_FRONTEND=noninteractive \
TERM=xterm-256color \
CARGO_HOME="/root/.cargo" \
USER="root" \
# Use PostgreSQL v17 during Alpine/MUSL builds instead of the default v16
# Debian Trixie uses libpq v17
PQ_LIB_DIR="/usr/local/musl/pq17/lib"
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
# Debian Bookworm already contains libpq v15
PQ_LIB_DIR="/usr/local/musl/pq15/lib"
# Create CARGO_HOME folder and don't download rust docs
@ -127,7 +127,7 @@ RUN source /env-cargo && \
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
#
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.23
FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.22
ENV ROCKET_PROFILE="release" \
ROCKET_ADDRESS=0.0.0.0 \

View File

@ -19,24 +19,24 @@
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
# click the tag name to view the digest of the image it currently points to.
# - From the command line:
# $ docker pull docker.io/vaultwarden/web-vault:v2025.12.0
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.12.0
# [docker.io/vaultwarden/web-vault@sha256:bb7303efafdb7e2b41bee2c772e14f67676ae2c9047bd7bba80d3544d4162613]
# $ docker pull docker.io/vaultwarden/web-vault:v2025.7.0
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.7.0
# [docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e]
#
# - Conversely, to get the tag name from the digest:
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:bb7303efafdb7e2b41bee2c772e14f67676ae2c9047bd7bba80d3544d4162613
# [docker.io/vaultwarden/web-vault:v2025.12.0]
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e
# [docker.io/vaultwarden/web-vault:v2025.7.0]
#
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:bb7303efafdb7e2b41bee2c772e14f67676ae2c9047bd7bba80d3544d4162613 AS vault
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:f6ac819a2cd9e226f2cd2ec26196ede94a41e672e9672a11b5f307a19278b15e AS vault
########################## Cross Compile Docker Helper Scripts ##########################
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
## And these bash scripts do not have any significant difference if at all
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:c64defb9ed5a91eacb37f96ccc3d4cd72521c4bd18d5442905b95e2226b0e707 AS xx
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894 AS xx
########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.92.0-slim-trixie AS build
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.88.0-slim-bookworm AS build
COPY --from=xx / /
ARG TARGETARCH
ARG TARGETVARIANT
@ -51,6 +51,7 @@ ENV DEBIAN_FRONTEND=noninteractive \
TERM=xterm-256color \
CARGO_HOME="/root/.cargo" \
USER="root"
# Install clang to get `xx-cargo` working
# Install pkg-config to allow amd64 builds to find all libraries
# Install git so build.rs can determine the correct version
@ -67,11 +68,15 @@ RUN apt-get update && \
xx-apt-get install -y \
--no-install-recommends \
gcc \
libmariadb3 \
libpq-dev \
libpq5 \
libssl-dev \
libmariadb-dev \
zlib1g-dev && \
# Force install arch dependend mariadb dev packages
# Installing them the normal way breaks several other packages (again)
apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \
dpkg --force-all -i ./libmariadb-dev*.deb && \
# Run xx-cargo early, since it sometimes seems to break when run at a later stage
echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo
@ -161,7 +166,7 @@ RUN source /env-cargo && \
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
#
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
FROM --platform=$TARGETPLATFORM docker.io/library/debian:trixie-slim
FROM --platform=$TARGETPLATFORM docker.io/library/debian:bookworm-slim
ENV ROCKET_PROFILE="release" \
ROCKET_ADDRESS=0.0.0.0 \
@ -174,7 +179,7 @@ RUN mkdir /data && \
--no-install-recommends \
ca-certificates \
curl \
libmariadb3 \
libmariadb-dev-compat \
libpq5 \
openssl && \
apt-get clean && \

View File

@ -36,16 +36,16 @@ FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@{{ vault_image_diges
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@{{ xx_image_digest }} AS xx
{% elif base == "alpine" %}
########################## ALPINE BUILD IMAGES ##########################
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 and linux/arm64
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
## And for Alpine we define all build images here, they will only be loaded when actually used
{% for arch in build_stage_image[base].arch_image %}
FROM --platform=$BUILDPLATFORM {{ build_stage_image[base].arch_image[arch] }} AS build_{{ arch }}
FROM --platform={{ build_stage_image[base].platform }} {{ build_stage_image[base].arch_image[arch] }} AS build_{{ arch }}
{% endfor %}
{% endif %}
########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006
FROM --platform=$BUILDPLATFORM {{ build_stage_image[base].image }} AS build
FROM --platform={{ build_stage_image[base].platform }} {{ build_stage_image[base].image }} AS build
{% if base == "debian" %}
COPY --from=xx / /
{% endif %}
@ -63,12 +63,13 @@ ENV DEBIAN_FRONTEND=noninteractive \
CARGO_HOME="/root/.cargo" \
USER="root"
{%- if base == "alpine" %} \
# Use PostgreSQL v17 during Alpine/MUSL builds instead of the default v16
# Debian Trixie uses libpq v17
PQ_LIB_DIR="/usr/local/musl/pq17/lib"
# Use PostgreSQL v15 during Alpine/MUSL builds instead of the default v11
# Debian Bookworm already contains libpq v15
PQ_LIB_DIR="/usr/local/musl/pq15/lib"
{% endif %}
{% if base == "debian" %}
# Install clang to get `xx-cargo` working
# Install pkg-config to allow amd64 builds to find all libraries
# Install git so build.rs can determine the correct version
@ -85,11 +86,15 @@ RUN apt-get update && \
xx-apt-get install -y \
--no-install-recommends \
gcc \
libmariadb3 \
libpq-dev \
libpq5 \
libssl-dev \
libmariadb-dev \
zlib1g-dev && \
# Force install arch dependend mariadb dev packages
# Installing them the normal way breaks several other packages (again)
apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \
dpkg --force-all -i ./libmariadb-dev*.deb && \
# Run xx-cargo early, since it sometimes seems to break when run at a later stage
echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo
{% endif %}
@ -211,7 +216,7 @@ RUN mkdir /data && \
--no-install-recommends \
ca-certificates \
curl \
libmariadb3 \
libmariadb-dev-compat \
libpq5 \
openssl && \
apt-get clean && \

View File

@ -116,7 +116,7 @@ docker/bake.sh
```
You can append both `alpine` and `debian` with `-amd64`, `-arm64`, `-armv7` or `-armv6`, which will trigger a build for that specific platform.<br>
This will also append those values to the tag so you can see the built container when running `docker images`.
This will also append those values to the tag so you can see the builded container when running `docker images`.
You can also append extra arguments after the target if you want. This can be useful for example to print what bake will use.
```bash
@ -162,7 +162,7 @@ You can append extra arguments after the target if you want. This can be useful
For the podman builds you can, just like the `bake.sh` script, also append the architecture to build for that specific platform.<br>
### Testing podman built images
### Testing podman builded images
The command to start a podman built container is almost the same as for the docker/bake built containers. The images start with `localhost/`, so you need to prepend that.

View File

@ -1,11 +1,7 @@
[package]
name = "macros"
version = "0.1.0"
repository.workspace = true
edition.workspace = true
rust-version.workspace = true
license.workspace = true
publish.workspace = true
edition = "2021"
[lib]
name = "macros"
@ -13,8 +9,8 @@ path = "src/lib.rs"
proc-macro = true
[dependencies]
quote = "1.0.42"
syn = "2.0.111"
quote = "1.0.40"
syn = "2.0.104"
[lints]
workspace = true

View File

@ -1 +0,0 @@
DROP TABLE sso_nonce;

View File

@ -1,4 +0,0 @@
CREATE TABLE sso_nonce (
nonce CHAR(36) NOT NULL PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@ -1 +0,0 @@
ALTER TABLE users_organizations DROP COLUMN invited_by_email;

View File

@ -1 +0,0 @@
ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL;

View File

@ -1,6 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
nonce CHAR(36) NOT NULL PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@ -1,8 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
state VARCHAR(512) NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@ -1,8 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
state VARCHAR(512) NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@ -1,9 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
state VARCHAR(512) NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
verifier TEXT,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@ -1 +0,0 @@
DROP TABLE IF EXISTS sso_users;

View File

@ -1,7 +0,0 @@
CREATE TABLE sso_users (
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
identifier VARCHAR(768) NOT NULL UNIQUE,
created_at TIMESTAMP NOT NULL DEFAULT now(),
FOREIGN KEY(user_uuid) REFERENCES users(uuid)
);

View File

@ -1,15 +0,0 @@
-- Dynamically create DROP FOREIGN KEY
-- Some versions of MySQL or MariaDB might fail if the key doesn't exists
-- This checks if the key exists, and if so, will drop it.
SET @drop_sso_fk = IF((SELECT true FROM information_schema.TABLE_CONSTRAINTS WHERE
CONSTRAINT_SCHEMA = DATABASE() AND
TABLE_NAME = 'sso_users' AND
CONSTRAINT_NAME = 'sso_users_ibfk_1' AND
CONSTRAINT_TYPE = 'FOREIGN KEY') = true,
'ALTER TABLE sso_users DROP FOREIGN KEY sso_users_ibfk_1',
'SELECT 1');
PREPARE stmt FROM @drop_sso_fk;
EXECUTE stmt;
DEALLOCATE PREPARE stmt;
ALTER TABLE sso_users ADD FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE;

View File

@ -1,9 +0,0 @@
DROP TABLE IF EXISTS sso_auth;
CREATE TABLE sso_nonce (
state VARCHAR(512) NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
verifier TEXT,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@ -1,12 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_auth (
state VARCHAR(512) NOT NULL PRIMARY KEY,
client_challenge TEXT NOT NULL,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
code_response TEXT,
auth_response TEXT,
created_at TIMESTAMP NOT NULL DEFAULT now(),
updated_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@ -1 +0,0 @@
DROP TABLE sso_nonce;

View File

@ -1,4 +0,0 @@
CREATE TABLE sso_nonce (
nonce CHAR(36) NOT NULL PRIMARY KEY,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@ -1 +0,0 @@
ALTER TABLE users_organizations DROP COLUMN invited_by_email;

View File

@ -1 +0,0 @@
ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL;

View File

@ -1,6 +0,0 @@
DROP TABLE sso_nonce;
CREATE TABLE sso_nonce (
nonce CHAR(36) NOT NULL PRIMARY KEY,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@ -1,8 +0,0 @@
DROP TABLE sso_nonce;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@ -1,8 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@ -1,9 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
verifier TEXT,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@ -1 +0,0 @@
DROP TABLE IF EXISTS sso_users;

View File

@ -1,7 +0,0 @@
CREATE TABLE sso_users (
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
identifier TEXT NOT NULL UNIQUE,
created_at TIMESTAMP NOT NULL DEFAULT now(),
FOREIGN KEY(user_uuid) REFERENCES users(uuid)
);

View File

@ -1,3 +0,0 @@
ALTER TABLE sso_users
DROP CONSTRAINT "sso_users_user_uuid_fkey",
ADD CONSTRAINT "sso_users_user_uuid_fkey" FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE;

View File

@ -1,9 +0,0 @@
DROP TABLE IF EXISTS sso_auth;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
verifier TEXT,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@ -1,12 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_auth (
state TEXT NOT NULL PRIMARY KEY,
client_challenge TEXT NOT NULL,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
code_response TEXT,
auth_response TEXT,
created_at TIMESTAMP NOT NULL DEFAULT now(),
updated_at TIMESTAMP NOT NULL DEFAULT now()
);

View File

@ -1 +0,0 @@
DROP TABLE sso_nonce;

View File

@ -1,4 +0,0 @@
CREATE TABLE sso_nonce (
nonce CHAR(36) NOT NULL PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@ -1 +0,0 @@
ALTER TABLE users_organizations DROP COLUMN invited_by_email;

View File

@ -1 +0,0 @@
ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL;

View File

@ -1,6 +0,0 @@
DROP TABLE sso_nonce;
CREATE TABLE sso_nonce (
nonce CHAR(36) NOT NULL PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@ -1,8 +0,0 @@
DROP TABLE sso_nonce;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@ -1,8 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@ -1,9 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
verifier TEXT,
redirect_uri TEXT NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@ -1 +0,0 @@
DROP TABLE IF EXISTS sso_users;

View File

@ -1,7 +0,0 @@
CREATE TABLE sso_users (
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
identifier TEXT NOT NULL UNIQUE,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(user_uuid) REFERENCES users(uuid)
);

View File

@ -1,9 +0,0 @@
DROP TABLE IF EXISTS sso_users;
CREATE TABLE sso_users (
user_uuid CHAR(36) NOT NULL PRIMARY KEY,
identifier TEXT NOT NULL UNIQUE,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE
);

View File

@ -1,9 +0,0 @@
DROP TABLE IF EXISTS sso_auth;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
verifier TEXT,
redirect_uri TEXT NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@ -1,12 +0,0 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_auth (
state TEXT NOT NULL PRIMARY KEY,
client_challenge TEXT NOT NULL,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
code_response TEXT,
auth_response TEXT,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

View File

@ -1,64 +0,0 @@
#################################
### Conf to run dev instances ###
#################################
ENV=dev
DC_ENV_FILE=.env
COMPOSE_IGNORE_ORPHANS=True
DOCKER_BUILDKIT=1
################
# Users Config #
################
TEST_USER=test
TEST_USER_PASSWORD=${TEST_USER}
TEST_USER_MAIL=${TEST_USER}@yopmail.com
TEST_USER2=test2
TEST_USER2_PASSWORD=${TEST_USER2}
TEST_USER2_MAIL=${TEST_USER2}@yopmail.com
TEST_USER3=test3
TEST_USER3_PASSWORD=${TEST_USER3}
TEST_USER3_MAIL=${TEST_USER3}@yopmail.com
###################
# Keycloak Config #
###################
KEYCLOAK_ADMIN=admin
KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN}
KC_HTTP_HOST=127.0.0.1
KC_HTTP_PORT=8080
# Script parameters (use Keycloak and Vaultwarden config too)
TEST_REALM=test
DUMMY_REALM=dummy
DUMMY_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${DUMMY_REALM}
######################
# Vaultwarden Config #
######################
ROCKET_ADDRESS=0.0.0.0
ROCKET_PORT=8000
DOMAIN=http://localhost:${ROCKET_PORT}
LOG_LEVEL=info,oidcwarden::sso=debug
I_REALLY_WANT_VOLATILE_STORAGE=true
SSO_ENABLED=true
SSO_ONLY=false
SSO_CLIENT_ID=warden
SSO_CLIENT_SECRET=warden
SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM}
SMTP_HOST=127.0.0.1
SMTP_PORT=1025
SMTP_SECURITY=off
SMTP_TIMEOUT=5
SMTP_FROM=vaultwarden@test
SMTP_FROM_NAME=Vaultwarden
########################################################
# DUMMY values for docker-compose to stop bothering us #
########################################################
MARIADB_PORT=3305
MYSQL_PORT=3307
POSTGRES_PORT=5432

View File

@ -1,6 +0,0 @@
logs
node_modules/
/test-results/
/playwright-report/
/playwright/.cache/
temp

View File

@ -1,179 +0,0 @@
# Integration tests
This allows running integration tests using [Playwright](https://playwright.dev/).
It uses its own `test.env` with different ports to not collide with a running dev instance.
## Install
This relies on `docker` and the `compose` [plugin](https://docs.docker.com/compose/install/).
Databases (`Mariadb`, `Mysql` and `Postgres`) and `Playwright` will run in containers.
### Running Playwright outside docker
It is possible to run `Playwright` outside of the container, this removes the need to rebuild the image for each change.
You will additionally need `nodejs` then run:
```bash
npm ci --ignore-scripts
npx playwright install-deps
npx playwright install firefox
```
## Usage
To run all the tests:
```bash
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright
```
To force a rebuild of the Playwright image:
```bash
DOCKER_BUILDKIT=1 docker compose --env-file test.env build Playwright
```
To access the UI to easily run test individually and debug if needed (this will not work in docker):
```bash
npx playwright test --ui
```
### DB
Projects are configured to allow to run tests only on specific database.
You can use:
```bash
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=mariadb
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=mysql
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=postgres
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite
```
### SSO
To run the SSO tests:
```bash
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project sso-sqlite
```
### Keep services running
If you want you can keep the DB and Keycloak runnning (states are not impacted by the tests):
```bash
PW_KEEP_SERVICE_RUNNNING=true npx playwright test
```
### Running specific tests
To run a whole file you can :
```bash
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite tests/login.spec.ts
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite login
```
To run only a specifc test (It might fail if it has dependency):
```bash
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite -g "Account creation"
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env run Playwright test --project=sqlite tests/login.spec.ts:16
```
## Writing scenario
When creating new scenario use the recorder to more easily identify elements
(in general try to rely on visible hint to identify elements and not hidden IDs).
This does not start the server, you will need to start it manually.
```bash
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env up Vaultwarden
npx playwright codegen "http://127.0.0.1:8003"
```
## Override web-vault
It is possible to change the `web-vault` used by referencing a different `vw_web_builds` commit.
Simplest is to set and uncomment `PW_VW_REPO_URL` and `PW_VW_COMMIT_HASH` in the `test.env`.
Ensure that the image is built with:
```bash
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env build Vaultwarden
```
You can check the result running:
```bash
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env up Vaultwarden
```
Then check `http://127.0.0.1:8003/admin/diagnostics` with `admin`.
# OpenID Connect test setup
Additionally this `docker-compose` template allows to run locally Vaultwarden,
[Keycloak](https://www.keycloak.org/) and [Maildev](https://github.com/timshel/maildev) to test OIDC.
## Setup
This rely on `docker` and the `compose` [plugin](https://docs.docker.com/compose/install/).
First create a copy of `.env.template` as `.env` (This is done to prevent committing your custom settings, Ex `SMTP_`).
## Usage
Then start the stack (the `profile` is required to run `Vaultwarden`) :
```bash
> docker compose --profile vaultwarden --env-file .env up
....
keycloakSetup_1 | Logging into http://127.0.0.1:8080 as user admin of realm master
keycloakSetup_1 | Created new realm with id 'test'
keycloakSetup_1 | 74af4933-e386-4e64-ba15-a7b61212c45e
oidc_keycloakSetup_1 exited with code 0
```
Wait until `oidc_keycloakSetup_1 exited with code 0` which indicates the correct setup of the Keycloak realm, client and user
(It is normal for this container to stop once the configuration is done).
Then you can access :
- `Vaultwarden` on http://0.0.0.0:8000 with the default user `test@yopmail.com/test`.
- `Keycloak` on http://0.0.0.0:8080/admin/master/console/ with the default user `admin/admin`
- `Maildev` on http://0.0.0.0:1080
To proceed with an SSO login after you enter the email, on the screen prompting for `Master Password` the SSO button should be visible.
To use your computer external ip (for example when testing with a phone) you will have to configure `KC_HTTP_HOST` and `DOMAIN`.
## Running only Keycloak
You can run just `Keycloak` with `--profile keycloak`:
```bash
> docker compose --profile keycloak --env-file .env up
```
When running with a local Vaultwarden, you can use a front-end build from [dani-garcia/bw_web_builds](https://github.com/dani-garcia/bw_web_builds/releases).
## Rebuilding the Vaultwarden
To force rebuilding the Vaultwarden image you can run
```bash
docker compose --profile vaultwarden --env-file .env build VaultwardenPrebuild Vaultwarden
```
## Configuration
All configuration for `keycloak` / `Vaultwarden` / `keycloak_setup.sh` can be found in [.env](.env.template).
The content of the file will be loaded as environment variables in all containers.
- `keycloak` [configuration](https://www.keycloak.org/server/all-config) includes `KEYCLOAK_ADMIN` / `KEYCLOAK_ADMIN_PASSWORD` and any variable prefixed `KC_` ([more information](https://www.keycloak.org/server/configuration#_example_configuring_the_db_url_host_parameter)).
- All `Vaultwarden` configuration can be set (EX: `SMTP_*`)
## Cleanup
Use `docker compose --profile vaultwarden down`.

View File

@ -1,19 +0,0 @@
FROM docker.io/library/debian:trixie-slim
ARG KEYCLOAK_VERSION
ENV DEBIAN_FRONTEND=noninteractive
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN apt-get update && apt-get install -y ca-certificates curl jq openjdk-21-jdk-headless wget
WORKDIR /
RUN wget -c https://github.com/keycloak/keycloak/releases/download/${KEYCLOAK_VERSION}/keycloak-${KEYCLOAK_VERSION}.tar.gz -O - | tar -xz \
&& mkdir -p /opt/keycloak \
&& mv /keycloak-${KEYCLOAK_VERSION}/bin /opt/keycloak/bin \
&& rm -rf /keycloak-${KEYCLOAK_VERSION}
COPY setup.sh /setup.sh
CMD "/setup.sh"

View File

@ -1,44 +0,0 @@
#!/bin/bash
export PATH=/opt/keycloak/bin:$PATH
STATUS_CODE=0
while [[ "$STATUS_CODE" != "404" ]] ; do
echo "Will retry in 2 seconds"
sleep 2
STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}" "$DUMMY_AUTHORITY")
if [[ "$STATUS_CODE" = "200" ]]; then
echo "Setup should already be done. Will not run."
exit 0
fi
done
set -e
kcadm.sh config credentials --server "http://${KC_HTTP_HOST}:${KC_HTTP_PORT}" --realm master --user "$KEYCLOAK_ADMIN" --password "$KEYCLOAK_ADMIN_PASSWORD" --client admin-cli
kcadm.sh create realms -s realm="$TEST_REALM" -s enabled=true -s "accessTokenLifespan=600"
kcadm.sh create clients -r test -s "clientId=$SSO_CLIENT_ID" -s "secret=$SSO_CLIENT_SECRET" -s "redirectUris=[\"$DOMAIN/*\"]" -i
TEST_USER_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER" -s "firstName=$TEST_USER" -s "lastName=$TEST_USER" -s "email=$TEST_USER_MAIL" -s emailVerified=true -s enabled=true -i)
kcadm.sh update users/$TEST_USER_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER_PASSWORD" -n
TEST_USER2_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER2" -s "firstName=$TEST_USER2" -s "lastName=$TEST_USER2" -s "email=$TEST_USER2_MAIL" -s emailVerified=true -s enabled=true -i)
kcadm.sh update users/$TEST_USER2_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER2_PASSWORD" -n
TEST_USER3_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER3" -s "firstName=$TEST_USER3" -s "lastName=$TEST_USER3" -s "email=$TEST_USER3_MAIL" -s emailVerified=true -s enabled=true -i)
kcadm.sh update users/$TEST_USER3_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER3_PASSWORD" -n
# Dummy realm to mark end of setup
kcadm.sh create realms -s realm="$DUMMY_REALM" -s enabled=true -s "accessTokenLifespan=600"
# TO DEBUG uncomment the following line to keep the setup container running
# sleep 3600
# THEN in another terminal:
# docker exec -it keycloakSetup-dev /bin/bash
# export PATH=$PATH:/opt/keycloak/bin
# kcadm.sh config credentials --server "http://${KC_HTTP_HOST}:${KC_HTTP_PORT}" --realm master --user "$KEYCLOAK_ADMIN" --password "$KEYCLOAK_ADMIN_PASSWORD" --client admin-cli
# ENJOY
# Doc: https://wjw465150.gitbooks.io/keycloak-documentation/content/server_admin/topics/admin-cli.html

View File

@ -1,40 +0,0 @@
FROM docker.io/library/debian:trixie-slim
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update \
&& apt-get install -y ca-certificates curl \
&& curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc \
&& chmod a+r /etc/apt/keyrings/docker.asc \
&& echo "deb [signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian trixie stable" | tee /etc/apt/sources.list.d/docker.list \
&& apt-get update \
&& apt-get install -y --no-install-recommends \
containerd.io \
docker-buildx-plugin \
docker-ce \
docker-ce-cli \
docker-compose-plugin \
git \
libmariadb-dev-compat \
libpq5 \
nodejs \
npm \
openssl \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir /playwright
WORKDIR /playwright
COPY package.json package-lock.json .
RUN npm ci --ignore-scripts && npx playwright install-deps && npx playwright install firefox
COPY docker-compose.yml test.env ./
COPY compose ./compose
COPY *.ts test.env ./
COPY tests ./tests
ENTRYPOINT ["/usr/bin/npx", "playwright"]
CMD ["test"]

View File

@ -1,40 +0,0 @@
FROM playwright_oidc_vaultwarden_prebuilt AS prebuilt
FROM node:22-trixie AS build
ARG REPO_URL
ARG COMMIT_HASH
ENV REPO_URL=$REPO_URL
ENV COMMIT_HASH=$COMMIT_HASH
COPY --from=prebuilt /web-vault /web-vault
COPY build.sh /build.sh
RUN /build.sh
######################## RUNTIME IMAGE ########################
FROM docker.io/library/debian:trixie-slim
ENV DEBIAN_FRONTEND=noninteractive
# Create data folder and Install needed libraries
RUN mkdir /data && \
apt-get update && apt-get install -y \
--no-install-recommends \
ca-certificates \
curl \
libmariadb-dev \
libpq5 \
openssl && \
rm -rf /var/lib/apt/lists/*
# Copies the files from the context (Rocket.toml file and web-vault)
# and the binary from the "build" stage to the current stage
WORKDIR /
COPY --from=prebuilt /start.sh .
COPY --from=prebuilt /vaultwarden .
COPY --from=build /web-vault ./web-vault
ENTRYPOINT ["/start.sh"]

View File

@ -1,24 +0,0 @@
#!/bin/bash
echo $REPO_URL
echo $COMMIT_HASH
if [[ ! -z "$REPO_URL" ]] && [[ ! -z "$COMMIT_HASH" ]] ; then
rm -rf /web-vault
mkdir -p vw_web_builds;
cd vw_web_builds;
git -c init.defaultBranch=main init
git remote add origin "$REPO_URL"
git fetch --depth 1 origin "$COMMIT_HASH"
git -c advice.detachedHead=false checkout FETCH_HEAD
npm ci --ignore-scripts
cd apps/web
npm run dist:oss:selfhost
printf '{"version":"%s"}' "$COMMIT_HASH" > build/vw-version.json
mv build /web-vault
fi

View File

@ -1,124 +0,0 @@
services:
VaultwardenPrebuild:
profiles: ["playwright", "vaultwarden"]
container_name: playwright_oidc_vaultwarden_prebuilt
image: playwright_oidc_vaultwarden_prebuilt
build:
context: ..
dockerfile: Dockerfile
entrypoint: /bin/bash
restart: "no"
Vaultwarden:
profiles: ["playwright", "vaultwarden"]
container_name: playwright_oidc_vaultwarden-${ENV:-dev}
image: playwright_oidc_vaultwarden-${ENV:-dev}
network_mode: "host"
build:
context: compose/warden
dockerfile: Dockerfile
args:
REPO_URL: ${PW_VW_REPO_URL:-}
COMMIT_HASH: ${PW_VW_COMMIT_HASH:-}
env_file: ${DC_ENV_FILE:-.env}
environment:
- ADMIN_TOKEN
- DATABASE_URL
- I_REALLY_WANT_VOLATILE_STORAGE
- LOG_LEVEL
- LOGIN_RATELIMIT_MAX_BURST
- SMTP_HOST
- SMTP_FROM
- SMTP_DEBUG
- SSO_DEBUG_TOKENS
- SSO_ENABLED
- SSO_FRONTEND
- SSO_ONLY
- SSO_SCOPES
restart: "no"
depends_on:
- VaultwardenPrebuild
Playwright:
profiles: ["playwright"]
container_name: playwright_oidc_playwright
image: playwright_oidc_playwright
network_mode: "host"
build:
context: .
dockerfile: compose/playwright/Dockerfile
environment:
- PW_WV_REPO_URL
- PW_WV_COMMIT_HASH
restart: "no"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ..:/project
Mariadb:
profiles: ["playwright"]
container_name: playwright_mariadb
image: mariadb:11.2.4
env_file: test.env
healthcheck:
test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"]
start_period: 10s
interval: 10s
ports:
- ${MARIADB_PORT}:3306
Mysql:
profiles: ["playwright"]
container_name: playwright_mysql
image: mysql:8.4.1
env_file: test.env
healthcheck:
test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"]
start_period: 10s
interval: 10s
ports:
- ${MYSQL_PORT}:3306
Postgres:
profiles: ["playwright"]
container_name: playwright_postgres
image: postgres:16.3
env_file: test.env
healthcheck:
test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}"]
start_period: 20s
interval: 30s
ports:
- ${POSTGRES_PORT}:5432
Maildev:
profiles: ["vaultwarden", "maildev"]
container_name: maildev
image: timshel/maildev:3.0.4
ports:
- ${SMTP_PORT}:1025
- 1080:1080
Keycloak:
profiles: ["keycloak", "vaultwarden"]
container_name: keycloak-${ENV:-dev}
image: quay.io/keycloak/keycloak:26.3.4
network_mode: "host"
command:
- start-dev
env_file: ${DC_ENV_FILE:-.env}
KeycloakSetup:
profiles: ["keycloak", "vaultwarden"]
container_name: keycloakSetup-${ENV:-dev}
image: keycloak_setup-${ENV:-dev}
build:
context: compose/keycloak
dockerfile: Dockerfile
args:
KEYCLOAK_VERSION: 26.3.4
network_mode: "host"
depends_on:
- Keycloak
restart: "no"
env_file: ${DC_ENV_FILE:-.env}

View File

@ -1,22 +0,0 @@
import { firefox, type FullConfig } from '@playwright/test';
import { execSync } from 'node:child_process';
import fs from 'fs';
const utils = require('./global-utils');
utils.loadEnv();
async function globalSetup(config: FullConfig) {
// Are we running in docker and the project is mounted ?
const path = (fs.existsSync("/project/playwright/playwright.config.ts") ? "/project/playwright" : ".");
execSync(`docker compose --project-directory ${path} --profile playwright --env-file test.env build VaultwardenPrebuild`, {
env: { ...process.env },
stdio: "inherit"
});
execSync(`docker compose --project-directory ${path} --profile playwright --env-file test.env build Vaultwarden`, {
env: { ...process.env },
stdio: "inherit"
});
}
export default globalSetup;

View File

@ -1,262 +0,0 @@
import { expect, type Browser, type TestInfo } from '@playwright/test';
import { EventEmitter } from "events";
import { type Mail, MailServer } from 'maildev';
import { execSync } from 'node:child_process';
import dotenv from 'dotenv';
import dotenvExpand from 'dotenv-expand';
const fs = require("fs");
const { spawn } = require('node:child_process');
export function loadEnv(){
var myEnv = dotenv.config({ path: 'test.env', quiet: true });
dotenvExpand.expand(myEnv);
return {
user1: {
email: process.env.TEST_USER_MAIL,
name: process.env.TEST_USER,
password: process.env.TEST_USER_PASSWORD,
},
user2: {
email: process.env.TEST_USER2_MAIL,
name: process.env.TEST_USER2,
password: process.env.TEST_USER2_PASSWORD,
},
user3: {
email: process.env.TEST_USER3_MAIL,
name: process.env.TEST_USER3,
password: process.env.TEST_USER3_PASSWORD,
},
}
}
export async function waitFor(url: String, browser: Browser) {
var ready = false;
var context;
do {
try {
context = await browser.newContext();
const page = await context.newPage();
await page.waitForTimeout(500);
const result = await page.goto(url);
ready = result.status() === 200;
} catch(e) {
if( !e.message.includes("CONNECTION_REFUSED") ){
throw e;
}
} finally {
await context.close();
}
} while(!ready);
}
export function startComposeService(serviceName: String){
console.log(`Starting ${serviceName}`);
execSync(`docker compose --profile playwright --env-file test.env up -d ${serviceName}`);
}
export function stopComposeService(serviceName: String){
console.log(`Stopping ${serviceName}`);
execSync(`docker compose --profile playwright --env-file test.env stop ${serviceName}`);
}
function wipeSqlite(){
console.log(`Delete Vaultwarden container to wipe sqlite`);
execSync(`docker compose --env-file test.env stop Vaultwarden`);
execSync(`docker compose --env-file test.env rm -f Vaultwarden`);
}
async function wipeMariaDB(){
var mysql = require('mysql2/promise');
var ready = false;
var connection;
do {
try {
connection = await mysql.createConnection({
user: process.env.MARIADB_USER,
host: "127.0.0.1",
database: process.env.MARIADB_DATABASE,
password: process.env.MARIADB_PASSWORD,
port: process.env.MARIADB_PORT,
});
await connection.execute(`DROP DATABASE ${process.env.MARIADB_DATABASE}`);
await connection.execute(`CREATE DATABASE ${process.env.MARIADB_DATABASE}`);
console.log('Successfully wiped mariadb');
ready = true;
} catch (err) {
console.log(`Error when wiping mariadb: ${err}`);
} finally {
if( connection ){
connection.end();
}
}
await new Promise(r => setTimeout(r, 1000));
} while(!ready);
}
async function wipeMysqlDB(){
var mysql = require('mysql2/promise');
var ready = false;
var connection;
do{
try {
connection = await mysql.createConnection({
user: process.env.MYSQL_USER,
host: "127.0.0.1",
database: process.env.MYSQL_DATABASE,
password: process.env.MYSQL_PASSWORD,
port: process.env.MYSQL_PORT,
});
await connection.execute(`DROP DATABASE ${process.env.MYSQL_DATABASE}`);
await connection.execute(`CREATE DATABASE ${process.env.MYSQL_DATABASE}`);
console.log('Successfully wiped mysql');
ready = true;
} catch (err) {
console.log(`Error when wiping mysql: ${err}`);
} finally {
if( connection ){
connection.end();
}
}
await new Promise(r => setTimeout(r, 1000));
} while(!ready);
}
async function wipePostgres(){
const { Client } = require('pg');
const client = new Client({
user: process.env.POSTGRES_USER,
host: "127.0.0.1",
database: "postgres",
password: process.env.POSTGRES_PASSWORD,
port: process.env.POSTGRES_PORT,
});
try {
await client.connect();
await client.query(`DROP DATABASE ${process.env.POSTGRES_DB}`);
await client.query(`CREATE DATABASE ${process.env.POSTGRES_DB}`);
console.log('Successfully wiped postgres');
} catch (err) {
console.log(`Error when wiping postgres: ${err}`);
} finally {
client.end();
}
}
function dbConfig(testInfo: TestInfo){
switch(testInfo.project.name) {
case "postgres":
case "sso-postgres":
return { DATABASE_URL: `postgresql://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PASSWORD}@127.0.0.1:${process.env.POSTGRES_PORT}/${process.env.POSTGRES_DB}` };
case "mariadb":
case "sso-mariadb":
return { DATABASE_URL: `mysql://${process.env.MARIADB_USER}:${process.env.MARIADB_PASSWORD}@127.0.0.1:${process.env.MARIADB_PORT}/${process.env.MARIADB_DATABASE}` };
case "mysql":
case "sso-mysql":
return { DATABASE_URL: `mysql://${process.env.MYSQL_USER}:${process.env.MYSQL_PASSWORD}@127.0.0.1:${process.env.MYSQL_PORT}/${process.env.MYSQL_DATABASE}`};
case "sqlite":
case "sso-sqlite":
return { I_REALLY_WANT_VOLATILE_STORAGE: true };
default:
throw new Error(`Unknow database name: ${testInfo.project.name}`);
}
}
/**
* All parameters passed in `env` need to be added to the docker-compose.yml
**/
export async function startVault(browser: Browser, testInfo: TestInfo, env = {}, resetDB: Boolean = true) {
if( resetDB ){
switch(testInfo.project.name) {
case "postgres":
case "sso-postgres":
await wipePostgres();
break;
case "mariadb":
case "sso-mariadb":
await wipeMariaDB();
break;
case "mysql":
case "sso-mysql":
await wipeMysqlDB();
break;
case "sqlite":
case "sso-sqlite":
wipeSqlite();
break;
default:
throw new Error(`Unknow database name: ${testInfo.project.name}`);
}
}
console.log(`Starting Vaultwarden`);
execSync(`docker compose --profile playwright --env-file test.env up -d Vaultwarden`, {
env: { ...env, ...dbConfig(testInfo) },
});
await waitFor("/", browser);
console.log(`Vaultwarden running on: ${process.env.DOMAIN}`);
}
export async function stopVault(force: boolean = false) {
if( force === false && process.env.PW_KEEP_SERVICE_RUNNNING === "true" ) {
console.log(`Keep vaultwarden running on: ${process.env.DOMAIN}`);
} else {
console.log(`Vaultwarden stopping`);
execSync(`docker compose --profile playwright --env-file test.env stop Vaultwarden`);
}
}
export async function restartVault(page: Page, testInfo: TestInfo, env, resetDB: Boolean = true) {
stopVault(true);
return startVault(page.context().browser(), testInfo, env, resetDB);
}
export async function checkNotification(page: Page, hasText: string) {
await expect(page.locator('bit-toast', { hasText })).toBeVisible();
try {
await page.locator('bit-toast', { hasText }).getByRole('button', { name: 'Close' }).click({force: true, timeout: 10_000});
} catch (error) {
console.log(`Closing notification failed but it should now be invisible (${error})`);
}
await expect(page.locator('bit-toast', { hasText })).toHaveCount(0);
}
export async function cleanLanding(page: Page) {
await page.goto('/', { waitUntil: 'domcontentloaded' });
await expect(page.getByRole('button').nth(0)).toBeVisible();
const logged = await page.getByRole('button', { name: 'Log out' }).count();
if( logged > 0 ){
await page.getByRole('button', { name: 'Log out' }).click();
await page.getByRole('button', { name: 'Log out' }).click();
}
}
export async function logout(test: Test, page: Page, user: { name: string }) {
await test.step('logout', async () => {
await page.getByRole('button', { name: user.name, exact: true }).click();
await page.getByRole('menuitem', { name: 'Log out' }).click();
await expect(page.getByRole('heading', { name: 'Log in' })).toBeVisible();
});
}
export async function ignoreExtension(page: Page) {
await page.waitForLoadState('domcontentloaded');
try {
await page.getByRole('button', { name: 'Add it later' }).click({timeout: 5_000});
await page.getByRole('link', { name: 'Skip to web app' }).click();
} catch (error) {
console.log('Extension setup not visible. Continuing');
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,21 +0,0 @@
{
"name": "scenarios",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {},
"keywords": [],
"author": "",
"license": "ISC",
"devDependencies": {
"@playwright/test": "1.56.1",
"dotenv": "17.2.3",
"dotenv-expand": "12.0.3",
"maildev": "npm:@timshel_npm/maildev@3.2.5"
},
"dependencies": {
"mysql2": "3.15.3",
"otpauth": "9.4.1",
"pg": "8.16.3"
}
}

View File

@ -1,143 +0,0 @@
import { defineConfig, devices } from '@playwright/test';
import { exec } from 'node:child_process';
const utils = require('./global-utils');
utils.loadEnv();
/**
* See https://playwright.dev/docs/test-configuration.
*/
export default defineConfig({
testDir: './.',
/* Run tests in files in parallel */
fullyParallel: false,
/* Fail the build on CI if you accidentally left test.only in the source code. */
forbidOnly: !!process.env.CI,
retries: 0,
workers: 1,
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
reporter: 'html',
/* Long global timeout for complex tests
* But short action/nav/expect timeouts to fail on specific step (raise locally if not enough).
*/
timeout: 120 * 1000,
actionTimeout: 20 * 1000,
navigationTimeout: 20 * 1000,
expect: { timeout: 20 * 1000 },
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
use: {
/* Base URL to use in actions like `await page.goto('/')`. */
baseURL: process.env.DOMAIN,
browserName: 'firefox',
locale: 'en-GB',
timezoneId: 'Europe/London',
/* Always collect trace (other values add random test failures) See https://playwright.dev/docs/trace-viewer */
trace: 'on',
viewport: {
width: 1080,
height: 720,
},
video: "on",
},
/* Configure projects for major browsers */
projects: [
{
name: 'mariadb-setup',
testMatch: 'tests/setups/db-setup.ts',
use: { serviceName: "Mariadb" },
teardown: 'mariadb-teardown',
},
{
name: 'mysql-setup',
testMatch: 'tests/setups/db-setup.ts',
use: { serviceName: "Mysql" },
teardown: 'mysql-teardown',
},
{
name: 'postgres-setup',
testMatch: 'tests/setups/db-setup.ts',
use: { serviceName: "Postgres" },
teardown: 'postgres-teardown',
},
{
name: 'sso-setup',
testMatch: 'tests/setups/sso-setup.ts',
teardown: 'sso-teardown',
},
{
name: 'mariadb',
testMatch: 'tests/*.spec.ts',
testIgnore: 'tests/sso_*.spec.ts',
dependencies: ['mariadb-setup'],
},
{
name: 'mysql',
testMatch: 'tests/*.spec.ts',
testIgnore: 'tests/sso_*.spec.ts',
dependencies: ['mysql-setup'],
},
{
name: 'postgres',
testMatch: 'tests/*.spec.ts',
testIgnore: 'tests/sso_*.spec.ts',
dependencies: ['postgres-setup'],
},
{
name: 'sqlite',
testMatch: 'tests/*.spec.ts',
testIgnore: 'tests/sso_*.spec.ts',
},
{
name: 'sso-mariadb',
testMatch: 'tests/sso_*.spec.ts',
dependencies: ['sso-setup', 'mariadb-setup'],
},
{
name: 'sso-mysql',
testMatch: 'tests/sso_*.spec.ts',
dependencies: ['sso-setup', 'mysql-setup'],
},
{
name: 'sso-postgres',
testMatch: 'tests/sso_*.spec.ts',
dependencies: ['sso-setup', 'postgres-setup'],
},
{
name: 'sso-sqlite',
testMatch: 'tests/sso_*.spec.ts',
dependencies: ['sso-setup'],
},
{
name: 'mariadb-teardown',
testMatch: 'tests/setups/db-teardown.ts',
use: { serviceName: "Mariadb" },
},
{
name: 'mysql-teardown',
testMatch: 'tests/setups/db-teardown.ts',
use: { serviceName: "Mysql" },
},
{
name: 'postgres-teardown',
testMatch: 'tests/setups/db-teardown.ts',
use: { serviceName: "Postgres" },
},
{
name: 'sso-teardown',
testMatch: 'tests/setups/sso-teardown.ts',
},
],
globalSetup: require.resolve('./global-setup'),
});

View File

@ -1,98 +0,0 @@
##################################################################
### Shared Playwright conf test file Vaultwarden and Databases ###
##################################################################
ENV=test
DC_ENV_FILE=test.env
COMPOSE_IGNORE_ORPHANS=True
DOCKER_BUILDKIT=1
#####################
# Playwright Config #
#####################
PW_KEEP_SERVICE_RUNNNING=${PW_KEEP_SERVICE_RUNNNING:-false}
PW_SMTP_FROM=vaultwarden@playwright.test
#####################
# Maildev Config #
#####################
MAILDEV_HTTP_PORT=1081
MAILDEV_SMTP_PORT=1026
MAILDEV_HOST=127.0.0.1
################
# Users Config #
################
TEST_USER=test
TEST_USER_PASSWORD=Master Password
TEST_USER_MAIL=${TEST_USER}@example.com
TEST_USER2=test2
TEST_USER2_PASSWORD=Master Password
TEST_USER2_MAIL=${TEST_USER2}@example.com
TEST_USER3=test3
TEST_USER3_PASSWORD=Master Password
TEST_USER3_MAIL=${TEST_USER3}@example.com
###################
# Keycloak Config #
###################
KEYCLOAK_ADMIN=admin
KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN}
KC_HTTP_HOST=127.0.0.1
KC_HTTP_PORT=8081
# Script parameters (use Keycloak and Vaultwarden config too)
TEST_REALM=test
DUMMY_REALM=dummy
DUMMY_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${DUMMY_REALM}
######################
# Vaultwarden Config #
######################
ROCKET_PORT=8003
DOMAIN=http://localhost:${ROCKET_PORT}
LOG_LEVEL=info,oidcwarden::sso=debug
LOGIN_RATELIMIT_MAX_BURST=100
ADMIN_TOKEN=admin
SMTP_SECURITY=off
SMTP_PORT=${MAILDEV_SMTP_PORT}
SMTP_FROM_NAME=Vaultwarden
SMTP_TIMEOUT=5
SSO_CLIENT_ID=warden
SSO_CLIENT_SECRET=warden
SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM}
SSO_DEBUG_TOKENS=true
# Custom web-vault build
# PW_VW_REPO_URL=https://github.com/vaultwarden/vw_web_builds.git
# PW_VW_COMMIT_HASH=b5f5b2157b9b64b5813bc334a75a277d0377b5d3
###########################
# Docker MariaDb container#
###########################
MARIADB_PORT=3307
MARIADB_ROOT_PASSWORD=warden
MARIADB_USER=warden
MARIADB_PASSWORD=warden
MARIADB_DATABASE=warden
###########################
# Docker Mysql container#
###########################
MYSQL_PORT=3309
MYSQL_ROOT_PASSWORD=warden
MYSQL_USER=warden
MYSQL_PASSWORD=warden
MYSQL_DATABASE=warden
############################
# Docker Postgres container#
############################
POSTGRES_PORT=5433
POSTGRES_USER=warden
POSTGRES_PASSWORD=warden
POSTGRES_DB=warden

View File

@ -1,37 +0,0 @@
import { test, expect, type TestInfo } from '@playwright/test';
import * as utils from "../global-utils";
import { createAccount } from './setups/user';
let users = utils.loadEnv();
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
await utils.startVault(browser, testInfo);
});
test.afterAll('Teardown', async ({}) => {
utils.stopVault();
});
test('Create', async ({ page }) => {
await createAccount(test, page, users.user1);
await test.step('Create Org', async () => {
await page.getByRole('link', { name: 'New organisation' }).click();
await page.getByLabel('Organisation name (required)').fill('Test');
await page.getByRole('button', { name: 'Submit' }).click();
await page.locator('div').filter({ hasText: 'Members' }).nth(2).click();
await utils.checkNotification(page, 'Organisation created');
});
await test.step('Create Collection', async () => {
await page.getByRole('link', { name: 'Collections' }).click();
await page.getByRole('button', { name: 'New' }).click();
await page.getByRole('menuitem', { name: 'Collection' }).click();
await page.getByLabel('Name (required)').fill('RandomCollec');
await page.getByRole('button', { name: 'Save' }).click();
await utils.checkNotification(page, 'Created collection RandomCollec');
await expect(page.getByRole('button', { name: 'RandomCollec' })).toBeVisible();
});
});

View File

@ -1,103 +0,0 @@
import { test, expect, type TestInfo } from '@playwright/test';
import { MailDev } from 'maildev';
const utils = require('../global-utils');
import { createAccount, logUser } from './setups/user';
import { activateEmail, retrieveEmailCode, disableEmail } from './setups/2fa';
let users = utils.loadEnv();
let mailserver;
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
mailserver = new MailDev({
port: process.env.MAILDEV_SMTP_PORT,
web: { port: process.env.MAILDEV_HTTP_PORT },
})
await mailserver.listen();
await utils.startVault(browser, testInfo, {
SMTP_HOST: process.env.MAILDEV_HOST,
SMTP_FROM: process.env.PW_SMTP_FROM,
});
});
test.afterAll('Teardown', async ({}) => {
utils.stopVault();
if( mailserver ){
await mailserver.close();
}
});
test('Account creation', async ({ page }) => {
const mailBuffer = mailserver.buffer(users.user1.email);
await createAccount(test, page, users.user1, mailBuffer);
mailBuffer.close();
});
test('Login', async ({ context, page }) => {
const mailBuffer = mailserver.buffer(users.user1.email);
await logUser(test, page, users.user1, mailBuffer);
await test.step('verify email', async () => {
await page.getByText('Verify your account\'s email').click();
await expect(page.getByText('Verify your account\'s email')).toBeVisible();
await page.getByRole('button', { name: 'Send email' }).click();
await utils.checkNotification(page, 'Check your email inbox for a verification link');
const verify = await mailBuffer.expect((m) => m.subject === "Verify Your Email");
expect(verify.from[0]?.address).toBe(process.env.PW_SMTP_FROM);
const page2 = await context.newPage();
await page2.setContent(verify.html);
const link = await page2.getByTestId("verify").getAttribute("href");
await page2.close();
await page.goto(link);
await utils.checkNotification(page, 'Account email verified');
});
mailBuffer.close();
});
test('Activate 2fa', async ({ page }) => {
const emails = mailserver.buffer(users.user1.email);
await logUser(test, page, users.user1);
await activateEmail(test, page, users.user1, emails);
emails.close();
});
test('2fa', async ({ page }) => {
const emails = mailserver.buffer(users.user1.email);
await test.step('login', async () => {
await page.goto('/');
await page.getByLabel(/Email address/).fill(users.user1.email);
await page.getByRole('button', { name: 'Continue' }).click();
await page.getByLabel('Master password').fill(users.user1.password);
await page.getByRole('button', { name: 'Log in with master password' }).click();
await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible();
const code = await retrieveEmailCode(test, page, emails);
await page.getByLabel(/Verification code/).fill(code);
await page.getByRole('button', { name: 'Continue' }).click();
await page.getByRole('button', { name: 'Add it later' }).click();
await page.getByRole('link', { name: 'Skip to web app' }).click();
await expect(page).toHaveTitle(/Vaults/);
})
await disableEmail(test, page, users.user1);
emails.close();
});

View File

@ -1,51 +0,0 @@
import { test, expect, type Page, type TestInfo } from '@playwright/test';
import * as OTPAuth from "otpauth";
import * as utils from "../global-utils";
import { createAccount, logUser } from './setups/user';
import { activateTOTP, disableTOTP } from './setups/2fa';
let users = utils.loadEnv();
let totp;
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
await utils.startVault(browser, testInfo, {});
});
test.afterAll('Teardown', async ({}) => {
utils.stopVault();
});
test('Account creation', async ({ page }) => {
await createAccount(test, page, users.user1);
});
test('Master password login', async ({ page }) => {
await logUser(test, page, users.user1);
});
test('Authenticator 2fa', async ({ page }) => {
await logUser(test, page, users.user1);
let totp = await activateTOTP(test, page, users.user1);
await utils.logout(test, page, users.user1);
await test.step('login', async () => {
let timestamp = Date.now(); // Needed to use the next token
timestamp = timestamp + (totp.period - (Math.floor(timestamp / 1000) % totp.period) + 1) * 1000;
await page.getByLabel(/Email address/).fill(users.user1.email);
await page.getByRole('button', { name: 'Continue' }).click();
await page.getByLabel('Master password').fill(users.user1.password);
await page.getByRole('button', { name: 'Log in with master password' }).click();
await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible();
await page.getByLabel(/Verification code/).fill(totp.generate({timestamp}));
await page.getByRole('button', { name: 'Continue' }).click();
await expect(page).toHaveTitle(/Vaultwarden Web/);
});
await disableTOTP(test, page, users.user1);
});

View File

@ -1,119 +0,0 @@
import { test, expect, type TestInfo } from '@playwright/test';
import { MailDev } from 'maildev';
import * as utils from '../global-utils';
import * as orgs from './setups/orgs';
import { createAccount, logUser } from './setups/user';
let users = utils.loadEnv();
let mailServer, mail1Buffer, mail2Buffer, mail3Buffer;
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
mailServer = new MailDev({
port: process.env.MAILDEV_SMTP_PORT,
web: { port: process.env.MAILDEV_HTTP_PORT },
})
await mailServer.listen();
await utils.startVault(browser, testInfo, {
SMTP_HOST: process.env.MAILDEV_HOST,
SMTP_FROM: process.env.PW_SMTP_FROM,
});
mail1Buffer = mailServer.buffer(users.user1.email);
mail2Buffer = mailServer.buffer(users.user2.email);
mail3Buffer = mailServer.buffer(users.user3.email);
});
test.afterAll('Teardown', async ({}, testInfo: TestInfo) => {
utils.stopVault(testInfo);
[mail1Buffer, mail2Buffer, mail3Buffer, mailServer].map((m) => m?.close());
});
test('Create user3', async ({ page }) => {
await createAccount(test, page, users.user3, mail3Buffer);
});
test('Invite users', async ({ page }) => {
await createAccount(test, page, users.user1, mail1Buffer);
await orgs.create(test, page, 'Test');
await orgs.members(test, page, 'Test');
await orgs.invite(test, page, 'Test', users.user2.email);
await orgs.invite(test, page, 'Test', users.user3.email, {
navigate: false,
});
});
test('invited with new account', async ({ page }) => {
const invited = await mail2Buffer.expect((mail) => mail.subject === 'Join Test');
await test.step('Create account', async () => {
await page.setContent(invited.html);
const link = await page.getByTestId('invite').getAttribute('href');
await page.goto(link);
await expect(page).toHaveTitle(/Create account | Vaultwarden Web/);
//await page.getByLabel('Name').fill(users.user2.name);
await page.getByLabel('Master password (required)', { exact: true }).fill(users.user2.password);
await page.getByLabel('Confirm master password (').fill(users.user2.password);
await page.getByRole('button', { name: 'Create account' }).click();
await utils.checkNotification(page, 'Your new account has been created');
await utils.checkNotification(page, 'Invitation accepted');
await utils.ignoreExtension(page);
// Redirected to the vault
await expect(page).toHaveTitle('Vaults | Vaultwarden Web');
// await utils.checkNotification(page, 'You have been logged in!');
});
await test.step('Check mails', async () => {
await mail2Buffer.expect((m) => m.subject === 'Welcome');
await mail2Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox');
await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted'));
});
});
test('invited with existing account', async ({ page }) => {
const invited = await mail3Buffer.expect((mail) => mail.subject === 'Join Test');
await page.setContent(invited.html);
const link = await page.getByTestId('invite').getAttribute('href');
await page.goto(link);
// We should be on login page with email prefilled
await expect(page).toHaveTitle(/Vaultwarden Web/);
await page.getByRole('button', { name: 'Continue' }).click();
// Unlock page
await page.getByLabel('Master password').fill(users.user3.password);
await page.getByRole('button', { name: 'Log in with master password' }).click();
await utils.checkNotification(page, 'Invitation accepted');
await utils.ignoreExtension(page);
// We are now in the default vault page
await expect(page).toHaveTitle(/Vaultwarden Web/);
await mail3Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox');
await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted'));
});
test('Confirm invited user', async ({ page }) => {
await logUser(test, page, users.user1, mail1Buffer);
await orgs.members(test, page, 'Test');
await orgs.confirm(test, page, 'Test', users.user2.email);
await mail2Buffer.expect((m) => m.subject.includes('Invitation to Test confirmed'));
});
test('Organization is visible', async ({ page }) => {
await logUser(test, page, users.user2, mail2Buffer);
await page.getByRole('button', { name: 'vault: Test', exact: true }).click();
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
});

View File

@ -1,54 +0,0 @@
import { test, expect, type TestInfo } from '@playwright/test';
import { MailDev } from 'maildev';
import * as utils from "../global-utils";
import * as orgs from './setups/orgs';
import { createAccount, logUser } from './setups/user';
let users = utils.loadEnv();
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
await utils.startVault(browser, testInfo);
});
test.afterAll('Teardown', async ({}) => {
utils.stopVault();
});
test('Invite', async ({ page }) => {
await createAccount(test, page, users.user3);
await createAccount(test, page, users.user1);
await orgs.create(test, page, 'New organisation');
await orgs.members(test, page, 'New organisation');
await test.step('missing user2', async () => {
await orgs.invite(test, page, 'New organisation', users.user2.email);
await expect(page.getByRole('row', { name: users.user2.email })).toHaveText(/Invited/);
});
await test.step('existing user3', async () => {
await orgs.invite(test, page, 'New organisation', users.user3.email);
await expect(page.getByRole('row', { name: users.user3.email })).toHaveText(/Needs confirmation/);
await orgs.confirm(test, page, 'New organisation', users.user3.email);
});
await test.step('confirm user2', async () => {
await createAccount(test, page, users.user2);
await logUser(test, page, users.user1);
await orgs.members(test, page, 'New organisation');
await orgs.confirm(test, page, 'New organisation', users.user2.email);
});
await test.step('Org visible user2 ', async () => {
await logUser(test, page, users.user2);
await page.getByRole('button', { name: 'vault: New organisation', exact: true }).click();
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
});
await test.step('Org visible user3 ', async () => {
await logUser(test, page, users.user3);
await page.getByRole('button', { name: 'vault: New organisation', exact: true }).click();
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
});
});

View File

@ -1,92 +0,0 @@
import { expect, type Page, Test } from '@playwright/test';
import { type MailBuffer } from 'maildev';
import * as OTPAuth from "otpauth";
import * as utils from '../../global-utils';
export async function activateTOTP(test: Test, page: Page, user: { name: string, password: string }): OTPAuth.TOTP {
return await test.step('Activate TOTP 2FA', async () => {
await page.getByRole('button', { name: user.name }).click();
await page.getByRole('menuitem', { name: 'Account settings' }).click();
await page.getByRole('link', { name: 'Security' }).click();
await page.getByRole('link', { name: 'Two-step login' }).click();
await page.locator('bit-item').filter({ hasText: /Authenticator app/ }).getByRole('button').click();
await page.getByLabel('Master password (required)').fill(user.password);
await page.getByRole('button', { name: 'Continue' }).click();
const secret = await page.getByLabel('Key').innerText();
let totp = new OTPAuth.TOTP({ secret, period: 30 });
await page.getByLabel(/Verification code/).fill(totp.generate());
await page.getByRole('button', { name: 'Turn on' }).click();
await page.getByRole('heading', { name: 'Turned on', exact: true });
await page.getByLabel('Close').click();
return totp;
})
}
export async function disableTOTP(test: Test, page: Page, user: { password: string }) {
await test.step('Disable TOTP 2FA', async () => {
await page.getByRole('button', { name: 'Test' }).click();
await page.getByRole('menuitem', { name: 'Account settings' }).click();
await page.getByRole('link', { name: 'Security' }).click();
await page.getByRole('link', { name: 'Two-step login' }).click();
await page.locator('bit-item').filter({ hasText: /Authenticator app/ }).getByRole('button').click();
await page.getByLabel('Master password (required)').click();
await page.getByLabel('Master password (required)').fill(user.password);
await page.getByRole('button', { name: 'Continue' }).click();
await page.getByRole('button', { name: 'Turn off' }).click();
await page.getByRole('button', { name: 'Yes' }).click();
await utils.checkNotification(page, 'Two-step login provider turned off');
});
}
export async function activateEmail(test: Test, page: Page, user: { name: string, password: string }, mailBuffer: MailBuffer) {
await test.step('Activate Email 2FA', async () => {
await page.getByRole('button', { name: user.name }).click();
await page.getByRole('menuitem', { name: 'Account settings' }).click();
await page.getByRole('link', { name: 'Security' }).click();
await page.getByRole('link', { name: 'Two-step login' }).click();
await page.locator('bit-item').filter({ hasText: 'Enter a code sent to your email' }).getByRole('button').click();
await page.getByLabel('Master password (required)').fill(user.password);
await page.getByRole('button', { name: 'Continue' }).click();
await page.getByRole('button', { name: 'Send email' }).click();
});
let code = await retrieveEmailCode(test, page, mailBuffer);
await test.step('input code', async () => {
await page.getByLabel('2. Enter the resulting 6').fill(code);
await page.getByRole('button', { name: 'Turn on' }).click();
await page.getByRole('heading', { name: 'Turned on', exact: true });
});
}
export async function retrieveEmailCode(test: Test, page: Page, mailBuffer: MailBuffer): string {
return await test.step('retrieve code', async () => {
const codeMail = await mailBuffer.expect((mail) => mail.subject.includes("Login Verification Code"));
const page2 = await page.context().newPage();
await page2.setContent(codeMail.html);
const code = await page2.getByTestId("2fa").innerText();
await page2.close();
return code;
});
}
export async function disableEmail(test: Test, page: Page, user: { password: string }) {
await test.step('Disable Email 2FA', async () => {
await page.getByRole('button', { name: 'Test' }).click();
await page.getByRole('menuitem', { name: 'Account settings' }).click();
await page.getByRole('link', { name: 'Security' }).click();
await page.getByRole('link', { name: 'Two-step login' }).click();
await page.locator('bit-item').filter({ hasText: 'Email' }).getByRole('button').click();
await page.getByLabel('Master password (required)').click();
await page.getByLabel('Master password (required)').fill(user.password);
await page.getByRole('button', { name: 'Continue' }).click();
await page.getByRole('button', { name: 'Turn off' }).click();
await page.getByRole('button', { name: 'Yes' }).click();
await utils.checkNotification(page, 'Two-step login provider turned off');
});
}

View File

@ -1,7 +0,0 @@
import { test } from './db-test';
const utils = require('../../global-utils');
test('DB start', async ({ serviceName }) => {
utils.startComposeService(serviceName);
});

View File

@ -1,11 +0,0 @@
import { test } from './db-test';
const utils = require('../../global-utils');
utils.loadEnv();
test('DB teardown ?', async ({ serviceName }) => {
if( process.env.PW_KEEP_SERVICE_RUNNNING !== "true" ) {
utils.stopComposeService(serviceName);
}
});

View File

@ -1,9 +0,0 @@
import { test as base } from '@playwright/test';
export type TestOptions = {
serviceName: string;
};
export const test = base.extend<TestOptions>({
serviceName: ['', { option: true }],
});

View File

@ -1,77 +0,0 @@
import { expect, type Browser,Page } from '@playwright/test';
import * as utils from '../../global-utils';
export async function create(test, page: Page, name: string) {
await test.step('Create Org', async () => {
await page.locator('a').filter({ hasText: 'Password Manager' }).first().click();
await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible();
await page.getByRole('link', { name: 'New organisation' }).click();
await page.getByLabel('Organisation name (required)').fill(name);
await page.getByRole('button', { name: 'Submit' }).click();
await utils.checkNotification(page, 'Organisation created');
});
}
export async function policies(test, page: Page, name: string) {
await test.step(`Navigate to ${name} policies`, async () => {
await page.locator('a').filter({ hasText: 'Admin Console' }).first().click();
await page.locator('org-switcher').getByLabel(/Toggle collapse/).click();
await page.locator('org-switcher').getByRole('link', { name: `${name}` }).first().click();
await expect(page.getByRole('heading', { name: `${name} collections` })).toBeVisible();
await page.getByRole('button', { name: 'Toggle collapse Settings' }).click();
await page.getByRole('link', { name: 'Policies' }).click();
await expect(page.getByRole('heading', { name: 'Policies' })).toBeVisible();
});
}
export async function members(test, page: Page, name: string) {
await test.step(`Navigate to ${name} members`, async () => {
await page.locator('a').filter({ hasText: 'Admin Console' }).first().click();
await page.locator('org-switcher').getByLabel(/Toggle collapse/).click();
await page.locator('org-switcher').getByRole('link', { name: `${name}` }).first().click();
await expect(page.getByRole('heading', { name: `${name} collections` })).toBeVisible();
await page.locator('div').filter({ hasText: 'Members' }).nth(2).click();
await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible();
await expect(page.getByRole('cell', { name: 'All' })).toBeVisible();
});
}
export async function invite(test, page: Page, name: string, email: string) {
await test.step(`Invite ${email}`, async () => {
await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible();
await page.getByRole('button', { name: 'Invite member' }).click();
await page.getByLabel('Email (required)').fill(email);
await page.getByRole('tab', { name: 'Collections' }).click();
await page.getByRole('combobox', { name: 'Permission' }).click();
await page.getByText('Edit items', { exact: true }).click();
await page.getByLabel('Select collections').click();
await page.getByText('Default collection').click();
await page.getByRole('cell', { name: 'Collection', exact: true }).click();
await page.getByRole('button', { name: 'Save' }).click();
await utils.checkNotification(page, 'User(s) invited');
});
}
export async function confirm(test, page: Page, name: string, user_email: string) {
await test.step(`Confirm ${user_email}`, async () => {
await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible();
await page.getByRole('row').filter({hasText: user_email}).getByLabel('Options').click();
await page.getByRole('menuitem', { name: 'Confirm' }).click();
await expect(page.getByRole('heading', { name: 'Confirm user' })).toBeVisible();
await page.getByRole('button', { name: 'Confirm' }).click();
await utils.checkNotification(page, 'confirmed');
});
}
export async function revoke(test, page: Page, name: string, user_email: string) {
await test.step(`Revoke ${user_email}`, async () => {
await expect(page.getByRole('heading', { name: 'Members' })).toBeVisible();
await page.getByRole('row').filter({hasText: user_email}).getByLabel('Options').click();
await page.getByRole('menuitem', { name: 'Revoke access' }).click();
await expect(page.getByRole('heading', { name: 'Revoke access' })).toBeVisible();
await page.getByRole('button', { name: 'Revoke access' }).click();
await utils.checkNotification(page, 'Revoked organisation access');
});
}

View File

@ -1,18 +0,0 @@
import { test, expect, type TestInfo } from '@playwright/test';
const { exec } = require('node:child_process');
const utils = require('../../global-utils');
utils.loadEnv();
test.beforeAll('Setup', async () => {
console.log("Starting Keycloak");
exec(`docker compose --profile keycloak --env-file test.env up`);
});
test('Keycloak is up', async ({ page }) => {
await utils.waitFor(process.env.SSO_AUTHORITY, page.context().browser());
// Dummy authority is created at the end of the setup
await utils.waitFor(process.env.DUMMY_AUTHORITY, page.context().browser());
console.log(`Keycloak running on: ${process.env.SSO_AUTHORITY}`);
});

View File

@ -1,15 +0,0 @@
import { test, type FullConfig } from '@playwright/test';
const { execSync } = require('node:child_process');
const utils = require('../../global-utils');
utils.loadEnv();
test('Keycloak teardown', async () => {
if( process.env.PW_KEEP_SERVICE_RUNNNING === "true" ) {
console.log("Keep Keycloak running");
} else {
console.log("Keycloak stopping");
execSync(`docker compose --profile keycloak --env-file test.env stop Keycloak`);
}
});

View File

@ -1,133 +0,0 @@
import { expect, type Page, Test } from '@playwright/test';
import { type MailBuffer, MailServer } from 'maildev';
import * as OTPAuth from "otpauth";
import * as utils from '../../global-utils';
import { retrieveEmailCode } from './2fa';
/**
* If a MailBuffer is passed it will be used and consume the expected emails
*/
export async function logNewUser(
test: Test,
page: Page,
user: { email: string, name: string, password: string },
options: { mailBuffer?: MailBuffer } = {}
) {
await test.step(`Create user ${user.name}`, async () => {
await page.context().clearCookies();
await test.step('Landing page', async () => {
await utils.cleanLanding(page);
await page.locator("input[type=email].vw-email-sso").fill(user.email);
await page.getByRole('button', { name: /Use single sign-on/ }).click();
});
await test.step('Keycloak login', async () => {
await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible();
await page.getByLabel(/Username/).fill(user.name);
await page.getByLabel('Password', { exact: true }).fill(user.password);
await page.getByRole('button', { name: 'Sign In' }).click();
});
await test.step('Create Vault account', async () => {
await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible();
await page.getByLabel('Master password (required)', { exact: true }).fill(user.password);
await page.getByLabel('Confirm master password (').fill(user.password);
await page.getByRole('button', { name: 'Create account' }).click();
});
await utils.checkNotification(page, 'Account successfully created!');
await utils.checkNotification(page, 'Invitation accepted');
await utils.ignoreExtension(page);
await test.step('Default vault page', async () => {
await expect(page).toHaveTitle(/Vaultwarden Web/);
await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible();
});
if( options.mailBuffer ){
let mailBuffer = options.mailBuffer;
await test.step('Check emails', async () => {
await mailBuffer.expect((m) => m.subject === "Welcome");
await mailBuffer.expect((m) => m.subject.includes("New Device Logged"));
});
}
});
}
/**
* If a MailBuffer is passed it will be used and consume the expected emails
*/
export async function logUser(
test: Test,
page: Page,
user: { email: string, password: string },
options: {
mailBuffer ?: MailBuffer,
totp?: OTPAuth.TOTP,
mail2fa?: boolean,
} = {}
) {
let mailBuffer = options.mailBuffer;
await test.step(`Log user ${user.email}`, async () => {
await page.context().clearCookies();
await test.step('Landing page', async () => {
await utils.cleanLanding(page);
await page.locator("input[type=email].vw-email-sso").fill(user.email);
await page.getByRole('button', { name: /Use single sign-on/ }).click();
});
await test.step('Keycloak login', async () => {
await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible();
await page.getByLabel(/Username/).fill(user.name);
await page.getByLabel('Password', { exact: true }).fill(user.password);
await page.getByRole('button', { name: 'Sign In' }).click();
});
if( options.totp || options.mail2fa ){
let code;
await test.step('2FA check', async () => {
await expect(page.getByRole('heading', { name: 'Verify your Identity' })).toBeVisible();
if( options.totp ) {
const totp = options.totp;
let timestamp = Date.now(); // Needed to use the next token
timestamp = timestamp + (totp.period - (Math.floor(timestamp / 1000) % totp.period) + 1) * 1000;
code = totp.generate({timestamp});
} else if( options.mail2fa ){
code = await retrieveEmailCode(test, page, mailBuffer);
}
await page.getByLabel(/Verification code/).fill(code);
await page.getByRole('button', { name: 'Continue' }).click();
});
}
await test.step('Unlock vault', async () => {
await expect(page).toHaveTitle('Vaultwarden Web');
await expect(page.getByRole('heading', { name: 'Your vault is locked' })).toBeVisible();
await page.getByLabel('Master password').fill(user.password);
await page.getByRole('button', { name: 'Unlock' }).click();
});
await utils.ignoreExtension(page);
await test.step('Default vault page', async () => {
await expect(page).toHaveTitle(/Vaultwarden Web/);
await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible();
});
if( mailBuffer ){
await test.step('Check email', async () => {
await mailBuffer.expect((m) => m.subject.includes("New Device Logged"));
});
}
});
}

View File

@ -1,58 +0,0 @@
import { expect, type Browser, Page } from '@playwright/test';
import { type MailBuffer } from 'maildev';
import * as utils from '../../global-utils';
export async function createAccount(test, page: Page, user: { email: string, name: string, password: string }, mailBuffer?: MailBuffer) {
await test.step(`Create user ${user.name}`, async () => {
await utils.cleanLanding(page);
await page.getByRole('link', { name: 'Create account' }).click();
// Back to Vault create account
await expect(page).toHaveTitle(/Create account | Vaultwarden Web/);
await page.getByLabel(/Email address/).fill(user.email);
await page.getByLabel('Name').fill(user.name);
await page.getByRole('button', { name: 'Continue' }).click();
// Vault finish Creation
await page.getByLabel('Master password (required)', { exact: true }).fill(user.password);
await page.getByLabel('Confirm master password (').fill(user.password);
await page.getByRole('button', { name: 'Create account' }).click();
await utils.checkNotification(page, 'Your new account has been created')
await utils.ignoreExtension(page);
// We are now in the default vault page
await expect(page).toHaveTitle('Vaults | Vaultwarden Web');
// await utils.checkNotification(page, 'You have been logged in!');
if( mailBuffer ){
await mailBuffer.expect((m) => m.subject === "Welcome");
await mailBuffer.expect((m) => m.subject === "New Device Logged In From Firefox");
}
});
}
export async function logUser(test, page: Page, user: { email: string, password: string }, mailBuffer?: MailBuffer) {
await test.step(`Log user ${user.email}`, async () => {
await utils.cleanLanding(page);
await page.getByLabel(/Email address/).fill(user.email);
await page.getByRole('button', { name: 'Continue' }).click();
// Unlock page
await page.getByLabel('Master password').fill(user.password);
await page.getByRole('button', { name: 'Log in with master password' }).click();
await utils.ignoreExtension(page);
// We are now in the default vault page
await expect(page).toHaveTitle(/Vaultwarden Web/);
if( mailBuffer ){
await mailBuffer.expect((m) => m.subject === "New Device Logged In From Firefox");
}
});
}

View File

@ -1,53 +0,0 @@
import { test, expect, type TestInfo } from '@playwright/test';
import { MailDev } from 'maildev';
import { logNewUser, logUser } from './setups/sso';
import { activateEmail, disableEmail } from './setups/2fa';
import * as utils from "../global-utils";
let users = utils.loadEnv();
let mailserver;
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
mailserver = new MailDev({
port: process.env.MAILDEV_SMTP_PORT,
web: { port: process.env.MAILDEV_HTTP_PORT },
})
await mailserver.listen();
await utils.startVault(browser, testInfo, {
SSO_ENABLED: true,
SSO_ONLY: false,
SMTP_HOST: process.env.MAILDEV_HOST,
SMTP_FROM: process.env.PW_SMTP_FROM,
});
});
test.afterAll('Teardown', async ({}) => {
utils.stopVault();
if( mailserver ){
await mailserver.close();
}
});
test('Create and activate 2FA', async ({ page }) => {
const mailBuffer = mailserver.buffer(users.user1.email);
await logNewUser(test, page, users.user1, {mailBuffer: mailBuffer});
await activateEmail(test, page, users.user1, mailBuffer);
mailBuffer.close();
});
test('Log and disable', async ({ page }) => {
const mailBuffer = mailserver.buffer(users.user1.email);
await logUser(test, page, users.user1, {mailBuffer: mailBuffer, mail2fa: true});
await disableEmail(test, page, users.user1);
mailBuffer.close();
});

View File

@ -1,85 +0,0 @@
import { test, expect, type TestInfo } from '@playwright/test';
import { logNewUser, logUser } from './setups/sso';
import { activateTOTP, disableTOTP } from './setups/2fa';
import * as utils from "../global-utils";
let users = utils.loadEnv();
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
await utils.startVault(browser, testInfo, {
SSO_ENABLED: true,
SSO_ONLY: false
});
});
test.afterAll('Teardown', async ({}) => {
utils.stopVault();
});
test('Account creation using SSO', async ({ page }) => {
// Landing page
await logNewUser(test, page, users.user1);
});
test('SSO login', async ({ page }) => {
await logUser(test, page, users.user1);
});
test('Non SSO login', async ({ page }) => {
// Landing page
await page.goto('/');
await page.locator("input[type=email].vw-email-sso").fill(users.user1.email);
await page.getByRole('button', { name: 'Other' }).click();
// Unlock page
await page.getByLabel('Master password').fill(users.user1.password);
await page.getByRole('button', { name: 'Log in with master password' }).click();
// We are now in the default vault page
await expect(page).toHaveTitle(/Vaultwarden Web/);
});
test('SSO login with TOTP 2fa', async ({ page }) => {
await logUser(test, page, users.user1);
let totp = await activateTOTP(test, page, users.user1);
await logUser(test, page, users.user1, { totp });
await disableTOTP(test, page, users.user1);
});
test('Non SSO login impossible', async ({ page, browser }, testInfo: TestInfo) => {
await utils.restartVault(page, testInfo, {
SSO_ENABLED: true,
SSO_ONLY: true
}, false);
// Landing page
await page.goto('/');
// Check that SSO login is available
await expect(page.getByRole('button', { name: /Use single sign-on/ })).toHaveCount(1);
// No Continue/Other
await expect(page.getByRole('button', { name: 'Other' })).toHaveCount(0);
});
test('No SSO login', async ({ page }, testInfo: TestInfo) => {
await utils.restartVault(page, testInfo, {
SSO_ENABLED: false
}, false);
// Landing page
await page.goto('/');
// No SSO button (rely on a correct selector checked in previous test)
await expect(page.getByRole('button', { name: /Use single sign-on/ })).toHaveCount(0);
// Can continue to Master password
await page.getByLabel(/Email address/).fill(users.user1.email);
await page.getByRole('button', { name: 'Continue' }).click();
await expect(page.getByRole('button', { name: 'Log in with master password' })).toHaveCount(1);
});

View File

@ -1,124 +0,0 @@
import { test, expect, type TestInfo } from '@playwright/test';
import { MailDev } from 'maildev';
import * as utils from "../global-utils";
import * as orgs from './setups/orgs';
import { logNewUser, logUser } from './setups/sso';
let users = utils.loadEnv();
let mailServer, mail1Buffer, mail2Buffer, mail3Buffer;
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
mailServer = new MailDev({
port: process.env.MAILDEV_SMTP_PORT,
web: { port: process.env.MAILDEV_HTTP_PORT },
})
await mailServer.listen();
await utils.startVault(browser, testInfo, {
SMTP_HOST: process.env.MAILDEV_HOST,
SMTP_FROM: process.env.PW_SMTP_FROM,
SSO_ENABLED: true,
SSO_ONLY: true,
});
mail1Buffer = mailServer.buffer(users.user1.email);
mail2Buffer = mailServer.buffer(users.user2.email);
mail3Buffer = mailServer.buffer(users.user3.email);
});
test.afterAll('Teardown', async ({}) => {
utils.stopVault();
[mail1Buffer, mail2Buffer, mail3Buffer, mailServer].map((m) => m?.close());
});
test('Create user3', async ({ page }) => {
await logNewUser(test, page, users.user3, { mailBuffer: mail3Buffer });
});
test('Invite users', async ({ page }) => {
await logNewUser(test, page, users.user1, { mailBuffer: mail1Buffer });
await orgs.create(test, page, '/Test');
await orgs.members(test, page, '/Test');
await orgs.invite(test, page, '/Test', users.user2.email);
await orgs.invite(test, page, '/Test', users.user3.email);
});
test('invited with new account', async ({ page }) => {
const link = await test.step('Extract email link', async () => {
const invited = await mail2Buffer.expect((m) => m.subject === "Join /Test");
await page.setContent(invited.html);
return await page.getByTestId("invite").getAttribute("href");
});
await test.step('Redirect to Keycloak', async () => {
await page.goto(link);
});
await test.step('Keycloak login', async () => {
await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible();
await page.getByLabel(/Username/).fill(users.user2.name);
await page.getByLabel('Password', { exact: true }).fill(users.user2.password);
await page.getByRole('button', { name: 'Sign In' }).click();
});
await test.step('Create Vault account', async () => {
await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible();
await page.getByLabel('Master password (required)', { exact: true }).fill(users.user2.password);
await page.getByLabel('Confirm master password (').fill(users.user2.password);
await page.getByRole('button', { name: 'Create account' }).click();
await utils.checkNotification(page, 'Account successfully created!');
await utils.checkNotification(page, 'Invitation accepted');
await utils.ignoreExtension(page);
});
await test.step('Default vault page', async () => {
await expect(page).toHaveTitle(/Vaultwarden Web/);
});
await test.step('Check mails', async () => {
await mail2Buffer.expect((m) => m.subject.includes("New Device Logged"));
await mail1Buffer.expect((m) => m.subject === "Invitation to /Test accepted");
});
});
test('invited with existing account', async ({ page }) => {
const link = await test.step('Extract email link', async () => {
const invited = await mail3Buffer.expect((m) => m.subject === "Join /Test");
await page.setContent(invited.html);
return await page.getByTestId("invite").getAttribute("href");
});
await test.step('Redirect to Keycloak', async () => {
await page.goto(link);
});
await test.step('Keycloak login', async () => {
await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible();
await page.getByLabel(/Username/).fill(users.user3.name);
await page.getByLabel('Password', { exact: true }).fill(users.user3.password);
await page.getByRole('button', { name: 'Sign In' }).click();
});
await test.step('Unlock vault', async () => {
await expect(page).toHaveTitle('Vaultwarden Web');
await page.getByLabel('Master password').fill(users.user3.password);
await page.getByRole('button', { name: 'Unlock' }).click();
await utils.checkNotification(page, 'Invitation accepted');
await utils.ignoreExtension(page);
});
await test.step('Default vault page', async () => {
await expect(page).toHaveTitle(/Vaultwarden Web/);
});
await test.step('Check mails', async () => {
await mail3Buffer.expect((m) => m.subject.includes("New Device Logged"));
await mail1Buffer.expect((m) => m.subject === "Invitation to /Test accepted");
});
});

View File

@ -1,76 +0,0 @@
import { test, expect, type TestInfo } from '@playwright/test';
import { MailDev } from 'maildev';
import * as utils from "../global-utils";
import * as orgs from './setups/orgs';
import { logNewUser, logUser } from './setups/sso';
let users = utils.loadEnv();
test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => {
await utils.startVault(browser, testInfo, {
SSO_ENABLED: true,
SSO_ONLY: true,
});
});
test.afterAll('Teardown', async ({}) => {
utils.stopVault();
});
test('Create user3', async ({ page }) => {
await logNewUser(test, page, users.user3);
});
test('Invite users', async ({ page }) => {
await logNewUser(test, page, users.user1);
await orgs.create(test, page, '/Test');
await orgs.members(test, page, '/Test');
await orgs.invite(test, page, '/Test', users.user2.email);
await orgs.invite(test, page, '/Test', users.user3.email);
await orgs.confirm(test, page, '/Test', users.user3.email);
});
test('Create invited account', async ({ page }) => {
await logNewUser(test, page, users.user2);
});
test('Confirm invited user', async ({ page }) => {
await logUser(test, page, users.user1);
await orgs.members(test, page, '/Test');
await expect(page.getByRole('row', { name: users.user2.name })).toHaveText(/Needs confirmation/);
await orgs.confirm(test, page, '/Test', users.user2.email);
});
test('Organization is visible', async ({ page }) => {
await logUser(test, page, users.user2);
await page.getByLabel('vault: /Test').click();
await expect(page.getByLabel('Filter: Default collection')).toBeVisible();
});
test('Enforce password policy', async ({ page }) => {
await logUser(test, page, users.user1);
await orgs.policies(test, page, '/Test');
await test.step(`Set master password policy`, async () => {
await page.getByRole('button', { name: 'Master password requirements' }).click();
await page.getByRole('checkbox', { name: 'Turn on' }).check();
await page.getByRole('checkbox', { name: 'Require existing members to' }).check();
await page.getByRole('spinbutton', { name: 'Minimum length' }).fill('42');
await page.getByRole('button', { name: 'Save' }).click();
await utils.checkNotification(page, 'Edited policy Master password requirements.');
});
await utils.logout(test, page, users.user1);
await test.step(`Unlock trigger policy`, async () => {
await page.locator("input[type=email].vw-email-sso").fill(users.user1.email);
await page.getByRole('button', { name: 'Use single sign-on' }).click();
await page.getByRole('textbox', { name: 'Master password (required)' }).fill(users.user1.password);
await page.getByRole('button', { name: 'Unlock' }).click();
await expect(page.getByRole('heading', { name: 'Update master password' })).toBeVisible();
});
});

View File

@ -1,4 +1,4 @@
[toolchain]
channel = "1.92.0"
channel = "1.88.0"
components = [ "rustfmt", "clippy" ]
profile = "minimal"

View File

@ -1,16 +1,17 @@
use std::{env, sync::LazyLock};
use once_cell::sync::Lazy;
use reqwest::Method;
use serde::de::DeserializeOwned;
use serde_json::Value;
use std::env;
use rocket::serde::json::Json;
use rocket::{
form::Form,
http::{Cookie, CookieJar, MediaType, SameSite, Status},
request::{FromRequest, Outcome, Request},
response::{content::RawHtml as Html, Redirect},
serde::json::Json,
Catcher, Route,
};
use serde::de::DeserializeOwned;
use serde_json::Value;
use crate::{
api::{
@ -19,14 +20,7 @@ use crate::{
},
auth::{decode_admin, encode_jwt, generate_admin_claims, ClientIp, Secure},
config::ConfigBuilder,
db::{
backup_sqlite, get_sql_server_version,
models::{
Attachment, Cipher, Collection, Device, Event, EventType, Group, Invitation, Membership, MembershipId,
MembershipType, OrgPolicy, Organization, OrganizationId, SsoUser, TwoFactor, User, UserId,
},
DbConn, DbConnType, ACTIVE_DB_TYPE,
},
db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType},
error::{Error, MapResult},
http_client::make_http_request,
mail,
@ -52,7 +46,6 @@ pub fn routes() -> Vec<Route> {
invite_user,
logout,
delete_user,
delete_sso_user,
deauth_user,
disable_user,
enable_user,
@ -81,21 +74,18 @@ pub fn catchers() -> Vec<Catcher> {
}
}
static DB_TYPE: LazyLock<&str> = LazyLock::new(|| match ACTIVE_DB_TYPE.get() {
#[cfg(mysql)]
Some(DbConnType::Mysql) => "MySQL",
#[cfg(postgresql)]
Some(DbConnType::Postgresql) => "PostgreSQL",
#[cfg(sqlite)]
Some(DbConnType::Sqlite) => "SQLite",
_ => "Unknown",
static DB_TYPE: Lazy<&str> = Lazy::new(|| {
DbConnType::from_url(&CONFIG.database_url())
.map(|t| match t {
DbConnType::sqlite => "SQLite",
DbConnType::mysql => "MySQL",
DbConnType::postgresql => "PostgreSQL",
})
.unwrap_or("Unknown")
});
#[cfg(sqlite)]
static CAN_BACKUP: LazyLock<bool> =
LazyLock::new(|| ACTIVE_DB_TYPE.get().map(|t| *t == DbConnType::Sqlite).unwrap_or(false));
#[cfg(not(sqlite))]
static CAN_BACKUP: LazyLock<bool> = LazyLock::new(|| false);
static CAN_BACKUP: Lazy<bool> =
Lazy::new(|| DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false));
#[get("/")]
fn admin_disabled() -> &'static str {
@ -157,10 +147,10 @@ fn admin_login(request: &Request<'_>) -> ApiResult<Html<String>> {
err_code!("Authorization failed.", Status::Unauthorized.code);
}
let redirect = request.segments::<std::path::PathBuf>(0..).unwrap_or_default().display().to_string();
render_admin_login(None, Some(&redirect))
render_admin_login(None, Some(redirect))
}
fn render_admin_login(msg: Option<&str>, redirect: Option<&str>) -> ApiResult<Html<String>> {
fn render_admin_login(msg: Option<&str>, redirect: Option<String>) -> ApiResult<Html<String>> {
// If there is an error, show it
let msg = msg.map(|msg| format!("Error: {msg}"));
let json = json!({
@ -194,17 +184,14 @@ fn post_admin_login(
if crate::ratelimit::check_limit_admin(&ip.ip).is_err() {
return Err(AdminResponse::TooManyRequests(render_admin_login(
Some("Too many requests, try again later."),
redirect.as_deref(),
redirect,
)));
}
// If the token is invalid, redirect to login page
if !_validate_token(&data.token) {
error!("Invalid admin token. IP: {}", ip.ip);
Err(AdminResponse::Unauthorized(render_admin_login(
Some("Invalid admin token, please try again."),
redirect.as_deref(),
)))
Err(AdminResponse::Unauthorized(render_admin_login(Some("Invalid admin token, please try again."), redirect)))
} else {
// If the token received is valid, generate JWT and save it as a cookie
let claims = generate_admin_claims();
@ -252,7 +239,6 @@ struct AdminTemplateData {
page_data: Option<Value>,
logged_in: bool,
urlpath: String,
sso_enabled: bool,
}
impl AdminTemplateData {
@ -262,7 +248,6 @@ impl AdminTemplateData {
page_data: Some(page_data),
logged_in: true,
urlpath: CONFIG.domain_path(),
sso_enabled: CONFIG.sso_enabled(),
}
}
@ -296,7 +281,7 @@ struct InviteData {
email: String,
}
async fn get_user_or_404(user_id: &UserId, conn: &DbConn) -> ApiResult<User> {
async fn get_user_or_404(user_id: &UserId, conn: &mut DbConn) -> ApiResult<User> {
if let Some(user) = User::find_by_uuid(user_id, conn).await {
Ok(user)
} else {
@ -305,15 +290,15 @@ async fn get_user_or_404(user_id: &UserId, conn: &DbConn) -> ApiResult<User> {
}
#[post("/invite", format = "application/json", data = "<data>")]
async fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -> JsonResult {
async fn invite_user(data: Json<InviteData>, _token: AdminToken, mut conn: DbConn) -> JsonResult {
let data: InviteData = data.into_inner();
if User::find_by_mail(&data.email, &conn).await.is_some() {
if User::find_by_mail(&data.email, &mut conn).await.is_some() {
err_code!("User already exists", Status::Conflict.code)
}
let mut user = User::new(&data.email, None);
let mut user = User::new(data.email);
async fn _generate_invite(user: &User, conn: &DbConn) -> EmptyResult {
async fn _generate_invite(user: &User, conn: &mut DbConn) -> EmptyResult {
if CONFIG.mail_enabled() {
let org_id: OrganizationId = FAKE_ADMIN_UUID.to_string().into();
let member_id: MembershipId = FAKE_ADMIN_UUID.to_string().into();
@ -324,10 +309,10 @@ async fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -
}
}
_generate_invite(&user, &conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
user.save(&conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
_generate_invite(&user, &mut conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
user.save(&mut conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
Ok(Json(user.to_json(&conn).await))
Ok(Json(user.to_json(&mut conn).await))
}
#[post("/test/smtp", format = "application/json", data = "<data>")]
@ -348,14 +333,14 @@ fn logout(cookies: &CookieJar<'_>) -> Redirect {
}
#[get("/users")]
async fn get_users_json(_token: AdminToken, conn: DbConn) -> Json<Value> {
let users = User::get_all(&conn).await;
async fn get_users_json(_token: AdminToken, mut conn: DbConn) -> Json<Value> {
let users = User::get_all(&mut conn).await;
let mut users_json = Vec::with_capacity(users.len());
for (u, _) in users {
let mut usr = u.to_json(&conn).await;
for u in users {
let mut usr = u.to_json(&mut conn).await;
usr["userEnabled"] = json!(u.enabled);
usr["createdAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
usr["lastActive"] = match u.last_active(&conn).await {
usr["lastActive"] = match u.last_active(&mut conn).await {
Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)),
None => json!(None::<String>),
};
@ -366,23 +351,20 @@ async fn get_users_json(_token: AdminToken, conn: DbConn) -> Json<Value> {
}
#[get("/users/overview")]
async fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
let users = User::get_all(&conn).await;
async fn users_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html<String>> {
let users = User::get_all(&mut conn).await;
let mut users_json = Vec::with_capacity(users.len());
for (u, sso_u) in users {
let mut usr = u.to_json(&conn).await;
usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn).await);
usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &conn).await);
usr["attachment_size"] = json!(get_display_size(Attachment::size_by_user(&u.uuid, &conn).await));
for u in users {
let mut usr = u.to_json(&mut conn).await;
usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &mut conn).await);
usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &mut conn).await);
usr["attachment_size"] = json!(get_display_size(Attachment::size_by_user(&u.uuid, &mut conn).await));
usr["user_enabled"] = json!(u.enabled);
usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
usr["last_active"] = match u.last_active(&conn).await {
usr["last_active"] = match u.last_active(&mut conn).await {
Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)),
None => json!("Never"),
};
usr["sso_identifier"] = json!(sso_u.map(|u| u.identifier.to_string()).unwrap_or(String::new()));
users_json.push(usr);
}
@ -391,9 +373,9 @@ async fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<Stri
}
#[get("/users/by-mail/<mail>")]
async fn get_user_by_mail_json(mail: &str, _token: AdminToken, conn: DbConn) -> JsonResult {
if let Some(u) = User::find_by_mail(mail, &conn).await {
let mut usr = u.to_json(&conn).await;
async fn get_user_by_mail_json(mail: &str, _token: AdminToken, mut conn: DbConn) -> JsonResult {
if let Some(u) = User::find_by_mail(mail, &mut conn).await {
let mut usr = u.to_json(&mut conn).await;
usr["userEnabled"] = json!(u.enabled);
usr["createdAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
Ok(Json(usr))
@ -403,21 +385,21 @@ async fn get_user_by_mail_json(mail: &str, _token: AdminToken, conn: DbConn) ->
}
#[get("/users/<user_id>")]
async fn get_user_json(user_id: UserId, _token: AdminToken, conn: DbConn) -> JsonResult {
let u = get_user_or_404(&user_id, &conn).await?;
let mut usr = u.to_json(&conn).await;
async fn get_user_json(user_id: UserId, _token: AdminToken, mut conn: DbConn) -> JsonResult {
let u = get_user_or_404(&user_id, &mut conn).await?;
let mut usr = u.to_json(&mut conn).await;
usr["userEnabled"] = json!(u.enabled);
usr["createdAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
Ok(Json(usr))
}
#[post("/users/<user_id>/delete", format = "application/json")]
async fn delete_user(user_id: UserId, token: AdminToken, conn: DbConn) -> EmptyResult {
let user = get_user_or_404(&user_id, &conn).await?;
async fn delete_user(user_id: UserId, token: AdminToken, mut conn: DbConn) -> EmptyResult {
let user = get_user_or_404(&user_id, &mut conn).await?;
// Get the membership records before deleting the actual user
let memberships = Membership::find_any_state_by_user(&user_id, &conn).await;
let res = user.delete(&conn).await;
let memberships = Membership::find_any_state_by_user(&user_id, &mut conn).await;
let res = user.delete(&mut conn).await;
for membership in memberships {
log_event(
@ -427,28 +409,7 @@ async fn delete_user(user_id: UserId, token: AdminToken, conn: DbConn) -> EmptyR
&ACTING_ADMIN_USER.into(),
14, // Use UnknownBrowser type
&token.ip.ip,
&conn,
)
.await;
}
res
}
#[delete("/users/<user_id>/sso", format = "application/json")]
async fn delete_sso_user(user_id: UserId, token: AdminToken, conn: DbConn) -> EmptyResult {
let memberships = Membership::find_any_state_by_user(&user_id, &conn).await;
let res = SsoUser::delete(&user_id, &conn).await;
for membership in memberships {
log_event(
EventType::OrganizationUserUnlinkedSso as i32,
&membership.uuid,
&membership.org_uuid,
&ACTING_ADMIN_USER.into(),
14, // Use UnknownBrowser type
&token.ip.ip,
&conn,
&mut conn,
)
.await;
}
@ -457,13 +418,13 @@ async fn delete_sso_user(user_id: UserId, token: AdminToken, conn: DbConn) -> Em
}
#[post("/users/<user_id>/deauth", format = "application/json")]
async fn deauth_user(user_id: UserId, _token: AdminToken, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
let mut user = get_user_or_404(&user_id, &conn).await?;
async fn deauth_user(user_id: UserId, _token: AdminToken, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
let mut user = get_user_or_404(&user_id, &mut conn).await?;
nt.send_logout(&user, None, &conn).await;
nt.send_logout(&user, None, &mut conn).await;
if CONFIG.push_enabled() {
for device in Device::find_push_devices_by_user(&user.uuid, &conn).await {
for device in Device::find_push_devices_by_user(&user.uuid, &mut conn).await {
match unregister_push_device(&device.push_uuid).await {
Ok(r) => r,
Err(e) => error!("Unable to unregister devices from Bitwarden server: {e}"),
@ -471,46 +432,46 @@ async fn deauth_user(user_id: UserId, _token: AdminToken, conn: DbConn, nt: Noti
}
}
Device::delete_all_by_user(&user.uuid, &conn).await?;
Device::delete_all_by_user(&user.uuid, &mut conn).await?;
user.reset_security_stamp();
user.save(&conn).await
user.save(&mut conn).await
}
#[post("/users/<user_id>/disable", format = "application/json")]
async fn disable_user(user_id: UserId, _token: AdminToken, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
let mut user = get_user_or_404(&user_id, &conn).await?;
Device::delete_all_by_user(&user.uuid, &conn).await?;
async fn disable_user(user_id: UserId, _token: AdminToken, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
let mut user = get_user_or_404(&user_id, &mut conn).await?;
Device::delete_all_by_user(&user.uuid, &mut conn).await?;
user.reset_security_stamp();
user.enabled = false;
let save_result = user.save(&conn).await;
let save_result = user.save(&mut conn).await;
nt.send_logout(&user, None, &conn).await;
nt.send_logout(&user, None, &mut conn).await;
save_result
}
#[post("/users/<user_id>/enable", format = "application/json")]
async fn enable_user(user_id: UserId, _token: AdminToken, conn: DbConn) -> EmptyResult {
let mut user = get_user_or_404(&user_id, &conn).await?;
async fn enable_user(user_id: UserId, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
let mut user = get_user_or_404(&user_id, &mut conn).await?;
user.enabled = true;
user.save(&conn).await
user.save(&mut conn).await
}
#[post("/users/<user_id>/remove-2fa", format = "application/json")]
async fn remove_2fa(user_id: UserId, token: AdminToken, conn: DbConn) -> EmptyResult {
let mut user = get_user_or_404(&user_id, &conn).await?;
TwoFactor::delete_all_by_user(&user.uuid, &conn).await?;
two_factor::enforce_2fa_policy(&user, &ACTING_ADMIN_USER.into(), 14, &token.ip.ip, &conn).await?;
async fn remove_2fa(user_id: UserId, token: AdminToken, mut conn: DbConn) -> EmptyResult {
let mut user = get_user_or_404(&user_id, &mut conn).await?;
TwoFactor::delete_all_by_user(&user.uuid, &mut conn).await?;
two_factor::enforce_2fa_policy(&user, &ACTING_ADMIN_USER.into(), 14, &token.ip.ip, &mut conn).await?;
user.totp_recover = None;
user.save(&conn).await
user.save(&mut conn).await
}
#[post("/users/<user_id>/invite/resend", format = "application/json")]
async fn resend_user_invite(user_id: UserId, _token: AdminToken, conn: DbConn) -> EmptyResult {
if let Some(user) = User::find_by_uuid(&user_id, &conn).await {
async fn resend_user_invite(user_id: UserId, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
if let Some(user) = User::find_by_uuid(&user_id, &mut conn).await {
//TODO: replace this with user.status check when it will be available (PR#3397)
if !user.password_hash.is_empty() {
err_code!("User already accepted invitation", Status::BadRequest.code);
@ -536,10 +497,10 @@ struct MembershipTypeData {
}
#[post("/users/org_type", format = "application/json", data = "<data>")]
async fn update_membership_type(data: Json<MembershipTypeData>, token: AdminToken, conn: DbConn) -> EmptyResult {
async fn update_membership_type(data: Json<MembershipTypeData>, token: AdminToken, mut conn: DbConn) -> EmptyResult {
let data: MembershipTypeData = data.into_inner();
let Some(mut member_to_edit) = Membership::find_by_user_and_org(&data.user_uuid, &data.org_uuid, &conn).await
let Some(mut member_to_edit) = Membership::find_by_user_and_org(&data.user_uuid, &data.org_uuid, &mut conn).await
else {
err!("The specified user isn't member of the organization")
};
@ -551,14 +512,28 @@ async fn update_membership_type(data: Json<MembershipTypeData>, token: AdminToke
if member_to_edit.atype == MembershipType::Owner && new_type != MembershipType::Owner {
// Removing owner permission, check that there is at least one other confirmed owner
if Membership::count_confirmed_by_org_and_type(&data.org_uuid, MembershipType::Owner, &conn).await <= 1 {
if Membership::count_confirmed_by_org_and_type(&data.org_uuid, MembershipType::Owner, &mut conn).await <= 1 {
err!("Can't change the type of the last owner")
}
}
member_to_edit.atype = new_type;
// This check is also done at api::organizations::{accept_invite, _confirm_invite, _activate_member, edit_member}, update_membership_type
OrgPolicy::check_user_allowed(&member_to_edit, "modify", &conn).await?;
// It returns different error messages per function.
if new_type < MembershipType::Admin {
match OrgPolicy::is_user_allowed(&member_to_edit.user_uuid, &member_to_edit.org_uuid, true, &mut conn).await {
Ok(_) => {}
Err(OrgPolicyErr::TwoFactorMissing) => {
if CONFIG.email_2fa_auto_fallback() {
two_factor::email::find_and_activate_email_2fa(&member_to_edit.user_uuid, &mut conn).await?;
} else {
err!("You cannot modify this user to this type because they have not setup 2FA");
}
}
Err(OrgPolicyErr::SingleOrgEnforced) => {
err!("You cannot modify this user to this type because it is a member of an organization which forbids it");
}
}
}
log_event(
EventType::OrganizationUserUpdated as i32,
@ -567,31 +542,32 @@ async fn update_membership_type(data: Json<MembershipTypeData>, token: AdminToke
&ACTING_ADMIN_USER.into(),
14, // Use UnknownBrowser type
&token.ip.ip,
&conn,
&mut conn,
)
.await;
member_to_edit.save(&conn).await
member_to_edit.atype = new_type;
member_to_edit.save(&mut conn).await
}
#[post("/users/update_revision", format = "application/json")]
async fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
User::update_all_revisions(&conn).await
async fn update_revision_users(_token: AdminToken, mut conn: DbConn) -> EmptyResult {
User::update_all_revisions(&mut conn).await
}
#[get("/organizations/overview")]
async fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
let organizations = Organization::get_all(&conn).await;
async fn organizations_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult<Html<String>> {
let organizations = Organization::get_all(&mut conn).await;
let mut organizations_json = Vec::with_capacity(organizations.len());
for o in organizations {
let mut org = o.to_json();
org["user_count"] = json!(Membership::count_by_org(&o.uuid, &conn).await);
org["cipher_count"] = json!(Cipher::count_by_org(&o.uuid, &conn).await);
org["collection_count"] = json!(Collection::count_by_org(&o.uuid, &conn).await);
org["group_count"] = json!(Group::count_by_org(&o.uuid, &conn).await);
org["event_count"] = json!(Event::count_by_org(&o.uuid, &conn).await);
org["attachment_count"] = json!(Attachment::count_by_org(&o.uuid, &conn).await);
org["attachment_size"] = json!(get_display_size(Attachment::size_by_org(&o.uuid, &conn).await));
org["user_count"] = json!(Membership::count_by_org(&o.uuid, &mut conn).await);
org["cipher_count"] = json!(Cipher::count_by_org(&o.uuid, &mut conn).await);
org["collection_count"] = json!(Collection::count_by_org(&o.uuid, &mut conn).await);
org["group_count"] = json!(Group::count_by_org(&o.uuid, &mut conn).await);
org["event_count"] = json!(Event::count_by_org(&o.uuid, &mut conn).await);
org["attachment_count"] = json!(Attachment::count_by_org(&o.uuid, &mut conn).await);
org["attachment_size"] = json!(get_display_size(Attachment::size_by_org(&o.uuid, &mut conn).await));
organizations_json.push(org);
}
@ -600,9 +576,9 @@ async fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult<H
}
#[post("/organizations/<org_id>/delete", format = "application/json")]
async fn delete_organization(org_id: OrganizationId, _token: AdminToken, conn: DbConn) -> EmptyResult {
let org = Organization::find_by_uuid(&org_id, &conn).await.map_res("Organization doesn't exist")?;
org.delete(&conn).await
async fn delete_organization(org_id: OrganizationId, _token: AdminToken, mut conn: DbConn) -> EmptyResult {
let org = Organization::find_by_uuid(&org_id, &mut conn).await.map_res("Organization doesn't exist")?;
org.delete(&mut conn).await
}
#[derive(Deserialize)]
@ -690,7 +666,7 @@ async fn get_ntp_time(has_http_access: bool) -> String {
}
#[get("/diagnostics")]
async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResult<Html<String>> {
async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn) -> ApiResult<Html<String>> {
use chrono::prelude::*;
use std::net::ToSocketAddrs;
@ -744,7 +720,7 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> A
"uses_proxy": uses_proxy,
"enable_websocket": &CONFIG.enable_websocket(),
"db_type": *DB_TYPE,
"db_version": get_sql_server_version(&conn).await,
"db_version": get_sql_server_version(&mut conn).await,
"admin_url": format!("{}/diagnostics", admin_url()),
"overrides": &CONFIG.get_overrides().join(", "),
"host_arch": env::consts::ARCH,
@ -788,9 +764,9 @@ async fn delete_config(_token: AdminToken) -> EmptyResult {
}
#[post("/config/backup_db", format = "application/json")]
fn backup_db(_token: AdminToken) -> ApiResult<String> {
async fn backup_db(_token: AdminToken, mut conn: DbConn) -> ApiResult<String> {
if *CAN_BACKUP {
match backup_sqlite() {
match backup_database(&mut conn).await {
Ok(f) => Ok(format!("Backup to '{f}' was successful")),
Err(e) => err!(format!("Backup was unsuccessful {e}")),
}
@ -813,7 +789,11 @@ impl<'r> FromRequest<'r> for AdminToken {
_ => err_handler!("Error getting Client IP"),
};
if !CONFIG.disable_admin_token() {
if CONFIG.disable_admin_token() {
Outcome::Success(Self {
ip,
})
} else {
let cookies = request.cookies();
let access_token = match cookies.get(COOKIE_NAME) {
@ -837,10 +817,10 @@ impl<'r> FromRequest<'r> for AdminToken {
error!("Invalid or expired admin JWT. IP: {}.", &ip.ip);
return Outcome::Error((Status::Unauthorized, "Session expired"));
}
}
Outcome::Success(Self {
ip,
})
Outcome::Success(Self {
ip,
})
}
}
}

View File

@ -7,20 +7,13 @@ use serde_json::Value;
use crate::{
api::{
core::{accept_org_invite, log_user_event, two_factor::email},
master_password_policy, register_push_device, unregister_push_device, AnonymousNotify, ApiResult, EmptyResult,
JsonResult, Notify, PasswordOrOtpData, UpdateType,
core::{log_user_event, two_factor::email},
master_password_policy, register_push_device, unregister_push_device, AnonymousNotify, EmptyResult, JsonResult,
Notify, PasswordOrOtpData, UpdateType,
},
auth::{decode_delete, decode_invite, decode_verify_email, ClientHeaders, Headers},
crypto,
db::{
models::{
AuthRequest, AuthRequestId, Cipher, CipherId, Device, DeviceId, DeviceType, EmergencyAccess,
EmergencyAccessId, EventType, Folder, FolderId, Invitation, Membership, MembershipId, OrgPolicy,
OrgPolicyType, Organization, OrganizationId, Send, SendId, User, UserId, UserKdfType,
},
DbConn,
},
db::{models::*, DbConn},
mail,
util::{format_date, NumberOrString},
CONFIG,
@ -41,7 +34,6 @@ pub fn routes() -> Vec<rocket::Route> {
get_public_keys,
post_keys,
post_password,
post_set_password,
post_kdf,
post_rotatekey,
post_sstamp,
@ -66,36 +58,23 @@ pub fn routes() -> Vec<rocket::Route> {
put_device_token,
put_clear_device_token,
post_clear_device_token,
get_tasks,
post_auth_request,
get_auth_request,
put_auth_request,
get_auth_request_response,
get_auth_requests,
get_auth_requests_pending,
]
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct KDFData {
#[serde(alias = "kdfType")]
kdf: i32,
#[serde(alias = "iterations")]
kdf_iterations: i32,
#[serde(alias = "memory")]
kdf_memory: Option<i32>,
#[serde(alias = "parallelism")]
kdf_parallelism: Option<i32>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RegisterData {
email: String,
#[serde(flatten)]
kdf: KDFData,
kdf: Option<i32>,
kdf_iterations: Option<i32>,
kdf_memory: Option<i32>,
kdf_parallelism: Option<i32>,
#[serde(alias = "userSymmetricKey")]
key: String,
@ -118,19 +97,6 @@ pub struct RegisterData {
org_invite_token: Option<String>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SetPasswordData {
#[serde(flatten)]
kdf: KDFData,
key: String,
keys: Option<KeysData>,
master_password_hash: String,
master_password_hint: Option<String>,
org_identifier: Option<String>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct KeysData {
@ -155,7 +121,7 @@ fn enforce_password_hint_setting(password_hint: &Option<String>) -> EmptyResult
}
Ok(())
}
async fn is_email_2fa_required(member_id: Option<MembershipId>, conn: &DbConn) -> bool {
async fn is_email_2fa_required(member_id: Option<MembershipId>, conn: &mut DbConn) -> bool {
if !CONFIG._enable_email_2fa() {
return false;
}
@ -173,7 +139,7 @@ async fn register(data: Json<RegisterData>, conn: DbConn) -> JsonResult {
_register(data, false, conn).await
}
pub async fn _register(data: Json<RegisterData>, email_verification: bool, conn: DbConn) -> JsonResult {
pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut conn: DbConn) -> JsonResult {
let mut data: RegisterData = data.into_inner();
let email = data.email.to_lowercase();
@ -255,7 +221,7 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, conn:
let password_hint = clean_password_hint(&data.master_password_hint);
enforce_password_hint_setting(&password_hint)?;
let mut user = match User::find_by_mail(&email, &conn).await {
let mut user = match User::find_by_mail(&email, &mut conn).await {
Some(user) => {
if !user.password_hash.is_empty() {
err!("Registration not allowed or user already exists")
@ -270,12 +236,15 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, conn:
} else {
err!("Registration email does not match invite email")
}
} else if Invitation::take(&email, &conn).await {
Membership::accept_user_invitations(&user.uuid, &conn).await?;
} else if Invitation::take(&email, &mut conn).await {
for membership in Membership::find_invited_by_user(&user.uuid, &mut conn).await.iter_mut() {
membership.status = MembershipStatus::Accepted as i32;
membership.save(&mut conn).await?;
}
user
} else if CONFIG.is_signup_allowed(&email)
|| (CONFIG.emergency_access_allowed()
&& EmergencyAccess::find_invited_by_grantee_email(&email, &conn).await.is_some())
&& EmergencyAccess::find_invited_by_grantee_email(&email, &mut conn).await.is_some())
{
user
} else {
@ -286,11 +255,11 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, conn:
// Order is important here; the invitation check must come first
// because the vaultwarden admin can invite anyone, regardless
// of other signup restrictions.
if Invitation::take(&email, &conn).await
if Invitation::take(&email, &mut conn).await
|| CONFIG.is_signup_allowed(&email)
|| pending_emergency_access.is_some()
{
User::new(&email, None)
User::new(email.clone())
} else {
err!("Registration not allowed or user already exists")
}
@ -298,9 +267,18 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, conn:
};
// Make sure we don't leave a lingering invitation.
Invitation::take(&email, &conn).await;
Invitation::take(&email, &mut conn).await;
set_kdf_data(&mut user, &data.kdf)?;
if let Some(client_kdf_type) = data.kdf {
user.client_kdf_type = client_kdf_type;
}
if let Some(client_kdf_iter) = data.kdf_iterations {
user.client_kdf_iter = client_kdf_iter;
}
user.client_kdf_memory = data.kdf_memory;
user.client_kdf_parallelism = data.kdf_parallelism;
user.set_password(&data.master_password_hash, Some(data.key), true, None);
user.password_hint = password_hint;
@ -329,17 +307,17 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, conn:
error!("Error sending welcome email: {e:#?}");
}
if email_verified && is_email_2fa_required(data.organization_user_id, &conn).await {
email::activate_email_2fa(&user, &conn).await.ok();
if email_verified && is_email_2fa_required(data.organization_user_id, &mut conn).await {
email::activate_email_2fa(&user, &mut conn).await.ok();
}
}
user.save(&conn).await?;
user.save(&mut conn).await?;
// accept any open emergency access invitations
if !CONFIG.mail_enabled() && CONFIG.emergency_access_allowed() {
for mut emergency_invite in EmergencyAccess::find_all_invited_by_grantee_email(&user.email, &conn).await {
emergency_invite.accept_invite(&user.uuid, &user.email, &conn).await.ok();
for mut emergency_invite in EmergencyAccess::find_all_invited_by_grantee_email(&user.email, &mut conn).await {
emergency_invite.accept_invite(&user.uuid, &user.email, &mut conn).await.ok();
}
}
@ -349,71 +327,9 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, conn:
})))
}
#[post("/accounts/set-password", data = "<data>")]
async fn post_set_password(data: Json<SetPasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
let data: SetPasswordData = data.into_inner();
let mut user = headers.user;
if user.private_key.is_some() {
err!("Account already initialized, cannot set password")
}
// Check against the password hint setting here so if it fails,
// the user can retry without losing their invitation below.
let password_hint = clean_password_hint(&data.master_password_hint);
enforce_password_hint_setting(&password_hint)?;
set_kdf_data(&mut user, &data.kdf)?;
user.set_password(
&data.master_password_hash,
Some(data.key),
false,
Some(vec![String::from("revision_date")]), // We need to allow revision-date to use the old security_timestamp
);
user.password_hint = password_hint;
if let Some(keys) = data.keys {
user.private_key = Some(keys.encrypted_private_key);
user.public_key = Some(keys.public_key);
}
if let Some(identifier) = data.org_identifier {
if identifier != crate::sso::FAKE_IDENTIFIER && identifier != crate::api::admin::FAKE_ADMIN_UUID {
let org = match Organization::find_by_uuid(&identifier.into(), &conn).await {
None => err!("Failed to retrieve the associated organization"),
Some(org) => org,
};
let membership = match Membership::find_by_user_and_org(&user.uuid, &org.uuid, &conn).await {
None => err!("Failed to retrieve the invitation"),
Some(org) => org,
};
accept_org_invite(&user, membership, None, &conn).await?;
}
}
if CONFIG.mail_enabled() {
mail::send_welcome(&user.email.to_lowercase()).await?;
} else {
Membership::accept_user_invitations(&user.uuid, &conn).await?;
}
log_user_event(EventType::UserChangedPassword as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &conn)
.await;
user.save(&conn).await?;
Ok(Json(json!({
"object": "set-password",
"captchaBypassToken": "",
})))
}
#[get("/accounts/profile")]
async fn profile(headers: Headers, conn: DbConn) -> Json<Value> {
Json(headers.user.to_json(&conn).await)
async fn profile(headers: Headers, mut conn: DbConn) -> Json<Value> {
Json(headers.user.to_json(&mut conn).await)
}
#[derive(Debug, Deserialize)]
@ -429,7 +345,7 @@ async fn put_profile(data: Json<ProfileData>, headers: Headers, conn: DbConn) ->
}
#[post("/accounts/profile", data = "<data>")]
async fn post_profile(data: Json<ProfileData>, headers: Headers, conn: DbConn) -> JsonResult {
async fn post_profile(data: Json<ProfileData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: ProfileData = data.into_inner();
// Check if the length of the username exceeds 50 characters (Same is Upstream Bitwarden)
@ -441,8 +357,8 @@ async fn post_profile(data: Json<ProfileData>, headers: Headers, conn: DbConn) -
let mut user = headers.user;
user.name = data.name;
user.save(&conn).await?;
Ok(Json(user.to_json(&conn).await))
user.save(&mut conn).await?;
Ok(Json(user.to_json(&mut conn).await))
}
#[derive(Deserialize)]
@ -452,7 +368,7 @@ struct AvatarData {
}
#[put("/accounts/avatar", data = "<data>")]
async fn put_avatar(data: Json<AvatarData>, headers: Headers, conn: DbConn) -> JsonResult {
async fn put_avatar(data: Json<AvatarData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: AvatarData = data.into_inner();
// It looks like it only supports the 6 hex color format.
@ -467,13 +383,13 @@ async fn put_avatar(data: Json<AvatarData>, headers: Headers, conn: DbConn) -> J
let mut user = headers.user;
user.avatar_color = data.avatar_color;
user.save(&conn).await?;
Ok(Json(user.to_json(&conn).await))
user.save(&mut conn).await?;
Ok(Json(user.to_json(&mut conn).await))
}
#[get("/users/<user_id>/public-key")]
async fn get_public_keys(user_id: UserId, _headers: Headers, conn: DbConn) -> JsonResult {
let user = match User::find_by_uuid(&user_id, &conn).await {
async fn get_public_keys(user_id: UserId, _headers: Headers, mut conn: DbConn) -> JsonResult {
let user = match User::find_by_uuid(&user_id, &mut conn).await {
Some(user) if user.public_key.is_some() => user,
Some(_) => err_code!("User has no public_key", Status::NotFound.code),
None => err_code!("User doesn't exist", Status::NotFound.code),
@ -487,7 +403,7 @@ async fn get_public_keys(user_id: UserId, _headers: Headers, conn: DbConn) -> Js
}
#[post("/accounts/keys", data = "<data>")]
async fn post_keys(data: Json<KeysData>, headers: Headers, conn: DbConn) -> JsonResult {
async fn post_keys(data: Json<KeysData>, headers: Headers, mut conn: DbConn) -> JsonResult {
let data: KeysData = data.into_inner();
let mut user = headers.user;
@ -495,7 +411,7 @@ async fn post_keys(data: Json<KeysData>, headers: Headers, conn: DbConn) -> Json
user.private_key = Some(data.encrypted_private_key);
user.public_key = Some(data.public_key);
user.save(&conn).await?;
user.save(&mut conn).await?;
Ok(Json(json!({
"privateKey": user.private_key,
@ -514,7 +430,7 @@ struct ChangePassData {
}
#[post("/accounts/password", data = "<data>")]
async fn post_password(data: Json<ChangePassData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
async fn post_password(data: Json<ChangePassData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
let data: ChangePassData = data.into_inner();
let mut user = headers.user;
@ -525,7 +441,7 @@ async fn post_password(data: Json<ChangePassData>, headers: Headers, conn: DbCon
user.password_hint = clean_password_hint(&data.master_password_hint);
enforce_password_hint_setting(&user.password_hint)?;
log_user_event(EventType::UserChangedPassword as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &conn)
log_user_event(EventType::UserChangedPassword as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn)
.await;
user.set_password(
@ -540,17 +456,38 @@ async fn post_password(data: Json<ChangePassData>, headers: Headers, conn: DbCon
]),
);
let save_result = user.save(&conn).await;
let save_result = user.save(&mut conn).await;
// Prevent logging out the client where the user requested this endpoint from.
// If you do logout the user it will causes issues at the client side.
// Adding the device uuid will prevent this.
nt.send_logout(&user, Some(headers.device.uuid.clone()), &conn).await;
nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await;
save_result
}
fn set_kdf_data(user: &mut User, data: &KDFData) -> EmptyResult {
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ChangeKdfData {
kdf: i32,
kdf_iterations: i32,
kdf_memory: Option<i32>,
kdf_parallelism: Option<i32>,
master_password_hash: String,
new_master_password_hash: String,
key: String,
}
#[post("/accounts/kdf", data = "<data>")]
async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
let data: ChangeKdfData = data.into_inner();
let mut user = headers.user;
if !user.check_valid_password(&data.master_password_hash) {
err!("Invalid password")
}
if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 {
err!("PBKDF2 KDF iterations must be at least 100000.")
}
@ -581,68 +518,10 @@ fn set_kdf_data(user: &mut User, data: &KDFData) -> EmptyResult {
}
user.client_kdf_iter = data.kdf_iterations;
user.client_kdf_type = data.kdf;
user.set_password(&data.new_master_password_hash, Some(data.key), true, None);
let save_result = user.save(&mut conn).await;
Ok(())
}
#[allow(dead_code)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct AuthenticationData {
salt: String,
kdf: KDFData,
master_password_authentication_hash: String,
}
#[allow(dead_code)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct UnlockData {
salt: String,
kdf: KDFData,
master_key_wrapped_user_key: String,
}
#[allow(dead_code)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ChangeKdfData {
new_master_password_hash: String,
key: String,
authentication_data: AuthenticationData,
unlock_data: UnlockData,
master_password_hash: String,
}
#[post("/accounts/kdf", data = "<data>")]
async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
let data: ChangeKdfData = data.into_inner();
if !headers.user.check_valid_password(&data.master_password_hash) {
err!("Invalid password")
}
if data.authentication_data.kdf != data.unlock_data.kdf {
err!("KDF settings must be equal for authentication and unlock")
}
if headers.user.email != data.authentication_data.salt || headers.user.email != data.unlock_data.salt {
err!("Invalid master password salt")
}
let mut user = headers.user;
set_kdf_data(&mut user, &data.unlock_data.kdf)?;
user.set_password(
&data.authentication_data.master_password_authentication_hash,
Some(data.unlock_data.master_key_wrapped_user_key),
true,
None,
);
let save_result = user.save(&conn).await;
nt.send_logout(&user, Some(headers.device.uuid.clone()), &conn).await;
nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await;
save_result
}
@ -797,7 +676,7 @@ fn validate_keydata(
}
#[post("/accounts/key-management/rotate-user-account-keys", data = "<data>")]
async fn post_rotatekey(data: Json<KeyData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
async fn post_rotatekey(data: Json<KeyData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
// TODO: See if we can wrap everything within a SQL Transaction. If something fails it should revert everything.
let data: KeyData = data.into_inner();
@ -815,13 +694,13 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, conn: DbConn, nt:
// TODO: Ideally we'd do everything after this point in a single transaction.
let mut existing_ciphers = Cipher::find_owned_by_user(user_id, &conn).await;
let mut existing_folders = Folder::find_by_user(user_id, &conn).await;
let mut existing_emergency_access = EmergencyAccess::find_all_confirmed_by_grantor_uuid(user_id, &conn).await;
let mut existing_memberships = Membership::find_by_user(user_id, &conn).await;
let mut existing_ciphers = Cipher::find_owned_by_user(user_id, &mut conn).await;
let mut existing_folders = Folder::find_by_user(user_id, &mut conn).await;
let mut existing_emergency_access = EmergencyAccess::find_all_by_grantor_uuid(user_id, &mut conn).await;
let mut existing_memberships = Membership::find_by_user(user_id, &mut conn).await;
// We only rotate the reset password key if it is set.
existing_memberships.retain(|m| m.reset_password_key.is_some());
let mut existing_sends = Send::find_by_user(user_id, &conn).await;
let mut existing_sends = Send::find_by_user(user_id, &mut conn).await;
validate_keydata(
&data,
@ -843,7 +722,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, conn: DbConn, nt:
};
saved_folder.name = folder_data.name;
saved_folder.save(&conn).await?
saved_folder.save(&mut conn).await?
}
}
@ -856,7 +735,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, conn: DbConn, nt:
};
saved_emergency_access.key_encrypted = Some(emergency_access_data.key_encrypted);
saved_emergency_access.save(&conn).await?
saved_emergency_access.save(&mut conn).await?
}
// Update reset password data
@ -868,7 +747,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, conn: DbConn, nt:
};
membership.reset_password_key = Some(reset_password_data.reset_password_key);
membership.save(&conn).await?
membership.save(&mut conn).await?
}
// Update send data
@ -877,7 +756,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, conn: DbConn, nt:
err!("Send doesn't exist")
};
update_send_from_data(send, send_data, &headers, &conn, &nt, UpdateType::None).await?;
update_send_from_data(send, send_data, &headers, &mut conn, &nt, UpdateType::None).await?;
}
// Update cipher data
@ -893,7 +772,7 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, conn: DbConn, nt:
// Prevent triggering cipher updates via WebSockets by settings UpdateType::None
// The user sessions are invalidated because all the ciphers were re-encrypted and thus triggering an update could cause issues.
// We force the users to logout after the user has been saved to try and prevent these issues.
update_cipher_from_data(saved_cipher, cipher_data, &headers, None, &conn, &nt, UpdateType::None).await?
update_cipher_from_data(saved_cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await?
}
}
@ -908,28 +787,28 @@ async fn post_rotatekey(data: Json<KeyData>, headers: Headers, conn: DbConn, nt:
None,
);
let save_result = user.save(&conn).await;
let save_result = user.save(&mut conn).await;
// Prevent logging out the client where the user requested this endpoint from.
// If you do logout the user it will causes issues at the client side.
// Adding the device uuid will prevent this.
nt.send_logout(&user, Some(headers.device.uuid.clone()), &conn).await;
nt.send_logout(&user, Some(headers.device.uuid.clone()), &mut conn).await;
save_result
}
#[post("/accounts/security-stamp", data = "<data>")]
async fn post_sstamp(data: Json<PasswordOrOtpData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
async fn post_sstamp(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
let data: PasswordOrOtpData = data.into_inner();
let mut user = headers.user;
data.validate(&user, true, &conn).await?;
data.validate(&user, true, &mut conn).await?;
Device::delete_all_by_user(&user.uuid, &conn).await?;
Device::delete_all_by_user(&user.uuid, &mut conn).await?;
user.reset_security_stamp();
let save_result = user.save(&conn).await;
let save_result = user.save(&mut conn).await;
nt.send_logout(&user, None, &conn).await;
nt.send_logout(&user, None, &mut conn).await;
save_result
}
@ -942,7 +821,7 @@ struct EmailTokenData {
}
#[post("/accounts/email-token", data = "<data>")]
async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, conn: DbConn) -> EmptyResult {
async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
if !CONFIG.email_change_allowed() {
err!("Email change is not allowed.");
}
@ -954,20 +833,10 @@ async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, conn: Db
err!("Invalid password")
}
if let Some(existing_user) = User::find_by_mail(&data.new_email, &conn).await {
if User::find_by_mail(&data.new_email, &mut conn).await.is_some() {
if CONFIG.mail_enabled() {
// check if existing_user has already registered
if existing_user.password_hash.is_empty() {
// inform an invited user about how to delete their temporary account if the
// request was done intentionally and they want to update their mail address
if let Err(e) = mail::send_change_email_invited(&data.new_email, &user.email).await {
error!("Error sending change-email-invited email: {e:#?}");
}
} else {
// inform existing user about the failed attempt to change their mail address
if let Err(e) = mail::send_change_email_existing(&data.new_email, &user.email).await {
error!("Error sending change-email-existing email: {e:#?}");
}
if let Err(e) = mail::send_change_email_existing(&data.new_email, &user.email).await {
error!("Error sending change-email-existing email: {e:#?}");
}
}
err!("Email already in use");
@ -989,7 +858,7 @@ async fn post_email_token(data: Json<EmailTokenData>, headers: Headers, conn: Db
user.email_new = Some(data.new_email);
user.email_new_token = Some(token);
user.save(&conn).await
user.save(&mut conn).await
}
#[derive(Deserialize)]
@ -1004,7 +873,7 @@ struct ChangeEmailData {
}
#[post("/accounts/email", data = "<data>")]
async fn post_email(data: Json<ChangeEmailData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
async fn post_email(data: Json<ChangeEmailData>, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult {
if !CONFIG.email_change_allowed() {
err!("Email change is not allowed.");
}
@ -1016,7 +885,7 @@ async fn post_email(data: Json<ChangeEmailData>, headers: Headers, conn: DbConn,
err!("Invalid password")
}
if User::find_by_mail(&data.new_email, &conn).await.is_some() {
if User::find_by_mail(&data.new_email, &mut conn).await.is_some() {
err!("Email already in use");
}
@ -1050,9 +919,9 @@ async fn post_email(data: Json<ChangeEmailData>, headers: Headers, conn: DbConn,
user.set_password(&data.new_master_password_hash, Some(data.key), true, None);
let save_result = user.save(&conn).await;
let save_result = user.save(&mut conn).await;
nt.send_logout(&user, None, &conn).await;
nt.send_logout(&user, None, &mut conn).await;
save_result
}
@ -1080,10 +949,10 @@ struct VerifyEmailTokenData {
}
#[post("/accounts/verify-email-token", data = "<data>")]
async fn post_verify_email_token(data: Json<VerifyEmailTokenData>, conn: DbConn) -> EmptyResult {
async fn post_verify_email_token(data: Json<VerifyEmailTokenData>, mut conn: DbConn) -> EmptyResult {
let data: VerifyEmailTokenData = data.into_inner();
let Some(mut user) = User::find_by_uuid(&data.user_id, &conn).await else {
let Some(mut user) = User::find_by_uuid(&data.user_id, &mut conn).await else {
err!("User doesn't exist")
};
@ -1096,7 +965,7 @@ async fn post_verify_email_token(data: Json<VerifyEmailTokenData>, conn: DbConn)
user.verified_at = Some(Utc::now().naive_utc());
user.last_verifying_at = None;
user.login_verify_count = 0;
if let Err(e) = user.save(&conn).await {
if let Err(e) = user.save(&mut conn).await {
error!("Error saving email verification: {e:#?}");
}
@ -1110,11 +979,11 @@ struct DeleteRecoverData {
}
#[post("/accounts/delete-recover", data = "<data>")]
async fn post_delete_recover(data: Json<DeleteRecoverData>, conn: DbConn) -> EmptyResult {
async fn post_delete_recover(data: Json<DeleteRecoverData>, mut conn: DbConn) -> EmptyResult {
let data: DeleteRecoverData = data.into_inner();
if CONFIG.mail_enabled() {
if let Some(user) = User::find_by_mail(&data.email, &conn).await {
if let Some(user) = User::find_by_mail(&data.email, &mut conn).await {
if let Err(e) = mail::send_delete_account(&user.email, &user.uuid).await {
error!("Error sending delete account email: {e:#?}");
}
@ -1137,21 +1006,21 @@ struct DeleteRecoverTokenData {
}
#[post("/accounts/delete-recover-token", data = "<data>")]
async fn post_delete_recover_token(data: Json<DeleteRecoverTokenData>, conn: DbConn) -> EmptyResult {
async fn post_delete_recover_token(data: Json<DeleteRecoverTokenData>, mut conn: DbConn) -> EmptyResult {
let data: DeleteRecoverTokenData = data.into_inner();
let Ok(claims) = decode_delete(&data.token) else {
err!("Invalid claim")
};
let Some(user) = User::find_by_uuid(&data.user_id, &conn).await else {
let Some(user) = User::find_by_uuid(&data.user_id, &mut conn).await else {
err!("User doesn't exist")
};
if claims.sub != *user.uuid {
err!("Invalid claim");
}
user.delete(&conn).await
user.delete(&mut conn).await
}
#[post("/accounts/delete", data = "<data>")]
@ -1160,13 +1029,13 @@ async fn post_delete_account(data: Json<PasswordOrOtpData>, headers: Headers, co
}
#[delete("/accounts", data = "<data>")]
async fn delete_account(data: Json<PasswordOrOtpData>, headers: Headers, conn: DbConn) -> EmptyResult {
async fn delete_account(data: Json<PasswordOrOtpData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
let data: PasswordOrOtpData = data.into_inner();
let user = headers.user;
data.validate(&user, true, &conn).await?;
data.validate(&user, true, &mut conn).await?;
user.delete(&conn).await
user.delete(&mut conn).await
}
#[get("/accounts/revision-date")]
@ -1182,7 +1051,7 @@ struct PasswordHintData {
}
#[post("/accounts/password-hint", data = "<data>")]
async fn password_hint(data: Json<PasswordHintData>, conn: DbConn) -> EmptyResult {
async fn password_hint(data: Json<PasswordHintData>, mut conn: DbConn) -> EmptyResult {
if !CONFIG.password_hints_allowed() || (!CONFIG.mail_enabled() && !CONFIG.show_password_hint()) {
err!("This server is not configured to provide password hints.");
}
@ -1192,7 +1061,7 @@ async fn password_hint(data: Json<PasswordHintData>, conn: DbConn) -> EmptyResul
let data: PasswordHintData = data.into_inner();
let email = &data.email;
match User::find_by_mail(email, &conn).await {
match User::find_by_mail(email, &mut conn).await {
None => {
// To prevent user enumeration, act as if the user exists.
if CONFIG.mail_enabled() {
@ -1234,10 +1103,10 @@ async fn prelogin(data: Json<PreloginData>, conn: DbConn) -> Json<Value> {
_prelogin(data, conn).await
}
pub async fn _prelogin(data: Json<PreloginData>, conn: DbConn) -> Json<Value> {
pub async fn _prelogin(data: Json<PreloginData>, mut conn: DbConn) -> Json<Value> {
let data: PreloginData = data.into_inner();
let (kdf_type, kdf_iter, kdf_mem, kdf_para) = match User::find_by_mail(&data.email, &conn).await {
let (kdf_type, kdf_iter, kdf_mem, kdf_para) = match User::find_by_mail(&data.email, &mut conn).await {
Some(user) => (user.client_kdf_type, user.client_kdf_iter, user.client_kdf_memory, user.client_kdf_parallelism),
None => (User::CLIENT_KDF_TYPE_DEFAULT, User::CLIENT_KDF_ITER_DEFAULT, None, None),
};
@ -1257,44 +1126,29 @@ struct SecretVerificationRequest {
master_password_hash: String,
}
// Change the KDF Iterations if necessary
pub async fn kdf_upgrade(user: &mut User, pwd_hash: &str, conn: &DbConn) -> ApiResult<()> {
if user.password_iterations < CONFIG.password_iterations() {
user.password_iterations = CONFIG.password_iterations();
user.set_password(pwd_hash, None, false, None);
if let Err(e) = user.save(conn).await {
error!("Error updating user: {e:#?}");
}
}
Ok(())
}
#[post("/accounts/verify-password", data = "<data>")]
async fn verify_password(data: Json<SecretVerificationRequest>, headers: Headers, conn: DbConn) -> JsonResult {
let data: SecretVerificationRequest = data.into_inner();
let mut user = headers.user;
let user = headers.user;
if !user.check_valid_password(&data.master_password_hash) {
err!("Invalid password")
}
kdf_upgrade(&mut user, &data.master_password_hash, &conn).await?;
Ok(Json(master_password_policy(&user, &conn).await))
}
async fn _api_key(data: Json<PasswordOrOtpData>, rotate: bool, headers: Headers, conn: DbConn) -> JsonResult {
async fn _api_key(data: Json<PasswordOrOtpData>, rotate: bool, headers: Headers, mut conn: DbConn) -> JsonResult {
use crate::util::format_date;
let data: PasswordOrOtpData = data.into_inner();
let mut user = headers.user;
data.validate(&user, true, &conn).await?;
data.validate(&user, true, &mut conn).await?;
if rotate || user.api_key.is_none() {
user.api_key = Some(crypto::generate_api_key());
user.save(&conn).await.expect("Error saving API key");
user.save(&mut conn).await.expect("Error saving API key");
}
Ok(Json(json!({
@ -1315,12 +1169,11 @@ async fn rotate_api_key(data: Json<PasswordOrOtpData>, headers: Headers, conn: D
}
#[get("/devices/knowndevice")]
async fn get_known_device(device: KnownDevice, conn: DbConn) -> JsonResult {
let result = if let Some(user) = User::find_by_mail(&device.email, &conn).await {
Device::find_by_uuid_and_user(&device.uuid, &user.uuid, &conn).await.is_some()
} else {
false
};
async fn get_known_device(device: KnownDevice, mut conn: DbConn) -> JsonResult {
let mut result = false;
if let Some(user) = User::find_by_mail(&device.email, &mut conn).await {
result = Device::find_by_uuid_and_user(&device.uuid, &user.uuid, &mut conn).await.is_some();
}
Ok(Json(json!(result)))
}
@ -1362,8 +1215,8 @@ impl<'r> FromRequest<'r> for KnownDevice {
}
#[get("/devices")]
async fn get_all_devices(headers: Headers, conn: DbConn) -> JsonResult {
let devices = Device::find_with_auth_request_by_user(&headers.user.uuid, &conn).await;
async fn get_all_devices(headers: Headers, mut conn: DbConn) -> JsonResult {
let devices = Device::find_with_auth_request_by_user(&headers.user.uuid, &mut conn).await;
let devices = devices.iter().map(|device| device.to_json()).collect::<Vec<Value>>();
Ok(Json(json!({
@ -1374,8 +1227,8 @@ async fn get_all_devices(headers: Headers, conn: DbConn) -> JsonResult {
}
#[get("/devices/identifier/<device_id>")]
async fn get_device(device_id: DeviceId, headers: Headers, conn: DbConn) -> JsonResult {
let Some(device) = Device::find_by_uuid_and_user(&device_id, &headers.user.uuid, &conn).await else {
async fn get_device(device_id: DeviceId, headers: Headers, mut conn: DbConn) -> JsonResult {
let Some(device) = Device::find_by_uuid_and_user(&device_id, &headers.user.uuid, &mut conn).await else {
err!("No device found");
};
Ok(Json(device.to_json()))
@ -1393,11 +1246,17 @@ async fn post_device_token(device_id: DeviceId, data: Json<PushToken>, headers:
}
#[put("/devices/identifier/<device_id>/token", data = "<data>")]
async fn put_device_token(device_id: DeviceId, data: Json<PushToken>, headers: Headers, conn: DbConn) -> EmptyResult {
async fn put_device_token(
device_id: DeviceId,
data: Json<PushToken>,
headers: Headers,
mut conn: DbConn,
) -> EmptyResult {
let data = data.into_inner();
let token = data.push_token;
let Some(mut device) = Device::find_by_uuid_and_user(&headers.device.uuid, &headers.user.uuid, &conn).await else {
let Some(mut device) = Device::find_by_uuid_and_user(&headers.device.uuid, &headers.user.uuid, &mut conn).await
else {
err!(format!("Error: device {device_id} should be present before a token can be assigned"))
};
@ -1410,17 +1269,17 @@ async fn put_device_token(device_id: DeviceId, data: Json<PushToken>, headers: H
}
device.push_token = Some(token);
if let Err(e) = device.save(true, &conn).await {
if let Err(e) = device.save(&mut conn).await {
err!(format!("An error occurred while trying to save the device push token: {e}"));
}
register_push_device(&mut device, &conn).await?;
register_push_device(&mut device, &mut conn).await?;
Ok(())
}
#[put("/devices/identifier/<device_id>/clear-token")]
async fn put_clear_device_token(device_id: DeviceId, conn: DbConn) -> EmptyResult {
async fn put_clear_device_token(device_id: DeviceId, mut conn: DbConn) -> EmptyResult {
// This only clears push token
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Api/Controllers/DevicesController.cs#L215
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/Services/Implementations/DeviceService.cs#L37
@ -1432,8 +1291,8 @@ async fn put_clear_device_token(device_id: DeviceId, conn: DbConn) -> EmptyResul
return Ok(());
}
if let Some(device) = Device::find_by_uuid(&device_id, &conn).await {
Device::clear_push_token_by_uuid(&device_id, &conn).await?;
if let Some(device) = Device::find_by_uuid(&device_id, &mut conn).await {
Device::clear_push_token_by_uuid(&device_id, &mut conn).await?;
unregister_push_device(&device.push_uuid).await?;
}
@ -1446,14 +1305,6 @@ async fn post_clear_device_token(device_id: DeviceId, conn: DbConn) -> EmptyResu
put_clear_device_token(device_id, conn).await
}
#[get("/tasks")]
fn get_tasks(_client_headers: ClientHeaders) -> JsonResult {
Ok(Json(json!({
"data": [],
"object": "list"
})))
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct AuthRequestRequest {
@ -1470,17 +1321,17 @@ struct AuthRequestRequest {
async fn post_auth_request(
data: Json<AuthRequestRequest>,
client_headers: ClientHeaders,
conn: DbConn,
mut conn: DbConn,
nt: Notify<'_>,
) -> JsonResult {
let data = data.into_inner();
let Some(user) = User::find_by_mail(&data.email, &conn).await else {
let Some(user) = User::find_by_mail(&data.email, &mut conn).await else {
err!("AuthRequest doesn't exist", "User not found")
};
// Validate device uuid and type
let device = match Device::find_by_uuid_and_user(&data.device_identifier, &user.uuid, &conn).await {
let device = match Device::find_by_uuid_and_user(&data.device_identifier, &user.uuid, &mut conn).await {
Some(device) if device.atype == client_headers.device_type => device,
_ => err!("AuthRequest doesn't exist", "Device verification failed"),
};
@ -1493,16 +1344,16 @@ async fn post_auth_request(
data.access_code,
data.public_key,
);
auth_request.save(&conn).await?;
auth_request.save(&mut conn).await?;
nt.send_auth_request(&user.uuid, &auth_request.uuid, &device, &conn).await;
nt.send_auth_request(&user.uuid, &auth_request.uuid, &device, &mut conn).await;
log_user_event(
EventType::UserRequestedDeviceApproval as i32,
&user.uuid,
client_headers.device_type,
&client_headers.ip.ip,
&conn,
&mut conn,
)
.await;
@ -1522,8 +1373,8 @@ async fn post_auth_request(
}
#[get("/auth-requests/<auth_request_id>")]
async fn get_auth_request(auth_request_id: AuthRequestId, headers: Headers, conn: DbConn) -> JsonResult {
let Some(auth_request) = AuthRequest::find_by_uuid_and_user(&auth_request_id, &headers.user.uuid, &conn).await
async fn get_auth_request(auth_request_id: AuthRequestId, headers: Headers, mut conn: DbConn) -> JsonResult {
let Some(auth_request) = AuthRequest::find_by_uuid_and_user(&auth_request_id, &headers.user.uuid, &mut conn).await
else {
err!("AuthRequest doesn't exist", "Record not found or user uuid does not match")
};
@ -1559,12 +1410,13 @@ async fn put_auth_request(
auth_request_id: AuthRequestId,
data: Json<AuthResponseRequest>,
headers: Headers,
conn: DbConn,
mut conn: DbConn,
ant: AnonymousNotify<'_>,
nt: Notify<'_>,
) -> JsonResult {
let data = data.into_inner();
let Some(mut auth_request) = AuthRequest::find_by_uuid_and_user(&auth_request_id, &headers.user.uuid, &conn).await
let Some(mut auth_request) =
AuthRequest::find_by_uuid_and_user(&auth_request_id, &headers.user.uuid, &mut conn).await
else {
err!("AuthRequest doesn't exist", "Record not found or user uuid does not match")
};
@ -1586,28 +1438,28 @@ async fn put_auth_request(
auth_request.master_password_hash = data.master_password_hash;
auth_request.response_device_id = Some(data.device_identifier.clone());
auth_request.response_date = Some(response_date);
auth_request.save(&conn).await?;
auth_request.save(&mut conn).await?;
ant.send_auth_response(&auth_request.user_uuid, &auth_request.uuid).await;
nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, &headers.device, &conn).await;
nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, &headers.device, &mut conn).await;
log_user_event(
EventType::OrganizationUserApprovedAuthRequest as i32,
&headers.user.uuid,
headers.device.atype,
&headers.ip.ip,
&conn,
&mut conn,
)
.await;
} else {
// If denied, there's no reason to keep the request
auth_request.delete(&conn).await?;
auth_request.delete(&mut conn).await?;
log_user_event(
EventType::OrganizationUserRejectedAuthRequest as i32,
&headers.user.uuid,
headers.device.atype,
&headers.ip.ip,
&conn,
&mut conn,
)
.await;
}
@ -1632,9 +1484,9 @@ async fn get_auth_request_response(
auth_request_id: AuthRequestId,
code: &str,
client_headers: ClientHeaders,
conn: DbConn,
mut conn: DbConn,
) -> JsonResult {
let Some(auth_request) = AuthRequest::find_by_uuid(&auth_request_id, &conn).await else {
let Some(auth_request) = AuthRequest::find_by_uuid(&auth_request_id, &mut conn).await else {
err!("AuthRequest doesn't exist", "User not found")
};
@ -1662,16 +1514,9 @@ async fn get_auth_request_response(
})))
}
// Now unused but not yet removed
// cf https://github.com/bitwarden/clients/blob/9b2fbdba1c028bf3394064609630d2ec224baefa/libs/common/src/services/api.service.ts#L245
#[get("/auth-requests")]
async fn get_auth_requests(headers: Headers, conn: DbConn) -> JsonResult {
get_auth_requests_pending(headers, conn).await
}
#[get("/auth-requests/pending")]
async fn get_auth_requests_pending(headers: Headers, conn: DbConn) -> JsonResult {
let auth_requests = AuthRequest::find_by_user(&headers.user.uuid, &conn).await;
async fn get_auth_requests(headers: Headers, mut conn: DbConn) -> JsonResult {
let auth_requests = AuthRequest::find_by_user(&headers.user.uuid, &mut conn).await;
Ok(Json(json!({
"data": auth_requests
@ -1701,8 +1546,8 @@ async fn get_auth_requests_pending(headers: Headers, conn: DbConn) -> JsonResult
pub async fn purge_auth_requests(pool: DbPool) {
debug!("Purging auth requests");
if let Ok(conn) = pool.get().await {
AuthRequest::purge_expired_auth_requests(&conn).await;
if let Ok(mut conn) = pool.get().await {
AuthRequest::purge_expired_auth_requests(&mut conn).await;
} else {
error!("Failed to get DB connection while purging trashed ciphers")
}

File diff suppressed because it is too large Load Diff

View File

@ -8,13 +8,7 @@ use crate::{
EmptyResult, JsonResult,
},
auth::{decode_emergency_access_invite, Headers},
db::{
models::{
Cipher, EmergencyAccess, EmergencyAccessId, EmergencyAccessStatus, EmergencyAccessType, Invitation,
Membership, MembershipType, OrgPolicy, TwoFactor, User, UserId,
},
DbConn, DbPool,
},
db::{models::*, DbConn, DbPool},
mail,
util::NumberOrString,
CONFIG,
@ -46,15 +40,28 @@ pub fn routes() -> Vec<Route> {
// region get
#[get("/emergency-access/trusted")]
async fn get_contacts(headers: Headers, conn: DbConn) -> Json<Value> {
let emergency_access_list = if CONFIG.emergency_access_allowed() {
EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &conn).await
} else {
Vec::new()
};
async fn get_contacts(headers: Headers, mut conn: DbConn) -> Json<Value> {
if !CONFIG.emergency_access_allowed() {
return Json(json!({
"data": [{
"id": "",
"status": 2,
"type": 0,
"waitTimeDays": 0,
"granteeId": "",
"email": "",
"name": "NOTE: Emergency Access is disabled!",
"object": "emergencyAccessGranteeDetails",
}],
"object": "list",
"continuationToken": null
}));
}
let emergency_access_list = EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &mut conn).await;
let mut emergency_access_list_json = Vec::with_capacity(emergency_access_list.len());
for ea in emergency_access_list {
if let Some(grantee) = ea.to_json_grantee_details(&conn).await {
if let Some(grantee) = ea.to_json_grantee_details(&mut conn).await {
emergency_access_list_json.push(grantee)
}
}
@ -67,15 +74,15 @@ async fn get_contacts(headers: Headers, conn: DbConn) -> Json<Value> {
}
#[get("/emergency-access/granted")]
async fn get_grantees(headers: Headers, conn: DbConn) -> Json<Value> {
async fn get_grantees(headers: Headers, mut conn: DbConn) -> Json<Value> {
let emergency_access_list = if CONFIG.emergency_access_allowed() {
EmergencyAccess::find_all_by_grantee_uuid(&headers.user.uuid, &conn).await
EmergencyAccess::find_all_by_grantee_uuid(&headers.user.uuid, &mut conn).await
} else {
Vec::new()
};
let mut emergency_access_list_json = Vec::with_capacity(emergency_access_list.len());
for ea in emergency_access_list {
emergency_access_list_json.push(ea.to_json_grantor_details(&conn).await);
emergency_access_list_json.push(ea.to_json_grantor_details(&mut conn).await);
}
Json(json!({
@ -86,12 +93,12 @@ async fn get_grantees(headers: Headers, conn: DbConn) -> Json<Value> {
}
#[get("/emergency-access/<emer_id>")]
async fn get_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> JsonResult {
async fn get_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_enabled()?;
match EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &conn).await {
match EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &mut conn).await {
Some(emergency_access) => Ok(Json(
emergency_access.to_json_grantee_details(&conn).await.expect("Grantee user should exist but does not!"),
emergency_access.to_json_grantee_details(&mut conn).await.expect("Grantee user should exist but does not!"),
)),
None => err!("Emergency access not valid."),
}
@ -124,14 +131,14 @@ async fn post_emergency_access(
emer_id: EmergencyAccessId,
data: Json<EmergencyAccessUpdateData>,
headers: Headers,
conn: DbConn,
mut conn: DbConn,
) -> JsonResult {
check_emergency_access_enabled()?;
let data: EmergencyAccessUpdateData = data.into_inner();
let Some(mut emergency_access) =
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &conn).await
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &mut conn).await
else {
err!("Emergency access not valid.")
};
@ -147,7 +154,7 @@ async fn post_emergency_access(
emergency_access.key_encrypted = data.key_encrypted;
}
emergency_access.save(&conn).await?;
emergency_access.save(&mut conn).await?;
Ok(Json(emergency_access.to_json()))
}
@ -156,12 +163,12 @@ async fn post_emergency_access(
// region delete
#[delete("/emergency-access/<emer_id>")]
async fn delete_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> EmptyResult {
async fn delete_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> EmptyResult {
check_emergency_access_enabled()?;
let emergency_access = match (
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &conn).await,
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &headers.user.uuid, &conn).await,
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &mut conn).await,
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &headers.user.uuid, &mut conn).await,
) {
(Some(grantor_emer), None) => {
info!("Grantor deleted emergency access {emer_id}");
@ -174,7 +181,7 @@ async fn delete_emergency_access(emer_id: EmergencyAccessId, headers: Headers, c
_ => err!("Emergency access not valid."),
};
emergency_access.delete(&conn).await?;
emergency_access.delete(&mut conn).await?;
Ok(())
}
@ -196,7 +203,7 @@ struct EmergencyAccessInviteData {
}
#[post("/emergency-access/invite", data = "<data>")]
async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, conn: DbConn) -> EmptyResult {
async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mut conn: DbConn) -> EmptyResult {
check_emergency_access_enabled()?;
let data: EmergencyAccessInviteData = data.into_inner();
@ -217,7 +224,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, co
err!("You can not set yourself as an emergency contact.")
}
let (grantee_user, new_user) = match User::find_by_mail(&email, &conn).await {
let (grantee_user, new_user) = match User::find_by_mail(&email, &mut conn).await {
None => {
if !CONFIG.invitations_allowed() {
err!(format!("Grantee user does not exist: {email}"))
@ -229,11 +236,11 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, co
if !CONFIG.mail_enabled() {
let invitation = Invitation::new(&email);
invitation.save(&conn).await?;
invitation.save(&mut conn).await?;
}
let mut user = User::new(&email, None);
user.save(&conn).await?;
let mut user = User::new(email.clone());
user.save(&mut conn).await?;
(user, true)
}
Some(user) if user.password_hash.is_empty() => (user, true),
@ -244,7 +251,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, co
&grantor_user.uuid,
&grantee_user.uuid,
&grantee_user.email,
&conn,
&mut conn,
)
.await
.is_some()
@ -254,7 +261,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, co
let mut new_emergency_access =
EmergencyAccess::new(grantor_user.uuid, grantee_user.email, emergency_access_status, new_type, wait_time_days);
new_emergency_access.save(&conn).await?;
new_emergency_access.save(&mut conn).await?;
if CONFIG.mail_enabled() {
mail::send_emergency_access_invite(
@ -267,18 +274,18 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, co
.await?;
} else if !new_user {
// if mail is not enabled immediately accept the invitation for existing users
new_emergency_access.accept_invite(&grantee_user.uuid, &email, &conn).await?;
new_emergency_access.accept_invite(&grantee_user.uuid, &email, &mut conn).await?;
}
Ok(())
}
#[post("/emergency-access/<emer_id>/reinvite")]
async fn resend_invite(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> EmptyResult {
async fn resend_invite(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> EmptyResult {
check_emergency_access_enabled()?;
let Some(mut emergency_access) =
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &conn).await
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &mut conn).await
else {
err!("Emergency access not valid.")
};
@ -291,7 +298,7 @@ async fn resend_invite(emer_id: EmergencyAccessId, headers: Headers, conn: DbCon
err!("Email not valid.")
};
let Some(grantee_user) = User::find_by_mail(&email, &conn).await else {
let Some(grantee_user) = User::find_by_mail(&email, &mut conn).await else {
err!("Grantee user not found.")
};
@ -308,10 +315,10 @@ async fn resend_invite(emer_id: EmergencyAccessId, headers: Headers, conn: DbCon
.await?;
} else if !grantee_user.password_hash.is_empty() {
// accept the invitation for existing user
emergency_access.accept_invite(&grantee_user.uuid, &email, &conn).await?;
} else if CONFIG.invitations_allowed() && Invitation::find_by_mail(&email, &conn).await.is_none() {
emergency_access.accept_invite(&grantee_user.uuid, &email, &mut conn).await?;
} else if CONFIG.invitations_allowed() && Invitation::find_by_mail(&email, &mut conn).await.is_none() {
let invitation = Invitation::new(&email);
invitation.save(&conn).await?;
invitation.save(&mut conn).await?;
}
Ok(())
@ -328,7 +335,7 @@ async fn accept_invite(
emer_id: EmergencyAccessId,
data: Json<AcceptData>,
headers: Headers,
conn: DbConn,
mut conn: DbConn,
) -> EmptyResult {
check_emergency_access_enabled()?;
@ -342,9 +349,9 @@ async fn accept_invite(
err!("Claim email does not match current users email")
}
let grantee_user = match User::find_by_mail(&claims.email, &conn).await {
let grantee_user = match User::find_by_mail(&claims.email, &mut conn).await {
Some(user) => {
Invitation::take(&claims.email, &conn).await;
Invitation::take(&claims.email, &mut conn).await;
user
}
None => err!("Invited user not found"),
@ -353,13 +360,13 @@ async fn accept_invite(
// We need to search for the uuid in combination with the email, since we do not yet store the uuid of the grantee in the database.
// The uuid of the grantee gets stored once accepted.
let Some(mut emergency_access) =
EmergencyAccess::find_by_uuid_and_grantee_email(&emer_id, &headers.user.email, &conn).await
EmergencyAccess::find_by_uuid_and_grantee_email(&emer_id, &headers.user.email, &mut conn).await
else {
err!("Emergency access not valid.")
};
// get grantor user to send Accepted email
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await else {
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
err!("Grantor user not found.")
};
@ -367,7 +374,7 @@ async fn accept_invite(
&& grantor_user.name == claims.grantor_name
&& grantor_user.email == claims.grantor_email
{
emergency_access.accept_invite(&grantee_user.uuid, &grantee_user.email, &conn).await?;
emergency_access.accept_invite(&grantee_user.uuid, &grantee_user.email, &mut conn).await?;
if CONFIG.mail_enabled() {
mail::send_emergency_access_invite_accepted(&grantor_user.email, &grantee_user.email).await?;
@ -390,7 +397,7 @@ async fn confirm_emergency_access(
emer_id: EmergencyAccessId,
data: Json<ConfirmData>,
headers: Headers,
conn: DbConn,
mut conn: DbConn,
) -> JsonResult {
check_emergency_access_enabled()?;
@ -399,7 +406,7 @@ async fn confirm_emergency_access(
let key = data.key;
let Some(mut emergency_access) =
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &confirming_user.uuid, &conn).await
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &confirming_user.uuid, &mut conn).await
else {
err!("Emergency access not valid.")
};
@ -410,12 +417,12 @@ async fn confirm_emergency_access(
err!("Emergency access not valid.")
}
let Some(grantor_user) = User::find_by_uuid(&confirming_user.uuid, &conn).await else {
let Some(grantor_user) = User::find_by_uuid(&confirming_user.uuid, &mut conn).await else {
err!("Grantor user not found.")
};
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &conn).await else {
let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &mut conn).await else {
err!("Grantee user not found.")
};
@ -423,7 +430,7 @@ async fn confirm_emergency_access(
emergency_access.key_encrypted = Some(key);
emergency_access.email = None;
emergency_access.save(&conn).await?;
emergency_access.save(&mut conn).await?;
if CONFIG.mail_enabled() {
mail::send_emergency_access_invite_confirmed(&grantee_user.email, &grantor_user.name).await?;
@ -439,12 +446,12 @@ async fn confirm_emergency_access(
// region access emergency access
#[post("/emergency-access/<emer_id>/initiate")]
async fn initiate_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> JsonResult {
async fn initiate_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_enabled()?;
let initiating_user = headers.user;
let Some(mut emergency_access) =
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &initiating_user.uuid, &conn).await
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &initiating_user.uuid, &mut conn).await
else {
err!("Emergency access not valid.")
};
@ -453,7 +460,7 @@ async fn initiate_emergency_access(emer_id: EmergencyAccessId, headers: Headers,
err!("Emergency access not valid.")
}
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await else {
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
err!("Grantor user not found.")
};
@ -462,7 +469,7 @@ async fn initiate_emergency_access(emer_id: EmergencyAccessId, headers: Headers,
emergency_access.updated_at = now;
emergency_access.recovery_initiated_at = Some(now);
emergency_access.last_notification_at = Some(now);
emergency_access.save(&conn).await?;
emergency_access.save(&mut conn).await?;
if CONFIG.mail_enabled() {
mail::send_emergency_access_recovery_initiated(
@ -477,11 +484,11 @@ async fn initiate_emergency_access(emer_id: EmergencyAccessId, headers: Headers,
}
#[post("/emergency-access/<emer_id>/approve")]
async fn approve_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> JsonResult {
async fn approve_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_enabled()?;
let Some(mut emergency_access) =
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &conn).await
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &mut conn).await
else {
err!("Emergency access not valid.")
};
@ -490,17 +497,17 @@ async fn approve_emergency_access(emer_id: EmergencyAccessId, headers: Headers,
err!("Emergency access not valid.")
}
let Some(grantor_user) = User::find_by_uuid(&headers.user.uuid, &conn).await else {
let Some(grantor_user) = User::find_by_uuid(&headers.user.uuid, &mut conn).await else {
err!("Grantor user not found.")
};
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &conn).await else {
let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &mut conn).await else {
err!("Grantee user not found.")
};
emergency_access.status = EmergencyAccessStatus::RecoveryApproved as i32;
emergency_access.save(&conn).await?;
emergency_access.save(&mut conn).await?;
if CONFIG.mail_enabled() {
mail::send_emergency_access_recovery_approved(&grantee_user.email, &grantor_user.name).await?;
@ -512,11 +519,11 @@ async fn approve_emergency_access(emer_id: EmergencyAccessId, headers: Headers,
}
#[post("/emergency-access/<emer_id>/reject")]
async fn reject_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> JsonResult {
async fn reject_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_enabled()?;
let Some(mut emergency_access) =
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &conn).await
EmergencyAccess::find_by_uuid_and_grantor_uuid(&emer_id, &headers.user.uuid, &mut conn).await
else {
err!("Emergency access not valid.")
};
@ -528,12 +535,12 @@ async fn reject_emergency_access(emer_id: EmergencyAccessId, headers: Headers, c
}
if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() {
let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &conn).await else {
let Some(grantee_user) = User::find_by_uuid(grantee_uuid, &mut conn).await else {
err!("Grantee user not found.")
};
emergency_access.status = EmergencyAccessStatus::Confirmed as i32;
emergency_access.save(&conn).await?;
emergency_access.save(&mut conn).await?;
if CONFIG.mail_enabled() {
mail::send_emergency_access_recovery_rejected(&grantee_user.email, &headers.user.name).await?;
@ -549,11 +556,11 @@ async fn reject_emergency_access(emer_id: EmergencyAccessId, headers: Headers, c
// region action
#[post("/emergency-access/<emer_id>/view")]
async fn view_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> JsonResult {
async fn view_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_enabled()?;
let Some(emergency_access) =
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &headers.user.uuid, &conn).await
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &headers.user.uuid, &mut conn).await
else {
err!("Emergency access not valid.")
};
@ -562,8 +569,8 @@ async fn view_emergency_access(emer_id: EmergencyAccessId, headers: Headers, con
err!("Emergency access not valid.")
}
let ciphers = Cipher::find_owned_by_user(&emergency_access.grantor_uuid, &conn).await;
let cipher_sync_data = CipherSyncData::new(&emergency_access.grantor_uuid, CipherSyncType::User, &conn).await;
let ciphers = Cipher::find_owned_by_user(&emergency_access.grantor_uuid, &mut conn).await;
let cipher_sync_data = CipherSyncData::new(&emergency_access.grantor_uuid, CipherSyncType::User, &mut conn).await;
let mut ciphers_json = Vec::with_capacity(ciphers.len());
for c in ciphers {
@ -573,7 +580,7 @@ async fn view_emergency_access(emer_id: EmergencyAccessId, headers: Headers, con
&emergency_access.grantor_uuid,
Some(&cipher_sync_data),
CipherSyncType::User,
&conn,
&mut conn,
)
.await?,
);
@ -587,12 +594,12 @@ async fn view_emergency_access(emer_id: EmergencyAccessId, headers: Headers, con
}
#[post("/emergency-access/<emer_id>/takeover")]
async fn takeover_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> JsonResult {
async fn takeover_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> JsonResult {
check_emergency_access_enabled()?;
let requesting_user = headers.user;
let Some(emergency_access) =
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &requesting_user.uuid, &conn).await
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &requesting_user.uuid, &mut conn).await
else {
err!("Emergency access not valid.")
};
@ -601,7 +608,7 @@ async fn takeover_emergency_access(emer_id: EmergencyAccessId, headers: Headers,
err!("Emergency access not valid.")
}
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await else {
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
err!("Grantor user not found.")
};
@ -629,7 +636,7 @@ async fn password_emergency_access(
emer_id: EmergencyAccessId,
data: Json<EmergencyAccessPasswordData>,
headers: Headers,
conn: DbConn,
mut conn: DbConn,
) -> EmptyResult {
check_emergency_access_enabled()?;
@ -639,7 +646,7 @@ async fn password_emergency_access(
let requesting_user = headers.user;
let Some(emergency_access) =
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &requesting_user.uuid, &conn).await
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &requesting_user.uuid, &mut conn).await
else {
err!("Emergency access not valid.")
};
@ -648,21 +655,21 @@ async fn password_emergency_access(
err!("Emergency access not valid.")
}
let Some(mut grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await else {
let Some(mut grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
err!("Grantor user not found.")
};
// change grantor_user password
grantor_user.set_password(new_master_password_hash, Some(data.key), true, None);
grantor_user.save(&conn).await?;
grantor_user.save(&mut conn).await?;
// Disable TwoFactor providers since they will otherwise block logins
TwoFactor::delete_all_by_user(&grantor_user.uuid, &conn).await?;
TwoFactor::delete_all_by_user(&grantor_user.uuid, &mut conn).await?;
// Remove grantor from all organisations unless Owner
for member in Membership::find_any_state_by_user(&grantor_user.uuid, &conn).await {
for member in Membership::find_any_state_by_user(&grantor_user.uuid, &mut conn).await {
if member.atype != MembershipType::Owner as i32 {
member.delete(&conn).await?;
member.delete(&mut conn).await?;
}
}
Ok(())
@ -671,10 +678,10 @@ async fn password_emergency_access(
// endregion
#[get("/emergency-access/<emer_id>/policies")]
async fn policies_emergency_access(emer_id: EmergencyAccessId, headers: Headers, conn: DbConn) -> JsonResult {
async fn policies_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut conn: DbConn) -> JsonResult {
let requesting_user = headers.user;
let Some(emergency_access) =
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &requesting_user.uuid, &conn).await
EmergencyAccess::find_by_uuid_and_grantee_uuid(&emer_id, &requesting_user.uuid, &mut conn).await
else {
err!("Emergency access not valid.")
};
@ -683,11 +690,11 @@ async fn policies_emergency_access(emer_id: EmergencyAccessId, headers: Headers,
err!("Emergency access not valid.")
}
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await else {
let Some(grantor_user) = User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await else {
err!("Grantor user not found.")
};
let policies = OrgPolicy::find_confirmed_by_user(&grantor_user.uuid, &conn);
let policies = OrgPolicy::find_confirmed_by_user(&grantor_user.uuid, &mut conn);
let policies_json: Vec<Value> = policies.await.iter().map(OrgPolicy::to_json).collect();
Ok(Json(json!({
@ -721,8 +728,8 @@ pub async fn emergency_request_timeout_job(pool: DbPool) {
return;
}
if let Ok(conn) = pool.get().await {
let emergency_access_list = EmergencyAccess::find_all_recoveries_initiated(&conn).await;
if let Ok(mut conn) = pool.get().await {
let emergency_access_list = EmergencyAccess::find_all_recoveries_initiated(&mut conn).await;
if emergency_access_list.is_empty() {
debug!("No emergency request timeout to approve");
@ -736,18 +743,18 @@ pub async fn emergency_request_timeout_job(pool: DbPool) {
if recovery_allowed_at.le(&now) {
// Only update the access status
// Updating the whole record could cause issues when the emergency_notification_reminder_job is also active
emer.update_access_status_and_save(EmergencyAccessStatus::RecoveryApproved as i32, &now, &conn)
emer.update_access_status_and_save(EmergencyAccessStatus::RecoveryApproved as i32, &now, &mut conn)
.await
.expect("Unable to update emergency access status");
if CONFIG.mail_enabled() {
// get grantor user to send Accepted email
let grantor_user =
User::find_by_uuid(&emer.grantor_uuid, &conn).await.expect("Grantor user not found");
User::find_by_uuid(&emer.grantor_uuid, &mut conn).await.expect("Grantor user not found");
// get grantee user to send Accepted email
let grantee_user =
User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid"), &conn)
User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid"), &mut conn)
.await
.expect("Grantee user not found");
@ -776,8 +783,8 @@ pub async fn emergency_notification_reminder_job(pool: DbPool) {
return;
}
if let Ok(conn) = pool.get().await {
let emergency_access_list = EmergencyAccess::find_all_recoveries_initiated(&conn).await;
if let Ok(mut conn) = pool.get().await {
let emergency_access_list = EmergencyAccess::find_all_recoveries_initiated(&mut conn).await;
if emergency_access_list.is_empty() {
debug!("No emergency request reminder notification to send");
@ -798,18 +805,18 @@ pub async fn emergency_notification_reminder_job(pool: DbPool) {
if final_recovery_reminder_at.le(&now) && next_recovery_reminder_at.le(&now) {
// Only update the last notification date
// Updating the whole record could cause issues when the emergency_request_timeout_job is also active
emer.update_last_notification_date_and_save(&now, &conn)
emer.update_last_notification_date_and_save(&now, &mut conn)
.await
.expect("Unable to update emergency access notification date");
if CONFIG.mail_enabled() {
// get grantor user to send Accepted email
let grantor_user =
User::find_by_uuid(&emer.grantor_uuid, &conn).await.expect("Grantor user not found");
User::find_by_uuid(&emer.grantor_uuid, &mut conn).await.expect("Grantor user not found");
// get grantee user to send Accepted email
let grantee_user =
User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid"), &conn)
User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid"), &mut conn)
.await
.expect("Grantee user not found");

Some files were not shown because too many files have changed in this diff Show More