diff --git a/app/routes/users/overview.tsx b/app/routes/users/overview.tsx
index e0e920d..b665ccb 100644
--- a/app/routes/users/overview.tsx
+++ b/app/routes/users/overview.tsx
@@ -11,12 +11,11 @@ import { ErrorPopup } from '~/components/Error';
import StatusCircle from '~/components/StatusCircle';
import type { Machine, User } from '~/types';
import cn from '~/utils/cn';
-import { loadContext } from '~/utils/config/headplane';
-import { loadConfig } from '~/utils/config/headscale';
import { del, post, pull } from '~/utils/headscale';
import { send } from '~/utils/res';
import { getSession } from '~/utils/sessions.server';
+import { hp_getConfig, hs_getConfig } from '~/utils/state';
import toast from '~/utils/toast';
import Auth from './components/auth';
import Oidc from './components/oidc';
@@ -36,11 +35,11 @@ export async function loader({ request }: LoaderFunctionArgs) {
machines: machines.nodes.filter((machine) => machine.user.id === user.id),
}));
- const context = await loadContext();
+ const context = hp_getConfig();
+ const { mode, config } = hs_getConfig();
let magic: string | undefined;
- if (context.config.read) {
- const config = await loadConfig();
+ if (mode !== 'no') {
if (config.dns.magic_dns) {
magic = config.dns.base_domain;
}
@@ -152,11 +151,7 @@ export default function Page() {
Manage the users in your network and their permissions. Tip: You can
drag machines between users to change ownership.
- {data.oidc ? (
-
- ) : (
-
- )}
+ {data.oidc ?
:
}
}>
{() => (
{
- if (context) {
- return context;
- }
-
- if (loadLock) {
- return new Promise((resolve) => {
- const interval = setInterval(() => {
- if (context) {
- clearInterval(interval);
- resolve(context);
- }
- }, 100);
- });
- }
-
- loadLock = true;
- const envFile = process.env.LOAD_ENV_FILE === 'true';
- if (envFile) {
- log.info('CTXT', 'Loading environment variables from .env');
- await import('dotenv/config');
- }
-
- const debug = process.env.DEBUG === 'true';
- if (debug) {
- log.info('CTXT', 'Debug mode is enabled! Logs will spam a lot.');
- log.info('CTXT', 'Please disable debug mode in production.');
- }
-
- const path = resolve(process.env.CONFIG_FILE ?? '/etc/headscale/config.yaml');
- const { config, contextData } = await checkConfig(path);
-
- let headscaleUrl = process.env.HEADSCALE_URL;
- let headscalePublicUrl = process.env.HEADSCALE_PUBLIC_URL;
-
- if (!headscaleUrl && !config) {
- throw new Error('HEADSCALE_URL not set');
- }
-
- if (config) {
- headscaleUrl = headscaleUrl ?? config.server_url;
- if (!headscalePublicUrl) {
- // Fallback to the config value if the env var is not set
- headscalePublicUrl = config.server_url;
- }
- }
-
- if (!headscaleUrl) {
- throw new Error('Missing server_url in headscale config');
- }
-
- const cookieSecret = process.env.COOKIE_SECRET;
- if (!cookieSecret) {
- throw new Error('COOKIE_SECRET not set');
- }
-
- // Initialize Session Management
- initSessionManager();
-
- const cacheEnabled = process.env.AGENT_CACHE_DISABLED !== 'true';
- const cachePath =
- process.env.AGENT_CACHE_PATH ?? '/etc/headplane/agent.cache';
- const cacheTTL = 300 * 1000; // 5 minutes
-
- // Load agent cache
- // if (cacheEnabled) {
- // log.info('CTXT', 'Initializing Agent Cache');
- // log.debug('CTXT', 'Cache Path: %s', cachePath);
- // log.debug('CTXT', 'Cache TTL: %d', cacheTTL);
- // await initAgentCache(cacheTTL, cachePath);
- // }
-
- context = {
- debug,
- headscaleUrl,
- headscalePublicUrl,
- cookieSecret,
- integration: await loadIntegration(),
- config: contextData,
- cache: {
- enabled: cacheEnabled,
- path: cachePath,
- defaultTTL: cacheTTL,
- },
- oidc: await checkOidc(config),
- };
-
- log.info('CTXT', 'Starting Headplane with Context');
- log.info('CTXT', 'HEADSCALE_URL: %s', headscaleUrl);
- if (headscalePublicUrl) {
- log.info('CTXT', 'HEADSCALE_PUBLIC_URL: %s', headscalePublicUrl);
- }
-
- log.info('CTXT', 'Integration: %s', context.integration?.name ?? 'None');
- log.info(
- 'CTXT',
- 'Config: %s',
- contextData.read
- ? `Found ${contextData.write ? '' : '(Read Only)'}`
- : 'Unavailable',
- );
-
- log.info('CTXT', 'OIDC: %s', context.oidc ? 'Configured' : 'Unavailable');
- loadLock = false;
- return context;
-}
-
-async function checkConfig(path: string) {
- log.debug('CTXT', 'Checking config at %s', path);
-
- let config: HeadscaleConfig | undefined;
- try {
- config = await loadConfig(path);
- } catch {
- log.debug('CTXT', 'Config at %s failed to load', path);
- return {
- config: undefined,
- contextData: {
- read: false,
- write: false,
- },
- };
- }
-
- let write = false;
- try {
- log.debug('CTXT', 'Checking write access to %s', path);
- await access(path, constants.W_OK);
- write = true;
- } catch {
- log.debug('CTXT', 'No write access to %s', path);
- }
-
- return {
- config,
- contextData: {
- read: true,
- write,
- },
- };
-}
-
-async function checkOidc(config?: HeadscaleConfig) {
- log.debug('CTXT', 'Checking OIDC configuration');
-
- const disableKeyLogin = process.env.DISABLE_API_KEY_LOGIN === 'true';
- log.debug('CTXT', 'API Key Login Enabled: %s', !disableKeyLogin);
-
- log.debug('CTXT', 'Checking ROOT_API_KEY and falling back to API_KEY');
- const rootKey = process.env.ROOT_API_KEY ?? process.env.API_KEY;
- if (!rootKey) {
- throw new Error('ROOT_API_KEY or API_KEY not set');
- }
-
- let issuer = process.env.OIDC_ISSUER;
- let client = process.env.OIDC_CLIENT_ID;
- let secret = process.env.OIDC_CLIENT_SECRET;
- const method = process.env.OIDC_CLIENT_SECRET_METHOD ?? 'client_secret_basic';
- const skip = process.env.OIDC_SKIP_CONFIG_VALIDATION === 'true';
- const redirectUri = process.env.OIDC_REDIRECT_URI;
-
- log.debug('CTXT', 'Checking OIDC environment variables');
- log.debug('CTXT', 'Issuer: %s', issuer);
- log.debug('CTXT', 'Client: %s', client);
- log.debug('CTXT', 'Token Auth Method: %s', method);
- if (redirectUri) {
- log.debug('CTXT', 'Redirect URI: %s', redirectUri);
- }
-
- if (
- (issuer ?? client ?? secret) &&
- !(issuer && client && secret) &&
- !config
- ) {
- throw new Error('OIDC environment variables are incomplete');
- }
-
- if (issuer && client && secret) {
- if (!skip) {
- log.debug(
- 'CTXT',
- 'Validating OIDC configuration from environment variables',
- );
-
- // This is a hold-over from the old code
- // TODO: Rewrite checkOIDC in the context loader
- const oidcConfig = {
- issuer: issuer,
- clientId: client,
- clientSecret: secret,
- tokenEndpointAuthMethod: method,
- };
-
- const result = await testOidc(oidcConfig);
- if (!result) {
- return;
- }
- } else {
- log.debug('CTXT', 'OIDC_SKIP_CONFIG_VALIDATION is set');
- log.debug('CTXT', 'Skipping OIDC configuration validation');
- }
-
- return {
- issuer,
- client,
- secret,
- redirectUri,
- method,
- rootKey,
- disableKeyLogin,
- };
- }
-
- if ((!issuer || !client || !secret) && config) {
- issuer = config.oidc?.issuer;
- client = config.oidc?.client_id;
- secret = config.oidc?.client_secret;
-
- if (!secret && config.oidc?.client_secret_path) {
- log.debug(
- 'CTXT',
- 'Trying to read OIDC client secret from %s',
- config.oidc.client_secret_path,
- );
- try {
- const data = await readFile(config.oidc.client_secret_path, 'utf8');
-
- if (data && data.length > 0) {
- secret = data.trim();
- }
- } catch {
- log.error(
- 'CTXT',
- 'Failed to read OIDC client secret from %s',
- config.oidc.client_secret_path,
- );
- }
- }
- }
-
- if ((issuer ?? client ?? secret) && !(issuer && client && secret)) {
- throw new Error('OIDC configuration is incomplete');
- }
-
- if (!issuer || !client || !secret) {
- return;
- }
-
- if (config?.oidc?.only_start_if_oidc_is_available) {
- log.debug('CTXT', 'Validating OIDC configuration from headscale config');
- const oidcConfig = {
- issuer: issuer,
- clientId: client,
- clientSecret: secret,
- tokenEndpointAuthMethod: method,
- };
-
- const result = await testOidc(oidcConfig);
- if (!result) {
- return;
- }
- } else {
- log.debug('CTXT', 'OIDC validation is disabled in headscale config');
- log.debug('CTXT', 'Skipping OIDC configuration validation');
- }
-
- return {
- issuer,
- client,
- secret,
- redirectUri,
- rootKey,
- method,
- disableKeyLogin,
- };
-}
diff --git a/app/utils/config/headscale.ts b/app/utils/config/headscale.ts
deleted file mode 100644
index cdbb696..0000000
--- a/app/utils/config/headscale.ts
+++ /dev/null
@@ -1,354 +0,0 @@
-// Handle the configuration loading for headscale.
-// Functionally only used for reading and writing the configuration file.
-// Availability checks and other configuration checks are done in the headplane
-// configuration file that's adjacent to this one.
-//
-// Around the codebase, this is referred to as the config
-// Refer to this file on juanfont/headscale for the default values:
-// https://github.com/juanfont/headscale/blob/main/hscontrol/types/config.go
-import { readFile, writeFile } from 'node:fs/promises';
-import { resolve } from 'node:path';
-
-import { type Document, parseDocument } from 'yaml';
-import { z } from 'zod';
-
-import log from '~/utils/log';
-
-const goBool = z
- .union([z.boolean(), z.literal('true'), z.literal('false')])
- .transform((value) => {
- if (typeof value === 'boolean') {
- return value;
- }
-
- return value === 'true';
- });
-
-const goDuration = z.union([z.literal(0), z.string()]);
-
-const HeadscaleConfig = z.object({
- tls_letsencrypt_cache_dir: z.string().default('/var/www/cache'),
- tls_letsencrypt_challenge_type: z
- .enum(['HTTP-01', 'TLS-ALPN-01'])
- .default('HTTP-01'),
-
- tls_letsencrypt_hostname: z.string().optional(),
- tls_letsencrypt_listen: z.string().optional(),
-
- tls_cert_path: z.string().nullish(),
- tls_key_path: z.string().nullish(),
-
- server_url: z.string().regex(/^https?:\/\//),
- listen_addr: z.string(),
- metrics_listen_addr: z.string().optional(),
- grpc_listen_addr: z.string().default(':50443'),
- grpc_allow_insecure: goBool.default(false),
-
- disable_check_updates: goBool.default(false),
- ephemeral_node_inactivity_timeout: goDuration.default('120s'),
- randomize_client_port: goBool.default(false),
-
- acme_email: z.string().optional(),
- acme_url: z.string().optional(),
-
- unix_socket: z.string().default('/var/run/headscale/headscale.sock'),
- unix_socket_permission: z.string().default('0o770'),
-
- policy: z
- .object({
- mode: z.enum(['file', 'database']).default('file'),
- path: z.string().optional(),
- })
- .optional(),
-
- tuning: z
- .object({
- batch_change_delay: goDuration.default('800ms'),
- node_mapsession_buffered_chan_size: z.number().default(30),
- })
- .optional(),
-
- noise: z.object({
- private_key_path: z.string(),
- }),
-
- log: z
- .object({
- level: z.string().default('info'),
- format: z.enum(['text', 'json']).default('text'),
- })
- .default({ level: 'info', format: 'text' }),
-
- logtail: z
- .object({
- enabled: goBool.default(false),
- })
- .default({ enabled: false }),
-
- cli: z
- .object({
- address: z.string().optional(),
- api_key: z.string().optional(),
- timeout: goDuration.default('10s'),
- insecure: goBool.default(false),
- })
- .optional(),
-
- prefixes: z.object({
- allocation: z.enum(['sequential', 'random']).default('sequential'),
- v4: z.string(),
- v6: z.string(),
- }),
-
- dns: z.object({
- magic_dns: goBool.default(true),
- base_domain: z.string().default('headscale.net'),
- nameservers: z
- .object({
- global: z.array(z.string()).default([]),
- split: z.record(z.array(z.string())).default({}),
- })
- .default({ global: [], split: {} }),
- search_domains: z.array(z.string()).default([]),
- extra_records: z
- .array(
- z.object({
- name: z.string(),
- type: z.literal('A'),
- value: z.string(),
- }),
- )
- .default([]),
- }),
-
- oidc: z
- .object({
- only_start_if_oidc_is_available: goBool.default(false),
- issuer: z.string().optional(),
- client_id: z.string().optional(),
- client_secret: z.string().optional(),
- client_secret_path: z.string().nullish(),
- scope: z.array(z.string()).default(['openid', 'profile', 'email']),
- extra_params: z.record(z.unknown()).default({}),
- allowed_domains: z.array(z.string()).optional(),
- allowed_users: z.array(z.string()).optional(),
- allowed_groups: z.array(z.string()).optional(),
- strip_email_domain: goBool.default(false),
- expiry: goDuration.default('180d'),
- use_expiry_from_token: goBool.default(false),
- })
- .optional(),
-
- database: z.union([
- z.object({
- type: z.literal('sqlite'),
- debug: goBool.default(false),
- sqlite: z.object({
- path: z.string(),
- }),
- }),
- z.object({
- type: z.literal('sqlite3'),
- debug: goBool.default(false),
- sqlite: z.object({
- path: z.string(),
- }),
- }),
- z.object({
- type: z.literal('postgres'),
- debug: goBool.default(false),
- postgres: z.object({
- host: z.string(),
- port: z.number(),
- name: z.string(),
- user: z.string(),
- pass: z.string(),
- ssl: goBool.default(true),
- max_open_conns: z.number().default(10),
- max_idle_conns: z.number().default(10),
- conn_max_idle_time_secs: z.number().default(3600),
- }),
- }),
- ]),
-
- derp: z.object({
- server: z.object({
- enabled: goBool.default(true),
- region_id: z.number().optional(),
- region_code: z.string().optional(),
- region_name: z.string().optional(),
- stun_listen_addr: z.string().optional(),
- private_key_path: z.string().optional(),
-
- ipv4: z.string().optional(),
- ipv6: z.string().optional(),
- automatically_add_embedded_derp_region: goBool.default(true),
- }),
-
- urls: z.array(z.string()).optional(),
- paths: z.array(z.string()).optional(),
- auto_update_enabled: goBool.default(true),
- update_frequency: goDuration.default('24h'),
- }),
-});
-
-export type HeadscaleConfig = z.infer;
-
-export let configYaml: Document | undefined;
-export let config: HeadscaleConfig | undefined;
-
-export async function loadConfig(path?: string) {
- if (config) {
- return config;
- }
-
- if (!path) {
- throw new Error('Path is required to lazy load config');
- }
-
- log.debug('CFGX', 'Loading Headscale configuration from %s', path);
- const data = await readFile(path, 'utf8');
- configYaml = parseDocument(data);
-
- if (process.env.HEADSCALE_CONFIG_UNSTRICT === 'true') {
- log.debug('CFGX', 'Loaded Headscale configuration in non-strict mode');
- const loaded = configYaml.toJSON() as Record;
- config = {
- ...loaded,
- tls_letsencrypt_cache_dir:
- loaded.tls_letsencrypt_cache_dir ?? '/var/www/cache',
- tls_letsencrypt_challenge_type:
- loaded.tls_letsencrypt_challenge_type ?? 'HTTP-01',
- grpc_listen_addr: loaded.grpc_listen_addr ?? ':50443',
- grpc_allow_insecure: loaded.grpc_allow_insecure ?? false,
- randomize_client_port: loaded.randomize_client_port ?? false,
- unix_socket: loaded.unix_socket ?? '/var/run/headscale/headscale.sock',
- unix_socket_permission: loaded.unix_socket_permission ?? '0o770',
- tuning: loaded.tuning ?? {
- batch_change_delay: '800ms',
- node_mapsession_buffered_chan_size: 30,
- },
-
- log: loaded.log ?? {
- level: 'info',
- format: 'text',
- },
-
- logtail: loaded.logtail ?? {
- enabled: false,
- },
-
- cli: loaded.cli ?? {
- timeout: '10s',
- insecure: false,
- },
-
- prefixes: loaded.prefixes ?? {
- allocation: 'sequential',
- v4: '',
- v6: '',
- },
-
- dns: loaded.dns ?? {
- nameservers: {
- global: [],
- split: {},
- },
- search_domains: [],
- extra_records: [],
- magic_dns: false,
- base_domain: 'headscale.net',
- },
- } as HeadscaleConfig;
-
- log.warn('CFGX', 'Loaded Headscale configuration in non-strict mode');
- log.warn('CFGX', 'By using this mode you forfeit GitHub issue support');
- log.warn('CFGX', 'This is very dangerous and comes with a few caveats:');
- log.warn('CFGX', 'Headplane could very easily crash');
- log.warn('CFGX', 'Headplane could break your Headscale installation');
- log.warn('CFGX', 'The UI could throw random errors/show incorrect data');
- log.warn('CFGX', '');
- return config;
- }
-
- try {
- log.debug('CFGX', 'Attempting to parse Headscale configuration');
- config = await HeadscaleConfig.parseAsync(configYaml.toJSON());
- } catch (error) {
- log.debug('CFGX', 'Failed to load Headscale configuration');
- if (error instanceof z.ZodError) {
- log.error('CFGX', 'Recieved invalid configuration file');
- log.error('CFGX', 'The following schema issues were found:');
- for (const issue of error.issues) {
- const path = issue.path.map(String).join('.');
- const message = issue.message;
-
- log.error('CFGX', ` '${path}': ${message}`);
- }
-
- log.error('CFGX', '');
- log.error('CFGX', 'Resolve these issues and try again.');
- log.error('CFGX', 'Headplane will operate without the config');
- log.error('CFGX', '');
- }
-
- throw error;
- }
-
- return config;
-}
-
-// This is so obscenely dangerous, please have a check around it
-export async function patchConfig(partial: Record) {
- if (!configYaml || !config) {
- throw new Error('Config not loaded');
- }
-
- log.debug('CFGX', 'Patching Headscale configuration');
- for (const [key, value] of Object.entries(partial)) {
- log.debug('CFGX', 'Patching %s with %s', key, value);
- // If the key is something like `test.bar."foo.bar"`, then we treat
- // the foo.bar as a single key, and not as two keys, so that needs
- // to be split correctly.
-
- // Iterate through each character, and if we find a dot, we check if
- // the next character is a quote, and if it is, we skip until the next
- // quote, and then we skip the next character, which should be a dot.
- // If it's not a quote, we split it.
- const path = [];
- let temp = '';
- let inQuote = false;
-
- for (const element of key) {
- if (element === '"') {
- inQuote = !inQuote;
- }
-
- if (element === '.' && !inQuote) {
- path.push(temp.replaceAll('"', ''));
- temp = '';
- continue;
- }
-
- temp += element;
- }
-
- // Push the remaining element
- path.push(temp.replaceAll('"', ''));
- if (value === null) {
- configYaml.deleteIn(path);
- continue;
- }
-
- configYaml.setIn(path, value);
- }
-
- config =
- process.env.HEADSCALE_CONFIG_UNSTRICT === 'true'
- ? (configYaml.toJSON() as HeadscaleConfig)
- : await HeadscaleConfig.parseAsync(configYaml.toJSON());
-
- const path = resolve(process.env.CONFIG_FILE ?? '/etc/headscale/config.yaml');
- log.debug('CFGX', 'Writing patched configuration to %s', path);
- await writeFile(path, configYaml.toString(), 'utf8');
-}
diff --git a/app/utils/config/loader.ts b/app/utils/config/loader.ts
new file mode 100644
index 0000000..fb533bd
--- /dev/null
+++ b/app/utils/config/loader.ts
@@ -0,0 +1,196 @@
+import { constants, access, readFile, writeFile } from 'node:fs/promises';
+import { Document, parseDocument } from 'yaml';
+import { HeadplaneConfig } from '~/utils/context/parser';
+import log from '~/utils/log';
+import mutex from '~/utils/mutex';
+import { HeadscaleConfig, validateConfig } from './parser';
+
+let runtimeYaml: Document | undefined = undefined;
+let runtimeConfig: HeadscaleConfig | undefined = undefined;
+let runtimePath: string | undefined = undefined;
+let runtimeMode: 'rw' | 'ro' | 'no' = 'no';
+let runtimeStrict = true;
+
+const runtimeLock = mutex();
+
+type ConfigModes =
+ | {
+ mode: 'rw' | 'ro';
+ config: HeadscaleConfig;
+ }
+ | {
+ mode: 'no';
+ config: undefined;
+ };
+
+export function hs_getConfig(): ConfigModes {
+ if (runtimeMode === 'no') {
+ return {
+ mode: 'no',
+ config: undefined,
+ };
+ }
+
+ runtimeLock.acquire();
+ // We can assert if mode is not 'no'
+ const config = runtimeConfig!;
+ runtimeLock.release();
+
+ return {
+ mode: runtimeMode,
+ config: config,
+ };
+}
+
+export async function hs_loadConfig(context: HeadplaneConfig) {
+ runtimeLock.acquire();
+ const path = context.headscale.config_path;
+ if (!path) {
+ runtimeLock.release();
+ return;
+ }
+
+ runtimeMode = await validateConfigPath(path);
+ if (runtimeMode === 'no') {
+ runtimeLock.release();
+ return;
+ }
+
+ runtimePath = path;
+ const rawConfig = await loadConfigFile(path);
+ if (!rawConfig) {
+ return;
+ }
+
+ runtimeStrict = context.headscale.config_strict ?? true;
+ const config = validateConfig(rawConfig, runtimeStrict);
+ if (!config) {
+ runtimeMode = 'no';
+ }
+
+ runtimeConfig = config;
+}
+
+async function validateConfigPath(path: string) {
+ log.debug('CFGX', `Validating Headscale configuration file at ${path}`);
+ try {
+ await access(path, constants.F_OK | constants.R_OK);
+ log.info('CFGX', `Headscale configuration found at ${path}`);
+ } catch (e) {
+ log.error('CFGX', `Headscale configuration not readable at ${path}`);
+ log.error('CFGX', `${e}`);
+ return 'no';
+ }
+
+ let writeable = false;
+ try {
+ await access(path, constants.W_OK);
+ writeable = true;
+ } catch (e) {
+ log.warn('CFGX', `Headscale configuration not writeable at ${path}`);
+ log.debug('CFGX', `${e}`);
+ }
+
+ return writeable ? 'rw' : 'ro';
+}
+
+async function loadConfigFile(path: string) {
+ log.debug('CFGX', `Loading Headscale configuration file at ${path}`);
+ try {
+ const data = await readFile(path, 'utf8');
+ const configYaml = parseDocument(data);
+
+ if (configYaml.errors.length > 0) {
+ log.error(
+ 'CFGX',
+ `Error parsing Headscale configuration file at ${path}`,
+ );
+ for (const error of configYaml.errors) {
+ log.error('CFGX', ` ${error.toString()}`);
+ }
+
+ return;
+ }
+
+ runtimeYaml = configYaml;
+ return configYaml.toJSON() as unknown;
+ } catch (e) {
+ log.error('CFGX', `Error reading Headscale configuration file at ${path}`);
+ log.error('CFGX', `${e}`);
+ return;
+ }
+}
+
+type PatchConfig = { path: string; value: unknown };
+export async function hs_patchConfig(patches: PatchConfig[]) {
+ if (!runtimeConfig || !runtimeYaml || !runtimePath) {
+ log.error('CFGX', 'Headscale configuration not loaded');
+ return;
+ }
+
+ if (runtimeMode === 'no') {
+ return;
+ }
+
+ if (runtimeMode === 'ro') {
+ throw new Error('Headscale configuration is read-only');
+ }
+
+ runtimeLock.acquire();
+ const config = runtimeConfig!;
+
+ log.debug('CFGX', 'Patching Headscale configuration');
+ for (const patch of patches) {
+ const { path, value } = patch;
+ log.debug('CFGX', 'Patching %s in Headscale configuration', path);
+ // If the key is something like `test.bar."foo.bar"`, then we treat
+ // the foo.bar as a single key, and not as two keys, so that needs
+ // to be split correctly.
+
+ // Iterate through each character, and if we find a dot, we check if
+ // the next character is a quote, and if it is, we skip until the next
+ // quote, and then we skip the next character, which should be a dot.
+ // If it's not a quote, we split it.
+ const key = [];
+ let current = '';
+ let quote = false;
+
+ for (const char of path) {
+ if (char === '"') {
+ quote = !quote;
+ }
+
+ if (char === '.' && !quote) {
+ key.push(current);
+ current = '';
+ continue;
+ }
+
+ current += char;
+ }
+
+ key.push(current.replaceAll('"', ''));
+
+ // Deletion handling
+ if (value === null) {
+ runtimeYaml.deleteIn(key);
+ continue;
+ }
+
+ runtimeYaml.setIn(key, value);
+ }
+
+ // Revalidate the configuration
+ const newRawConfig = runtimeYaml.toJSON() as unknown;
+ runtimeConfig = runtimeStrict
+ ? validateConfig(newRawConfig, runtimeStrict)
+ : (newRawConfig as HeadscaleConfig);
+
+ log.debug(
+ 'CFGX',
+ 'Writing patched Headscale configuration to %s',
+ runtimePath,
+ );
+ await writeFile(runtimePath, runtimeYaml.toString(), 'utf8');
+ runtimeLock.release();
+}
diff --git a/app/utils/config/parser.ts b/app/utils/config/parser.ts
new file mode 100644
index 0000000..7a057d0
--- /dev/null
+++ b/app/utils/config/parser.ts
@@ -0,0 +1,228 @@
+import { type } from 'arktype';
+import log from '~/utils/log';
+
+const goBool = type('boolean | "true" | "false"').pipe((v) => {
+ if (v === 'true') return true;
+ if (v === 'false') return false;
+ return v;
+});
+
+const goDuration = type('0 | string').pipe((v) => {
+ return v.toString();
+});
+
+const databaseConfig = type({
+ type: '"sqlite" | "sqlite3"',
+ sqlite: {
+ path: 'string',
+ write_head_log: goBool.default(true),
+ wal_autocheckpoint: 'number = 1000',
+ },
+})
+ .or({
+ type: '"postgres"',
+ postgres: {
+ host: 'string',
+ port: 'number | ""',
+ name: 'string',
+ user: 'string',
+ pass: 'string',
+ max_open_conns: 'number = 10',
+ max_idle_conns: 'number = 10',
+ conn_max_idle_time_secs: 'number = 3600',
+ ssl: goBool.default(false),
+ },
+ })
+ .merge({
+ debug: goBool.default(false),
+ 'gorm?': {
+ prepare_stmt: goBool.default(true),
+ parameterized_queries: goBool.default(true),
+ skip_err_record_not_found: goBool.default(true),
+ slow_threshold: 'number = 1000',
+ },
+ });
+
+// Not as strict parsing because we just need the values
+// to be slightly truthy enough to safely modify them
+export type HeadscaleConfig = typeof headscaleConfig.infer;
+const headscaleConfig = type({
+ server_url: 'string',
+ listen_addr: 'string',
+ metrics_listen_addr: 'string?',
+ grpc_listen_addr: 'string = ":50433"',
+ grpc_allow_insecure: goBool.default(false),
+ noise: {
+ private_key_path: 'string',
+ },
+ prefixes: {
+ v4: 'string',
+ v6: 'string',
+ allocation: '"sequential" | "random" = "sequential"',
+ },
+ derp: {
+ server: {
+ enabled: goBool.default(true),
+ region_id: 'number?',
+ region_code: 'string?',
+ region_name: 'string?',
+ stun_listen_addr: 'string?',
+ private_key_path: 'string?',
+ ipv4: 'string?',
+ ipv6: 'string?',
+ automatically_add_embedded_derp_region: goBool.default(true),
+ },
+ urls: 'string[]?',
+ paths: 'string[]?',
+ auto_update_enabled: goBool.default(true),
+ update_frequency: goDuration.default('24h'),
+ },
+
+ disable_check_updates: goBool.default(false),
+ ephemeral_node_inactivity_timeout: goDuration.default('30m'),
+ database: databaseConfig,
+
+ acme_url: 'string = "https://acme-v02.api.letsencrypt.org/directory"',
+ acme_email: 'string | ""',
+ tls_letsencrypt_hostname: 'string | ""',
+ tls_letsencrypt_cache_dir: 'string = "/var/lib/headscale/cache"',
+ tls_letsencrypt_challenge_type: 'string = "HTTP-01"',
+ tls_letsencrypt_listen: 'string = ":http"',
+ tls_cert_path: 'string?',
+ tls_key_path: 'string?',
+
+ log: type({
+ format: 'string = "text"',
+ level: 'string = "info"',
+ }).default(() => ({ format: 'text', level: 'info' })),
+
+ 'policy?': {
+ mode: '"database" | "file" = "file"',
+ path: 'string?',
+ },
+
+ dns: {
+ magic_dns: goBool.default(true),
+ base_domain: 'string = "headscale.net"',
+ nameservers: type({
+ global: 'string[]',
+ split: 'Record',
+ }).default(() => ({ global: [], split: {} })),
+ search_domains: type('string[]').default(() => []),
+ extra_records: type({
+ name: 'string',
+ value: 'string',
+ type: 'string | "A"',
+ })
+ .array()
+ .default(() => []),
+ },
+
+ unix_socket: 'string?',
+ unix_socket_permission: 'string = "0770"',
+
+ 'oidc?': {
+ only_start_if_oidc_is_available: goBool.default(false),
+ issuer: 'string',
+ client_id: 'string',
+ client_secret: 'string?',
+ client_secret_path: 'string?',
+ expiry: goDuration.default('180d'),
+ use_expiry_from_token: goBool.default(false),
+ scope: 'string = "profile email"',
+ extra_params: 'Record?',
+ allowed_domains: 'string[]?',
+ allowed_groups: 'string[]?',
+ allowed_users: 'string[]?',
+ 'pkce?': {
+ enabled: goBool.default(false),
+ method: 'string = "S256"',
+ },
+ map_legacy_users: goBool.default(false),
+ },
+
+ 'logtail?': {
+ enabled: goBool.default(false),
+ },
+
+ randomize_client_port: goBool.default(false),
+});
+
+export function validateConfig(config: unknown, strict: boolean) {
+ log.debug('CFGX', 'Validating Headscale configuration...');
+ const out = strict
+ ? headscaleConfig(config)
+ : headscaleConfig(augmentUnstrictConfig(config as HeadscaleConfig));
+
+ if (out instanceof type.errors) {
+ log.error('CFGX', 'Error parsing Headscale configuration:');
+ for (const [number, error] of out.entries()) {
+ log.error('CFGX', ` (${number}): ${error.toString()}`);
+ }
+
+ log.error('CFGX', '');
+ log.error('CFGX', 'Resolve these issues and try again.');
+ log.error('CFGX', 'Headplane will operate without the config');
+ log.error('CFGX', '');
+ return;
+ }
+
+ log.debug('CFGX', 'Headscale configuration is valid.');
+ return out;
+}
+
+// If config_strict is false, we set the defaults and disable
+// the schema checking for the values that are not present
+function augmentUnstrictConfig(
+ loaded: Partial,
+): HeadscaleConfig {
+ log.debug('CFGX', 'Loaded Headscale configuration in non-strict mode');
+ const config = {
+ ...loaded,
+ tls_letsencrypt_cache_dir:
+ loaded.tls_letsencrypt_cache_dir ?? '/var/www/cache',
+ tls_letsencrypt_challenge_type:
+ loaded.tls_letsencrypt_challenge_type ?? 'HTTP-01',
+ grpc_listen_addr: loaded.grpc_listen_addr ?? ':50443',
+ grpc_allow_insecure: loaded.grpc_allow_insecure ?? false,
+ randomize_client_port: loaded.randomize_client_port ?? false,
+ unix_socket: loaded.unix_socket ?? '/var/run/headscale/headscale.sock',
+ unix_socket_permission: loaded.unix_socket_permission ?? '0770',
+
+ log: loaded.log ?? {
+ level: 'info',
+ format: 'text',
+ },
+
+ logtail: loaded.logtail ?? {
+ enabled: false,
+ },
+
+ prefixes: loaded.prefixes ?? {
+ allocation: 'sequential',
+ v4: '',
+ v6: '',
+ },
+
+ dns: loaded.dns ?? {
+ nameservers: {
+ global: [],
+ split: {},
+ },
+ search_domains: [],
+ extra_records: [],
+ magic_dns: false,
+ base_domain: 'headscale.net',
+ },
+ };
+
+ log.warn('CFGX', 'Loaded Headscale configuration in non-strict mode');
+ log.warn('CFGX', 'By using this mode you forfeit GitHub issue support');
+ log.warn('CFGX', 'This is very dangerous and comes with a few caveats:');
+ log.warn('CFGX', ' Headplane could very easily crash');
+ log.warn('CFGX', ' Headplane could break your Headscale installation');
+ log.warn('CFGX', ' The UI could throw random errors/show incorrect data');
+ log.warn('CFGX', '');
+
+ return config as HeadscaleConfig;
+}
diff --git a/app/utils/context/loader.ts b/app/utils/context/loader.ts
new file mode 100644
index 0000000..5e2e869
--- /dev/null
+++ b/app/utils/context/loader.ts
@@ -0,0 +1,245 @@
+import { constants, access, readFile } from 'node:fs/promises';
+import { type } from 'arktype';
+import { parseDocument } from 'yaml';
+import { hs_loadConfig } from '~/utils/config/loader';
+import log, { hp_loadLogger } from '~/utils/log';
+import mutex from '~/utils/mutex';
+import { testOidc } from '~/utils/oidc';
+import { initSessionManager } from '~/utils/sessions.server';
+import { HeadplaneConfig, coalesceConfig, validateConfig } from './parser';
+
+const envBool = type('string | undefined').pipe((v) => {
+ return ['1', 'true', 'yes', 'on'].includes(v?.toLowerCase() ?? '');
+});
+
+const rootEnvs = type({
+ HEADPLANE_DEBUG_LOG: envBool,
+ HEADPLANE_LOAD_ENV_FILE: envBool,
+ HEADPLANE_LOAD_ENV_OVERRIDES: envBool,
+ HEADPLANE_CONFIG_PATH: 'string | undefined',
+}).onDeepUndeclaredKey('reject');
+
+const HEADPLANE_DEFAULT_CONFIG_PATH = '/etc/headplane/config.yaml';
+let runtimeConfig: HeadplaneConfig | undefined = undefined;
+const runtimeLock = mutex();
+
+// We need to acquire here to ensure that the configuration is loaded
+// properly. We can't request a configuration if its in the process
+// of being updated.
+export function hp_getConfig() {
+ runtimeLock.acquire();
+ if (!runtimeConfig) {
+ runtimeLock.release();
+ // This shouldn't be possible, we NEED to have a configuration
+ throw new Error('Configuration not loaded');
+ }
+
+ const config = runtimeConfig;
+
+ runtimeLock.release();
+ return config;
+}
+
+// hp_loadConfig should ONLY be called when we explicitly need to reload
+// the configuration. This should be done when the configuration file
+// changes and we ignore environment variable changes.
+//
+// To read the config hp_getConfig should be used.
+// TODO: File watching for hp_loadConfig()
+export async function hp_loadConfig() {
+ runtimeLock.acquire();
+ let path = HEADPLANE_DEFAULT_CONFIG_PATH;
+
+ const envs = rootEnvs({
+ HEADPLANE_DEBUG_LOG: process.env.HEADPLANE_DEBUG_LOG,
+ HEADPLANE_CONFIG_PATH: process.env.HEADPLANE_CONFIG_PATH,
+ HEADPLANE_LOAD_ENV_FILE: process.env.HEADPLANE_LOAD_ENV_FILE,
+ HEADPLANE_LOAD_ENV_OVERRIDES: process.env.HEADPLANE_LOAD_ENV_OVERRIDES,
+ });
+
+ if (envs instanceof type.errors) {
+ log.error('CFGX', 'Error parsing environment variables:');
+ for (const [number, error] of envs.entries()) {
+ log.error('CFGX', ` (${number}): ${error.toString()}`);
+ }
+
+ return;
+ }
+
+ // Load our debug based logger before ANYTHING
+ hp_loadLogger(envs.HEADPLANE_DEBUG_LOG);
+
+ if (envs.HEADPLANE_CONFIG_PATH) {
+ path = envs.HEADPLANE_CONFIG_PATH;
+ }
+
+ await validateConfigPath(path);
+ const rawConfig = await loadConfigFile(path);
+ if (!rawConfig) {
+ log.error('CFGX', 'Failed to load Headplane configuration file');
+ process.exit(1);
+ }
+
+ let config = validateConfig({
+ ...rawConfig,
+ debug: envs.HEADPLANE_DEBUG_LOG,
+ });
+
+ if (envs.HEADPLANE_LOAD_ENV_FILE) {
+ log.info('CFGX', 'Loading a .env file if one exists');
+ await import('dotenv/config');
+ }
+
+ if (config && envs.HEADPLANE_LOAD_ENV_OVERRIDES) {
+ log.info(
+ 'CFGX',
+ 'Loading environment variables to override the configuration',
+ );
+ config = coalesceEnv(config);
+ }
+
+ if (!config) {
+ runtimeLock.release();
+ log.error('CFGX', 'Fatal error encountered with configuration');
+ process.exit(1);
+ }
+
+ if (config.headscale.config_path) {
+ await hs_loadConfig(config);
+ }
+
+ if (config.oidc?.strict_validation) {
+ testOidc(config.oidc);
+ }
+
+ runtimeConfig = config;
+
+ initSessionManager(config.server.cookie_secret, config.server.cookie_secure);
+ runtimeLock.release();
+}
+
+async function validateConfigPath(path: string) {
+ log.debug('CFGX', `Validating Headplane configuration file at ${path}`);
+ try {
+ await access(path, constants.F_OK | constants.R_OK);
+ log.info('CFGX', `Headplane configuration found at ${path}`);
+ return true;
+ } catch (e) {
+ log.error('CFGX', `Headplane configuration not readable at ${path}`);
+ log.error('CFGX', `${e}`);
+ return false;
+ }
+}
+
+async function loadConfigFile(path: string) {
+ log.debug('CFGX', `Loading Headplane configuration file at ${path}`);
+ try {
+ const data = await readFile(path, 'utf8');
+ const configYaml = parseDocument(data);
+ if (configYaml.errors.length > 0) {
+ log.error(
+ 'CFGX',
+ `Error parsing Headplane configuration file at ${path}`,
+ );
+ for (const error of configYaml.errors) {
+ log.error('CFGX', ` ${error.toString()}`);
+ }
+
+ return;
+ }
+
+ if (configYaml.warnings.length > 0) {
+ log.warn(
+ 'CFGX',
+ `Warnings parsing Headplane configuration file at ${path}`,
+ );
+ for (const warning of configYaml.warnings) {
+ log.warn('CFGX', ` ${warning.toString()}`);
+ }
+ }
+
+ return configYaml.toJSON() as unknown;
+ } catch (e) {
+ log.error('CFGX', `Error reading Headplane configuration file at ${path}`);
+ log.error('CFGX', `${e}`);
+ return;
+ }
+}
+
+function coalesceEnv(config: HeadplaneConfig) {
+ const envConfig: Record = {};
+ const rootKeys: string[] = rootEnvs.props.map((prop) => prop.key);
+
+ // Typescript is still insanely stupid at nullish filtering
+ const vars = Object.entries(process.env).filter(([key, value]) => {
+ if (!value) {
+ return false;
+ }
+
+ if (!key.startsWith('HEADPLANE_')) {
+ return false;
+ }
+
+ // Filter out the rootEnv configurations
+ if (rootKeys.includes(key)) {
+ return false;
+ }
+
+ return true;
+ }) as [string, string][];
+
+ log.debug('CFGX', `Coalescing ${vars.length} environment variables`);
+ for (const [key, value] of vars) {
+ const configPath = key.replace('HEADPLANE_', '').toLowerCase().split('__');
+ log.debug('CFGX', ` ${key}=${new Array(value.length).fill('*').join('')}`);
+
+ let current = envConfig;
+ while (configPath.length > 1) {
+ const path = configPath.shift() as string;
+ if (!(path in current)) {
+ current[path] = {};
+ }
+
+ current = current[path] as Record;
+ }
+
+ current[configPath[0]] = value;
+ }
+
+ const toMerge = coalesceConfig(envConfig);
+ if (!toMerge) {
+ return;
+ }
+
+ // Deep merge the environment variables into the configuration
+ // This will overwrite any existing values in the configuration
+ return deepMerge(config, toMerge);
+}
+
+type DeepPartial =
+ | {
+ [P in keyof T]?: DeepPartial;
+ }
+ | undefined;
+
+function deepMerge(target: T, source: DeepPartial): T {
+ if (typeof target !== 'object' || typeof source !== 'object')
+ return source as T;
+ const result = { ...target } as T;
+
+ for (const key in source) {
+ const val = source[key];
+ if (val === undefined) {
+ continue;
+ }
+
+ if (typeof val === 'object') {
+ result[key] = deepMerge(result[key], val);
+ continue;
+ }
+
+ result[key] = val;
+ }
+
+ return result;
+}
diff --git a/app/utils/context/parser.ts b/app/utils/context/parser.ts
new file mode 100644
index 0000000..211b712
--- /dev/null
+++ b/app/utils/context/parser.ts
@@ -0,0 +1,78 @@
+import { type } from 'arktype';
+import log from '~/utils/log';
+
+// TODO: ALLOW HEADSCALE CONFIG TO OVERRIDE HEADPLANE CONFIG MAYBE FOR OIDC?
+export type HeadplaneConfig = typeof headplaneConfig.infer;
+
+const stringToBool = type('string | boolean').pipe((v) => Boolean(v));
+const serverConfig = type({
+ host: 'string.ip',
+ port: type('string | number.integer').pipe((v) => Number(v)),
+ cookie_secret: '32 <= string <= 32',
+ cookie_secure: stringToBool,
+});
+
+const oidcConfig = type({
+ issuer: 'string.url',
+ client_id: 'string',
+ client_secret: 'string',
+ token_endpoint_auth_method:
+ '"client_secret_basic" | "client_secret_post" | "client_secret_jwt"',
+ redirect_uri: 'string.url?',
+ disable_api_key_login: stringToBool,
+ headscale_api_key: 'string',
+ strict_validation: stringToBool.default(true),
+}).onDeepUndeclaredKey('reject');
+
+const headscaleConfig = type({
+ url: 'string.url',
+ public_url: 'string.url?',
+ config_path: 'string?',
+ config_strict: stringToBool,
+}).onDeepUndeclaredKey('reject');
+
+const headplaneConfig = type({
+ debug: stringToBool,
+ server: serverConfig,
+ 'oidc?': oidcConfig,
+ headscale: headscaleConfig,
+}).onDeepUndeclaredKey('reject');
+
+const partialHeadplaneConfig = type({
+ debug: stringToBool,
+ server: serverConfig.partial(),
+ 'oidc?': oidcConfig.partial(),
+ headscale: headscaleConfig.partial(),
+}).partial();
+
+export function validateConfig(config: unknown) {
+ log.debug('CFGX', 'Validating Headplane configuration...');
+ const out = headplaneConfig(config);
+ if (out instanceof type.errors) {
+ log.error('CFGX', 'Error parsing Headplane configuration:');
+ for (const [number, error] of out.entries()) {
+ log.error('CFGX', ` (${number}): ${error.toString()}`);
+ }
+
+ return;
+ }
+
+ log.debug('CFGX', 'Headplane configuration is valid.');
+ return out;
+}
+
+export function coalesceConfig(config: unknown) {
+ log.debug('CFGX', 'Validating coalescing vars for configuration...');
+ const out = partialHeadplaneConfig(config);
+ if (out instanceof type.errors) {
+ log.error('CFGX', 'Error parsing variables:');
+ for (const [number, error] of out.entries()) {
+ log.error('CFGX', ` (${number}): ${error.toString()}`);
+ }
+
+ return;
+ }
+
+ log.debug('CFGX', 'Coalescing variables is valid.');
+ return out;
+}
diff --git a/app/utils/headscale.ts b/app/utils/headscale.ts
index b85741a..ca9f74f 100644
--- a/app/utils/headscale.ts
+++ b/app/utils/headscale.ts
@@ -1,5 +1,5 @@
-import { loadContext } from './config/headplane';
-import log from './log';
+import log from '~/utils/log';
+import { hp_getConfig } from '~/utils/state';
export class HeadscaleError extends Error {
status: number;
@@ -21,16 +21,16 @@ export class FatalError extends Error {
}
export async function healthcheck() {
- const context = await loadContext();
- const prefix = context.headscaleUrl;
+ const context = hp_getConfig();
+ const prefix = context.headscale.url;
log.debug('APIC', 'GET /health');
const health = new URL('health', prefix);
const response = await fetch(health.toString(), {
headers: {
- Accept: 'application/json'
- }
- })
+ Accept: 'application/json',
+ },
+ });
// Intentionally not catching
return response.status === 200;
@@ -41,8 +41,8 @@ export async function pull(url: string, key: string) {
throw new Error('Missing API key, could this be a cookie setting issue?');
}
- const context = await loadContext();
- const prefix = context.headscaleUrl;
+ const context = hp_getConfig();
+ const prefix = context.headscale.url;
log.debug('APIC', 'GET %s', `${prefix}/api/${url}`);
const response = await fetch(`${prefix}/api/${url}`, {
@@ -69,8 +69,8 @@ export async function post(url: string, key: string, body?: unknown) {
throw new Error('Missing API key, could this be a cookie setting issue?');
}
- const context = await loadContext();
- const prefix = context.headscaleUrl;
+ const context = hp_getConfig();
+ const prefix = context.headscale.url;
log.debug('APIC', 'POST %s', `${prefix}/api/${url}`);
const response = await fetch(`${prefix}/api/${url}`, {
@@ -99,8 +99,8 @@ export async function put(url: string, key: string, body?: unknown) {
throw new Error('Missing API key, could this be a cookie setting issue?');
}
- const context = await loadContext();
- const prefix = context.headscaleUrl;
+ const context = hp_getConfig();
+ const prefix = context.headscale.url;
log.debug('APIC', 'PUT %s', `${prefix}/api/${url}`);
const response = await fetch(`${prefix}/api/${url}`, {
@@ -129,8 +129,8 @@ export async function del(url: string, key: string) {
throw new Error('Missing API key, could this be a cookie setting issue?');
}
- const context = await loadContext();
- const prefix = context.headscaleUrl;
+ const context = hp_getConfig();
+ const prefix = context.headscale.url;
log.debug('APIC', 'DELETE %s', `${prefix}/api/${url}`);
const response = await fetch(`${prefix}/api/${url}`, {
diff --git a/app/utils/log.ts b/app/utils/log.ts
index 91ff7dc..70b568f 100644
--- a/app/utils/log.ts
+++ b/app/utils/log.ts
@@ -1,4 +1,22 @@
-export default {
+export function hp_loadLogger(debug: boolean) {
+ if (debug) {
+ log.debug = (category: string, message: string, ...args: unknown[]) => {
+ defaultLog('DEBG', category, message, ...args);
+ };
+
+ log.info('CFGX', 'Debug logging enabled');
+ log.info(
+ 'CFGX',
+ 'This is very verbose and should only be used for debugging purposes',
+ );
+ log.info(
+ 'CFGX',
+ 'If you run this in production, your storage WILL fill up quickly',
+ );
+ }
+}
+
+const log = {
info: (category: string, message: string, ...args: unknown[]) => {
defaultLog('INFO', category, message, ...args);
},
@@ -11,11 +29,8 @@ export default {
defaultLog('ERRO', category, message, ...args);
},
- debug: (category: string, message: string, ...args: unknown[]) => {
- if (process.env.DEBUG === 'true') {
- defaultLog('DEBG', category, message, ...args);
- }
- },
+ // Default to a no-op until the logger is initialized
+ debug: (category: string, message: string, ...args: unknown[]) => {},
};
function defaultLog(
@@ -27,3 +42,5 @@ function defaultLog(
const date = new Date().toISOString();
console.log(`${date} (${level}) [${category}] ${message}`, ...args);
}
+
+export default log;
diff --git a/app/utils/mutex.ts b/app/utils/mutex.ts
new file mode 100644
index 0000000..e8b3ea0
--- /dev/null
+++ b/app/utils/mutex.ts
@@ -0,0 +1,32 @@
+class Mutex {
+ private locked = false;
+ private queue: (() => void)[] = [];
+
+ constructor(locked: boolean) {
+ this.locked = locked;
+ }
+
+ acquire() {
+ return new Promise((resolve) => {
+ if (!this.locked) {
+ this.locked = true;
+ resolve();
+ } else {
+ this.queue.push(resolve);
+ }
+ });
+ }
+
+ release() {
+ if (this.queue.length > 0) {
+ const next = this.queue.shift();
+ next?.();
+ } else {
+ this.locked = false;
+ }
+ }
+}
+
+export default function mutex(locked = false) {
+ return new Mutex(locked);
+}
diff --git a/app/utils/oidc.ts b/app/utils/oidc.ts
index 765d874..f228aad 100644
--- a/app/utils/oidc.ts
+++ b/app/utils/oidc.ts
@@ -1,22 +1,12 @@
import * as client from 'openid-client';
-import { z } from 'zod';
import log from '~/utils/log';
-
-const oidcConfigSchema = z.object({
- issuer: z.string(),
- clientId: z.string(),
- clientSecret: z.string(),
- redirectUri: z.string().optional(),
- tokenEndpointAuthMethod: z
- .enum(['client_secret_post', 'client_secret_basic', 'client_secret_jwt'])
- .default('client_secret_basic'),
-});
+import { HeadplaneConfig } from '~/utils/state';
declare global {
const __PREFIX__: string;
}
-export type OidcConfig = z.infer;
+type OidcConfig = NonNullable;
// We try our best to infer the callback URI of our Headplane instance
// By default it is always //oidc/callback
@@ -64,9 +54,9 @@ function clientAuthMethod(
export async function beginAuthFlow(oidc: OidcConfig, redirect_uri: string) {
const config = await client.discovery(
new URL(oidc.issuer),
- oidc.clientId,
- oidc.clientSecret,
- clientAuthMethod(oidc.tokenEndpointAuthMethod)(oidc.clientSecret),
+ oidc.client_id,
+ oidc.client_secret,
+ clientAuthMethod(oidc.token_endpoint_auth_method)(oidc.client_secret),
);
const codeVerifier = client.randomPKCECodeVerifier();
@@ -77,7 +67,7 @@ export async function beginAuthFlow(oidc: OidcConfig, redirect_uri: string) {
scope: 'openid profile email',
code_challenge: codeChallenge,
code_challenge_method: 'S256',
- token_endpoint_auth_method: oidc.tokenEndpointAuthMethod,
+ token_endpoint_auth_method: oidc.token_endpoint_auth_method,
state: client.randomState(),
};
@@ -106,9 +96,9 @@ interface FlowOptions {
export async function finishAuthFlow(oidc: OidcConfig, options: FlowOptions) {
const config = await client.discovery(
new URL(oidc.issuer),
- oidc.clientId,
- oidc.clientSecret,
- clientAuthMethod(oidc.tokenEndpointAuthMethod)(oidc.clientSecret),
+ oidc.client_id,
+ oidc.client_secret,
+ clientAuthMethod(oidc.token_endpoint_auth_method)(oidc.client_secret),
);
let subject: string;
@@ -192,9 +182,9 @@ export async function testOidc(oidc: OidcConfig) {
log.debug('OIDC', 'Discovering OIDC configuration from %s', oidc.issuer);
const config = await client.discovery(
new URL(oidc.issuer),
- oidc.clientId,
- oidc.clientSecret,
- clientAuthMethod(oidc.tokenEndpointAuthMethod)(oidc.clientSecret),
+ oidc.client_id,
+ oidc.client_secret,
+ clientAuthMethod(oidc.token_endpoint_auth_method)(oidc.client_secret),
);
const meta = config.serverMetadata();
@@ -217,13 +207,13 @@ export async function testOidc(oidc: OidcConfig) {
if (meta.token_endpoint_auth_methods_supported) {
if (
meta.token_endpoint_auth_methods_supported.includes(
- oidc.tokenEndpointAuthMethod,
+ oidc.token_endpoint_auth_method,
) === false
) {
log.error(
'OIDC',
'OIDC server does not support %s',
- oidc.tokenEndpointAuthMethod,
+ oidc.token_endpoint_auth_method,
);
return false;
}
diff --git a/app/utils/res.ts b/app/utils/res.ts
index c6f0258..1cdf80f 100644
--- a/app/utils/res.ts
+++ b/app/utils/res.ts
@@ -3,3 +3,7 @@ import { data } from 'react-router';
export function send(payload: T, init?: number | ResponseInit) {
return data(payload, init);
}
+
+export function send401(payload: T) {
+ return data(payload, { status: 401 });
+}
diff --git a/app/utils/sessions.server.ts b/app/utils/sessions.server.ts
index 9a50af8..8c1350c 100644
--- a/app/utils/sessions.server.ts
+++ b/app/utils/sessions.server.ts
@@ -1,4 +1,8 @@
-import { Session, SessionStorage, createCookieSessionStorage } from 'react-router';
+import {
+ Session,
+ SessionStorage,
+ createCookieSessionStorage,
+} from 'react-router';
export type SessionData = {
hsApiKey: string;
@@ -23,7 +27,7 @@ type SessionStore = SessionStorage;
// TODO: Add args to this function to allow custom domain/config
let sessionStorage: SessionStore | null = null;
-export function initSessionManager() {
+export function initSessionManager(secret: string, secure: boolean) {
if (sessionStorage) {
return;
}
@@ -35,8 +39,8 @@ export function initSessionManager() {
maxAge: 60 * 60 * 24, // 24 hours
path: '/',
sameSite: 'lax',
- secrets: [process.env.COOKIE_SECRET!],
- secure: process.env.COOKIE_SECURE !== 'false',
+ secrets: [secret],
+ secure,
},
});
}
@@ -49,6 +53,20 @@ export function getSession(cookie: string | null) {
return sessionStorage.getSession(cookie);
}
+export async function auth(request: Request) {
+ if (!sessionStorage) {
+ return false;
+ }
+
+ const cookie = request.headers.get('Cookie');
+ const session = await sessionStorage.getSession(cookie);
+ if (!session.has('hsApiKey')) {
+ return false;
+ }
+
+ return true;
+}
+
export function destroySession(session: Session) {
if (!sessionStorage) {
throw new Error('Session manager not initialized');
diff --git a/app/utils/state.ts b/app/utils/state.ts
new file mode 100644
index 0000000..2033f36
--- /dev/null
+++ b/app/utils/state.ts
@@ -0,0 +1,5 @@
+export { hp_getConfig } from '~/utils/context/loader';
+export { hs_getConfig } from '~/utils/config/loader';
+
+export type { HeadplaneConfig } from '~/utils/context/parser';
+export type { HeadscaleConfig } from '~/utils/config/parser';
diff --git a/config.example.yaml b/config.example.yaml
new file mode 100644
index 0000000..0b3d901
--- /dev/null
+++ b/config.example.yaml
@@ -0,0 +1,57 @@
+# Configuration for the Headplane server and web application
+server:
+ host: "0.0.0.0"
+ port: 3000
+
+ # The secret used to encode and decode web sessions
+ # Ensure that this is exactly 32 characters long
+ cookie_secret: ""
+
+ # Should the cookies only work over HTTPS?
+ # Set to false if running via HTTP without a proxy
+ # (I recommend this is true in production)
+ cookie_secure: true
+
+# Headscale specific settings to allow Headplane to talk
+# to Headscale and access deep integration features
+headscale:
+ # The URL to your Headscale instance
+ # (All API requests are routed through this URL)
+ url: "http://headscale:5000"
+
+ # Optional, public URL if they differ
+ # This affects certain parts of the web UI
+ # public_url: "https://headscale.example.com"
+
+ # Path to the Headscale configuration file
+ # This is optional, but HIGHLY recommended for the best experience
+ # If this is read only, Headplane will show your configuration settings
+ # in the Web UI, but they cannot be changed.
+ config_path: "/etc/headscale/config.yaml"
+
+ # Headplane internally validates the Headscale configuration
+ # to ensure that it changes the configuration in a safe way.
+ # If you want to disable this validation, set this to false.
+ config_strict: true
+
+# OIDC Configuration for simpler authentication
+# (This is optional, but recommended for the best experience)
+oidc:
+ issuer: "https://accounts.google.com"
+ client_id: "your-client-id"
+ client_secret: ""
+ disable_api_key_login: false
+ token_endpoint_auth_method: "client_secret_post"
+
+ # If you are using OIDC, you need to generate an API key
+ # that can be used to authenticate other sessions when signing in.
+ #
+ # This can be done with `headscale apikeys create --expiration 999d`
+ headscale_api_key: ""
+
+ # Optional, but highly recommended otherwise Headplane
+ # will attempt to automatically guess this from the issuer
+ #
+ # This should point to your publicly accessibly URL
+ # for your Headplane instance with /admin/oidc/callback
+ redirect_uri: "http://localhost:3000/admin/oidc/callback"
diff --git a/package.json b/package.json
index 36ace97..6d65f77 100644
--- a/package.json
+++ b/package.json
@@ -26,6 +26,7 @@
"@uiw/codemirror-theme-github": "^4.23.7",
"@uiw/codemirror-theme-xcode": "^4.23.8",
"@uiw/react-codemirror": "^4.23.7",
+ "arktype": "^2.0.4",
"clsx": "^2.1.1",
"dotenv": "^16.4.7",
"isbot": "^5.1.19",
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 1756fa6..e87c75e 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -61,6 +61,9 @@ importers:
'@uiw/react-codemirror':
specifier: ^4.23.7
version: 4.23.7(@babel/runtime@7.26.0)(@codemirror/autocomplete@6.18.2(@codemirror/language@6.10.8)(@codemirror/state@6.5.0)(@codemirror/view@6.36.1)(@lezer/common@1.2.3))(@codemirror/language@6.10.8)(@codemirror/lint@6.8.2)(@codemirror/search@6.5.7)(@codemirror/state@6.5.0)(@codemirror/theme-one-dark@6.1.2)(@codemirror/view@6.36.1)(codemirror@6.0.1(@lezer/common@1.2.3))(react-dom@19.0.0(react@19.0.0))(react@19.0.0)
+ arktype:
+ specifier: ^2.0.4
+ version: 2.0.4
clsx:
specifier: ^2.1.1
version: 2.1.1
@@ -187,6 +190,12 @@ packages:
resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==}
engines: {node: '>=6.0.0'}
+ '@ark/schema@0.39.0':
+ resolution: {integrity: sha512-LQbQUb3Sj461LgklXObAyUJNtsUUCBxZlO2HqRLYvRSqpStm0xTMrXn51DwBNNxeSULvKVpXFwoxiSec9kwKww==}
+
+ '@ark/util@0.39.0':
+ resolution: {integrity: sha512-90APHVklk8BP4kku7hIh1BgrhuyKYqoZ4O7EybtFRo7cDl9mIyc/QUbGvYDg//73s0J2H0I/gW9pzroA1R4IBQ==}
+
'@babel/code-frame@7.26.2':
resolution: {integrity: sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==}
engines: {node: '>=6.9.0'}
@@ -1675,6 +1684,9 @@ packages:
argparse@2.0.1:
resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
+ arktype@2.0.4:
+ resolution: {integrity: sha512-S68rWVDnJauwH7/QCm8zCUM3aTe9Xk6oRihdcc3FSUAtxCo/q1Fwq46JhcwB5Ufv1YStwdQRz+00Y/URlvbhAQ==}
+
asn1@0.2.6:
resolution: {integrity: sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==}
@@ -3018,6 +3030,12 @@ snapshots:
'@jridgewell/gen-mapping': 0.3.5
'@jridgewell/trace-mapping': 0.3.25
+ '@ark/schema@0.39.0':
+ dependencies:
+ '@ark/util': 0.39.0
+
+ '@ark/util@0.39.0': {}
+
'@babel/code-frame@7.26.2':
dependencies:
'@babel/helper-validator-identifier': 7.25.9
@@ -4924,6 +4942,11 @@ snapshots:
argparse@2.0.1: {}
+ arktype@2.0.4:
+ dependencies:
+ '@ark/schema': 0.39.0
+ '@ark/util': 0.39.0
+
asn1@0.2.6:
dependencies:
safer-buffer: 2.1.2