chore: remove old server code

This commit is contained in:
Aarnav Tale 2025-03-24 13:57:47 -04:00
parent 5918d0e501
commit cac64a6fbe
15 changed files with 0 additions and 1172 deletions

View File

@ -1,11 +0,0 @@
import { hp_agentRequest } from '~server/ws/data';
export interface AppContext {
hp_agentRequest: typeof hp_agentRequest;
}
export default function appContext(): AppContext {
return {
hp_agentRequest,
};
}

View File

@ -1,85 +0,0 @@
import type { Configuration } from 'openid-client';
import type { Agent } from 'undici';
import type { WebSocket } from 'ws';
import type { HostInfo } from '~/types';
import type { HeadplaneConfig } from '~server/context/parser';
import type { Logger } from '~server/utils/log';
import type { TimedCache } from '~server/ws/cache';
// This is a stupid workaround for how the Remix import context works
// Even though they run in the same Node instance, they have different
// contexts which means importing this in the app code will not work
// because it will be a different instance of the module.
//
// Instead we can rely on globalThis to share the module between the
// different contexts and use some helper functions to make it easier.
// As a part of this global module, we also define all our singletons
// here in order to avoid polluting the global scope and instead just using
// the `__headplane_server_context` object.
interface ServerContext {
config: HeadplaneConfig;
singletons: ServerSingletons;
}
interface ServerSingletons {
api_agent: Agent;
logger: Logger;
oidc_client: Configuration;
ws_agents: Map<string, WebSocket>;
ws_agent_data: TimedCache<HostInfo>;
ws_fetch_data: (nodeList: string[]) => Promise<void>;
}
// These declarations are separate to prevent the Remix context
// from modifying the globalThis object and causing issues with
// the server context.
declare namespace globalThis {
let __headplane_server_context: {
[K in keyof ServerContext]: ServerContext[K] | null | object;
};
}
// We need to check if the context is already initialized and set a default
// value. This is fine as a side-effect since it's just setting up a framework
// for the object to get modified later.
if (!globalThis.__headplane_server_context) {
globalThis.__headplane_server_context = {
config: null,
singletons: {},
};
}
declare global {
const __headplane_server_context: ServerContext;
}
export function hp_getConfig(): HeadplaneConfig {
return __headplane_server_context.config;
}
export function hp_setConfig(config: HeadplaneConfig): void {
__headplane_server_context.config = config;
}
export function hp_getSingleton<T extends keyof ServerSingletons>(
key: T,
): ServerSingletons[T] {
if (!__headplane_server_context.singletons[key]) {
throw new Error(`Singleton ${key} not initialized`);
}
return __headplane_server_context.singletons[key];
}
export function hp_getSingletonUnsafe<T extends keyof ServerSingletons>(
key: T,
): ServerSingletons[T] | undefined {
return __headplane_server_context.singletons[key];
}
export function hp_setSingleton<
T extends ServerSingletons[keyof ServerSingletons],
>(key: keyof ServerSingletons, value: T): void {
(__headplane_server_context.singletons[key] as T) = value;
}

View File

@ -1,252 +0,0 @@
import { constants, access, readFile } from 'node:fs/promises';
import { env } from 'node:process';
import { type } from 'arktype';
import dotenv from 'dotenv';
import { Agent } from 'undici';
import { parseDocument } from 'yaml';
import { testOidc } from '~/utils/oidc';
import log, { hp_loadLogger } from '~server/utils/log';
import mutex from '~server/utils/mutex';
import { hp_setConfig, hp_setSingleton } from './global';
import { HeadplaneConfig, coalesceConfig, validateConfig } from './parser';
const envBool = type('string | undefined').pipe((v) => {
return ['1', 'true', 'yes', 'on'].includes(v?.toLowerCase() ?? '');
});
const rootEnvs = type({
HEADPLANE_DEBUG_LOG: envBool,
HEADPLANE_LOAD_ENV_OVERRIDES: envBool,
HEADPLANE_CONFIG_PATH: 'string | undefined',
}).onDeepUndeclaredKey('reject');
const HEADPLANE_DEFAULT_CONFIG_PATH = '/etc/headplane/config.yaml';
const runtimeLock = mutex();
// hp_loadConfig should ONLY be called when we explicitly need to reload
// the configuration. This should be done when the configuration file
// changes and we ignore environment variable changes.
//
// TODO: File watching for hp_loadConfig()
export async function hp_loadConfig() {
runtimeLock.acquire();
let path = HEADPLANE_DEFAULT_CONFIG_PATH;
const envs = rootEnvs({
HEADPLANE_DEBUG_LOG: env.HEADPLANE_DEBUG_LOG,
HEADPLANE_CONFIG_PATH: env.HEADPLANE_CONFIG_PATH,
HEADPLANE_LOAD_ENV_OVERRIDES: env.HEADPLANE_LOAD_ENV_OVERRIDES,
});
if (envs instanceof type.errors) {
log.error('CFGX', 'Error parsing environment variables:');
for (const [number, error] of envs.entries()) {
log.error('CFGX', ` (${number}): ${error.toString()}`);
}
return;
}
// Load our debug based logger before ANYTHING
await hp_loadLogger(envs.HEADPLANE_DEBUG_LOG);
if (envs.HEADPLANE_CONFIG_PATH) {
path = envs.HEADPLANE_CONFIG_PATH;
}
await validateConfigPath(path);
const rawConfig = await loadConfigFile(path);
if (!rawConfig) {
log.error('CFGX', 'Failed to load Headplane configuration file');
process.exit(1);
}
let config = validateConfig({
...rawConfig,
debug: envs.HEADPLANE_DEBUG_LOG,
});
if (config && envs.HEADPLANE_LOAD_ENV_OVERRIDES) {
log.info('CFGX', 'Loading a .env file if one exists');
dotenv.config();
log.info(
'CFGX',
'Loading environment variables to override the configuration',
);
config = coalesceEnv(config);
}
if (!config) {
runtimeLock.release();
log.error('CFGX', 'Fatal error encountered with configuration');
process.exit(1);
}
// OIDC Related Checks
if (config.oidc) {
if (!config.oidc.client_secret && !config.oidc.client_secret_path) {
log.error('CFGX', 'OIDC configuration is missing a secret, disabling');
log.error(
'CFGX',
'Please specify either `oidc.client_secret` or `oidc.client_secret_path`',
);
}
if (config.oidc?.strict_validation) {
const result = await testOidc(config.oidc);
if (!result) {
log.error('CFGX', 'OIDC configuration failed validation, disabling');
}
}
}
if (config.headscale.tls_cert_path) {
log.debug('CFGX', 'Attempting to load supplied Headscale TLS cert');
try {
const data = await readFile(config.headscale.tls_cert_path, 'utf8');
log.info('CFGX', 'Headscale TLS cert loaded successfully');
hp_setSingleton(
'api_agent',
new Agent({
connect: {
ca: data.trim(),
},
}),
);
} catch (error) {
log.error('CFGX', 'Failed to load Headscale TLS cert');
log.debug('CFGX', 'Error Details: %o', error);
}
} else {
hp_setSingleton('api_agent', new Agent());
}
hp_setConfig(config);
runtimeLock.release();
}
async function validateConfigPath(path: string) {
log.debug('CFGX', `Validating Headplane configuration file at ${path}`);
try {
await access(path, constants.F_OK | constants.R_OK);
log.info('CFGX', `Headplane configuration found at ${path}`);
return true;
} catch (e) {
log.error('CFGX', `Headplane configuration not readable at ${path}`);
log.error('CFGX', `${e}`);
return false;
}
}
async function loadConfigFile(path: string) {
log.debug('CFGX', `Loading Headplane configuration file at ${path}`);
try {
const data = await readFile(path, 'utf8');
const configYaml = parseDocument(data);
if (configYaml.errors.length > 0) {
log.error(
'CFGX',
`Error parsing Headplane configuration file at ${path}`,
);
for (const error of configYaml.errors) {
log.error('CFGX', ` ${error.toString()}`);
}
return;
}
if (configYaml.warnings.length > 0) {
log.warn(
'CFGX',
`Warnings parsing Headplane configuration file at ${path}`,
);
for (const warning of configYaml.warnings) {
log.warn('CFGX', ` ${warning.toString()}`);
}
}
return configYaml.toJSON() as unknown;
} catch (e) {
log.error('CFGX', `Error reading Headplane configuration file at ${path}`);
log.error('CFGX', `${e}`);
return;
}
}
function coalesceEnv(config: HeadplaneConfig) {
const envConfig: Record<string, unknown> = {};
const rootKeys: string[] = rootEnvs.props.map((prop) => prop.key);
// Typescript is still insanely stupid at nullish filtering
const vars = Object.entries(env).filter(([key, value]) => {
if (!value) {
return false;
}
if (!key.startsWith('HEADPLANE_')) {
return false;
}
// Filter out the rootEnv configurations
if (rootKeys.includes(key)) {
return false;
}
return true;
}) as [string, string][];
log.debug('CFGX', `Coalescing ${vars.length} environment variables`);
for (const [key, value] of vars) {
const configPath = key.replace('HEADPLANE_', '').toLowerCase().split('__');
log.debug('CFGX', ` ${key}=${new Array(value.length).fill('*').join('')}`);
let current = envConfig;
while (configPath.length > 1) {
const path = configPath.shift() as string;
if (!(path in current)) {
current[path] = {};
}
current = current[path] as Record<string, unknown>;
}
current[configPath[0]] = value;
}
const toMerge = coalesceConfig(envConfig);
if (!toMerge) {
return;
}
// Deep merge the environment variables into the configuration
// This will overwrite any existing values in the configuration
return deepMerge(config, toMerge);
}
type DeepPartial<T> =
| {
[P in keyof T]?: DeepPartial<T[P]>;
}
| undefined;
function deepMerge<T>(target: T, source: DeepPartial<T>): T {
if (typeof target !== 'object' || typeof source !== 'object')
return source as T;
const result = { ...target } as T;
for (const key in source) {
const val = source[key];
if (val === undefined) {
continue;
}
if (typeof val === 'object') {
result[key] = deepMerge(result[key], val);
continue;
}
result[key] = val;
}
return result;
}

View File

@ -1,119 +0,0 @@
import { type } from 'arktype';
import log from '~server/utils/log';
export type HeadplaneConfig = typeof headplaneConfig.infer;
const stringToBool = type('string | boolean').pipe((v) => Boolean(v));
const serverConfig = type({
host: 'string.ip',
port: type('string | number.integer').pipe((v) => Number(v)),
cookie_secret: '32 <= string <= 32',
cookie_secure: stringToBool,
agent: type({
authkey: 'string = ""',
ttl: 'number.integer = 180000', // Default to 3 minutes
cache_path: 'string = "/var/lib/headplane/agent_cache.json"',
})
.onDeepUndeclaredKey('reject')
.default(() => ({
authkey: '',
ttl: 180000,
cache_path: '/var/lib/headplane/agent_cache.json',
})),
});
const oidcConfig = type({
issuer: 'string.url',
client_id: 'string',
client_secret: 'string?',
client_secret_path: 'string?',
token_endpoint_auth_method:
'"client_secret_basic" | "client_secret_post" | "client_secret_jwt"',
redirect_uri: 'string.url?',
disable_api_key_login: stringToBool,
headscale_api_key: 'string',
strict_validation: stringToBool.default(true),
}).onDeepUndeclaredKey('reject');
const headscaleConfig = type({
url: type('string.url').pipe((v) => (v.endsWith('/') ? v.slice(0, -1) : v)),
tls_cert_path: 'string?',
public_url: 'string.url?',
config_path: 'string?',
config_strict: stringToBool,
}).onDeepUndeclaredKey('reject');
const dockerConfig = type({
enabled: stringToBool,
container_name: 'string',
socket: 'string = "unix:///var/run/docker.sock"',
});
const kubernetesConfig = type({
enabled: stringToBool,
pod_name: 'string',
validate_manifest: stringToBool,
});
const procConfig = type({
enabled: stringToBool,
});
const integrationConfig = type({
'docker?': dockerConfig,
'kubernetes?': kubernetesConfig,
'proc?': procConfig,
}).onDeepUndeclaredKey('reject');
const headplaneConfig = type({
debug: stringToBool,
server: serverConfig,
'oidc?': oidcConfig,
'integration?': integrationConfig,
headscale: headscaleConfig,
}).onDeepUndeclaredKey('reject');
const partialIntegrationConfig = type({
'docker?': dockerConfig.partial(),
'kubernetes?': kubernetesConfig.partial(),
'proc?': procConfig.partial(),
}).partial();
const partialHeadplaneConfig = type({
debug: stringToBool,
server: serverConfig.partial(),
'oidc?': oidcConfig.partial(),
'integration?': partialIntegrationConfig,
headscale: headscaleConfig.partial(),
}).partial();
export function validateConfig(config: unknown) {
log.debug('CFGX', 'Validating Headplane configuration...');
const out = headplaneConfig(config);
if (out instanceof type.errors) {
log.error('CFGX', 'Error parsing Headplane configuration:');
for (const [number, error] of out.entries()) {
log.error('CFGX', ` (${number}): ${error.toString()}`);
}
return;
}
log.debug('CFGX', 'Headplane configuration is valid.');
return out;
}
export function coalesceConfig(config: unknown) {
log.debug('CFGX', 'Validating coalescing vars for configuration...');
const out = partialHeadplaneConfig(config);
if (out instanceof type.errors) {
log.error('CFGX', 'Error parsing variables:');
for (const [number, error] of out.entries()) {
log.error('CFGX', ` (${number}): ${error.toString()}`);
}
return;
}
log.debug('CFGX', 'Coalescing variables is valid.');
return out;
}

View File

@ -1,7 +0,0 @@
import { createRequestHandler } from 'react-router';
export default createRequestHandler(
// @ts-expect-error: React Router Vite plugin
() => import('virtual:react-router/server-build'),
'development',
);

View File

@ -1,56 +0,0 @@
import { type ViteDevServer, createServer } from 'vite';
import log from '~server/utils/log';
// TODO: Remove env.NODE_ENV
let server: ViteDevServer | undefined;
export async function loadDevtools() {
log.info('DEVX', 'Starting Vite Development server');
process.env.NODE_ENV = 'development';
// This is loading the ROOT vite.config.ts
server = await createServer({
server: {
middlewareMode: true,
},
});
// We can't just do ssrLoadModule for virtual:react-router/server-build
// because for hot reload to work server side it needs to be imported
// using builtin import in its own file.
const handler = await server.ssrLoadModule('./server/dev/dev-handler.ts');
return {
server,
handler: handler.default,
};
}
export async function stacksafeTry(
devtools: {
server: ViteDevServer;
handler: (req: Request, context: unknown) => Promise<Response>;
},
req: Request,
context: unknown,
) {
try {
const result = await devtools.handler(req, context);
return result;
} catch (error) {
log.error('DEVX', 'Error in request handler', error);
if (typeof error === 'object' && error instanceof Error) {
devtools.server.ssrFixStacktrace(error);
}
throw error;
}
}
if (import.meta.hot) {
import.meta.hot.on('vite:beforeFullReload', () => {
server?.close();
});
import.meta.hot.dispose(() => {
server?.close();
});
}

View File

@ -1,52 +0,0 @@
import { constants, access } from 'node:fs/promises';
import { createServer } from 'node:http';
import { WebSocketServer } from 'ws';
import { hp_getConfig } from '~server/context/global';
import { hp_loadConfig } from '~server/context/loader';
import { listener } from '~server/listener';
import { hp_loadAgentCache } from '~server/ws/data';
import { initWebsocket } from '~server/ws/socket';
import log from './utils/log';
log.info('SRVX', 'Running Node.js %s', process.versions.node);
try {
await access('./node_modules/react-router', constants.F_OK | constants.R_OK);
log.info('SRVX', 'Found dependencies');
} catch (error) {
log.error('SRVX', 'No dependencies found. Please run `npm install`');
console.error(error);
process.exit(1);
}
await hp_loadConfig();
const server = createServer(listener);
const context = hp_getConfig();
if (context.server.agent.authkey.length > 0) {
const ws = new WebSocketServer({ server });
initWebsocket(ws, context.server.agent.authkey);
await hp_loadAgentCache(
context.server.agent.ttl,
context.server.agent.cache_path,
);
}
server.listen(context.server.port, context.server.host, () => {
log.info(
'SRVX',
'Running on %s:%s',
context.server.host,
context.server.port,
);
});
if (import.meta.hot) {
import.meta.hot.on('vite:beforeFullReload', () => {
server.close();
});
import.meta.hot.dispose(() => {
server.close();
});
}

View File

@ -1,149 +0,0 @@
import { createReadStream, existsSync, statSync } from 'node:fs';
import { type RequestListener } from 'node:http';
import { join, resolve } from 'node:path';
import {
createReadableStreamFromReadable,
writeReadableStreamToWritable,
} from '@react-router/node';
import mime from 'mime/lite';
import type { RequestHandler } from 'react-router';
import type { ViteDevServer } from 'vite';
import appContext from '~server/context/app';
import { loadDevtools, stacksafeTry } from '~server/dev/hot-server';
import prodBuild from '~server/prod-handler';
import { hp_loadConfig } from './context/loader';
declare global {
// Prefix is a build-time constant
const __hp_prefix: string;
}
// biome-ignore lint/suspicious/noExplicitAny: Who cares man
let devtools: { server: ViteDevServer; handler: any } | undefined = undefined;
let prodHandler: RequestHandler | undefined = undefined;
let loaded = false;
async function loadGlobals() {
await hp_loadConfig();
devtools = import.meta.env.DEV ? await loadDevtools() : undefined;
prodHandler = import.meta.env.PROD ? await prodBuild() : undefined;
loaded = true;
}
const baseDir = resolve(join('./build', 'client'));
export const listener: RequestListener = async (req, res) => {
if (!loaded) {
await loadGlobals();
}
const url = new URL(`http://${req.headers.host}${req.url}`);
// build:strip
if (devtools) {
await new Promise((resolve) => {
devtools?.server.middlewares(req, res, resolve);
});
}
if (!url.pathname.startsWith(__hp_prefix)) {
res.writeHead(404);
res.end();
return;
}
// We need to handle an issue where say we are navigating to $PREFIX
// but Remix does not handle it without the trailing slash. This is
// because Remix uses the URL constructor to parse the URL and it
// will remove the trailing slash. We need to redirect to the correct
// URL so that Remix can handle it correctly.
if (url.pathname === __hp_prefix) {
res.writeHead(301, {
Location: `${__hp_prefix}/`,
});
res.end();
return;
}
// Before we pass any requests to our Remix handler we need to check
// if we can handle a raw file request. This is important for the
// Remix loader to work correctly.
//
// To optimize this, we send them as readable streams in the node
// response and we also set headers for aggressive caching.
if (url.pathname.startsWith(`${__hp_prefix}/assets/`)) {
const filePath = join(baseDir, url.pathname.slice(__hp_prefix.length));
const exists = existsSync(filePath);
const stats = exists ? statSync(filePath) : null;
if (exists && stats && stats.isFile()) {
// Build assets are cache-bust friendly so we can cache them heavily
res.setHeader('Cache-Control', 'public, max-age=31536000, immutable');
}
// Send the file as a readable stream
const fileStream = createReadStream(filePath);
const type = mime.getType(filePath);
res.writeHead(200, {
'Content-Type': type || 'application/octet-stream',
});
fileStream.pipe(res);
return;
}
// Handling the request
const controller = new AbortController();
res.on('close', () => controller.abort());
const headers = new Headers();
for (const [key, value] of Object.entries(req.headers)) {
if (!value) continue;
if (Array.isArray(value)) {
for (const v of value) {
headers.append(key, v);
}
continue;
}
headers.append(key, value);
}
const frameworkReq = new Request(url.href, {
headers,
method: req.method,
signal: controller.signal,
// If we have a body, we set the duplex and load it
...(req.method !== 'GET' && req.method !== 'HEAD'
? {
body: createReadableStreamFromReadable(req),
duplex: 'half',
}
: {}),
});
const response = devtools
? await stacksafeTry(devtools, frameworkReq, appContext())
: await prodHandler?.(frameworkReq, appContext());
if (!response) {
res.writeHead(404);
res.end();
return;
}
res.statusCode = response.status;
res.statusMessage = response.statusText;
for (const [key, value] of response.headers.entries()) {
res.appendHeader(key, value);
}
if (response.body) {
await writeReadableStreamToWritable(response.body, res);
}
res.end();
};

View File

@ -1,26 +0,0 @@
import { constants, access } from 'node:fs/promises';
import { join, resolve } from 'node:path';
import { createRequestHandler } from 'react-router';
import log from '~server/utils/log';
export default async function () {
const buildPath = process.env.BUILD_PATH ?? './build';
const server = resolve(join(buildPath, 'server'));
try {
await access(server, constants.F_OK | constants.R_OK);
log.info('SRVX', 'Using build directory %s', resolve(buildPath));
} catch (error) {
log.error('SRVX', 'No build found. Please refer to the documentation');
log.error(
'SRVX',
'https://github.com/tale/headplane/blob/main/docs/integration/Native.md',
);
console.error(error);
process.exit(1);
}
// @vite-ignore
const build = await import(resolve(join(server, 'index.js')));
return createRequestHandler(build, 'production');
}

View File

@ -1,61 +0,0 @@
import {
hp_getSingleton,
hp_getSingletonUnsafe,
hp_setSingleton,
} from '~server/context/global';
export interface Logger {
info: (category: string, message: string, ...args: unknown[]) => void;
warn: (category: string, message: string, ...args: unknown[]) => void;
error: (category: string, message: string, ...args: unknown[]) => void;
debug: (category: string, message: string, ...args: unknown[]) => void;
}
export function hp_loadLogger(debug: boolean) {
const newLog = { ...log };
if (debug) {
newLog.debug = (category: string, message: string, ...args: unknown[]) => {
defaultLog('DEBG', category, message, ...args);
};
newLog.info('CFGX', 'Debug logging enabled');
newLog.info(
'CFGX',
'This is very verbose and should only be used for debugging purposes',
);
newLog.info(
'CFGX',
'If you run this in production, your storage COULD fill up quickly',
);
}
hp_setSingleton('logger', newLog);
}
function defaultLog(
level: string,
category: string,
message: string,
...args: unknown[]
) {
const date = new Date().toISOString();
console.log(`${date} (${level}) [${category}] ${message}`, ...args);
}
const log = {
info: (category: string, message: string, ...args: unknown[]) => {
defaultLog('INFO', category, message, ...args);
},
warn: (category: string, message: string, ...args: unknown[]) => {
defaultLog('WARN', category, message, ...args);
},
error: (category: string, message: string, ...args: unknown[]) => {
defaultLog('ERRO', category, message, ...args);
},
debug: (category: string, message: string, ...args: unknown[]) => {},
};
export default hp_getSingletonUnsafe('logger') ?? log;

View File

@ -1,32 +0,0 @@
class Mutex {
private locked = false;
private queue: (() => void)[] = [];
constructor(locked: boolean) {
this.locked = locked;
}
acquire() {
return new Promise<void>((resolve) => {
if (!this.locked) {
this.locked = true;
resolve();
} else {
this.queue.push(resolve);
}
});
}
release() {
if (this.queue.length > 0) {
const next = this.queue.shift();
next?.();
} else {
this.locked = false;
}
}
}
export default function mutex(locked = false) {
return new Mutex(locked);
}

View File

@ -1,76 +0,0 @@
import { createRequire } from 'node:module';
import { defineConfig } from 'vite';
import tsconfigPaths from 'vite-tsconfig-paths';
import { devDependencies } from '../package.json';
const prefix = process.env.__INTERNAL_PREFIX || '/admin';
if (prefix.endsWith('/')) {
throw new Error('Prefix must not end with a slash');
}
const require = createRequire(import.meta.url);
export default defineConfig({
define: {
__hp_prefix: JSON.stringify(prefix),
},
resolve: {
preserveSymlinks: true,
alias: {
buffer: 'node:buffer',
crypto: 'node:crypto',
events: 'node:events',
fs: 'node:fs',
net: 'node:net',
http: 'node:http',
https: 'node:https',
os: 'node:os',
path: 'node:path',
stream: 'node:stream',
tls: 'node:tls',
url: 'node:url',
zlib: 'node:zlib',
ws: require.resolve('ws'),
},
},
plugins: [tsconfigPaths()],
build: {
minify: false,
target: 'node18',
// lib: {
// entry: 'server/entry.ts',
// formats: ['es'],
// },
rollupOptions: {
input: 'server/entry.ts',
treeshake: {
moduleSideEffects: false,
},
output: {
entryFileNames: 'server.js',
dir: 'build/headplane',
banner: '#!/usr/bin/env node\n',
},
// We are selecting a list of dependencies we want to include
// We are only including our production dependencies
external: (id) => {
if (id.startsWith('node:')) {
return true;
}
if (id === 'ws') {
return true;
}
const match = id.match(/node_modules\/([^/]+)/);
if (match) {
return true;
// const dep = match[1];
// if ((devDependencies as Record<string, string>)[dep]) {
// return true;
// }
}
},
},
},
});

View File

@ -1,126 +0,0 @@
import { createHash } from 'node:crypto';
import { readFile, writeFile } from 'node:fs/promises';
import { type } from 'arktype';
import log from '~server/utils/log';
import mutex from '~server/utils/mutex';
const diskSchema = type({
key: 'string',
value: 'unknown',
expires: 'number?',
}).array();
// A persistent HashMap with a TTL for each key
export class TimedCache<V> {
private _cache = new Map<string, V>();
private _timings = new Map<string, number>();
// Default TTL is 1 minute
private defaultTTL: number;
private filePath: string;
private writeLock = mutex();
// Last flush ID is essentially a hash of the flush contents
// Prevents unnecessary flushing if nothing has changed
private lastFlushId = '';
constructor(defaultTTL: number, filePath: string) {
this.defaultTTL = defaultTTL;
this.filePath = filePath;
// Load the cache from disk and then queue flushes every 10 seconds
this.load().then(() => {
setInterval(() => this.flush(), 10000);
});
}
set(key: string, value: V, ttl: number = this.defaultTTL) {
this._cache.set(key, value);
this._timings.set(key, Date.now() + ttl);
}
get(key: string) {
const value = this._cache.get(key);
if (!value) {
return;
}
const expires = this._timings.get(key);
if (!expires || expires < Date.now()) {
this._cache.delete(key);
this._timings.delete(key);
return;
}
return value;
}
// Map into a Record without any TTLs
toJSON() {
const result: Record<string, V> = {};
for (const [key, value] of this._cache.entries()) {
result[key] = value;
}
return result;
}
// WARNING: This function expects that this.filePath is NOT ENOENT
private async load() {
const data = await readFile(this.filePath, 'utf-8');
const cache = () => {
try {
return JSON.parse(data);
} catch (e) {
return undefined;
}
};
const diskData = cache();
if (diskData === undefined) {
log.error('CACH', 'Failed to load cache at %s', this.filePath);
return;
}
const cacheData = diskSchema(diskData);
if (cacheData instanceof type.errors) {
log.error('CACH', 'Failed to load cache at %s', this.filePath);
log.debug('CACHE', 'Error details: %s', cacheData.toString());
// Skip loading the cache (it should be overwritten soon)
return;
}
for (const { key, value, expires } of diskData) {
this._cache.set(key, value);
this._timings.set(key, expires);
}
log.info('CACH', 'Loaded cache from %s', this.filePath);
}
private async flush() {
this.writeLock.acquire();
const data = Array.from(this._cache.entries()).map(([key, value]) => {
return { key, value, expires: this._timings.get(key) };
});
if (data.length === 0) {
this.writeLock.release();
return;
}
// Calculate the hash of the data
const dumpData = JSON.stringify(data);
const sha = createHash('sha256').update(dumpData).digest('hex');
if (sha === this.lastFlushId) {
this.writeLock.release();
return;
}
await writeFile(this.filePath, dumpData, 'utf-8');
this.lastFlushId = sha;
this.writeLock.release();
log.debug('CACH', 'Flushed cache to %s', this.filePath);
}
}

View File

@ -1,62 +0,0 @@
import { open } from 'node:fs/promises';
import type { HostInfo } from '~/types';
import {
hp_getSingleton,
hp_getSingletonUnsafe,
hp_setSingleton,
} from '~server/context/global';
import log from '~server/utils/log';
import { TimedCache } from './cache';
export async function hp_loadAgentCache(defaultTTL: number, filepath: string) {
log.debug('CACH', `Loading agent cache from ${filepath}`);
try {
const handle = await open(filepath, 'w');
log.info('CACH', `Using agent cache file at ${filepath}`);
await handle.close();
} catch (e) {
log.info('CACH', `Agent cache file not found at ${filepath}`);
return;
}
const cache = new TimedCache<HostInfo>(defaultTTL, filepath);
hp_setSingleton('ws_agent_data', cache);
}
export async function hp_agentRequest(nodeList: string[]) {
// Request to all connected agents (we can have multiple)
// Luckily we can parse all the data at once through message parsing
// and then overlapping cache entries will be overwritten by time
const agents = hp_getSingleton('ws_agents');
const cache = hp_getSingletonUnsafe('ws_agent_data');
// Deduplicate the list of nodes
const NodeIDs = [...new Set(nodeList)];
NodeIDs.map((node) => {
log.debug('CACH', 'Requesting agent data for', node);
});
// Await so that data loads on first request without racing
// Since we do agent.once() we NEED to wait for it to finish
await Promise.allSettled(
[...agents].map(async ([id, agent]) => {
agent.send(JSON.stringify({ NodeIDs }));
await new Promise<void>((resolve) => {
// Just as a safety measure, we set a maximum timeout of 3 seconds
setTimeout(() => resolve(), 3000);
agent.once('message', (data) => {
const parsed = JSON.parse(data.toString());
log.debug('CACH', 'Received agent data from %s', id);
for (const [node, info] of Object.entries<HostInfo>(parsed)) {
cache?.set(node, info);
log.debug('CACH', 'Cached %s', node);
}
resolve();
});
});
}),
);
}

View File

@ -1,58 +0,0 @@
import WebSocket, { WebSocketServer } from 'ws';
import { hp_setSingleton } from '~server/context/global';
import log from '~server/utils/log';
import { hp_agentRequest } from './data';
export function initWebsocket(server: WebSocketServer, authKey: string) {
log.info('SRVX', 'Starting a WebSocket server for agent connections');
const agents = new Map<string, WebSocket>();
hp_setSingleton('ws_agents', agents);
hp_setSingleton('ws_fetch_data', hp_agentRequest);
server.on('connection', (ws, req) => {
const tailnetID = req.headers['x-headplane-tailnet-id'];
if (!tailnetID || typeof tailnetID !== 'string') {
log.warn(
'SRVX',
'Rejecting an agent WebSocket connection without a tailnet ID',
);
ws.close(1008, 'ERR_INVALID_TAILNET_ID');
return;
}
if (req.headers.authorization !== `Bearer ${authKey}`) {
log.warn('SRVX', 'Rejecting an unauthorized WebSocket connection');
if (req.socket.remoteAddress) {
log.warn('SRVX', 'Agent source IP: %s', req.socket.remoteAddress);
}
ws.close(1008, 'ERR_UNAUTHORIZED');
return;
}
agents.set(tailnetID, ws);
const pinger = setInterval(() => {
if (ws.readyState !== WebSocket.OPEN) {
clearInterval(pinger);
return;
}
ws.ping();
}, 30000);
ws.on('close', () => {
clearInterval(pinger);
agents.delete(tailnetID);
});
ws.on('error', (error) => {
clearInterval(pinger);
log.error('SRVX', 'Agent WebSocket error: %s', error);
log.debug('SRVX', 'Error details: %o', error);
log.error('SRVX', 'Closing agent WebSocket connection');
ws.close(1011, 'ERR_INTERNAL_ERROR');
});
});
return server;
}