chore(release): automatic release v1.47.0

This commit is contained in:
homarr-releases[bot]
2025-12-19 19:34:14 +00:00
committed by GitHub
313 changed files with 3972 additions and 2527 deletions

View File

@@ -33,6 +33,9 @@ body:
options:
# The below comment is used to insert a new version with on-release.yml
#NEXT_VERSION#
- 1.46.0
- 1.45.3
- 1.45.2
- 1.45.1
- 1.45.0
- 1.44.0

View File

@@ -5,7 +5,7 @@ inputs:
description: Digest of Docker image to use
required: true
architecture:
description: Name of architecture, will be used to create directories (e.g. amd64, arm64)
description: Name of architecture, will be used to build artifact content (amd64 or arm64)
required: true
release-tag:
description: Tag of the release to which the artifact will be attached
@@ -19,25 +19,46 @@ inputs:
runs:
using: "composite"
steps:
- name: Prebuilt debian dependencies
uses: homarr-labs/homarr/.github/actions/prebuilt-debian@dev
id: prebuilt-debian
with:
architecture: ${{ inputs.architecture }}
- name: Start docker container for ${{ inputs.architecture }}
run: |
docker run --name homarr \
-e "SECRET_ENCRYPTION_KEY=0000000000000000000000000000000000000000000000000000000000000000" \
--detach --rm ${{ inputs.digest }}
shell: bash
- name: Extract build from ${{ inputs.architecture }} container
- name: Prepare extraction
run: |
docker exec homarr cp /etc/nginx/templates/nginx.conf /app && \
docker exec homarr tar -czf extraction.tar.gz -C /app . && \
mkdir -p ${{ runner.temp }}/extraction/${{ inputs.architecture }} && \
docker cp homarr:/app/extraction.tar.gz ${{ runner.temp }}/extraction/${{ inputs.architecture }}/build-${{ inputs.architecture }}.tar.gz
mkdir -p ${{ runner.temp }}/extraction/${{ inputs.architecture }}
shell: bash
- name: Extract source from ${{ inputs.architecture }} container (alpine)
run: |
docker exec homarr tar -czf extraction-alpine.tar.gz -C /app . && \
docker cp homarr:/app/extraction-alpine.tar.gz ${{ runner.temp }}/extraction/${{ inputs.architecture }}/build-alpine-${{ inputs.architecture }}.tar.gz && \
docker exec homarr rm /app/extraction-alpine.tar.gz
shell: bash
- name: Extract source from ${{ inputs.architecture }} container (debian)
run: |
docker cp ${{ steps.prebuilt-debian.outputs.path }}/. homarr:/app/build && \
docker cp ${{ steps.prebuilt-debian.outputs.path }}/. homarr:/app/node_modules/better-sqlite3/build/Release && \
docker exec homarr tar -czf extraction-debian.tar.gz -C /app . && \
docker cp homarr:/app/extraction-debian.tar.gz ${{ runner.temp }}/extraction/${{ inputs.architecture }}/build-debian-${{ inputs.architecture }}.tar.gz
shell: bash
- name: Stop ${{ inputs.architecture }} container
if: always()
run: docker container remove --force --volumes homarr
shell: bash
- name: Add build archive to release
- name: Add build archive to release (alpine)
env:
GH_TOKEN: ${{ inputs.token }}
run: gh release upload --repo ${{ inputs.repository }} ${{ inputs.release-tag }} ${{ runner.temp }}/extraction/${{ inputs.architecture }}/build-${{ inputs.architecture }}.tar.gz --clobber
run: gh release upload --repo ${{ inputs.repository }} ${{ inputs.release-tag }} ${{ runner.temp }}/extraction/${{ inputs.architecture }}/build-alpine-${{ inputs.architecture }}.tar.gz --clobber
shell: bash
- name: Add build archive to release (debian)
env:
GH_TOKEN: ${{ inputs.token }}
run: gh release upload --repo ${{ inputs.repository }} ${{ inputs.release-tag }} ${{ runner.temp }}/extraction/${{ inputs.architecture }}/build-debian-${{ inputs.architecture }}.tar.gz --clobber
shell: bash

View File

@@ -0,0 +1,42 @@
name: Prebuilt dependencies for debian
description: Provides prebuilt dependencies for debian based docker images
inputs:
architecture:
description: Name of architecture, will be used to build docker image (e.g. amd64, arm64)
required: true
outputs:
path:
description: Path to extracted prebuilt dependencies
value: ${{ runner.temp }}/prebuilts
runs:
using: "composite"
steps:
- uses: actions/checkout@v6
with:
persist-credentials: false
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build docker image for ${{ inputs.architecture }}
id: build
uses: docker/build-push-action@v6
with:
push: false
load: true
context: ./deployments/prebuilt-debian
platforms: linux/${{ inputs.architecture }}
tags: prebuilt-debian
- name: Start docker container for ${{ inputs.architecture }}
run: |
docker run --name prebuilt-debian \
--detach --rm prebuilt-debian
shell: bash
- name: Extract prebuilt dependencies from ${{ inputs.architecture }} container
run: |
mkdir -p ${{ runner.temp }}/prebuilts && \
docker cp prebuilt-debian:/app/node_modules/better-sqlite3/build/Release/better_sqlite3.node ${{ runner.temp }}/prebuilts/better_sqlite3.node
shell: bash
- name: Stop ${{ inputs.architecture }} container
if: always()
run: docker container remove --force --volumes prebuilt-debian
shell: bash

View File

@@ -8,6 +8,7 @@ permissions: {}
jobs:
approve-automatic-prs:
runs-on: ubuntu-latest
timeout-minutes: 2
if: github.actor_id == 158783068 || github.actor_id == 190541745 || github.actor_id == 210161987 # Id of renovate bot and crowdin bot see https://api.github.com/users/homarr-renovate%5Bbot%5D and https://api.github.com/users/homarr-crowdin%5Bbot%5D and https://api.github.com/users/homarr-update-contributors%5Bbot%5D
steps:
- name: Checkout code

View File

@@ -23,6 +23,7 @@ env:
jobs:
lint:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v6
@@ -38,6 +39,7 @@ jobs:
format:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v6
@@ -49,6 +51,7 @@ jobs:
typecheck:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v6
@@ -60,6 +63,7 @@ jobs:
test:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v6
@@ -77,6 +81,7 @@ jobs:
e2e:
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- uses: actions/checkout@v6
@@ -101,6 +106,7 @@ jobs:
build:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v6
- name: Setup

View File

@@ -13,7 +13,8 @@ permissions:
jobs:
validate-pull-request-title:
runs-on: ubuntu-latest
timeout-minutes: 1
steps:
- uses: amannn/action-semantic-pull-request@v6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -10,6 +10,7 @@ permissions:
jobs:
download-crowdin-translations:
timeout-minutes: 5
runs-on: ubuntu-latest
steps:

View File

@@ -14,6 +14,7 @@ jobs:
# Don't run this action if the downloaded translations are being pushed
if: "!contains(github.event.head_commit.message, 'chore(lang)')"
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- name: Checkout

View File

@@ -32,6 +32,7 @@ jobs:
release:
name: Create tag and release
runs-on: ubuntu-latest
timeout-minutes: 5
env:
SKIP_RELEASE: ${{ github.event_name == 'workflow_dispatch' || github.ref_name == 'dev' }}
outputs:
@@ -64,7 +65,7 @@ jobs:
- uses: actions/setup-node@v6
if: env.SKIP_RELEASE == 'false'
with:
node-version: 24.11.1
node-version: 24.12.0
cache: "pnpm"
- run: npm i -g pnpm
if: env.SKIP_RELEASE == 'false'
@@ -112,6 +113,7 @@ jobs:
name: Build docker image for amd64
needs: release
runs-on: ubuntu-latest
timeout-minutes: 15
outputs:
digest: ${{ steps.build.outputs.digest }}
steps:
@@ -150,6 +152,7 @@ jobs:
name: Build docker image for arm64
needs: release
runs-on: ubuntu-24.04-arm
timeout-minutes: 20
outputs:
digest: ${{ steps.build.outputs.digest }}
steps:
@@ -188,6 +191,7 @@ jobs:
name: Extract amd64 asset from docker image
needs: [release, build-amd64]
runs-on: ubuntu-latest
timeout-minutes: 2
steps:
- name: Extract amd64
if: needs.release.outputs.skipped == 'false'
@@ -203,6 +207,7 @@ jobs:
name: Extract arm64 asset from docker image
needs: [release, build-arm64]
runs-on: ubuntu-24.04-arm
timeout-minutes: 2
steps:
- name: Extract arm64
if: needs.release.outputs.skipped == 'false'
@@ -217,6 +222,7 @@ jobs:
name: Complete deployment and notify
needs: [release, build-amd64, build-arm64, extract-asset-amd64, extract-asset-arm64]
runs-on: ubuntu-latest
timeout-minutes: 5
env:
NEXT_VERSION: ${{ needs.release.outputs.version }}
DEPLOY_LATEST: ${{ github.ref_name == 'main' }}

View File

@@ -18,6 +18,7 @@ permissions:
jobs:
create-and-merge-pr:
runs-on: ubuntu-latest
timeout-minutes: 2
steps:
- name: Discord notification
if: ${{ github.events.inputs.send-notifications }}

View File

@@ -0,0 +1,45 @@
name: "[Deployments] Validate prebuilt debian dependencies"
permissions:
contents: read
on:
pull_request:
branches: ["*"]
paths: [".github/actions/prebuilt-debian/**", "deployments/prebuilt-debian/**"]
jobs:
prebuilt-debian-validate-amd64:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: actions/checkout@v6
- name: Validate prebuilt dependencies for amd64
id: validate-amd64
uses: ./.github/actions/prebuilt-debian
with:
architecture: amd64
- name: Check extracted files for amd64
run: |
if [ ! -f "${{ steps.validate-amd64.outputs.path }}/better_sqlite3.node" ]; then
echo "better_sqlite3.node not found for amd64!"
exit 1
fi
prebuilt-debian-validate-arm64:
runs-on: ubuntu-24.04-
timeout-minutes: 5
steps:
- uses: actions/checkout@v6
- name: Validate prebuilt dependencies for arm64
id: validate-arm64
uses: ./.github/actions/prebuilt-debian
with:
architecture: arm64
- name: Check extracted files for arm64
run: |
if [ ! -f "${{ steps.validate-arm64.outputs.path }}/better_sqlite3.node" ]; then
echo "better_sqlite3.node not found for arm64!"
exit 1
fi

View File

@@ -11,6 +11,7 @@ on:
jobs:
renovate-validate:
runs-on: ubuntu-latest
timeout-minutes: 2
steps:
- uses: actions/checkout@v6
- run: |

View File

@@ -8,6 +8,7 @@ jobs:
trigger-docs-release:
name: Trigger Documentation Release
runs-on: ubuntu-latest
timeout-minutes: 2
steps:
- name: Obtain token
id: obtainToken
@@ -39,6 +40,7 @@ jobs:
update-bug-report-template:
name: Update Bug Report Template
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- name: Obtain token
id: obtainToken
@@ -67,7 +69,7 @@ jobs:
- name: Create Pull Request
id: create-pull-request
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v8
with:
token: ${{ steps.obtainToken.outputs.token }}
branch: update-bug-report-template

View File

@@ -10,6 +10,7 @@ jobs:
if: ${{ !startsWith(github.head_ref, 'renovate/') }}
name: Skip Stability Days
runs-on: ubuntu-latest
timeout-minutes: 1
steps:
- name: Add status check
env:

View File

@@ -14,6 +14,7 @@ permissions:
jobs:
update-contributors:
runs-on: ubuntu-latest
timeout-minutes: 2
strategy:
matrix:
node-version: [22]
@@ -53,7 +54,7 @@ jobs:
- name: Create Pull Request
id: create-pull-request
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v8
with:
token: ${{ steps.obtainToken.outputs.token }}
branch: update-contributors

View File

@@ -17,6 +17,7 @@ jobs:
group: update-integration
cancel-in-progress: false
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- name: Obtain token
id: obtainToken
@@ -44,7 +45,7 @@ jobs:
- name: Create Pull Request
id: create-pull-request
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v8
with:
token: ${{ steps.obtainToken.outputs.token }}
branch: update-integrations-readme

2
.nvmrc
View File

@@ -1 +1 @@
24.11.1
24.12.0

View File

@@ -1 +1,9 @@
# ── CODEOWNERS ───────────────────────────────────────
* @homarr-labs/maintainers
# Exempt Renovatemanaged files (no owners)
package.json @homarr-labs/none
package-lock.json @homarr-labs/none
pnpm-lock.yaml @homarr-labs/none
Dockerfile @homarr-labs/none
docker-compose.yml @homarr-labs/none

View File

@@ -1,4 +1,4 @@
FROM node:24.11.1-alpine AS base
FROM node:24.12.0-alpine AS base
FROM base AS builder
RUN apk add --no-cache libc6-compat

View File

@@ -1,8 +1,8 @@
// Importing env files here to validate on build
import "@homarr/auth/env";
import "@homarr/db/env";
import "@homarr/core/infrastructure/db/env";
import "@homarr/common/env";
import "@homarr/log/env";
import "@homarr/core/infrastructure/logs/env";
import "@homarr/docker/env";
import type { NextConfig } from "next";

View File

@@ -23,7 +23,6 @@
"@homarr/api": "workspace:^0.1.0",
"@homarr/auth": "workspace:^0.1.0",
"@homarr/boards": "workspace:^0.1.0",
"@homarr/certificates": "workspace:^0.1.0",
"@homarr/common": "workspace:^0.1.0",
"@homarr/core": "workspace:^0.1.0",
"@homarr/cron-job-status": "workspace:^0.1.0",
@@ -36,7 +35,6 @@
"@homarr/icons": "workspace:^0.1.0",
"@homarr/image-proxy": "workspace:^0.1.0",
"@homarr/integrations": "workspace:^0.1.0",
"@homarr/log": "workspace:^",
"@homarr/modals": "workspace:^0.1.0",
"@homarr/modals-collection": "workspace:^0.1.0",
"@homarr/notifications": "workspace:^0.1.0",
@@ -50,21 +48,21 @@
"@homarr/ui": "workspace:^0.1.0",
"@homarr/validation": "workspace:^0.1.0",
"@homarr/widgets": "workspace:^0.1.0",
"@mantine/colors-generator": "^8.3.9",
"@mantine/core": "^8.3.9",
"@mantine/dropzone": "^8.3.9",
"@mantine/hooks": "^8.3.9",
"@mantine/modals": "^8.3.9",
"@mantine/tiptap": "^8.3.9",
"@mantine/colors-generator": "^8.3.10",
"@mantine/core": "^8.3.10",
"@mantine/dropzone": "^8.3.10",
"@mantine/hooks": "^8.3.10",
"@mantine/modals": "^8.3.10",
"@mantine/tiptap": "^8.3.10",
"@million/lint": "1.0.14",
"@tabler/icons-react": "^3.35.0",
"@tanstack/react-query": "^5.90.12",
"@tanstack/react-query-devtools": "^5.91.1",
"@tanstack/react-query-next-experimental": "^5.91.0",
"@trpc/client": "^11.7.2",
"@trpc/next": "^11.7.2",
"@trpc/react-query": "^11.7.2",
"@trpc/server": "^11.7.2",
"@trpc/client": "^11.8.0",
"@trpc/next": "^11.8.0",
"@trpc/react-query": "^11.8.0",
"@trpc/server": "^11.8.0",
"@xterm/addon-canvas": "^0.7.0",
"@xterm/addon-fit": "0.10.0",
"@xterm/xterm": "^5.5.0",
@@ -75,19 +73,19 @@
"dotenv": "^17.2.3",
"flag-icons": "^7.5.0",
"glob": "^13.0.0",
"isomorphic-dompurify": "^2.33.0",
"jotai": "^2.15.2",
"isomorphic-dompurify": "^2.34.0",
"jotai": "^2.16.0",
"mantine-react-table": "2.0.0-beta.9",
"next": "16.0.10",
"postcss-preset-mantine": "^1.18.0",
"prismjs": "^1.30.0",
"react": "19.2.1",
"react-dom": "19.2.1",
"react": "19.2.3",
"react-dom": "19.2.3",
"react-error-boundary": "^6.0.0",
"react-simple-code-editor": "^0.14.1",
"sass": "^1.94.2",
"sass": "^1.96.0",
"superjson": "2.2.6",
"swagger-ui-react": "^5.30.3",
"swagger-ui-react": "^5.31.0",
"use-deep-compare-effect": "^1.8.1",
"zod": "^4.1.13"
},
@@ -96,13 +94,13 @@
"@homarr/prettier-config": "workspace:^0.1.0",
"@homarr/tsconfig": "workspace:^0.1.0",
"@types/chroma-js": "3.1.2",
"@types/node": "^24.10.1",
"@types/node": "^24.10.4",
"@types/prismjs": "^1.26.5",
"@types/react": "19.2.7",
"@types/react-dom": "19.2.3",
"@types/swagger-ui-react": "^5.18.0",
"concurrently": "^9.2.1",
"eslint": "^9.39.1",
"eslint": "^9.39.2",
"node-loader": "^2.1.0",
"prettier": "^3.7.4",
"typescript": "^5.9.3"

View File

@@ -11,8 +11,9 @@ import { IntegrationProvider } from "@homarr/auth/client";
import { auth } from "@homarr/auth/next";
import { getIntegrationsWithPermissionsAsync } from "@homarr/auth/server";
import { isNullOrWhitespace } from "@homarr/common";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { ErrorWithMetadata } from "@homarr/core/infrastructure/logs/error";
import type { WidgetKind } from "@homarr/definitions";
import { logger } from "@homarr/log";
import { getI18n } from "@homarr/translation/server";
import { prefetchForKindAsync } from "@homarr/widgets/prefetch";
@@ -22,6 +23,8 @@ import type { Board, Item } from "../_types";
import { DynamicClientBoard } from "./_dynamic-client";
import { BoardContentHeaderActions } from "./_header-actions";
const logger = createLogger({ module: "createBoardContentPage" });
export type Params = Record<string, unknown>;
interface Props<TParams extends Params> {
@@ -57,7 +60,13 @@ export const createBoardContentPage = <TParams extends Record<string, unknown>>(
for (const [kind, items] of itemsMap) {
await prefetchForKindAsync(kind, queryClient, items).catch((error) => {
logger.error(new Error("Failed to prefetch widget", { cause: error }));
logger.error(
new ErrorWithMetadata(
"Failed to prefetch widget",
{ widgetKind: kind, itemCount: items.length },
{ cause: error },
),
);
});
}

View File

@@ -44,7 +44,7 @@ export const BoardContentHeaderActions = () => {
const { hasChangeAccess } = useBoardPermissions(board);
if (!hasChangeAccess) {
return null; // Hide actions for user without access
return <SelectBoardsMenu />;
}
return (

View File

@@ -6,7 +6,7 @@ import { TRPCError } from "@trpc/server";
import { auth } from "@homarr/auth/next";
import { BoardProvider } from "@homarr/boards/context";
import { EditModeProvider } from "@homarr/boards/edit-mode";
import { logger } from "@homarr/log";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { MainHeader } from "~/components/layout/header";
import { BoardLogoWithTitle } from "~/components/layout/logo/board-logo";
@@ -18,6 +18,8 @@ import { CustomCss } from "./(content)/_custom-css";
import { BoardReadyProvider } from "./(content)/_ready-context";
import { BoardMantineProvider } from "./(content)/_theme";
const logger = createLogger({ module: "createBoardLayout" });
interface CreateBoardLayoutProps<TParams extends Params> {
headerActions: JSX.Element;
getInitialBoardAsync: (params: TParams) => Promise<Board>;

View File

@@ -16,7 +16,7 @@ import {
import { IconCertificateOff } from "@tabler/icons-react";
import { auth } from "@homarr/auth/next";
import { getTrustedCertificateHostnamesAsync } from "@homarr/certificates/server";
import { getTrustedCertificateHostnamesAsync } from "@homarr/core/infrastructure/certificates";
import { getI18n } from "@homarr/translation/server";
import { Link } from "@homarr/ui";

View File

@@ -5,8 +5,8 @@ import { IconAlertTriangle, IconCertificate, IconCertificateOff } from "@tabler/
import dayjs from "dayjs";
import { auth } from "@homarr/auth/next";
import { loadCustomRootCertificatesAsync } from "@homarr/certificates/server";
import { getMantineColor } from "@homarr/common";
import { loadCustomRootCertificatesAsync } from "@homarr/core/infrastructure/certificates";
import type { SupportedLanguage } from "@homarr/translation";
import { getI18n } from "@homarr/translation/server";
import { Link } from "@homarr/ui";

View File

@@ -2,8 +2,8 @@
import { Select } from "@mantine/core";
import type { LogLevel } from "@homarr/log/constants";
import { logLevelConfiguration, logLevels } from "@homarr/log/constants";
import type { LogLevel } from "@homarr/core/infrastructure/logs/constants";
import { logLevelConfiguration, logLevels } from "@homarr/core/infrastructure/logs/constants";
import { useI18n } from "@homarr/translation/client";
import { useLogContext } from "./log-context";

View File

@@ -3,8 +3,8 @@
import type { PropsWithChildren } from "react";
import { createContext, useContext, useMemo, useState } from "react";
import type { LogLevel } from "@homarr/log/constants";
import { logLevels } from "@homarr/log/constants";
import type { LogLevel } from "@homarr/core/infrastructure/logs/constants";
import { logLevels } from "@homarr/core/infrastructure/logs/constants";
const LogContext = createContext<{
level: LogLevel;

View File

@@ -7,7 +7,7 @@ import "@xterm/xterm/css/xterm.css";
import { notFound } from "next/navigation";
import { auth } from "@homarr/auth/next";
import { env } from "@homarr/log/env";
import { logsEnv } from "@homarr/core/infrastructure/logs/env";
import { DynamicBreadcrumb } from "~/components/navigation/dynamic-breadcrumb";
import { fullHeightWithoutHeaderAndFooter } from "~/constants";
@@ -35,7 +35,7 @@ export default async function LogsManagementPage() {
}
return (
<LogContextProvider defaultLevel={env.LOG_LEVEL}>
<LogContextProvider defaultLevel={logsEnv.LEVEL}>
<Group justify="space-between" align="center" wrap="nowrap">
<DynamicBreadcrumb />
<LogLevelSelection />

View File

@@ -6,9 +6,12 @@ import { appRouter, createTRPCContext } from "@homarr/api";
import type { Session } from "@homarr/auth";
import { hashPasswordAsync } from "@homarr/auth";
import { createSessionAsync } from "@homarr/auth/server";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { ErrorWithMetadata } from "@homarr/core/infrastructure/logs/error";
import { db, eq } from "@homarr/db";
import { apiKeys } from "@homarr/db/schema";
import { logger } from "@homarr/log";
const logger = createLogger({ module: "trpcOpenApiRoute" });
const handlerAsync = async (req: NextRequest) => {
const apiKeyHeaderValue = req.headers.get("ApiKey");
@@ -27,7 +30,7 @@ const handlerAsync = async (req: NextRequest) => {
router: appRouter,
createContext: () => createTRPCContext({ session, headers: req.headers }),
onError({ error, path, type }) {
logger.error(new Error(`tRPC Error with ${type} on '${path}'`, { cause: error.cause }));
logger.error(new ErrorWithMetadata("tRPC Error occured", { path, type }, { cause: error }));
},
});
};
@@ -48,9 +51,10 @@ const getSessionOrDefaultFromHeadersAsync = async (
const [apiKeyId, apiKey] = apiKeyHeaderValue.split(".");
if (!apiKeyId || !apiKey) {
logger.warn(
`An attempt to authenticate over API has failed due to invalid API key format ip='${ipAdress}' userAgent='${userAgent}'`,
);
logger.warn("An attempt to authenticate over API has failed due to invalid API key format", {
ipAdress,
userAgent,
});
return null;
}
@@ -74,18 +78,21 @@ const getSessionOrDefaultFromHeadersAsync = async (
});
if (!apiKeyFromDb) {
logger.warn(`An attempt to authenticate over API has failed ip='${ipAdress}' userAgent='${userAgent}'`);
logger.warn("An attempt to authenticate over API has failed", { ipAdress, userAgent });
return null;
}
const hashedApiKey = await hashPasswordAsync(apiKey, apiKeyFromDb.salt);
if (apiKeyFromDb.apiKey !== hashedApiKey) {
logger.warn(`An attempt to authenticate over API has failed ip='${ipAdress}' userAgent='${userAgent}'`);
logger.warn("An attempt to authenticate over API has failed", { ipAdress, userAgent });
return null;
}
logger.info(`Read session from API request and found user ${apiKeyFromDb.user.name} (${apiKeyFromDb.user.id})`);
logger.info("Read session from API request and found user", {
name: apiKeyFromDb.user.name,
id: apiKeyFromDb.user.id,
});
return await createSessionAsync(db, apiKeyFromDb.user);
};

View File

@@ -1,8 +1,10 @@
import { NextRequest } from "next/server";
import { createHandlersAsync } from "@homarr/auth";
import { createLogger } from "@homarr/core/infrastructure/logs";
import type { SupportedAuthProvider } from "@homarr/definitions";
import { logger } from "@homarr/log";
const logger = createLogger({ module: "nextAuthRoute" });
export const GET = async (req: NextRequest) => {
const { handlers } = await createHandlersAsync(extractProvider(req), isSecureCookieEnabled(req));

View File

@@ -1,13 +1,16 @@
import { performance } from "perf_hooks";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { ErrorWithMetadata } from "@homarr/core/infrastructure/logs/error";
import { db } from "@homarr/db";
import { logger } from "@homarr/log";
import { handshakeAsync } from "@homarr/redis";
const logger = createLogger({ module: "healthLiveRoute" });
export async function GET() {
const timeBeforeHealthCheck = performance.now();
const response = await executeAndAggregateAllHealthChecksAsync();
logger.info(`Completed healthcheck after ${performance.now() - timeBeforeHealthCheck}ms`);
logger.info("Completed healthcheck", { elapsed: `${performance.now() - timeBeforeHealthCheck}ms` });
if (response.status === "healthy") {
return new Response(JSON.stringify(response), {
@@ -73,7 +76,7 @@ const executeHealthCheckSafelyAsync = async (
};
} catch (error) {
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
logger.error(`Healthcheck '${name}' has failed: ${error}`);
logger.error(new ErrorWithMetadata("Healthcheck failed", { name }, { cause: error }));
return {
status: "unhealthy",
values: {

View File

@@ -3,7 +3,10 @@ import { fetchRequestHandler } from "@trpc/server/adapters/fetch";
import { appRouter, createTRPCContext } from "@homarr/api";
import { trpcPath } from "@homarr/api/shared";
import { auth } from "@homarr/auth/next";
import { logger } from "@homarr/log";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { ErrorWithMetadata } from "@homarr/core/infrastructure/logs/error";
const logger = createLogger({ module: "trpcRoute" });
/**
* Configure basic CORS headers
@@ -31,7 +34,7 @@ const handler = auth(async (req) => {
req,
createContext: () => createTRPCContext({ session: req.auth, headers: req.headers }),
onError({ error, path, type }) {
logger.error(new Error(`tRPC Error with ${type} on '${path}'`, { cause: error.cause }));
logger.error(new ErrorWithMetadata("tRPC Error occured", { path, type }, { cause: error }));
},
});

View File

@@ -3,7 +3,9 @@ import "server-only";
import { notFound, redirect } from "next/navigation";
import { TRPCError } from "@trpc/server";
import { logger } from "@homarr/log";
import { createLogger } from "@homarr/core/infrastructure/logs";
const logger = createLogger({ module: "trpcCatchError" });
export const catchTrpcNotFound = (err: unknown) => {
if (err instanceof TRPCError && err.code === "NOT_FOUND") {

View File

@@ -23,6 +23,7 @@
"@homarr/analytics": "workspace:^0.1.0",
"@homarr/auth": "workspace:^0.1.0",
"@homarr/common": "workspace:^0.1.0",
"@homarr/core": "workspace:^",
"@homarr/cron-job-api": "workspace:^0.1.0",
"@homarr/cron-jobs": "workspace:^0.1.0",
"@homarr/cron-jobs-core": "workspace:^0.1.0",
@@ -30,7 +31,6 @@
"@homarr/definitions": "workspace:^0.1.0",
"@homarr/icons": "workspace:^0.1.0",
"@homarr/integrations": "workspace:^0.1.0",
"@homarr/log": "workspace:^",
"@homarr/ping": "workspace:^0.1.0",
"@homarr/redis": "workspace:^0.1.0",
"@homarr/request-handler": "workspace:^0.1.0",
@@ -47,10 +47,10 @@
"@homarr/eslint-config": "workspace:^0.2.0",
"@homarr/prettier-config": "workspace:^0.1.0",
"@homarr/tsconfig": "workspace:^0.1.0",
"@types/node": "^24.10.1",
"@types/node": "^24.10.4",
"dotenv-cli": "^11.0.0",
"esbuild": "^0.27.1",
"eslint": "^9.39.1",
"eslint": "^9.39.2",
"prettier": "^3.7.4",
"tsx": "4.20.4",
"typescript": "^5.9.3"

View File

@@ -1,11 +1,13 @@
import { schedule, validate as validateCron } from "node-cron";
import { createLogger } from "@homarr/core/infrastructure/logs";
import type { IJobManager } from "@homarr/cron-job-api";
import type { jobGroup as cronJobGroup, JobGroupKeys } from "@homarr/cron-jobs";
import type { Database, InferInsertModel } from "@homarr/db";
import { eq } from "@homarr/db";
import { cronJobConfigurations } from "@homarr/db/schema";
import { logger } from "@homarr/log";
const logger = createLogger({ module: "jobManager" });
export class JobManager implements IJobManager {
constructor(
@@ -23,7 +25,7 @@ export class JobManager implements IJobManager {
await this.jobGroup.stopAsync(name);
}
public async updateIntervalAsync(name: JobGroupKeys, cron: string): Promise<void> {
logger.info(`Updating cron job interval name="${name}" expression="${cron}"`);
logger.info("Updating cron job interval", { name, expression: cron });
const job = this.jobGroup.getJobRegistry().get(name);
if (!job) throw new Error(`Job ${name} not found`);
if (!validateCron(cron)) {
@@ -38,22 +40,22 @@ export class JobManager implements IJobManager {
name,
}),
);
logger.info(`Cron job interval updated name="${name}" expression="${cron}"`);
logger.info("Cron job interval updated", { name, expression: cron });
}
public async disableAsync(name: JobGroupKeys): Promise<void> {
logger.info(`Disabling cron job name="${name}"`);
logger.info("Disabling cron job", { name });
const job = this.jobGroup.getJobRegistry().get(name);
if (!job) throw new Error(`Job ${name} not found`);
await this.updateConfigurationAsync(name, { isEnabled: false });
await this.jobGroup.stopAsync(name);
logger.info(`Cron job disabled name="${name}"`);
logger.info("Cron job disabled", { name });
}
public async enableAsync(name: JobGroupKeys): Promise<void> {
logger.info(`Enabling cron job name="${name}"`);
logger.info("Enabling cron job", { name });
await this.updateConfigurationAsync(name, { isEnabled: true });
await this.jobGroup.startAsync(name);
logger.info(`Cron job enabled name="${name}"`);
logger.info("Cron job enabled", { name });
}
private async updateConfigurationAsync(
@@ -64,9 +66,11 @@ export class JobManager implements IJobManager {
where: (table, { eq }) => eq(table.name, name),
});
logger.debug(
`Updating cron job configuration name="${name}" configuration="${JSON.stringify(configuration)}" exists="${Boolean(existingConfig)}"`,
);
logger.debug("Updating cron job configuration", {
name,
configuration: JSON.stringify(configuration),
exists: Boolean(existingConfig),
});
if (existingConfig) {
await this.db
@@ -74,7 +78,10 @@ export class JobManager implements IJobManager {
// prevent updating the name, as it is the primary key
.set({ ...configuration, name: undefined })
.where(eq(cronJobConfigurations.name, name));
logger.debug(`Cron job configuration updated name="${name}" configuration="${JSON.stringify(configuration)}"`);
logger.debug("Cron job configuration updated", {
name,
configuration: JSON.stringify(configuration),
});
return;
}
@@ -86,7 +93,10 @@ export class JobManager implements IJobManager {
cronExpression: configuration.cronExpression ?? job.cronExpression,
isEnabled: configuration.isEnabled ?? true,
});
logger.debug(`Cron job configuration updated name="${name}" configuration="${JSON.stringify(configuration)}"`);
logger.debug("Cron job configuration updated", {
name,
configuration: JSON.stringify(configuration),
});
}
public async getAllAsync(): Promise<

View File

@@ -5,16 +5,19 @@ import type { FastifyTRPCPluginOptions } from "@trpc/server/adapters/fastify";
import { fastifyTRPCPlugin } from "@trpc/server/adapters/fastify";
import fastify from "fastify";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { ErrorWithMetadata } from "@homarr/core/infrastructure/logs/error";
import type { JobRouter } from "@homarr/cron-job-api";
import { jobRouter } from "@homarr/cron-job-api";
import { CRON_JOB_API_KEY_HEADER, CRON_JOB_API_PATH, CRON_JOB_API_PORT } from "@homarr/cron-job-api/constants";
import { jobGroup } from "@homarr/cron-jobs";
import { db } from "@homarr/db";
import { logger } from "@homarr/log";
import { JobManager } from "./job-manager";
import { onStartAsync } from "./on-start";
const logger = createLogger({ module: "tasksMain" });
const server = fastify({
maxParamLength: 5000,
});
@@ -27,7 +30,7 @@ server.register(fastifyTRPCPlugin, {
apiKey: req.headers[CRON_JOB_API_KEY_HEADER] as string | undefined,
}),
onError({ path, error }) {
logger.error(new Error(`Error in tasks tRPC handler path="${path}"`, { cause: error }));
logger.error(new ErrorWithMetadata("Error in tasks tRPC handler", { path }, { cause: error }));
},
} satisfies FastifyTRPCPluginOptions<JobRouter>["trpcOptions"],
});
@@ -39,9 +42,11 @@ void (async () => {
try {
await server.listen({ port: CRON_JOB_API_PORT });
logger.info(`Tasks web server started successfully port="${CRON_JOB_API_PORT}"`);
logger.info("Tasks web server started successfully", { port: CRON_JOB_API_PORT });
} catch (err) {
logger.error(new Error(`Failed to start tasks web server port="${CRON_JOB_API_PORT}"`, { cause: err }));
logger.error(
new ErrorWithMetadata("Failed to start tasks web server", { port: CRON_JOB_API_PORT }, { cause: err }),
);
process.exit(1);
}
})();

View File

@@ -1,7 +1,7 @@
import { logger } from "@homarr/log";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { updateCheckerRequestHandler } from "@homarr/request-handler/update-checker";
const localLogger = logger.child({ module: "invalidateUpdateCheckerCache" });
const logger = createLogger({ module: "invalidateUpdateCheckerCache" });
/**
* Invalidates the update checker cache on startup to ensure fresh data.
@@ -11,8 +11,8 @@ export async function invalidateUpdateCheckerCacheAsync() {
try {
const handler = updateCheckerRequestHandler.handler({});
await handler.invalidateAsync();
localLogger.debug("Update checker cache invalidated");
logger.debug("Update checker cache invalidated");
} catch (error) {
localLogger.error(new Error("Failed to invalidate update checker cache", { cause: error }));
logger.error(new Error("Failed to invalidate update checker cache", { cause: error }));
}
}

View File

@@ -1,10 +1,10 @@
import { env } from "@homarr/auth/env";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { db, eq, inArray } from "@homarr/db";
import { sessions, users } from "@homarr/db/schema";
import { supportedAuthProviders } from "@homarr/definitions";
import { logger } from "@homarr/log";
const localLogger = logger.child({ module: "sessionCleanup" });
const logger = createLogger({ module: "sessionCleanup" });
/**
* Deletes sessions for users that have inactive auth providers.
@@ -29,11 +29,13 @@ export async function cleanupSessionsAsync() {
await db.delete(sessions).where(inArray(sessions.userId, userIds));
if (sessionsWithInactiveProviders.length > 0) {
localLogger.info(`Deleted sessions for inactive providers count=${userIds.length}`);
logger.info("Deleted sessions for inactive providers", {
count: userIds.length,
});
} else {
localLogger.debug("No sessions to delete");
logger.debug("No sessions to delete");
}
} catch (error) {
localLogger.error(new Error("Failed to clean up sessions", { cause: error }));
logger.error(new Error("Failed to clean up sessions", { cause: error }));
}
}

View File

@@ -1,6 +1,5 @@
import { setGlobalDispatcher } from "undici";
import { LoggingAgent } from "@homarr/common/server";
import { UndiciHttpAgent } from "@homarr/core/infrastructure/http";
const agent = new LoggingAgent();
setGlobalDispatcher(agent);
setGlobalDispatcher(new UndiciHttpAgent());

View File

@@ -20,9 +20,9 @@
"@homarr/api": "workspace:^0.1.0",
"@homarr/auth": "workspace:^0.1.0",
"@homarr/common": "workspace:^0.1.0",
"@homarr/core": "workspace:^",
"@homarr/db": "workspace:^0.1.0",
"@homarr/definitions": "workspace:^0.1.0",
"@homarr/log": "workspace:^",
"@homarr/redis": "workspace:^0.1.0",
"@homarr/validation": "workspace:^0.1.0",
"dotenv": "^17.2.3",
@@ -35,7 +35,7 @@
"@homarr/tsconfig": "workspace:^0.1.0",
"@types/ws": "^8.18.1",
"esbuild": "^0.27.1",
"eslint": "^9.39.1",
"eslint": "^9.39.2",
"prettier": "^3.7.4",
"typescript": "^5.9.3"
}

View File

@@ -4,8 +4,10 @@ import { WebSocketServer } from "ws";
import { appRouter, createTRPCContext } from "@homarr/api/websocket";
import { getSessionFromToken, sessionTokenCookieName } from "@homarr/auth";
import { parseCookies } from "@homarr/common";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { db } from "@homarr/db";
import { logger } from "@homarr/log";
const logger = createLogger({ module: "websocketMain" });
const wss = new WebSocketServer({
port: 3001,

View File

@@ -0,0 +1,6 @@
FROM node:24.12.0-trixie AS base
WORKDIR /app
COPY package.json .
COPY pnpm-lock.yaml .
RUN corepack enable pnpm && pnpm install --frozen-lockfile
CMD ["sleep", "60s"]

View File

@@ -0,0 +1,17 @@
{
"name": "homarr-prebuilt-debian",
"private": true,
"dependencies": {
"better-sqlite3": "^12.5.0"
},
"packageManager": "pnpm@10.25.0",
"engines": {
"node": ">=24.12.0",
"pnpm": ">=10.25.0"
},
"pnpm": {
"onlyBuiltDependencies": [
"better-sqlite3"
]
}
}

286
deployments/prebuilt-debian/pnpm-lock.yaml generated Normal file
View File

@@ -0,0 +1,286 @@
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
better-sqlite3:
specifier: ^12.5.0
version: 12.5.0
packages:
base64-js@1.5.1:
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
better-sqlite3@12.5.0:
resolution: {integrity: sha512-WwCZ/5Diz7rsF29o27o0Gcc1Du+l7Zsv7SYtVPG0X3G/uUI1LqdxrQI7c9Hs2FWpqXXERjW9hp6g3/tH7DlVKg==}
engines: {node: 20.x || 22.x || 23.x || 24.x || 25.x}
bindings@1.5.0:
resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==}
bl@4.1.0:
resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==}
buffer@5.7.1:
resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==}
chownr@1.1.4:
resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==}
decompress-response@6.0.0:
resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==}
engines: {node: '>=10'}
deep-extend@0.6.0:
resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==}
engines: {node: '>=4.0.0'}
detect-libc@2.1.2:
resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==}
engines: {node: '>=8'}
end-of-stream@1.4.5:
resolution: {integrity: sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==}
expand-template@2.0.3:
resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==}
engines: {node: '>=6'}
file-uri-to-path@1.0.0:
resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==}
fs-constants@1.0.0:
resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==}
github-from-package@0.0.0:
resolution: {integrity: sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==}
ieee754@1.2.1:
resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==}
inherits@2.0.4:
resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
ini@1.3.8:
resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==}
mimic-response@3.1.0:
resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==}
engines: {node: '>=10'}
minimist@1.2.8:
resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==}
mkdirp-classic@0.5.3:
resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==}
napi-build-utils@2.0.0:
resolution: {integrity: sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA==}
node-abi@3.85.0:
resolution: {integrity: sha512-zsFhmbkAzwhTft6nd3VxcG0cvJsT70rL+BIGHWVq5fi6MwGrHwzqKaxXE+Hl2GmnGItnDKPPkO5/LQqjVkIdFg==}
engines: {node: '>=10'}
once@1.4.0:
resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==}
prebuild-install@7.1.3:
resolution: {integrity: sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug==}
engines: {node: '>=10'}
hasBin: true
pump@3.0.3:
resolution: {integrity: sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==}
rc@1.2.8:
resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==}
hasBin: true
readable-stream@3.6.2:
resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==}
engines: {node: '>= 6'}
safe-buffer@5.2.1:
resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
semver@7.7.3:
resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==}
engines: {node: '>=10'}
hasBin: true
simple-concat@1.0.1:
resolution: {integrity: sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==}
simple-get@4.0.1:
resolution: {integrity: sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==}
string_decoder@1.3.0:
resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==}
strip-json-comments@2.0.1:
resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==}
engines: {node: '>=0.10.0'}
tar-fs@2.1.4:
resolution: {integrity: sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==}
tar-stream@2.2.0:
resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==}
engines: {node: '>=6'}
tunnel-agent@0.6.0:
resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==}
util-deprecate@1.0.2:
resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
wrappy@1.0.2:
resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}
snapshots:
base64-js@1.5.1: {}
better-sqlite3@12.5.0:
dependencies:
bindings: 1.5.0
prebuild-install: 7.1.3
bindings@1.5.0:
dependencies:
file-uri-to-path: 1.0.0
bl@4.1.0:
dependencies:
buffer: 5.7.1
inherits: 2.0.4
readable-stream: 3.6.2
buffer@5.7.1:
dependencies:
base64-js: 1.5.1
ieee754: 1.2.1
chownr@1.1.4: {}
decompress-response@6.0.0:
dependencies:
mimic-response: 3.1.0
deep-extend@0.6.0: {}
detect-libc@2.1.2: {}
end-of-stream@1.4.5:
dependencies:
once: 1.4.0
expand-template@2.0.3: {}
file-uri-to-path@1.0.0: {}
fs-constants@1.0.0: {}
github-from-package@0.0.0: {}
ieee754@1.2.1: {}
inherits@2.0.4: {}
ini@1.3.8: {}
mimic-response@3.1.0: {}
minimist@1.2.8: {}
mkdirp-classic@0.5.3: {}
napi-build-utils@2.0.0: {}
node-abi@3.85.0:
dependencies:
semver: 7.7.3
once@1.4.0:
dependencies:
wrappy: 1.0.2
prebuild-install@7.1.3:
dependencies:
detect-libc: 2.1.2
expand-template: 2.0.3
github-from-package: 0.0.0
minimist: 1.2.8
mkdirp-classic: 0.5.3
napi-build-utils: 2.0.0
node-abi: 3.85.0
pump: 3.0.3
rc: 1.2.8
simple-get: 4.0.1
tar-fs: 2.1.4
tunnel-agent: 0.6.0
pump@3.0.3:
dependencies:
end-of-stream: 1.4.5
once: 1.4.0
rc@1.2.8:
dependencies:
deep-extend: 0.6.0
ini: 1.3.8
minimist: 1.2.8
strip-json-comments: 2.0.1
readable-stream@3.6.2:
dependencies:
inherits: 2.0.4
string_decoder: 1.3.0
util-deprecate: 1.0.2
safe-buffer@5.2.1: {}
semver@7.7.3: {}
simple-concat@1.0.1: {}
simple-get@4.0.1:
dependencies:
decompress-response: 6.0.0
once: 1.4.0
simple-concat: 1.0.1
string_decoder@1.3.0:
dependencies:
safe-buffer: 5.2.1
strip-json-comments@2.0.1: {}
tar-fs@2.1.4:
dependencies:
chownr: 1.1.4
mkdirp-classic: 0.5.3
pump: 3.0.3
tar-stream: 2.2.0
tar-stream@2.2.0:
dependencies:
bl: 4.1.0
end-of-stream: 1.4.5
fs-constants: 1.0.0
inherits: 2.0.4
readable-stream: 3.6.2
tunnel-agent@0.6.0:
dependencies:
safe-buffer: 5.2.1
util-deprecate@1.0.2: {}
wrappy@1.0.2: {}

View File

@@ -5,6 +5,7 @@ import Database from "better-sqlite3";
import { BetterSQLite3Database, drizzle } from "drizzle-orm/better-sqlite3";
import { migrate } from "drizzle-orm/better-sqlite3/migrator";
import { DB_CASING } from "../../packages/core/src/infrastructure/db/constants";
import * as sqliteSchema from "../../packages/db/schema/sqlite";
export const createSqliteDbFileAsync = async () => {
@@ -16,7 +17,7 @@ export const createSqliteDbFileAsync = async () => {
const connection = new Database(localDbUrl);
const db = drizzle(connection, {
schema: sqliteSchema,
casing: "snake_case",
casing: DB_CASING,
});
await migrate(db, {

View File

@@ -40,29 +40,29 @@
"@semantic-release/commit-analyzer": "^13.0.1",
"@semantic-release/git": "^10.0.1",
"@semantic-release/github": "^12.0.2",
"@semantic-release/npm": "^13.1.2",
"@semantic-release/npm": "^13.1.3",
"@semantic-release/release-notes-generator": "^14.1.0",
"@testcontainers/redis": "^11.9.0",
"@testcontainers/redis": "^11.10.0",
"@turbo/gen": "^2.6.3",
"@vitejs/plugin-react": "^5.1.1",
"@vitejs/plugin-react": "^5.1.2",
"@vitest/coverage-v8": "^4.0.15",
"@vitest/ui": "^4.0.15",
"conventional-changelog-conventionalcommits": "^9.1.0",
"cross-env": "^10.1.0",
"jsdom": "^27.2.0",
"jsdom": "^27.3.0",
"json5": "^2.2.3",
"prettier": "^3.7.4",
"semantic-release": "^25.0.2",
"testcontainers": "^11.9.0",
"testcontainers": "^11.10.0",
"turbo": "^2.6.3",
"typescript": "^5.9.3",
"vite-tsconfig-paths": "^5.1.4",
"vitest": "^4.0.15"
},
"packageManager": "pnpm@10.24.0",
"packageManager": "pnpm@10.25.0",
"engines": {
"node": ">=24.11.1",
"pnpm": ">=10.24.0"
"node": ">=24.12.0",
"pnpm": ">=10.25.0"
},
"pnpm": {
"onlyBuiltDependencies": [
@@ -84,18 +84,18 @@
"brace-expansion@>=1.0.0 <=1.1.11": ">=4.0.1",
"esbuild@<=0.24.2": ">=0.27.1",
"form-data@>=4.0.0 <4.0.4": ">=4.0.5",
"hono@<4.6.5": ">=4.10.7",
"hono@<4.6.5": ">=4.11.0",
"linkifyjs@<4.3.2": ">=4.3.2",
"nanoid@>=4.0.0 <5.0.9": ">=5.1.6",
"prismjs@<1.30.0": ">=1.30.0",
"proxmox-api>undici": "7.16.0",
"react-is": "^19.2.1",
"react-is": "^19.2.3",
"rollup@>=4.0.0 <4.22.4": ">=4.53.3",
"sha.js@<=2.4.11": ">=2.4.12",
"tar-fs@>=3.0.0 <3.0.9": ">=3.1.1",
"tar-fs@>=2.0.0 <2.1.3": ">=3.1.1",
"tmp@<=0.2.3": ">=0.2.5",
"vite@>=5.0.0 <=5.4.18": ">=7.2.6"
"vite@>=5.0.0 <=5.4.18": ">=7.2.7"
},
"patchedDependencies": {
"@types/node-unifi": "patches/@types__node-unifi.patch",

View File

@@ -22,8 +22,8 @@
},
"prettier": "@homarr/prettier-config",
"dependencies": {
"@homarr/core": "workspace:^0.1.0",
"@homarr/db": "workspace:^0.1.0",
"@homarr/log": "workspace:^0.1.0",
"@homarr/server-settings": "workspace:^0.1.0",
"@umami/node": "^0.4.0",
"superjson": "2.2.6"
@@ -32,7 +32,7 @@
"@homarr/eslint-config": "workspace:^0.2.0",
"@homarr/prettier-config": "workspace:^0.1.0",
"@homarr/tsconfig": "workspace:^0.1.0",
"eslint": "^9.39.1",
"eslint": "^9.39.2",
"typescript": "^5.9.3"
}
}

View File

@@ -1,15 +1,17 @@
import type { UmamiEventData } from "@umami/node";
import { Umami } from "@umami/node";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { count, db } from "@homarr/db";
import { getServerSettingByKeyAsync } from "@homarr/db/queries";
import { integrations, items, users } from "@homarr/db/schema";
import { logger } from "@homarr/log";
import type { defaultServerSettings } from "@homarr/server-settings";
import { Stopwatch } from "../../common/src";
import { UMAMI_HOST_URL, UMAMI_WEBSITE_ID } from "./constants";
const logger = createLogger({ module: "analytics" });
export const sendServerAnalyticsAsync = async () => {
const stopWatch = new Stopwatch();
const analyticsSettings = await getServerSettingByKeyAsync(db, "analytics");

View File

@@ -22,7 +22,6 @@
"prettier": "@homarr/prettier-config",
"dependencies": {
"@homarr/auth": "workspace:^0.1.0",
"@homarr/certificates": "workspace:^0.1.0",
"@homarr/common": "workspace:^0.1.0",
"@homarr/core": "workspace:^0.1.0",
"@homarr/cron-job-api": "workspace:^0.1.0",
@@ -33,7 +32,6 @@
"@homarr/docker": "workspace:^0.1.0",
"@homarr/icons": "workspace:^0.1.0",
"@homarr/integrations": "workspace:^0.1.0",
"@homarr/log": "workspace:^",
"@homarr/old-import": "workspace:^0.1.0",
"@homarr/old-schema": "workspace:^0.1.0",
"@homarr/ping": "workspace:^0.1.0",
@@ -44,14 +42,14 @@
"@homarr/validation": "workspace:^0.1.0",
"@kubernetes/client-node": "^1.4.0",
"@tanstack/react-query": "^5.90.12",
"@trpc/client": "^11.7.2",
"@trpc/react-query": "^11.7.2",
"@trpc/server": "^11.7.2",
"@trpc/tanstack-react-query": "^11.7.2",
"@trpc/client": "^11.8.0",
"@trpc/react-query": "^11.8.0",
"@trpc/server": "^11.8.0",
"@trpc/tanstack-react-query": "^11.8.0",
"lodash.clonedeep": "^4.5.0",
"next": "16.0.10",
"react": "19.2.1",
"react-dom": "19.2.1",
"react": "19.2.3",
"react-dom": "19.2.3",
"superjson": "2.2.6",
"trpc-to-openapi": "^3.1.0",
"zod": "^4.1.13"
@@ -60,7 +58,7 @@
"@homarr/eslint-config": "workspace:^0.2.0",
"@homarr/prettier-config": "workspace:^0.1.0",
"@homarr/tsconfig": "workspace:^0.1.0",
"eslint": "^9.39.1",
"eslint": "^9.39.2",
"prettier": "^3.7.4",
"typescript": "^5.9.3"
}

View File

@@ -3,14 +3,19 @@ import { TRPCError } from "@trpc/server";
import { zfd } from "zod-form-data";
import { z } from "zod/v4";
import { addCustomRootCertificateAsync, removeCustomRootCertificateAsync } from "@homarr/certificates/server";
import {
addCustomRootCertificateAsync,
removeCustomRootCertificateAsync,
} from "@homarr/core/infrastructure/certificates";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { and, eq } from "@homarr/db";
import { trustedCertificateHostnames } from "@homarr/db/schema";
import { logger } from "@homarr/log";
import { certificateValidFileNameSchema, checkCertificateFile } from "@homarr/validation/certificates";
import { createTRPCRouter, permissionRequiredProcedure } from "../../trpc";
const logger = createLogger({ module: "certificateRouter" });
export const certificateRouter = createTRPCRouter({
addCertificate: permissionRequiredProcedure
.requiresPermission("admin")

View File

@@ -1,14 +1,16 @@
import { observable } from "@trpc/server/observable";
import z from "zod/v4";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { cronExpressionSchema, jobGroupKeys, jobNameSchema } from "@homarr/cron-job-api";
import { cronJobApi } from "@homarr/cron-job-api/client";
import type { TaskStatus } from "@homarr/cron-job-status";
import { createCronJobStatusChannel } from "@homarr/cron-job-status";
import { logger } from "@homarr/log";
import { createTRPCRouter, permissionRequiredProcedure } from "../trpc";
const logger = createLogger({ module: "cronJobsRouter" });
export const cronJobsRouter = createTRPCRouter({
triggerJob: permissionRequiredProcedure
.requiresPermission("admin")

View File

@@ -3,6 +3,7 @@ import { z } from "zod/v4";
import { createId, objectEntries } from "@homarr/common";
import { decryptSecret, encryptSecret } from "@homarr/common/server";
import { createLogger } from "@homarr/core/infrastructure/logs";
import type { Database } from "@homarr/db";
import { and, asc, eq, handleTransactionsAsync, inArray, like, or } from "@homarr/db";
import {
@@ -26,7 +27,6 @@ import {
integrationSecretKindObject,
} from "@homarr/definitions";
import { createIntegrationAsync } from "@homarr/integrations";
import { logger } from "@homarr/log";
import { byIdSchema } from "@homarr/validation/common";
import {
integrationCreateSchema,
@@ -40,6 +40,8 @@ import { throwIfActionForbiddenAsync } from "./integration-access";
import { MissingSecretError, testConnectionAsync } from "./integration-test-connection";
import { mapTestConnectionError } from "./map-test-connection-error";
const logger = createLogger({ module: "integrationRouter" });
export const integrationRouter = createTRPCRouter({
all: publicProcedure.query(async ({ ctx }) => {
const groupsOfCurrentUser = await ctx.db.query.groupMembers.findMany({

View File

@@ -1,9 +1,12 @@
import { decryptSecret } from "@homarr/common/server";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { ErrorWithMetadata } from "@homarr/core/infrastructure/logs/error";
import type { Integration } from "@homarr/db/schema";
import type { IntegrationKind, IntegrationSecretKind } from "@homarr/definitions";
import { getAllSecretKindOptions } from "@homarr/definitions";
import { createIntegrationAsync } from "@homarr/integrations";
import { logger } from "@homarr/log";
const logger = createLogger({ module: "integrationTestConnection" });
type FormIntegration = Omit<Integration, "appId"> & {
secrets: {
@@ -35,8 +38,13 @@ export const testConnectionAsync = async (
};
} catch (error) {
logger.warn(
new Error(
`Failed to decrypt secret from database integration="${integration.name}" secretKind="${secret.kind}"`,
new ErrorWithMetadata(
"Failed to decrypt secret from database",
{
integrationName: integration.name,
integrationKind: integration.kind,
secretKind: secret.kind,
},
{ cause: error },
),
);

View File

@@ -2,7 +2,6 @@ import type { V1NodeList, VersionInfo } from "@kubernetes/client-node";
import { TRPCError } from "@trpc/server";
import type { ClusterResourceCount, KubernetesCluster } from "@homarr/definitions";
import { logger } from "@homarr/log";
import { kubernetesMiddleware } from "../../../middlewares/kubernetes";
import { createTRPCRouter, permissionRequiredProcedure } from "../../../trpc";
@@ -129,7 +128,6 @@ export const clusterRouter = createTRPCRouter({
],
};
} catch (error) {
logger.error("Unable to retrieve cluster", error);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "An error occurred while fetching Kubernetes cluster",
@@ -165,7 +163,6 @@ export const clusterRouter = createTRPCRouter({
{ label: "volumes", count: volumes.items.length },
];
} catch (error) {
logger.error("Unable to retrieve cluster resource counts", error);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "An error occurred while fetching Kubernetes resources count",

View File

@@ -1,7 +1,6 @@
import { TRPCError } from "@trpc/server";
import type { KubernetesBaseResource } from "@homarr/definitions";
import { logger } from "@homarr/log";
import { kubernetesMiddleware } from "../../../middlewares/kubernetes";
import { createTRPCRouter, permissionRequiredProcedure } from "../../../trpc";
@@ -25,7 +24,6 @@ export const configMapsRouter = createTRPCRouter({
};
});
} catch (error) {
logger.error("Unable to retrieve configMaps", error);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "An error occurred while fetching Kubernetes ConfigMaps",

View File

@@ -2,7 +2,6 @@ import type { V1HTTPIngressPath, V1Ingress, V1IngressRule } from "@kubernetes/cl
import { TRPCError } from "@trpc/server";
import type { KubernetesIngress, KubernetesIngressPath, KubernetesIngressRuleAndPath } from "@homarr/definitions";
import { logger } from "@homarr/log";
import { kubernetesMiddleware } from "../../../middlewares/kubernetes";
import { createTRPCRouter, permissionRequiredProcedure } from "../../../trpc";
@@ -43,7 +42,6 @@ export const ingressesRouter = createTRPCRouter({
return ingresses.items.map(mapIngress);
} catch (error) {
logger.error("Unable to retrieve ingresses", error);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "An error occurred while fetching Kubernetes ingresses",

View File

@@ -1,7 +1,6 @@
import { TRPCError } from "@trpc/server";
import type { KubernetesNamespace, KubernetesNamespaceState } from "@homarr/definitions";
import { logger } from "@homarr/log";
import { kubernetesMiddleware } from "../../../middlewares/kubernetes";
import { createTRPCRouter, permissionRequiredProcedure } from "../../../trpc";
@@ -25,7 +24,6 @@ export const namespacesRouter = createTRPCRouter({
} satisfies KubernetesNamespace;
});
} catch (error) {
logger.error("Unable to retrieve namespaces", error);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "An error occurred while fetching Kubernetes namespaces",

View File

@@ -1,7 +1,6 @@
import { TRPCError } from "@trpc/server";
import type { KubernetesNode, KubernetesNodeState } from "@homarr/definitions";
import { logger } from "@homarr/log";
import { kubernetesMiddleware } from "../../../middlewares/kubernetes";
import { createTRPCRouter, permissionRequiredProcedure } from "../../../trpc";
@@ -57,7 +56,6 @@ export const nodesRouter = createTRPCRouter({
};
});
} catch (error) {
logger.error("Unable to retrieve nodes", error);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "An error occurred while fetching Kubernetes nodes",

View File

@@ -2,13 +2,15 @@ import type { KubeConfig, V1OwnerReference } from "@kubernetes/client-node";
import { AppsV1Api } from "@kubernetes/client-node";
import { TRPCError } from "@trpc/server";
import { createLogger } from "@homarr/core/infrastructure/logs";
import type { KubernetesPod } from "@homarr/definitions";
import { logger } from "@homarr/log";
import { kubernetesMiddleware } from "../../../middlewares/kubernetes";
import { createTRPCRouter, permissionRequiredProcedure } from "../../../trpc";
import { KubernetesClient } from "../kubernetes-client";
const logger = createLogger({ module: "podsRouter" });
export const podsRouter = createTRPCRouter({
getPods: permissionRequiredProcedure
.requiresPermission("admin")
@@ -55,7 +57,6 @@ export const podsRouter = createTRPCRouter({
return pods;
} catch (error) {
logger.error("Unable to retrieve pods", error);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "An error occurred while fetching Kubernetes pods",

View File

@@ -1,7 +1,6 @@
import { TRPCError } from "@trpc/server";
import type { KubernetesSecret } from "@homarr/definitions";
import { logger } from "@homarr/log";
import { kubernetesMiddleware } from "../../../middlewares/kubernetes";
import { createTRPCRouter, permissionRequiredProcedure } from "../../../trpc";
@@ -25,7 +24,6 @@ export const secretsRouter = createTRPCRouter({
};
});
} catch (error) {
logger.error("Unable to retrieve secrets", error);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "An error occurred while fetching Kubernetes secrets",

View File

@@ -1,7 +1,6 @@
import { TRPCError } from "@trpc/server";
import type { KubernetesService } from "@homarr/definitions";
import { logger } from "@homarr/log";
import { kubernetesMiddleware } from "../../../middlewares/kubernetes";
import { createTRPCRouter, permissionRequiredProcedure } from "../../../trpc";
@@ -29,7 +28,6 @@ export const servicesRouter = createTRPCRouter({
};
});
} catch (error) {
logger.error("Unable to retrieve services", error);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "An error occurred while fetching Kubernetes services",

View File

@@ -1,7 +1,6 @@
import { TRPCError } from "@trpc/server";
import type { KubernetesVolume } from "@homarr/definitions";
import { logger } from "@homarr/log";
import { kubernetesMiddleware } from "../../../middlewares/kubernetes";
import { createTRPCRouter, permissionRequiredProcedure } from "../../../trpc";
@@ -31,7 +30,6 @@ export const volumesRouter = createTRPCRouter({
};
});
} catch (error) {
logger.error("Unable to retrieve volumes", error);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "An error occurred while fetching Kubernetes Volumes",

View File

@@ -1,6 +1,6 @@
import { z } from "zod/v4";
import { fetchWithTimeout } from "@homarr/common";
import { fetchWithTimeoutAsync } from "@homarr/core/infrastructure/http/timeout";
import { createTRPCRouter, publicProcedure } from "../trpc";
@@ -36,7 +36,7 @@ export const locationRouter = createTRPCRouter({
.input(locationSearchCityInput)
.output(locationSearchCityOutput)
.query(async ({ input }) => {
const res = await fetchWithTimeout(`https://geocoding-api.open-meteo.com/v1/search?name=${input.query}`);
const res = await fetchWithTimeoutAsync(`https://geocoding-api.open-meteo.com/v1/search?name=${input.query}`);
return (await res.json()) as z.infer<typeof locationSearchCityOutput>;
}),
});

View File

@@ -1,14 +1,16 @@
import { observable } from "@trpc/server/observable";
import z from "zod/v4";
import { logger } from "@homarr/log";
import { logLevels } from "@homarr/log/constants";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { logLevels } from "@homarr/core/infrastructure/logs/constants";
import type { LoggerMessage } from "@homarr/redis";
import { loggingChannel } from "@homarr/redis";
import { zodEnumFromArray } from "@homarr/validation/enums";
import { createTRPCRouter, permissionRequiredProcedure } from "../trpc";
const logger = createLogger({ module: "logRouter" });
export const logRouter = createTRPCRouter({
subscribe: permissionRequiredProcedure
.requiresPermission("other-view-logs")

View File

@@ -2,11 +2,11 @@ import { TRPCError } from "@trpc/server";
import { z } from "zod/v4";
import { createId } from "@homarr/common";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { asc, eq, like } from "@homarr/db";
import { getServerSettingByKeyAsync, updateServerSettingByKeyAsync } from "@homarr/db/queries";
import { searchEngines, users } from "@homarr/db/schema";
import { createIntegrationAsync } from "@homarr/integrations";
import { logger } from "@homarr/log";
import { byIdSchema, paginatedSchema, searchSchema } from "@homarr/validation/common";
import { searchEngineEditSchema, searchEngineManageSchema } from "@homarr/validation/search-engine";
import { mediaRequestOptionsSchema, mediaRequestRequestSchema } from "@homarr/validation/widgets/media-request";
@@ -14,6 +14,8 @@ import { mediaRequestOptionsSchema, mediaRequestRequestSchema } from "@homarr/va
import { createOneIntegrationMiddleware } from "../../middlewares/integration";
import { createTRPCRouter, permissionRequiredProcedure, protectedProcedure, publicProcedure } from "../../trpc";
const logger = createLogger({ module: "searchEngineRouter" });
export const searchEngineRouter = createTRPCRouter({
getPaginated: protectedProcedure.input(paginatedSchema).query(async ({ input, ctx }) => {
const whereQuery = input.search ? like(searchEngines.name, `%${input.search.trim()}%`) : undefined;

View File

@@ -1,8 +1,10 @@
import { logger } from "@homarr/log";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { updateCheckerRequestHandler } from "@homarr/request-handler/update-checker";
import { createTRPCRouter, permissionRequiredProcedure } from "../trpc";
const logger = createLogger({ module: "updateCheckerRouter" });
export const updateCheckerRouter = createTRPCRouter({
getAvailableUpdates: permissionRequiredProcedure.requiresPermission("admin").query(async () => {
try {

View File

@@ -3,6 +3,7 @@ import { z } from "zod/v4";
import { createSaltAsync, hashPasswordAsync } from "@homarr/auth";
import { createId } from "@homarr/common";
import { createLogger } from "@homarr/core/infrastructure/logs";
import type { Database } from "@homarr/db";
import { and, eq, like } from "@homarr/db";
import { getMaxGroupPositionAsync } from "@homarr/db/queries";
@@ -10,7 +11,6 @@ import { boards, groupMembers, groupPermissions, groups, invites, users } from "
import { selectUserSchema } from "@homarr/db/validationSchemas";
import { credentialsAdminGroup } from "@homarr/definitions";
import type { SupportedAuthProvider } from "@homarr/definitions";
import { logger } from "@homarr/log";
import { byIdSchema } from "@homarr/validation/common";
import type { userBaseCreateSchema } from "@homarr/validation/user";
import {
@@ -39,6 +39,8 @@ import { throwIfCredentialsDisabled } from "./invite/checks";
import { nextOnboardingStepAsync } from "./onboard/onboard-queries";
import { changeSearchPreferencesAsync, changeSearchPreferencesInputSchema } from "./user/change-search-preferences";
const logger = createLogger({ module: "userRouter" });
export const userRouter = createTRPCRouter({
initUser: onboardingProcedure
.requiresStep("user")
@@ -364,9 +366,11 @@ export const userRouter = createTRPCRouter({
// Admins can change the password of other users without providing the previous password
const isPreviousPasswordRequired = ctx.session.user.id === input.userId;
logger.info(
`User ${user.id} is changing password for user ${input.userId}, previous password is required: ${isPreviousPasswordRequired}`,
);
logger.info("Changing user password", {
actorId: ctx.session.user.id,
targetUserId: input.userId,
previousPasswordRequired: isPreviousPasswordRequired,
});
if (isPreviousPasswordRequired) {
const previousPasswordHash = await hashPasswordAsync(input.previousPassword, dbUser.salt ?? "");

View File

@@ -4,7 +4,6 @@ import { observable } from "@trpc/server/observable";
import { getIntegrationKindsByCategory } from "@homarr/definitions";
import { createIntegrationAsync } from "@homarr/integrations";
import type { Indexer } from "@homarr/integrations/types";
import { logger } from "@homarr/log";
import { indexerManagerRequestHandler } from "@homarr/request-handler/indexer-manager";
import type { IntegrationAction } from "../../middlewares/integration";
@@ -61,10 +60,10 @@ export const indexerManagerRouter = createTRPCRouter({
ctx.integrations.map(async (integration) => {
const client = await createIntegrationAsync(integration);
await client.testAllAsync().catch((err) => {
logger.error("indexer-manager router - ", err);
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: `Failed to test all indexers for ${integration.name} (${integration.id})`,
cause: err,
});
});
}),

View File

@@ -14,12 +14,14 @@ import { ZodError } from "zod/v4";
import type { Session } from "@homarr/auth";
import { FlattenError } from "@homarr/common";
import { userAgent } from "@homarr/common/server";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { db } from "@homarr/db";
import type { GroupPermissionKey, OnboardingStep } from "@homarr/definitions";
import { logger } from "@homarr/log";
import { getOnboardingOrFallbackAsync } from "./router/onboard/onboard-queries";
const logger = createLogger({ module: "trpc" });
/**
* 1. CONTEXT
*
@@ -36,7 +38,7 @@ export const createTRPCContext = (opts: { headers: Headers; session: Session | n
const session = opts.session;
const source = opts.headers.get("x-trpc-source") ?? "unknown";
logger.info(`tRPC request from ${source} by user '${session?.user.name} (${session?.user.id})'`, session?.user);
logger.info("Received tRPC request", { source, userId: session?.user.id, userName: session?.user.name });
return {
session,

View File

@@ -3,9 +3,9 @@ import { cookies } from "next/headers";
import NextAuth from "next-auth";
import Credentials from "next-auth/providers/credentials";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { db } from "@homarr/db";
import type { SupportedAuthProvider } from "@homarr/definitions";
import { logger } from "@homarr/log";
import { createAdapter } from "./adapter";
import { createSessionCallback } from "./callbacks";
@@ -18,6 +18,8 @@ import { OidcProvider } from "./providers/oidc/oidc-provider";
import { createRedirectUri } from "./redirect";
import { expireDateAfter, generateSessionToken, sessionTokenCookieName } from "./session";
const logger = createLogger({ module: "authConfiguration" });
// See why it's unknown in the [...nextauth]/route.ts file
export const createConfiguration = (
provider: SupportedAuthProvider | "unknown",

View File

@@ -2,15 +2,17 @@ import { cookies } from "next/headers";
import dayjs from "dayjs";
import type { NextAuthConfig } from "next-auth";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { and, eq, inArray } from "@homarr/db";
import type { Database } from "@homarr/db";
import { groupMembers, groups, users } from "@homarr/db/schema";
import { colorSchemeCookieKey, everyoneGroup } from "@homarr/definitions";
import { logger } from "@homarr/log";
import { env } from "./env";
import { extractProfileName } from "./providers/oidc/oidc-provider";
const logger = createLogger({ module: "authEvents" });
export const createSignInEventHandler = (db: Database): Exclude<NextAuthConfig["events"], undefined>["signIn"] => {
return async ({ user, profile }) => {
logger.debug(`SignIn EventHandler for user: ${JSON.stringify(user)} . profile: ${JSON.stringify(profile)}`);
@@ -43,9 +45,11 @@ export const createSignInEventHandler = (db: Database): Exclude<NextAuthConfig["
if (dbUser.name !== user.name) {
await db.update(users).set({ name: user.name }).where(eq(users.id, user.id));
logger.info(
`Username for user of credentials provider has changed. user=${user.id} old=${dbUser.name} new=${user.name}`,
);
logger.info("Username for user of credentials provider has changed.", {
userId: user.id,
oldName: dbUser.name,
newName: user.name,
});
}
if (profile) {
@@ -56,9 +60,11 @@ export const createSignInEventHandler = (db: Database): Exclude<NextAuthConfig["
if (dbUser.name !== profileUsername) {
await db.update(users).set({ name: profileUsername }).where(eq(users.id, user.id));
logger.info(
`Username for user of oidc provider has changed. user=${user.id} old='${dbUser.name}' new='${profileUsername}'`,
);
logger.info("Username for user of oidc provider has changed.", {
userId: user.id,
oldName: dbUser.name,
newName: profileUsername,
});
}
if (
@@ -67,11 +73,13 @@ export const createSignInEventHandler = (db: Database): Exclude<NextAuthConfig["
!dbUser.image?.startsWith("data:")
) {
await db.update(users).set({ image: profile.picture }).where(eq(users.id, user.id));
logger.info(`Profile picture for user of oidc provider has changed. user=${user.id}'`);
logger.info("Profile picture for user of oidc provider has changed.", {
userId: user.id,
});
}
}
logger.info(`User '${dbUser.name}' logged in at ${dayjs().format()}`);
logger.info("User logged in", { userId: user.id, userName: dbUser.name, timestamp: dayjs().format() });
// We use a cookie as localStorage is not shared with server (otherwise flickering would occur)
(await cookies()).set(colorSchemeCookieKey, dbUser.colorScheme, {
@@ -96,7 +104,7 @@ const addUserToEveryoneGroupIfNotMemberAsync = async (db: Database, userId: stri
userId,
groupId: dbEveryoneGroup.id,
});
logger.info(`Added user to everyone group. user=${userId}`);
logger.info("Added user to everyone group.", { userId });
}
};
@@ -118,9 +126,10 @@ const synchronizeGroupsWithExternalForUserAsync = async (db: Database, userId: s
);
if (missingExternalGroupsForUser.length > 0) {
logger.debug(
`Homarr does not have the user in certain groups. user=${userId} count=${missingExternalGroupsForUser.length}`,
);
logger.debug("Homarr does not have the user in certain groups.", {
user: userId,
count: missingExternalGroupsForUser.length,
});
const groupIds = await db.query.groups.findMany({
columns: {
@@ -129,7 +138,10 @@ const synchronizeGroupsWithExternalForUserAsync = async (db: Database, userId: s
where: inArray(groups.name, missingExternalGroupsForUser),
});
logger.debug(`Homarr has found groups in the database user is not in. user=${userId} count=${groupIds.length}`);
logger.debug("Homarr has found groups in the database user is not in.", {
user: userId,
count: groupIds.length,
});
if (groupIds.length > 0) {
await db.insert(groupMembers).values(
@@ -139,9 +151,9 @@ const synchronizeGroupsWithExternalForUserAsync = async (db: Database, userId: s
})),
);
logger.info(`Added user to groups successfully. user=${userId} count=${groupIds.length}`);
logger.info("Added user to groups successfully.", { user: userId, count: groupIds.length });
} else {
logger.debug(`User is already in all groups of Homarr. user=${userId}`);
logger.debug("User is already in all groups of Homarr.", { user: userId });
}
}
@@ -154,9 +166,10 @@ const synchronizeGroupsWithExternalForUserAsync = async (db: Database, userId: s
);
if (groupsUserIsNoLongerMemberOfExternally.length > 0) {
logger.debug(
`Homarr has the user in certain groups that LDAP does not have. user=${userId} count=${groupsUserIsNoLongerMemberOfExternally.length}`,
);
logger.debug("Homarr has the user in certain groups that LDAP does not have.", {
user: userId,
count: groupsUserIsNoLongerMemberOfExternally.length,
});
await db.delete(groupMembers).where(
and(
@@ -168,8 +181,9 @@ const synchronizeGroupsWithExternalForUserAsync = async (db: Database, userId: s
),
);
logger.info(
`Removed user from groups successfully. user=${userId} count=${groupsUserIsNoLongerMemberOfExternally.length}`,
);
logger.info("Removed user from groups successfully.", {
user: userId,
count: groupsUserIsNoLongerMemberOfExternally.length,
});
}
};

View File

@@ -25,20 +25,18 @@
"dependencies": {
"@auth/core": "^0.41.1",
"@auth/drizzle-adapter": "^1.11.1",
"@homarr/certificates": "workspace:^0.1.0",
"@homarr/common": "workspace:^0.1.0",
"@homarr/core": "workspace:^0.1.0",
"@homarr/db": "workspace:^0.1.0",
"@homarr/definitions": "workspace:^0.1.0",
"@homarr/log": "workspace:^0.1.0",
"@homarr/validation": "workspace:^0.1.0",
"bcrypt": "^6.0.0",
"cookies": "^0.9.1",
"ldapts": "8.0.14",
"ldapts": "8.0.23",
"next": "16.0.10",
"next-auth": "5.0.0-beta.30",
"react": "19.2.1",
"react-dom": "19.2.1",
"react": "19.2.3",
"react-dom": "19.2.3",
"zod": "^4.1.13"
},
"devDependencies": {
@@ -47,7 +45,7 @@
"@homarr/tsconfig": "workspace:^0.1.0",
"@types/bcrypt": "6.0.0",
"@types/cookies": "0.9.2",
"eslint": "^9.39.1",
"eslint": "^9.39.2",
"prettier": "^3.7.4",
"typescript": "^5.9.3"
}

View File

@@ -1,12 +1,14 @@
import bcrypt from "bcrypt";
import type { z } from "zod/v4";
import { createLogger } from "@homarr/core/infrastructure/logs";
import type { Database } from "@homarr/db";
import { and, eq } from "@homarr/db";
import { users } from "@homarr/db/schema";
import { logger } from "@homarr/log";
import type { userSignInSchema } from "@homarr/validation/user";
const logger = createLogger({ module: "basicAuthorization" });
export const authorizeWithBasicCredentialsAsync = async (
db: Database,
credentials: z.infer<typeof userSignInSchema>,
@@ -16,19 +18,19 @@ export const authorizeWithBasicCredentialsAsync = async (
});
if (!user?.password) {
logger.info(`user ${credentials.name} was not found`);
logger.info("User not found", { userName: credentials.name });
return null;
}
logger.info(`user ${user.name} is trying to log in. checking password...`);
logger.info("User is trying to log in. Checking password...", { userName: user.name });
const isValidPassword = await bcrypt.compare(credentials.password, user.password);
if (!isValidPassword) {
logger.warn(`password for user ${user.name} was incorrect`);
logger.warn("Password for user was incorrect", { userName: user.name });
return null;
}
logger.info(`user ${user.name} successfully authorized`);
logger.info("User successfully authorized", { userName: user.name });
return {
id: user.id,

View File

@@ -1,21 +1,23 @@
import { CredentialsSignin } from "@auth/core/errors";
import { z } from "zod/v4";
import { createId, extractErrorMessage } from "@homarr/common";
import { createId } from "@homarr/common";
import { createLogger } from "@homarr/core/infrastructure/logs";
import type { Database, InferInsertModel } from "@homarr/db";
import { and, eq } from "@homarr/db";
import { users } from "@homarr/db/schema";
import { logger } from "@homarr/log";
import type { ldapSignInSchema } from "@homarr/validation/user";
import { env } from "../../../env";
import { LdapClient } from "../ldap-client";
const logger = createLogger({ module: "ldapAuthorization" });
export const authorizeWithLdapCredentialsAsync = async (
db: Database,
credentials: z.infer<typeof ldapSignInSchema>,
) => {
logger.info(`user ${credentials.name} is trying to log in using LDAP. Connecting to LDAP server...`);
logger.info("User is trying to log in using LDAP. Connecting to LDAP server...", { userName: credentials.name });
const client = new LdapClient();
await client
.bindAsync({
@@ -23,8 +25,7 @@ export const authorizeWithLdapCredentialsAsync = async (
password: env.AUTH_LDAP_BIND_PASSWORD,
})
.catch((error) => {
logger.error(`Failed to connect to LDAP server ${extractErrorMessage(error)}`);
throw new CredentialsSignin();
throw new CredentialsSignin("Failed to connect to LDAP server", { cause: error });
});
logger.info("Connected to LDAP server. Searching for user...");
@@ -48,21 +49,21 @@ export const authorizeWithLdapCredentialsAsync = async (
});
if (!ldapUser) {
logger.warn(`User ${credentials.name} not found in LDAP`);
throw new CredentialsSignin();
throw new CredentialsSignin(`User not found in LDAP username="${credentials.name}"`);
}
// Validate email
const mailResult = await z.string().email().safeParseAsync(ldapUser[env.AUTH_LDAP_USER_MAIL_ATTRIBUTE]);
if (!mailResult.success) {
logger.error(
`User ${credentials.name} found but with invalid or non-existing Email. Not Supported: "${ldapUser[env.AUTH_LDAP_USER_MAIL_ATTRIBUTE]}"`,
);
throw new CredentialsSignin();
logger.error("User found in LDAP but with invalid or non-existing Email", {
userName: credentials.name,
emailValue: ldapUser[env.AUTH_LDAP_USER_MAIL_ATTRIBUTE],
});
throw new CredentialsSignin("User found in LDAP but with invalid or non-existing Email");
}
logger.info(`User ${credentials.name} found in LDAP. Logging in...`);
logger.info("User found in LDAP. Logging in...", { userName: credentials.name });
// Bind with user credentials to check if the password is correct
const userClient = new LdapClient();
@@ -72,12 +73,12 @@ export const authorizeWithLdapCredentialsAsync = async (
password: credentials.password,
})
.catch(() => {
logger.warn(`Wrong credentials for user ${credentials.name}`);
logger.warn("Wrong credentials for user", { userName: credentials.name });
throw new CredentialsSignin();
});
await userClient.disconnectAsync();
logger.info(`User ${credentials.name} logged in successfully, retrieving user groups...`);
logger.info("User credentials are correct. Retrieving user groups...", { userName: credentials.name });
const userGroups = await client
.searchAsync({
@@ -93,7 +94,7 @@ export const authorizeWithLdapCredentialsAsync = async (
})
.then((entries) => entries.map((entry) => entry.cn).filter((group): group is string => group !== undefined));
logger.info(`Found ${userGroups.length} groups for user ${credentials.name}.`);
logger.info("User groups retrieved", { userName: credentials.name, groups: userGroups.length });
await client.disconnectAsync();
@@ -111,7 +112,7 @@ export const authorizeWithLdapCredentialsAsync = async (
});
if (!user) {
logger.info(`User ${credentials.name} not found in the database. Creating...`);
logger.info("User not found in the database. Creating...", { userName: credentials.name });
const insertUser = {
id: createId(),
@@ -126,7 +127,7 @@ export const authorizeWithLdapCredentialsAsync = async (
user = insertUser;
logger.info(`User ${credentials.name} created successfully.`);
logger.info("User created successfully", { userName: credentials.name });
}
return {

View File

@@ -3,7 +3,7 @@ import type { OIDCConfig } from "@auth/core/providers";
import type { Profile } from "@auth/core/types";
import { customFetch } from "next-auth";
import { fetchWithTrustedCertificatesAsync } from "@homarr/certificates/server";
import { fetchWithTrustedCertificatesAsync } from "@homarr/core/infrastructure/http";
import { env } from "../../env";
import { createRedirectUri } from "../../redirect";

View File

@@ -25,14 +25,14 @@
"prettier": "@homarr/prettier-config",
"dependencies": {
"@homarr/api": "workspace:^0.1.0",
"react": "19.2.1",
"react-dom": "19.2.1"
"react": "19.2.3",
"react-dom": "19.2.3"
},
"devDependencies": {
"@homarr/eslint-config": "workspace:^0.2.0",
"@homarr/prettier-config": "workspace:^0.1.0",
"@homarr/tsconfig": "workspace:^0.1.0",
"eslint": "^9.39.1",
"eslint": "^9.39.2",
"typescript": "^5.9.3"
}
}

View File

@@ -1,9 +0,0 @@
import baseConfig from "@homarr/eslint-config/base";
/** @type {import('typescript-eslint').Config} */
export default [
{
ignores: [],
},
...baseConfig,
];

View File

@@ -1,36 +0,0 @@
{
"name": "@homarr/certificates",
"version": "0.1.0",
"private": true,
"license": "Apache-2.0",
"type": "module",
"exports": {
"./server": "./src/server.ts"
},
"typesVersions": {
"*": {
"*": [
"src/*"
]
}
},
"scripts": {
"clean": "rm -rf .turbo node_modules",
"format": "prettier --check . --ignore-path ../../.gitignore",
"lint": "eslint",
"typecheck": "tsc --noEmit"
},
"prettier": "@homarr/prettier-config",
"dependencies": {
"@homarr/common": "workspace:^0.1.0",
"@homarr/db": "workspace:^0.1.0",
"undici": "7.16.0"
},
"devDependencies": {
"@homarr/eslint-config": "workspace:^0.2.0",
"@homarr/prettier-config": "workspace:^0.1.0",
"@homarr/tsconfig": "workspace:^0.1.0",
"eslint": "^9.39.1",
"typescript": "^5.9.3"
}
}

View File

@@ -1,141 +0,0 @@
import { X509Certificate } from "node:crypto";
import fsSync from "node:fs";
import fs from "node:fs/promises";
import type { AgentOptions } from "node:https";
import { Agent as HttpsAgent } from "node:https";
import path from "node:path";
import { checkServerIdentity, rootCertificates } from "node:tls";
import axios from "axios";
import type { RequestInfo, RequestInit, Response } from "undici";
import { fetch } from "undici";
import { env } from "@homarr/common/env";
import { LoggingAgent } from "@homarr/common/server";
import type { InferSelectModel } from "@homarr/db";
import { db } from "@homarr/db";
import type { trustedCertificateHostnames } from "@homarr/db/schema";
const getCertificateFolder = () => {
if (env.NODE_ENV !== "production") return process.env.LOCAL_CERTIFICATE_PATH;
return process.env.LOCAL_CERTIFICATE_PATH ?? path.join("/appdata", "trusted-certificates");
};
export const loadCustomRootCertificatesAsync = async () => {
const folder = getCertificateFolder();
if (!folder) {
return [];
}
if (!fsSync.existsSync(folder)) {
await fs.mkdir(folder, { recursive: true });
}
const dirContent = await fs.readdir(folder);
return await Promise.all(
dirContent
.filter((file) => file.endsWith(".crt") || file.endsWith(".pem"))
.map(async (file) => ({
content: await fs.readFile(path.join(folder, file), "utf8"),
fileName: file,
})),
);
};
export const removeCustomRootCertificateAsync = async (fileName: string) => {
const folder = getCertificateFolder();
if (!folder) {
return null;
}
const existingFiles = await fs.readdir(folder, { withFileTypes: true });
if (!existingFiles.some((file) => file.isFile() && file.name === fileName)) {
throw new Error(`File ${fileName} does not exist`);
}
const fullPath = path.join(folder, fileName);
const content = await fs.readFile(fullPath, "utf8");
await fs.rm(fullPath);
try {
return new X509Certificate(content);
} catch {
return null;
}
};
export const addCustomRootCertificateAsync = async (fileName: string, content: string) => {
const folder = getCertificateFolder();
if (!folder) {
throw new Error(
"When you want to use custom certificates locally you need to set LOCAL_CERTIFICATE_PATH to an absolute path",
);
}
if (fileName.includes("/")) {
throw new Error("Invalid file name");
}
await fs.writeFile(path.join(folder, fileName), content);
};
export const getTrustedCertificateHostnamesAsync = async () => {
return await db.query.trustedCertificateHostnames.findMany();
};
export const getAllTrustedCertificatesAsync = async () => {
const customCertificates = await loadCustomRootCertificatesAsync();
return rootCertificates.concat(customCertificates.map((cert) => cert.content));
};
export const createCustomCheckServerIdentity = (
trustedHostnames: InferSelectModel<typeof trustedCertificateHostnames>[],
): typeof checkServerIdentity => {
return (hostname, peerCertificate) => {
const matchingTrustedHostnames = trustedHostnames.filter(
(cert) => cert.thumbprint === peerCertificate.fingerprint256,
);
// We trust the certificate if we have a matching hostname
if (matchingTrustedHostnames.some((cert) => cert.hostname === hostname)) return undefined;
return checkServerIdentity(hostname, peerCertificate);
};
};
export const createCertificateAgentAsync = async (override?: {
ca: string | string[];
checkServerIdentity: typeof checkServerIdentity;
}) => {
return new LoggingAgent({
connect: override ?? {
ca: await getAllTrustedCertificatesAsync(),
checkServerIdentity: createCustomCheckServerIdentity(await getTrustedCertificateHostnamesAsync()),
},
});
};
export const createHttpsAgentAsync = async (override?: Pick<AgentOptions, "ca" | "checkServerIdentity">) => {
return new HttpsAgent(
override ?? {
ca: await getAllTrustedCertificatesAsync(),
checkServerIdentity: createCustomCheckServerIdentity(await getTrustedCertificateHostnamesAsync()),
},
);
};
export const createAxiosCertificateInstanceAsync = async (
override?: Pick<AgentOptions, "ca" | "checkServerIdentity">,
) => {
return axios.create({
httpsAgent: await createHttpsAgentAsync(override),
});
};
export const fetchWithTrustedCertificatesAsync = async (url: RequestInfo, options?: RequestInit): Promise<Response> => {
const agent = await createCertificateAgentAsync(undefined);
return fetch(url, {
...options,
dispatcher: agent,
});
};

View File

@@ -1,8 +0,0 @@
{
"extends": "@homarr/tsconfig/base.json",
"compilerOptions": {
"tsBuildInfoFile": "node_modules/.cache/tsbuildinfo.json"
},
"include": ["*.ts", "src"],
"exclude": ["node_modules"]
}

View File

@@ -35,7 +35,7 @@
"@homarr/prettier-config": "workspace:^0.1.0",
"@homarr/tsconfig": "workspace:^0.1.0",
"esbuild": "^0.27.1",
"eslint": "^9.39.1",
"eslint": "^9.39.2",
"typescript": "^5.9.3"
}
}

View File

@@ -24,12 +24,10 @@ export const env = createEnv({
message: `SECRET_ENCRYPTION_KEY must only contain hex characters${errorSuffix}`,
}),
NO_EXTERNAL_CONNECTION: createBooleanSchema(false),
ENABLE_DNS_CACHING: createBooleanSchema(false),
},
runtimeEnv: {
SECRET_ENCRYPTION_KEY: process.env.SECRET_ENCRYPTION_KEY,
NODE_ENV: process.env.NODE_ENV,
NO_EXTERNAL_CONNECTION: process.env.NO_EXTERNAL_CONNECTION,
ENABLE_DNS_CACHING: process.env.ENABLE_DNS_CACHING,
},
});

View File

@@ -8,7 +8,6 @@
".": "./index.ts",
"./types": "./src/types.ts",
"./server": "./src/server.ts",
"./init-dns": "./src/dns.ts",
"./client": "./src/client.ts",
"./env": "./env.ts"
},
@@ -28,14 +27,12 @@
"prettier": "@homarr/prettier-config",
"dependencies": {
"@homarr/core": "workspace:^0.1.0",
"@homarr/log": "workspace:^0.1.0",
"@paralleldrive/cuid2": "^3.1.0",
"dayjs": "^1.11.19",
"dns-caching": "^0.2.7",
"next": "16.0.10",
"octokit": "^5.0.5",
"react": "19.2.1",
"react-dom": "19.2.1",
"react": "19.2.3",
"react-dom": "19.2.3",
"undici": "7.16.0",
"zod": "^4.1.13",
"zod-validation-error": "^5.0.0"
@@ -44,7 +41,7 @@
"@homarr/eslint-config": "workspace:^0.2.0",
"@homarr/prettier-config": "workspace:^0.1.0",
"@homarr/tsconfig": "workspace:^0.1.0",
"eslint": "^9.39.1",
"eslint": "^9.39.2",
"typescript": "^5.9.3"
}
}

View File

@@ -1,7 +1,5 @@
import { AxiosError } from "axios";
import { logger } from "@homarr/log";
import type { AnyRequestError } from "../request-error";
import { RequestError } from "../request-error";
import { ResponseError } from "../response-error";
@@ -9,11 +7,15 @@ import { matchErrorCode } from "./fetch-http-error-handler";
import { HttpErrorHandler } from "./http-error-handler";
export class AxiosHttpErrorHandler extends HttpErrorHandler {
constructor() {
super("axios");
}
handleRequestError(error: unknown): AnyRequestError | undefined {
if (!(error instanceof AxiosError)) return undefined;
if (error.code === undefined) return undefined;
logger.debug("Received Axios request error", {
this.logRequestError({
code: error.code,
message: error.message,
});
@@ -28,8 +30,7 @@ export class AxiosHttpErrorHandler extends HttpErrorHandler {
handleResponseError(error: unknown): ResponseError | undefined {
if (!(error instanceof AxiosError)) return undefined;
if (error.response === undefined) return undefined;
logger.debug("Received Axios response error", {
this.logResponseError({
status: error.response.status,
url: error.response.config.url,
message: error.message,

View File

@@ -1,5 +1,3 @@
import { logger } from "@homarr/log";
import { objectEntries } from "../../../object";
import type { Modify } from "../../../types";
import type { AnyRequestError, AnyRequestErrorInput, RequestErrorCode, RequestErrorReason } from "../request-error";
@@ -9,13 +7,13 @@ import { HttpErrorHandler } from "./http-error-handler";
export class FetchHttpErrorHandler extends HttpErrorHandler {
constructor(private type = "undici") {
super();
super(type);
}
handleRequestError(error: unknown): AnyRequestError | undefined {
if (!isTypeErrorWithCode(error)) return undefined;
logger.debug(`Received ${this.type} request error`, {
this.logRequestError({
code: error.cause.code,
});

View File

@@ -1,7 +1,24 @@
import type { ILogger } from "@homarr/core/infrastructure/logs";
import { createLogger } from "@homarr/core/infrastructure/logs";
import type { AnyRequestError } from "../request-error";
import type { ResponseError } from "../response-error";
export abstract class HttpErrorHandler {
protected logger: ILogger;
constructor(type: string) {
this.logger = createLogger({ module: "httpErrorHandler", type });
}
protected logRequestError<T extends { code: string }>(metadata: T) {
this.logger.debug("Received request error", metadata);
}
protected logResponseError<T extends { status: number; url: string | undefined }>(metadata: T) {
this.logger.debug("Received response error", metadata);
}
abstract handleRequestError(error: unknown): AnyRequestError | undefined;
abstract handleResponseError(error: unknown): ResponseError | undefined;
}

View File

@@ -1,7 +1,5 @@
import { FetchError } from "node-fetch";
import { logger } from "@homarr/log";
import { RequestError } from "../request-error";
import type { AnyRequestError } from "../request-error";
import type { ResponseError } from "../response-error";
@@ -15,14 +13,14 @@ import { HttpErrorHandler } from "./http-error-handler";
*/
export class NodeFetchHttpErrorHandler extends HttpErrorHandler {
constructor(private type = "node-fetch") {
super();
super(type);
}
handleRequestError(error: unknown): AnyRequestError | undefined {
if (!(error instanceof FetchError)) return undefined;
if (error.code === undefined) return undefined;
logger.debug(`Received ${this.type} request error`, {
this.logRequestError({
code: error.code,
message: error.message,
});

View File

@@ -5,6 +5,10 @@ import { ResponseError } from "../response-error";
import { HttpErrorHandler } from "./http-error-handler";
export class OctokitHttpErrorHandler extends HttpErrorHandler {
constructor() {
super("octokit");
}
/**
* I wasn't able to get a request error triggered. Therefore we ignore them for now
* and just forward them as unknown errors
@@ -16,6 +20,11 @@ export class OctokitHttpErrorHandler extends HttpErrorHandler {
handleResponseError(error: unknown): ResponseError | undefined {
if (!(error instanceof OctokitRequestError)) return undefined;
this.logResponseError({
status: error.status,
url: error.response?.url,
});
return new ResponseError({
status: error.status,
url: error.response?.url,

View File

@@ -1,7 +1,5 @@
import { FetchError } from "ofetch";
import { logger } from "@homarr/log";
import type { AnyRequestError } from "../request-error";
import { ResponseError } from "../response-error";
import { FetchHttpErrorHandler } from "./fetch-http-error-handler";
@@ -14,6 +12,10 @@ import { HttpErrorHandler } from "./http-error-handler";
* It is for example used within the ctrl packages like qbittorrent, deluge, transmission, etc.
*/
export class OFetchHttpErrorHandler extends HttpErrorHandler {
constructor() {
super("ofetch");
}
handleRequestError(error: unknown): AnyRequestError | undefined {
if (!(error instanceof FetchError)) return undefined;
if (!(error.cause instanceof TypeError)) return undefined;
@@ -28,7 +30,7 @@ export class OFetchHttpErrorHandler extends HttpErrorHandler {
if (!(error instanceof FetchError)) return undefined;
if (error.response === undefined) return undefined;
logger.debug("Received ofetch response error", {
this.logResponseError({
status: error.response.status,
url: error.response.url,
});

View File

@@ -1,11 +1,13 @@
import { logger } from "@homarr/log";
import type { AnyRequestError } from "../request-error";
import { ResponseError } from "../response-error";
import { HttpErrorHandler } from "./http-error-handler";
import { NodeFetchHttpErrorHandler } from "./node-fetch-http-error-handler";
export class TsdavHttpErrorHandler extends HttpErrorHandler {
constructor() {
super("tsdav");
}
handleRequestError(error: unknown): AnyRequestError | undefined {
return new NodeFetchHttpErrorHandler("tsdav").handleRequestError(error);
}
@@ -16,8 +18,9 @@ export class TsdavHttpErrorHandler extends HttpErrorHandler {
// https://github.com/natelindev/tsdav/blob/bf33f04b1884694d685ee6f2b43fe9354b12d167/src/account.ts#L86
if (error.message !== "Invalid credentials") return undefined;
logger.debug("Received tsdav response error", {
this.logResponseError({
status: 401,
url: undefined,
});
return new ResponseError({ status: 401, url: "?" });

View File

@@ -1,13 +1,15 @@
import { logger } from "@homarr/log";
import { ParseError } from "../parse-error";
import { ParseErrorHandler } from "./parse-error-handler";
export class JsonParseErrorHandler extends ParseErrorHandler {
constructor() {
super("json");
}
handleParseError(error: unknown): ParseError | undefined {
if (!(error instanceof SyntaxError)) return undefined;
logger.debug("Received JSON parse error", {
this.logParseError({
message: error.message,
});

View File

@@ -1,5 +1,17 @@
import type { ILogger } from "@homarr/core/infrastructure/logs";
import { createLogger } from "@homarr/core/infrastructure/logs";
import type { ParseError } from "../parse-error";
export abstract class ParseErrorHandler {
protected logger: ILogger;
constructor(type: string) {
this.logger = createLogger({ module: "parseErrorHandler", type });
}
protected logParseError(metadata?: Record<string, unknown>) {
this.logger.debug("Received parse error", metadata);
}
abstract handleParseError(error: unknown): ParseError | undefined;
}

View File

@@ -1,12 +1,14 @@
import { fromError } from "zod-validation-error";
import { ZodError } from "zod/v4";
import { logger } from "@homarr/log";
import { ParseError } from "../parse-error";
import { ParseErrorHandler } from "./parse-error-handler";
export class ZodParseErrorHandler extends ParseErrorHandler {
constructor() {
super("zod");
}
handleParseError(error: unknown): ParseError | undefined {
if (!(error instanceof ZodError)) return undefined;
@@ -17,7 +19,7 @@ export class ZodParseErrorHandler extends ParseErrorHandler {
prefix: null,
}).toString();
logger.debug("Received Zod parse error");
this.logParseError();
return new ParseError(message, { cause: error });
}

View File

@@ -1,39 +0,0 @@
import type { Dispatcher } from "undici";
import { Agent } from "undici";
import { logger } from "@homarr/log";
// The below import statement initializes dns-caching
import "./dns";
export class LoggingAgent extends Agent {
constructor(...props: ConstructorParameters<typeof Agent>) {
super(...props);
}
dispatch(options: Dispatcher.DispatchOptions, handler: Dispatcher.DispatchHandler): boolean {
const path = options.path
.split("/")
.map((segment) => (segment.length >= 32 && !segment.startsWith("?") ? "REDACTED" : segment))
.join("/");
const url = new URL(`${options.origin as string}${path}`);
// The below code should prevent sensitive data from being logged as
// some integrations use query parameters for auth
url.searchParams.forEach((value, key) => {
if (value === "") return; // Skip empty values
if (/^-?\d{1,12}$/.test(value)) return; // Skip small numbers
if (value === "true" || value === "false") return; // Skip boolean values
if (/^[a-zA-Z]{1,12}$/.test(value)) return; // Skip short strings
if (/^\d{4}-\d{2}-\d{2}$/.test(value)) return; // Skip dates
if (/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/.test(value)) return; // Skip date times
url.searchParams.set(key, "REDACTED");
});
logger.debug(
`Dispatching request ${url.toString().replaceAll("=&", "&")} (${Object.keys(options.headers ?? {}).length} headers)`,
);
return super.dispatch(options, handler);
}
}

View File

@@ -1,16 +0,0 @@
/**
* Same as fetch, but with a timeout of 10 seconds.
* https://stackoverflow.com/questions/46946380/fetch-api-request-timeout
* @param param0 fetch arguments
* @returns fetch response
*/
export const fetchWithTimeout = (...[url, requestInit]: Parameters<typeof fetch>) => {
const controller = new AbortController();
// 10 seconds timeout:
const timeoutId = setTimeout(() => controller.abort(), 10000);
return fetch(url, { signal: controller.signal, ...requestInit }).finally(() => {
clearTimeout(timeoutId);
});
};

View File

@@ -9,7 +9,6 @@ export * from "./id";
export * from "./url";
export * from "./number";
export * from "./error";
export * from "./fetch-with-timeout";
export * from "./theme";
export * from "./function";
export * from "./id";

View File

@@ -1,5 +1,4 @@
export * from "./security";
export * from "./encryption";
export * from "./user-agent";
export * from "./fetch-agent";
export * from "./errors";

View File

@@ -7,7 +7,20 @@
"exports": {
"./infrastructure/redis": "./src/infrastructure/redis/client.ts",
"./infrastructure/env": "./src/infrastructure/env/index.ts",
".": "./src/index.ts"
"./infrastructure/logs": "./src/infrastructure/logs/index.ts",
"./infrastructure/logs/constants": "./src/infrastructure/logs/constants.ts",
"./infrastructure/logs/env": "./src/infrastructure/logs/env.ts",
"./infrastructure/logs/error": "./src/infrastructure/logs/error.ts",
"./infrastructure/db": "./src/infrastructure/db/index.ts",
"./infrastructure/db/env": "./src/infrastructure/db/env.ts",
"./infrastructure/db/constants": "./src/infrastructure/db/constants.ts",
"./infrastructure/certificates": "./src/infrastructure/certificates/index.ts",
"./infrastructure/certificates/hostnames/db/sqlite": "./src/infrastructure/certificates/hostnames/db/sqlite.ts",
"./infrastructure/certificates/hostnames/db/mysql": "./src/infrastructure/certificates/hostnames/db/mysql.ts",
"./infrastructure/certificates/hostnames/db/postgresql": "./src/infrastructure/certificates/hostnames/db/postgresql.ts",
"./infrastructure/dns/init": "./src/infrastructure/dns/init.ts",
"./infrastructure/http": "./src/infrastructure/http/index.ts",
"./infrastructure/http/timeout": "./src/infrastructure/http/timeout.ts"
},
"typesVersions": {
"*": {
@@ -25,14 +38,23 @@
"prettier": "@homarr/prettier-config",
"dependencies": {
"@t3-oss/env-nextjs": "^0.13.8",
"better-sqlite3": "^12.5.0",
"dns-caching": "^0.2.9",
"drizzle-orm": "^0.45.1",
"ioredis": "5.8.2",
"mysql2": "3.15.3",
"pg": "^8.16.3",
"superjson": "2.2.6",
"winston": "3.19.0",
"zod": "^4.1.13"
},
"devDependencies": {
"@homarr/eslint-config": "workspace:^0.2.0",
"@homarr/prettier-config": "workspace:^0.1.0",
"@homarr/tsconfig": "workspace:^0.1.0",
"eslint": "^9.39.1",
"@types/better-sqlite3": "7.6.13",
"@types/pg": "^8.16.0",
"eslint": "^9.39.2",
"typescript": "^5.9.3"
}
}

View File

@@ -0,0 +1,74 @@
import { X509Certificate } from "node:crypto";
import fsSync from "node:fs";
import fs from "node:fs/promises";
import path from "node:path";
import { rootCertificates } from "node:tls";
const getCertificateFolder = () => {
if (process.env.NODE_ENV !== "production") return process.env.LOCAL_CERTIFICATE_PATH;
return process.env.LOCAL_CERTIFICATE_PATH ?? path.join("/appdata", "trusted-certificates");
};
export const loadCustomRootCertificatesAsync = async () => {
const folder = getCertificateFolder();
if (!folder) {
return [];
}
if (!fsSync.existsSync(folder)) {
await fs.mkdir(folder, { recursive: true });
}
const dirContent = await fs.readdir(folder);
return await Promise.all(
dirContent
.filter((file) => file.endsWith(".crt") || file.endsWith(".pem"))
.map(async (file) => ({
content: await fs.readFile(path.join(folder, file), "utf8"),
fileName: file,
})),
);
};
export const getAllTrustedCertificatesAsync = async () => {
const customCertificates = await loadCustomRootCertificatesAsync();
return rootCertificates.concat(customCertificates.map((cert) => cert.content));
};
export const removeCustomRootCertificateAsync = async (fileName: string) => {
const folder = getCertificateFolder();
if (!folder) {
return null;
}
const existingFiles = await fs.readdir(folder, { withFileTypes: true });
if (!existingFiles.some((file) => file.isFile() && file.name === fileName)) {
throw new Error(`File ${fileName} does not exist`);
}
const fullPath = path.join(folder, fileName);
const content = await fs.readFile(fullPath, "utf8");
await fs.rm(fullPath);
try {
return new X509Certificate(content);
} catch {
return null;
}
};
export const addCustomRootCertificateAsync = async (fileName: string, content: string) => {
const folder = getCertificateFolder();
if (!folder) {
throw new Error(
"When you want to use custom certificates locally you need to set LOCAL_CERTIFICATE_PATH to an absolute path",
);
}
if (fileName.includes("/")) {
throw new Error("Invalid file name");
}
await fs.writeFile(path.join(folder, fileName), content);
};

Some files were not shown because too many files have changed in this diff Show More