Replace entire codebase with homarr-labs/homarr

This commit is contained in:
Thomas Camlong
2026-01-15 21:54:44 +01:00
parent c5bc3b1559
commit 4fdd1fe351
4666 changed files with 409577 additions and 147434 deletions
+21
View File
@@ -0,0 +1,21 @@
import { sendServerAnalyticsAsync } from "@homarr/analytics";
import { env } from "@homarr/common/env";
import { EVERY_WEEK } from "@homarr/cron-jobs-core/expressions";
import { db } from "@homarr/db";
import { getServerSettingByKeyAsync } from "@homarr/db/queries";
import { createCronJob } from "../lib";
export const analyticsJob = createCronJob("analytics", EVERY_WEEK, {
runOnStart: true,
preventManualExecution: true,
}).withCallback(async () => {
if (env.NO_EXTERNAL_CONNECTION) return;
const analyticSetting = await getServerSettingByKeyAsync(db, "analytics");
if (!analyticSetting.enableGeneral) {
return;
}
await sendServerAnalyticsAsync();
});
+31
View File
@@ -0,0 +1,31 @@
import SuperJSON from "superjson";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { ErrorWithMetadata } from "@homarr/core/infrastructure/logs/error";
import { EVERY_MINUTE } from "@homarr/cron-jobs-core/expressions";
import { db, eq } from "@homarr/db";
import { items } from "@homarr/db/schema";
import { dockerContainersRequestHandler } from "@homarr/request-handler/docker";
import type { WidgetComponentProps } from "../../../widgets";
import { createCronJob } from "../lib";
const logger = createLogger({ module: "dockerJobs" });
export const dockerContainersJob = createCronJob("dockerContainers", EVERY_MINUTE).withCallback(async () => {
const dockerItems = await db.query.items.findMany({
where: eq(items.kind, "dockerContainers"),
});
await Promise.allSettled(
dockerItems.map(async (item) => {
try {
const options = SuperJSON.parse<WidgetComponentProps<"dockerContainers">["options"]>(item.options);
const innerHandler = dockerContainersRequestHandler.handler(options);
await innerHandler.getCachedOrUpdatedDataAsync({ forceUpdate: true });
} catch (error) {
logger.error(new ErrorWithMetadata("Failed to update Docker container status", { item }, { cause: error }));
}
}),
);
});
@@ -0,0 +1,174 @@
import { createId, splitToNChunks, Stopwatch } from "@homarr/common";
import { env } from "@homarr/common/env";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { EVERY_WEEK } from "@homarr/cron-jobs-core/expressions";
import type { InferInsertModel } from "@homarr/db";
import { db, handleTransactionsAsync, inArray, sql } from "@homarr/db";
import { iconRepositories, icons } from "@homarr/db/schema";
import { fetchIconsAsync } from "@homarr/icons";
import { createCronJob } from "../lib";
const logger = createLogger({ module: "iconsUpdaterJobs" });
export const iconsUpdaterJob = createCronJob("iconsUpdater", EVERY_WEEK, {
runOnStart: true,
expectedMaximumDurationInMillis: 10 * 1000,
}).withCallback(async () => {
if (env.NO_EXTERNAL_CONNECTION) return;
logger.info("Updating icon repository cache...");
const stopWatch = new Stopwatch();
const repositoryIconGroups = await fetchIconsAsync();
const countIcons = repositoryIconGroups
.map((group) => group.icons.length)
.reduce((partialSum, arrayLength) => partialSum + arrayLength, 0);
logger.info("Fetched icons from repositories", {
repositoryCount: repositoryIconGroups.length,
iconCount: countIcons,
duration: stopWatch.getElapsedInHumanWords(),
});
const databaseIconRepositories = await db.query.iconRepositories.findMany({
with: {
icons: true,
},
});
const skippedChecksums: `${string}.${string}`[] = [];
let countDeleted = 0;
let countInserted = 0;
logger.info("Updating icons in database...");
stopWatch.reset();
const newIconRepositories: InferInsertModel<typeof iconRepositories>[] = [];
const newIcons: InferInsertModel<typeof icons>[] = [];
const allDbIcons = databaseIconRepositories.flatMap((group) => group.icons);
for (const repositoryIconGroup of repositoryIconGroups) {
if (!repositoryIconGroup.success) {
continue;
}
const repositoryInDb = databaseIconRepositories.find(
(dbIconGroup) => dbIconGroup.slug === repositoryIconGroup.slug,
);
const iconRepositoryId: string = repositoryInDb?.id ?? createId();
if (!repositoryInDb?.id) {
newIconRepositories.push({
id: iconRepositoryId,
slug: repositoryIconGroup.slug,
});
}
const dbIconsInRepository = allDbIcons.filter((icon) => icon.iconRepositoryId === iconRepositoryId);
for (const icon of repositoryIconGroup.icons) {
if (dbIconsInRepository.some((dbIcon) => dbIcon.checksum === icon.checksum)) {
skippedChecksums.push(`${iconRepositoryId}.${icon.checksum}`);
continue;
}
newIcons.push({
id: createId(),
checksum: icon.checksum,
name: icon.fileNameWithExtension,
url: icon.imageUrl,
iconRepositoryId,
});
countInserted++;
}
}
const deadIcons = allDbIcons.filter(
(icon) => !skippedChecksums.includes(`${icon.iconRepositoryId}.${icon.checksum}`),
);
const deadIconRepositories = databaseIconRepositories.filter(
(iconRepository) => !repositoryIconGroups.some((group) => group.slug === iconRepository.slug),
);
await handleTransactionsAsync(db, {
async handleAsync(db, schema) {
await db.transaction(async (transaction) => {
if (newIconRepositories.length >= 1) {
await transaction.insert(schema.iconRepositories).values(newIconRepositories);
}
if (newIcons.length >= 1) {
// We only insert 5000 icons at a time to avoid SQLite limitations
for (const chunck of splitToNChunks(newIcons, Math.ceil(newIcons.length / 5000))) {
await transaction.insert(schema.icons).values(chunck);
}
}
if (deadIcons.length >= 1) {
await transaction.delete(schema.icons).where(
inArray(
// Combine iconRepositoryId and checksum to allow same icons on different repositories
sql`concat(${icons.iconRepositoryId}, '.', ${icons.checksum})`,
deadIcons.map((icon) => `${icon.iconRepositoryId}.${icon.checksum}`),
),
);
}
if (deadIconRepositories.length >= 1) {
await transaction.delete(schema.iconRepositories).where(
inArray(
iconRepositories.id,
deadIconRepositories.map((iconRepository) => iconRepository.id),
),
);
}
countDeleted += deadIcons.length;
});
},
handleSync() {
db.transaction((transaction) => {
if (newIconRepositories.length >= 1) {
transaction.insert(iconRepositories).values(newIconRepositories).run();
}
if (newIcons.length >= 1) {
// We only insert 5000 icons at a time to avoid SQLite limitations
for (const chunck of splitToNChunks(newIcons, Math.ceil(newIcons.length / 5000))) {
transaction.insert(icons).values(chunck).run();
}
}
if (deadIcons.length >= 1) {
transaction
.delete(icons)
.where(
inArray(
// Combine iconRepositoryId and checksum to allow same icons on different repositories
sql`concat(${icons.iconRepositoryId}, '.', ${icons.checksum})`,
deadIcons.map((icon) => `${icon.iconRepositoryId}.${icon.checksum}`),
),
)
.run();
}
if (deadIconRepositories.length >= 1) {
transaction
.delete(iconRepositories)
.where(
inArray(
iconRepositories.id,
deadIconRepositories.map((iconRepository) => iconRepository.id),
),
)
.run();
}
countDeleted += deadIcons.length;
});
},
});
logger.info("Updated icons in database", {
duration: stopWatch.getElapsedInHumanWords(),
added: countInserted,
deleted: countDeleted,
});
});
@@ -0,0 +1,15 @@
import { EVERY_MINUTE } from "@homarr/cron-jobs-core/expressions";
import { dnsHoleRequestHandler } from "@homarr/request-handler/dns-hole";
import { createRequestIntegrationJobHandler } from "@homarr/request-handler/lib/cached-request-integration-job-handler";
import { createCronJob } from "../../lib";
export const dnsHoleJob = createCronJob("dnsHole", EVERY_MINUTE).withCallback(
createRequestIntegrationJobHandler(dnsHoleRequestHandler.handler, {
widgetKinds: ["dnsHoleSummary", "dnsHoleControls"],
getInput: {
dnsHoleSummary: () => ({}),
dnsHoleControls: () => ({}),
},
}),
);
@@ -0,0 +1,16 @@
import { EVERY_5_SECONDS } from "@homarr/cron-jobs-core/expressions";
import { downloadClientRequestHandler } from "@homarr/request-handler/downloads";
import { createRequestIntegrationJobHandler } from "@homarr/request-handler/lib/cached-request-integration-job-handler";
import { createCronJob } from "../../lib";
export const downloadsJob = createCronJob("downloads", EVERY_5_SECONDS).withCallback(
createRequestIntegrationJobHandler(downloadClientRequestHandler.handler, {
widgetKinds: ["downloads"],
getInput: {
downloads: (options) => ({
limit: options.limitPerIntegration,
}),
},
}),
);
@@ -0,0 +1,46 @@
import { EVERY_5_SECONDS, EVERY_30_SECONDS, EVERY_HOUR, EVERY_MINUTE } from "@homarr/cron-jobs-core/expressions";
import {
firewallCpuRequestHandler,
firewallInterfacesRequestHandler,
firewallMemoryRequestHandler,
firewallVersionRequestHandler,
} from "@homarr/request-handler/firewall";
import { createRequestIntegrationJobHandler } from "@homarr/request-handler/lib/cached-request-integration-job-handler";
import { createCronJob } from "../../lib";
export const firewallCpuJob = createCronJob("firewallCpu", EVERY_5_SECONDS).withCallback(
createRequestIntegrationJobHandler(firewallCpuRequestHandler.handler, {
widgetKinds: ["firewall"],
getInput: {
firewall: () => ({}),
},
}),
);
export const firewallMemoryJob = createCronJob("firewallMemory", EVERY_MINUTE).withCallback(
createRequestIntegrationJobHandler(firewallMemoryRequestHandler.handler, {
widgetKinds: ["firewall"],
getInput: {
firewall: () => ({}),
},
}),
);
export const firewallInterfacesJob = createCronJob("firewallInterfaces", EVERY_30_SECONDS).withCallback(
createRequestIntegrationJobHandler(firewallInterfacesRequestHandler.handler, {
widgetKinds: ["firewall"],
getInput: {
firewall: () => ({}),
},
}),
);
export const firewallVersionJob = createCronJob("firewallVersion", EVERY_HOUR).withCallback(
createRequestIntegrationJobHandler(firewallVersionRequestHandler.handler, {
widgetKinds: ["firewall"],
getInput: {
firewall: () => ({}),
},
}),
);
@@ -0,0 +1,25 @@
import { EVERY_5_SECONDS } from "@homarr/cron-jobs-core/expressions";
import { clusterInfoRequestHandler, systemInfoRequestHandler } from "@homarr/request-handler/health-monitoring";
import { createRequestIntegrationJobHandler } from "@homarr/request-handler/lib/cached-request-integration-job-handler";
import { createCronJob } from "../../lib";
export const healthMonitoringJob = createCronJob("healthMonitoring", EVERY_5_SECONDS).withCallback(
createRequestIntegrationJobHandler(
(integration, itemOptions: Record<string, never>) => {
const { kind } = integration;
if (kind !== "proxmox" && kind !== "mock") {
return systemInfoRequestHandler.handler({ ...integration, kind }, itemOptions);
}
return clusterInfoRequestHandler.handler({ ...integration, kind }, itemOptions);
},
{
widgetKinds: ["healthMonitoring", "systemResources"],
getInput: {
healthMonitoring: () => ({}),
systemResources: () => ({}),
},
},
),
);
@@ -0,0 +1,16 @@
import { EVERY_MINUTE } from "@homarr/cron-jobs-core/expressions";
import { createRequestIntegrationJobHandler } from "@homarr/request-handler/lib/cached-request-integration-job-handler";
import { smartHomeEntityStateRequestHandler } from "@homarr/request-handler/smart-home-entity-state";
import { createCronJob } from "../../lib";
export const smartHomeEntityStateJob = createCronJob("smartHomeEntityState", EVERY_MINUTE).withCallback(
createRequestIntegrationJobHandler(smartHomeEntityStateRequestHandler.handler, {
widgetKinds: ["smartHome-entityState"],
getInput: {
"smartHome-entityState": (options) => ({
entityId: options.entityId,
}),
},
}),
);
@@ -0,0 +1,14 @@
import { EVERY_5_MINUTES } from "@homarr/cron-jobs-core/expressions";
import { indexerManagerRequestHandler } from "@homarr/request-handler/indexer-manager";
import { createRequestIntegrationJobHandler } from "@homarr/request-handler/lib/cached-request-integration-job-handler";
import { createCronJob } from "../../lib";
export const indexerManagerJob = createCronJob("indexerManager", EVERY_5_MINUTES).withCallback(
createRequestIntegrationJobHandler(indexerManagerRequestHandler.handler, {
widgetKinds: ["indexerManager"],
getInput: {
indexerManager: () => ({}),
},
}),
);
@@ -0,0 +1,36 @@
import dayjs from "dayjs";
import { EVERY_MINUTE } from "@homarr/cron-jobs-core/expressions";
import { calendarMonthRequestHandler } from "@homarr/request-handler/calendar";
import { createRequestIntegrationJobHandler } from "@homarr/request-handler/lib/cached-request-integration-job-handler";
import { createCronJob } from "../../lib";
export const mediaOrganizerJob = createCronJob("mediaOrganizer", EVERY_MINUTE).withCallback(
createRequestIntegrationJobHandler(calendarMonthRequestHandler.handler, {
widgetKinds: ["calendar"],
getInput: {
// Request handler will run for all specified months
calendar: (options) => {
const inputs = [];
const startOffset = -Number(options.filterPastMonths);
const endOffset = Number(options.filterFutureMonths);
for (let offsetMonths = startOffset; offsetMonths <= endOffset; offsetMonths++) {
const year = dayjs().subtract(offsetMonths, "months").year();
const month = dayjs().subtract(offsetMonths, "months").month();
inputs.push({
year,
month,
releaseType: options.releaseType,
showUnmonitored: options.showUnmonitored,
});
}
return inputs;
},
},
}),
);
@@ -0,0 +1,24 @@
import { EVERY_MINUTE } from "@homarr/cron-jobs-core/expressions";
import { createRequestIntegrationJobHandler } from "@homarr/request-handler/lib/cached-request-integration-job-handler";
import { mediaRequestListRequestHandler } from "@homarr/request-handler/media-request-list";
import { mediaRequestStatsRequestHandler } from "@homarr/request-handler/media-request-stats";
import { createCronJob } from "../../lib";
export const mediaRequestStatsJob = createCronJob("mediaRequestStats", EVERY_MINUTE).withCallback(
createRequestIntegrationJobHandler(mediaRequestStatsRequestHandler.handler, {
widgetKinds: ["mediaRequests-requestStats"],
getInput: {
"mediaRequests-requestStats": () => ({}),
},
}),
);
export const mediaRequestListJob = createCronJob("mediaRequestList", EVERY_MINUTE).withCallback(
createRequestIntegrationJobHandler(mediaRequestListRequestHandler.handler, {
widgetKinds: ["mediaRequests-requestList"],
getInput: {
"mediaRequests-requestList": () => ({}),
},
}),
);
@@ -0,0 +1,14 @@
import { EVERY_5_SECONDS } from "@homarr/cron-jobs-core/expressions";
import { createRequestIntegrationJobHandler } from "@homarr/request-handler/lib/cached-request-integration-job-handler";
import { mediaServerRequestHandler } from "@homarr/request-handler/media-server";
import { createCronJob } from "../../lib";
export const mediaServerJob = createCronJob("mediaServer", EVERY_5_SECONDS).withCallback(
createRequestIntegrationJobHandler(mediaServerRequestHandler.handler, {
widgetKinds: ["mediaServer"],
getInput: {
mediaServer: ({ showOnlyPlaying }) => ({ showOnlyPlaying }),
},
}),
);
@@ -0,0 +1,14 @@
import { EVERY_5_MINUTES } from "@homarr/cron-jobs-core/expressions";
import { createRequestIntegrationJobHandler } from "@homarr/request-handler/lib/cached-request-integration-job-handler";
import { mediaTranscodingRequestHandler } from "@homarr/request-handler/media-transcoding";
import { createCronJob } from "../../lib";
export const mediaTranscodingJob = createCronJob("mediaTranscoding", EVERY_5_MINUTES).withCallback(
createRequestIntegrationJobHandler(mediaTranscodingRequestHandler.handler, {
widgetKinds: ["mediaTranscoding"],
getInput: {
mediaTranscoding: () => ({ pageOffset: 0, pageSize: 10 }),
},
}),
);
@@ -0,0 +1,14 @@
import { EVERY_MINUTE } from "@homarr/cron-jobs-core/expressions";
import { createRequestIntegrationJobHandler } from "@homarr/request-handler/lib/cached-request-integration-job-handler";
import { networkControllerRequestHandler } from "@homarr/request-handler/network-controller";
import { createCronJob } from "../../lib";
export const networkControllerJob = createCronJob("networkController", EVERY_MINUTE).withCallback(
createRequestIntegrationJobHandler(networkControllerRequestHandler.handler, {
widgetKinds: ["networkControllerSummary"],
getInput: {
networkControllerSummary: () => ({}),
},
}),
);
@@ -0,0 +1,14 @@
import { EVERY_5_MINUTES } from "@homarr/cron-jobs-core/expressions";
import { createRequestIntegrationJobHandler } from "@homarr/request-handler/lib/cached-request-integration-job-handler";
import { notificationsRequestHandler } from "@homarr/request-handler/notifications";
import { createCronJob } from "../../lib";
export const refreshNotificationsJob = createCronJob("refreshNotifications", EVERY_5_MINUTES).withCallback(
createRequestIntegrationJobHandler(notificationsRequestHandler.handler, {
widgetKinds: ["notifications"],
getInput: {
notifications: (options) => options,
},
}),
);
@@ -0,0 +1,25 @@
import SuperJSON from "superjson";
import { EVERY_5_MINUTES } from "@homarr/cron-jobs-core/expressions";
import { db, eq } from "@homarr/db";
import { items } from "@homarr/db/schema";
import { minecraftServerStatusRequestHandler } from "@homarr/request-handler/minecraft-server-status";
import type { WidgetComponentProps } from "../../../widgets/src";
import { createCronJob } from "../lib";
export const minecraftServerStatusJob = createCronJob("minecraftServerStatus", EVERY_5_MINUTES).withCallback(
async () => {
const dbItems = await db.query.items.findMany({
where: eq(items.kind, "minecraftServerStatus"),
});
await Promise.allSettled(
dbItems.map(async (item) => {
const options = SuperJSON.parse<WidgetComponentProps<"minecraftServerStatus">["options"]>(item.options);
const innerHandler = minecraftServerStatusRequestHandler.handler(options);
await innerHandler.getCachedOrUpdatedDataAsync({ forceUpdate: true });
}),
);
},
);
+46
View File
@@ -0,0 +1,46 @@
import { createLogger } from "@homarr/core/infrastructure/logs";
import { ErrorWithMetadata } from "@homarr/core/infrastructure/logs/error";
import { EVERY_MINUTE } from "@homarr/cron-jobs-core/expressions";
import { db } from "@homarr/db";
import { getServerSettingByKeyAsync } from "@homarr/db/queries";
import { sendPingRequestAsync } from "@homarr/ping";
import { pingChannel, pingUrlChannel } from "@homarr/redis";
import { createCronJob } from "../lib";
const logger = createLogger({ module: "pingJobs" });
const resetPreviousUrlsAsync = async () => {
await pingUrlChannel.clearAsync();
logger.info("Cleared previous ping urls");
};
export const pingJob = createCronJob("ping", EVERY_MINUTE, {
beforeStart: resetPreviousUrlsAsync,
}).withCallback(async () => {
const boardSettings = await getServerSettingByKeyAsync(db, "board");
if (boardSettings.forceDisableStatus) {
logger.debug("Simple ping is disabled by server settings");
return;
}
const urls = await pingUrlChannel.getAllAsync();
await Promise.allSettled([...new Set(urls)].map(pingAsync));
});
const pingAsync = async (url: string) => {
const pingResult = await sendPingRequestAsync(url);
if ("statusCode" in pingResult) {
logger.debug("Executed ping successfully", { url, statusCode: pingResult.statusCode });
} else {
logger.error(new ErrorWithMetadata("Executing ping failed", { url }, { cause: pingResult.error }));
}
await pingChannel.publishAsync({
url,
...pingResult,
});
};
+39
View File
@@ -0,0 +1,39 @@
import SuperJSON from "superjson";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { ErrorWithMetadata } from "@homarr/core/infrastructure/logs/error";
import { EVERY_10_MINUTES } from "@homarr/cron-jobs-core/expressions";
import { db, eq } from "@homarr/db";
import { items } from "@homarr/db/schema";
// This import is done that way to avoid circular dependencies.
import { rssFeedsRequestHandler } from "@homarr/request-handler/rss-feeds";
import type { WidgetComponentProps } from "../../../widgets";
import { createCronJob } from "../lib";
const logger = createLogger({ module: "rssFeedsJobs" });
export const rssFeedsJob = createCronJob("rssFeeds", EVERY_10_MINUTES).withCallback(async () => {
const rssItems = await db.query.items.findMany({
where: eq(items.kind, "rssFeed"),
});
const itemOptions = rssItems.map((item) => SuperJSON.parse<WidgetComponentProps<"rssFeed">["options"]>(item.options));
for (const option of itemOptions) {
const maxAmountPosts = typeof option.maximumAmountPosts === "number" ? option.maximumAmountPosts : 100;
for (const url of option.feedUrls) {
try {
const innerHandler = rssFeedsRequestHandler.handler({
url,
count: maxAmountPosts,
});
await innerHandler.getCachedOrUpdatedDataAsync({
forceUpdate: true,
});
} catch (error) {
logger.error(new ErrorWithMetadata("Failed to update RSS feed", { url }, { cause: error }));
}
}
}
});
@@ -0,0 +1,13 @@
import { EVERY_HOUR } from "@homarr/cron-jobs-core/expressions";
import { updateCheckerRequestHandler } from "@homarr/request-handler/update-checker";
import { createCronJob } from "../lib";
export const updateCheckerJob = createCronJob("updateChecker", EVERY_HOUR, {
runOnStart: true,
}).withCallback(async () => {
const handler = updateCheckerRequestHandler.handler({});
await handler.getCachedOrUpdatedDataAsync({
forceUpdate: true,
});
});
+36
View File
@@ -0,0 +1,36 @@
import SuperJSON from "superjson";
import { createLogger } from "@homarr/core/infrastructure/logs";
import { ErrorWithMetadata } from "@homarr/core/infrastructure/logs/error";
import { EVERY_10_MINUTES } from "@homarr/cron-jobs-core/expressions";
import { db, eq } from "@homarr/db";
import { items } from "@homarr/db/schema";
import { weatherRequestHandler } from "@homarr/request-handler/weather";
import type { WidgetComponentProps } from "../../../widgets";
import { createCronJob } from "../lib";
const logger = createLogger({ module: "weatherJobs" });
export const weatherJob = createCronJob("weather", EVERY_10_MINUTES).withCallback(async () => {
const weatherItems = await db.query.items.findMany({
where: eq(items.kind, "weather"),
});
const parsedItems = weatherItems.map((item) => ({
id: item.id,
options: SuperJSON.parse<WidgetComponentProps<"weather">["options"]>(item.options),
}));
for (const item of parsedItems) {
try {
const innerHandler = weatherRequestHandler.handler({
longitude: item.options.location.longitude,
latitude: item.options.location.latitude,
});
await innerHandler.getCachedOrUpdatedDataAsync({ forceUpdate: true });
} catch (error) {
logger.error(new ErrorWithMetadata("Failed to update weather", { id: item.id }, { cause: error }));
}
}
});