feat: add tasks page (#692)

This commit is contained in:
Manuel
2024-07-01 18:57:40 +02:00
committed by GitHub
parent 663eb0bf5b
commit 08d571ad74
43 changed files with 668 additions and 174 deletions

View File

@@ -1,3 +0,0 @@
import { client } from "./queues";
export const queueClient = client;

View File

@@ -1,17 +0,0 @@
import { iconsUpdaterJob } from "~/jobs/icons-updater";
import { smartHomeEntityStateJob } from "~/jobs/integrations/home-assistant";
import { analyticsJob } from "./jobs/analytics";
import { pingJob } from "./jobs/ping";
import { queuesJob } from "./jobs/queue";
import { createCronJobGroup } from "./lib/jobs";
export const jobs = createCronJobGroup({
// Add your jobs here:
analytics: analyticsJob,
iconsUpdater: iconsUpdaterJob,
ping: pingJob,
smartHomeEntityState: smartHomeEntityStateJob,
// This job is used to process queues.
queues: queuesJob,
});

View File

@@ -1,29 +0,0 @@
import SuperJSON from "superjson";
import { sendServerAnalyticsAsync } from "@homarr/analytics";
import { EVERY_WEEK } from "@homarr/cron-jobs-core/expressions";
import { db, eq } from "@homarr/db";
import { serverSettings } from "@homarr/db/schema/sqlite";
import { createCronJob } from "~/lib/jobs";
import type { defaultServerSettings } from "../../../../packages/server-settings";
export const analyticsJob = createCronJob("analytics", EVERY_WEEK, {
runOnStart: true,
}).withCallback(async () => {
const analyticSetting = await db.query.serverSettings.findFirst({
where: eq(serverSettings.settingKey, "analytics"),
});
if (!analyticSetting) {
return;
}
const value = SuperJSON.parse<(typeof defaultServerSettings)["analytics"]>(analyticSetting.value);
if (!value.enableGeneral) {
return;
}
await sendServerAnalyticsAsync();
});

View File

@@ -1,97 +0,0 @@
import { Stopwatch } from "@homarr/common";
import { EVERY_WEEK } from "@homarr/cron-jobs-core/expressions";
import type { InferInsertModel } from "@homarr/db";
import { db, inArray } from "@homarr/db";
import { createId } from "@homarr/db/client";
import { iconRepositories, icons } from "@homarr/db/schema/sqlite";
import { fetchIconsAsync } from "@homarr/icons";
import { logger } from "@homarr/log";
import { createCronJob } from "~/lib/jobs";
export const iconsUpdaterJob = createCronJob("iconsUpdater", EVERY_WEEK, {
runOnStart: true,
}).withCallback(async () => {
logger.info("Updating icon repository cache...");
const stopWatch = new Stopwatch();
const repositoryIconGroups = await fetchIconsAsync();
const countIcons = repositoryIconGroups
.map((group) => group.icons.length)
.reduce((partialSum, arrayLength) => partialSum + arrayLength, 0);
logger.info(
`Successfully fetched ${countIcons} icons from ${repositoryIconGroups.length} repositories within ${stopWatch.getElapsedInHumanWords()}`,
);
const databaseIconGroups = await db.query.iconRepositories.findMany({
with: {
icons: true,
},
});
const skippedChecksums: string[] = [];
let countDeleted = 0;
let countInserted = 0;
logger.info("Updating icons in database...");
stopWatch.reset();
const newIconRepositories: InferInsertModel<typeof iconRepositories>[] = [];
const newIcons: InferInsertModel<typeof icons>[] = [];
for (const repositoryIconGroup of repositoryIconGroups) {
if (!repositoryIconGroup.success) {
continue;
}
const repositoryInDb = databaseIconGroups.find((dbIconGroup) => dbIconGroup.slug === repositoryIconGroup.slug);
const repositoryIconGroupId: string = repositoryInDb?.id ?? createId();
if (!repositoryInDb?.id) {
newIconRepositories.push({
id: repositoryIconGroupId,
slug: repositoryIconGroup.slug,
});
}
for (const icon of repositoryIconGroup.icons) {
if (databaseIconGroups.flatMap((group) => group.icons).some((dbIcon) => dbIcon.checksum === icon.checksum)) {
skippedChecksums.push(icon.checksum);
continue;
}
newIcons.push({
id: createId(),
checksum: icon.checksum,
name: icon.fileNameWithExtension,
url: icon.imageUrl.href,
iconRepositoryId: repositoryIconGroupId,
});
countInserted++;
}
}
const deadIcons = databaseIconGroups
.flatMap((group) => group.icons)
.filter((icon) => !skippedChecksums.includes(icon.checksum));
await db.transaction(async (transaction) => {
if (newIconRepositories.length >= 1) {
await transaction.insert(iconRepositories).values(newIconRepositories);
}
if (newIcons.length >= 1) {
await transaction.insert(icons).values(newIcons);
}
if (deadIcons.length >= 1) {
await transaction.delete(icons).where(
inArray(
icons.checksum,
deadIcons.map((icon) => icon.checksum),
),
);
}
countDeleted += deadIcons.length;
});
logger.info(`Updated database within ${stopWatch.getElapsedInHumanWords()} (-${countDeleted}, +${countInserted})`);
});

View File

@@ -1,64 +0,0 @@
import SuperJSON from "superjson";
import { decryptSecret } from "@homarr/common";
import { EVERY_MINUTE } from "@homarr/cron-jobs-core/expressions";
import { db, eq } from "@homarr/db";
import { items } from "@homarr/db/schema/sqlite";
import { HomeAssistantIntegration } from "@homarr/integrations";
import { logger } from "@homarr/log";
import { homeAssistantEntityState } from "@homarr/redis";
import type { WidgetComponentProps } from "@homarr/widgets";
import { createCronJob } from "~/lib/jobs";
export const smartHomeEntityStateJob = createCronJob("smartHomeEntityState", EVERY_MINUTE).withCallback(async () => {
const itemsForIntegration = await db.query.items.findMany({
where: eq(items.kind, "smartHome-entityState"),
with: {
integrations: {
with: {
integration: {
with: {
secrets: {
columns: {
kind: true,
value: true,
},
},
},
},
},
},
},
});
for (const itemForIntegration of itemsForIntegration) {
const integration = itemForIntegration.integrations[0]?.integration;
if (!integration) {
continue;
}
const options = SuperJSON.parse<WidgetComponentProps<"smartHome-entityState">["options"]>(
itemForIntegration.options,
);
const homeAssistant = new HomeAssistantIntegration({
...integration,
decryptedSecrets: integration.secrets.map((secret) => ({
...secret,
value: decryptSecret(secret.value),
})),
});
const state = await homeAssistant.getEntityStateAsync(options.entityId);
if (!state.success) {
logger.error("Unable to fetch data from Home Assistant");
continue;
}
await homeAssistantEntityState.publishAsync({
entityId: options.entityId,
state: state.data.state,
});
}
});

View File

@@ -1,25 +0,0 @@
import { EVERY_MINUTE } from "@homarr/cron-jobs-core/expressions";
import { logger } from "@homarr/log";
import { sendPingRequestAsync } from "@homarr/ping";
import { pingChannel, pingUrlChannel } from "@homarr/redis";
import { createCronJob } from "~/lib/jobs";
export const pingJob = createCronJob("ping", EVERY_MINUTE).withCallback(async () => {
const urls = await pingUrlChannel.getAllAsync();
for (const url of new Set(urls)) {
const pingResult = await sendPingRequestAsync(url);
if ("statusCode" in pingResult) {
logger.debug(`executed ping for url ${url} with status code ${pingResult.statusCode}`);
} else {
logger.error(`Executing ping for url ${url} failed with error: ${pingResult.error}`);
}
await pingChannel.publishAsync({
url,
...pingResult,
});
}
});

View File

@@ -1,9 +0,0 @@
import { EVERY_MINUTE } from "@homarr/cron-jobs-core/expressions";
import { createCronJob } from "~/lib/jobs";
import { queueWorkerAsync } from "../lib/queue/worker";
// This job processes queues, it runs every minute.
export const queuesJob = createCronJob("queues", EVERY_MINUTE).withCallback(async () => {
await queueWorkerAsync();
});

View File

@@ -1,21 +0,0 @@
import { createCronJobFunctions } from "@homarr/cron-jobs-core";
import type { Logger } from "@homarr/cron-jobs-core/logger";
import { logger } from "@homarr/log";
class WinstonCronJobLogger implements Logger {
logDebug(message: string) {
logger.debug(message);
}
logInfo(message: string) {
logger.info(message);
}
logError(error: unknown) {
logger.error(error);
}
}
export const { createCronJob, createCronJobGroup } = createCronJobFunctions({
logger: new WinstonCronJobLogger(),
});

View File

@@ -1,54 +0,0 @@
import { objectEntries, objectKeys } from "@homarr/common";
import type { MaybePromise } from "@homarr/common/types";
import { queueChannel } from "@homarr/redis";
import type { z } from "@homarr/validation";
import type { createQueue } from "./creator";
interface Queue<TInput extends z.ZodType = z.ZodType> {
name: string;
callback: (input: z.infer<TInput>) => MaybePromise<void>;
inputValidator: TInput;
}
type Queues = Record<string, ReturnType<ReturnType<typeof createQueue>["withCallback"]>>;
export const createQueueClient = <TQueues extends Queues>(queues: TQueues) => {
const queueRegistry = new Map<string, Queue>();
for (const [name, queue] of objectEntries(queues)) {
if (typeof name !== "string") continue;
queueRegistry.set(name, {
name,
callback: queue._callback,
inputValidator: queue._input,
});
}
return {
queueRegistry,
client: objectKeys(queues).reduce(
(acc, name) => {
acc[name] = async (data: z.infer<TQueues[typeof name]["_input"]>, options) => {
if (typeof name !== "string") return;
const queue = queueRegistry.get(name);
if (!queue) return;
await queueChannel.addAsync({
name,
data,
executionDate: typeof options === "object" && options.executionDate ? options.executionDate : new Date(),
});
};
return acc;
},
{} as {
[key in keyof TQueues]: (
data: z.infer<TQueues[key]["_input"]>,
props: {
executionDate?: Date;
} | void,
) => Promise<void>;
},
),
};
};

View File

@@ -1,13 +0,0 @@
import type { MaybePromise } from "@homarr/common/types";
import type { z } from "@homarr/validation";
export const createQueue = <TInput extends z.ZodType>(input: TInput) => {
return {
withCallback: (callback: (data: z.infer<TInput>) => MaybePromise<void>) => {
return {
_input: input,
_callback: callback,
};
},
};
};

View File

@@ -1,32 +0,0 @@
import { logger } from "@homarr/log";
import { queueChannel } from "@homarr/redis";
import { queueRegistry } from "~/queues";
/**
* This function reads all the queue executions that are due and processes them.
* Those executions are stored in the redis queue channel.
*/
export const queueWorkerAsync = async () => {
const now = new Date();
const executions = await queueChannel.filterAsync((item) => {
return item.executionDate < now;
});
for (const execution of executions) {
const queue = queueRegistry.get(execution.name);
if (!queue) continue;
try {
await queue.callback(execution.data);
} catch (err) {
logger.error(
`apps/tasks/src/lib/queue/worker.ts: Error occured when executing queue ${execution.name} with data`,
execution.data,
"and error:",
err,
);
}
await queueChannel.markAsDoneAsync(execution._id);
}
};

View File

@@ -1,10 +1,13 @@
// This import has to be the first import in the file so that the agent is overridden before any other modules are imported.
import "./undici-log-agent-override";
import { jobs } from "./jobs";
import { registerCronJobRunner } from "@homarr/cron-job-runner";
import { jobGroup } from "@homarr/cron-jobs";
import { seedServerSettingsAsync } from "./seed-server-settings";
void (async () => {
await jobs.startAllAsync();
registerCronJobRunner();
await jobGroup.startAllAsync();
await seedServerSettingsAsync();
})();

View File

@@ -1,7 +0,0 @@
import { createQueueClient } from "./lib/queue/client";
import { testQueue } from "./queues/test";
export const { client, queueRegistry } = createQueueClient({
// Add your queues here
test: testQueue,
});

View File

@@ -1,11 +0,0 @@
import { z } from "@homarr/validation";
import { createQueue } from "~/lib/queue/creator";
export const testQueue = createQueue(
z.object({
id: z.string(),
}),
).withCallback(({ id }) => {
console.log(`Test queue with id ${id}`);
});