Replace entire codebase with homarr-labs/homarr

This commit is contained in:
Thomas Camlong
2026-01-15 21:54:44 +01:00
parent c5bc3b1559
commit 4fdd1fe351
4666 changed files with 409577 additions and 147434 deletions

25
scripts/entrypoint.sh Normal file
View File

@@ -0,0 +1,25 @@
#!/bin/sh
set -e
export PUID=${PUID:-0}
export PGID=${PGID:-0}
echo "Starting with UID='$PUID', GID='$PGID'"
if [ "${PUID}" != "0" ] || [ "${PGID}" != "0" ]; then
# The below command will change the owner of all files in the /app directory (except node_modules) to the new UID and GID
echo "Changing owner to $PUID:$PGID, this will take about 10 seconds..."
find . -name 'node_modules' -prune -o -mindepth 1 -maxdepth 1 -exec chown -R $PUID:$PGID {} +
chown -R $PUID:$PGID /var/cache/nginx
chown -R $PUID:$PGID /var/log/nginx
chown -R $PUID:$PGID /var/lib/nginx
chown -R $PUID:$PGID /run/nginx/nginx.pid
chown -R $PUID:$PGID /etc/nginx
echo "Changing owner to $PUID:$PGID, done."
fi
if [ "${PUID}" != "0" ]; then
exec su-exec $PUID:$PGID "$@"
else
exec "$@"
fi

View File

@@ -1,17 +1,71 @@
#!/bin/sh
#!/usr/bin/env bash
echo "Exporting hostname..."
export NEXTAUTH_URL_INTERNAL="http://$HOSTNAME:${PORT:-7575}"
# Create sub directories in volume
mkdir -p /appdata/db
mkdir -p /appdata/redis
mkdir -p /appdata/trusted-certificates
echo "Migrating database..."
cd ./migrate; yarn db:migrate & PID=$!
# Wait for migration to finish
wait $PID
# Run migrations
if [ "$DB_MIGRATIONS_DISABLED" = "true" ]; then
echo "DB migrations are disabled, skipping"
else
echo "Running DB migrations"
# We disable redis logs during migration as the redis client is not yet started
DISABLE_REDIS_LOGS=true node ./db/migrations/$DB_DIALECT/migrate.cjs ./db/migrations/$DB_DIALECT
fi
## If 'default.json' does not exist in '/app/data/configs', we copy it from '/app/data/default.json'
cp -n /app/data/default.json /app/data/configs/default.json
# Auth secret is generated every time the container starts as it is required, but not used because we don't need JWTs or Mail hashing
export AUTH_SECRET=$(openssl rand -base64 32)
# Cron job API key is generated every time the container starts as it is required for communication between nextjs-api and tasks-api
export CRON_JOB_API_KEY=$(openssl rand -base64 32)
echo "Starting production server..."
node /app/server.js & PID=$!
# Start nginx proxy
# 1. Replace the HOSTNAME in the nginx template file
# 2. Create the nginx configuration file from the template
# 3. Start the nginx server
export HOSTNAME
envsubst '${HOSTNAME}' < /etc/nginx/templates/nginx.conf > /etc/nginx/nginx.conf
# Start services in the background and store their PIDs
nginx -g 'daemon off;' &
NGINX_PID=$!
wait $PID
if [ "$REDIS_IS_EXTERNAL" = "true" ]; then
echo "Using external Redis server at redis://$REDIS_HOST:$REDIS_PORT"
REDIS_PID=""
else
echo "Starting internal Redis server"
redis-server /app/redis.conf &
REDIS_PID=$!
fi
node apps/tasks/tasks.cjs &
TASKS_PID=$!
node apps/websocket/wssServer.cjs &
WSS_PID=$!
node apps/nextjs/server.js &
NEXTJS_PID=$!
# Function to handle SIGTERM and shut down services
terminate() {
echo "Received SIGTERM. Shutting down..."
kill -TERM $NGINX_PID $TASKS_PID $WSS_PID $NEXTJS_PID 2>/dev/null
wait
# kill redis-server last because of logging of other services and only if $REDIS_PID is set
if [ -n "$REDIS_PID" ]; then
kill -TERM $REDIS_PID 2>/dev/null
wait
fi
echo "Shutdown complete."
exit 0
}
# When SIGTERM (docker stop <container>) / SIGINT (ctrl+c) is received, run the terminate function
trap terminate TERM INT
# Wait for all processes
wait $NEXTJS_PID
terminate

View File

@@ -0,0 +1,14 @@
import { readFile, writeFile } from "fs/promises";
const replaceTemplate = "#NEXT_VERSION#";
const fileName = ".github/ISSUE_TEMPLATE/bug_report.yml";
const env = {
NEXT_VERSION: process.env.NEXT_VERSION as string,
};
const content = await readFile(fileName, "utf8");
const updatedContent = content.replace(
replaceTemplate,
`${replaceTemplate}\n - ${env.NEXT_VERSION.replace("v", "")}`,
);
await writeFile(fileName, updatedContent, "utf8");

View File

@@ -0,0 +1,79 @@
import fs from "fs/promises";
const sources = {
crowdin: [
{ projectId: 534422 },
{ projectId: 742587 },
],
github: [
{ slug: "ajnart", repository: "homarr" },
{ slug: "homarr-labs", repository: "homarr" },
],
};
const env = {
GITHUB_TOKEN: process.env.GITHUB_TOKEN,
CROWDIN_TOKEN: process.env.CROWDIN_TOKEN,
};
const fetchGithubContributors = async (slug, repository) => {
const url = `https://api.github.com/repos/${slug}/${repository}/contributors?per_page=999`;
const options = {
method: "GET",
headers: {
Authorization: `Bearer ${env.GITHUB_TOKEN}`,
Accept: "application/vnd.github+json",
"X-GitHub-Api-Version": "2022-11-28",
},
};
const response = await fetch(url, options);
const data = await response.json();
return data.map((contributor) => ({
login: contributor.login,
avatar_url: contributor.avatar_url,
}));
};
const fetchCrowdinMembers = async (projectId) => {
const url = `https://crowdin.com/api/v2/projects/${projectId}/members`;
const options = {
method: "GET",
headers: {
Accept: "application/json",
Authorization: `Bearer ${env.CROWDIN_TOKEN}`,
},
};
const response = await fetch(url, options);
const data = await response.json();
return data.data.flatMap((data) => data.data).map(contributor => ({
username: contributor.username,
avatarUrl: contributor.avatarUrl,
}));
};
const distinctBy = (callback) => (value, index, self) => {
return self.findIndex((item) => callback(item) === callback(value)) === index;
};
const githubContributors = [];
const crowdinContributors = [];
for (const { repository, slug } of sources.github) {
githubContributors.push(...(await fetchGithubContributors(slug, repository)));
}
const distinctGithubContributors = githubContributors
.filter(distinctBy((contributor) => contributor.login))
.sort((a, b) => b.contributions - a.contributions)
.map(({ contributions, ...props }) => props)
.filter((contributor) => !contributor.login.includes("[bot]"));
await fs.writeFile("./static-data/contributors.json", JSON.stringify(distinctGithubContributors));
for (const { projectId } of sources.crowdin) {
crowdinContributors.push(...(await fetchCrowdinMembers(projectId)));
}
const distinctCrowdinContributors = crowdinContributors.filter(distinctBy((contributor) => contributor.username));
await fs.writeFile("./static-data/translators.json", JSON.stringify(distinctCrowdinContributors));

View File

@@ -0,0 +1,70 @@
import {readFile, writeFile} from 'fs/promises';
import {integrationDefs} from '../packages/definitions/src/integration';
const FILE = 'docs/README.md';
const MAX_COLUMNS_PER_ROW = 7;
async function updateIntegrationList() {
// Read current README content
const content = await readFile(FILE, 'utf8');
// Define markers
const startMarker = '<!-- AUTO_GENERATE_INTEGRATION_LIST_START -->';
const endMarker = '<!-- AUTO_GENERATE_INTEGRATION_LIST_END -->';
// Find the section to replace
const startIndex = content.indexOf(startMarker);
const endIndex = content.indexOf(endMarker);
if (startIndex === -1 || endIndex === -1) {
throw new Error('Could not find markers in README.md');
}
// Generate the new integration list
const integrations = Object.values(integrationDefs)
.filter(def => def.name !== 'Mock')
.sort((a, b) => a.name.localeCompare(b.name));
const tableRows: string[] = [];
let currentRow: string[] = [];
integrations.forEach((integration) => {
currentRow.push(`<td align="center">
<a href="${integration.documentationUrl}" target="_blank" rel="noreferrer noopener">
<img src="${integration.iconUrl}" alt="${integration.name}" width="90" height="90" />
<br/>
<p align="center">${integration.name.replaceAll(' ', '<br/>')}</p>
</a>
</td>`);
if (currentRow.length === MAX_COLUMNS_PER_ROW) {
tableRows.push(`<tr>${currentRow.join('\n')}</tr>`);
currentRow = [];
}
});
// Add remaining items if any
if (currentRow.length > 0) {
tableRows.push(`<tr>${currentRow.join('\n')}</tr>`);
}
// Create the new content
const newSection = `${startMarker}
<div align="center">
<table>
<tbody>
${tableRows.join('\n')}
</tbody>
</table>
</div>
${endMarker}`;
// Replace the old section with the new one
const newContent = content.slice(0, startIndex) + newSection + content.slice(endIndex + endMarker.length);
// Write the updated content back to the file
await writeFile(FILE, newContent, 'utf8');
}
updateIntegrationList().catch(console.error);