mirror of
https://github.com/gethomepage/homepage.git
synced 2026-01-07 07:52:08 +08:00
Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0e1aeaf54c | ||
|
|
2e8717247d | ||
|
|
d17a17bd3c | ||
|
|
0afc1b96f1 | ||
|
|
5fbc6702bc | ||
|
|
75455a23e2 | ||
|
|
2aed46671f | ||
|
|
88934ec39a | ||
|
|
21c0c687cd | ||
|
|
6b90d3ef28 |
14
README.md
14
README.md
@@ -45,15 +45,17 @@
|
|||||||
- Container status (Running / Stopped) & statistics (CPU, Memory, Network)
|
- Container status (Running / Stopped) & statistics (CPU, Memory, Network)
|
||||||
- Automatic service discovery (via labels)
|
- Automatic service discovery (via labels)
|
||||||
- Service Integration
|
- Service Integration
|
||||||
- Sonarr, Radarr, Readarr, Prowlarr, Bazarr, Lidarr, Emby, Jellyfin, Tautulli (Plex)
|
- Sonarr, Radarr, Readarr, Prowlarr, Bazarr, Lidarr, Emby, Jellyfin, Tautulli, Plex and more
|
||||||
- Ombi, Overseerr, Jellyseerr, Jackett, NZBGet, SABnzbd, ruTorrent, Transmission, qBittorrent
|
- Ombi, Overseerr, Jellyseerr, Jackett, NZBGet, SABnzbd, ruTorrent, Transmission, qBittorrent and more
|
||||||
- Portainer, Traefik, Speedtest Tracker, PiHole, AdGuard Home, Nginx Proxy Manager, Gotify, Syncthing Relay Server, Authentik, Proxmox
|
- Portainer, Traefik, Speedtest Tracker, PiHole, AdGuard Home, Nginx Proxy Manager, Gotify, Syncthing Relay Server, Authentik, Proxmox and more
|
||||||
- Information Providers
|
- Information Providers
|
||||||
- Coin Market Cap, Mastodon
|
- Coin Market Cap, Mastodon and more
|
||||||
- Information & Utility Widgets
|
- Information & Utility Widgets
|
||||||
- System Stats (Disk, CPU, Memory)
|
- System Stats (Disk, CPU, Memory)
|
||||||
- Weather via [OpenWeatherMap](https://openweathermap.org/) or [Open-Meteo](https://open-meteo.com/)
|
- Weather via [OpenWeatherMap](https://openweathermap.org/) or [Open-Meteo](https://open-meteo.com/)
|
||||||
- Search Bar
|
- Web Search Bar
|
||||||
|
- UniFi Console, Glances and more
|
||||||
|
- Instant "Quick-launch" search
|
||||||
- Customizable
|
- Customizable
|
||||||
- 21 theme colors with light and dark mode support
|
- 21 theme colors with light and dark mode support
|
||||||
- Background image support
|
- Background image support
|
||||||
@@ -63,7 +65,7 @@
|
|||||||
|
|
||||||
If you have any questions, suggestions, or general issues, please start a discussion on the [Discussions](https://github.com/benphelps/homepage/discussions) page.
|
If you have any questions, suggestions, or general issues, please start a discussion on the [Discussions](https://github.com/benphelps/homepage/discussions) page.
|
||||||
|
|
||||||
If you have a more specific issue, please open an issue on the [Issues](https://github.com/benphelps/homepage/issues) page.
|
For bug reports, please open an issue on the [Issues](https://github.com/benphelps/homepage/issues) page.
|
||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
|
|||||||
@@ -394,14 +394,14 @@
|
|||||||
"numberOfLeases": "Alquileres"
|
"numberOfLeases": "Alquileres"
|
||||||
},
|
},
|
||||||
"xteve": {
|
"xteve": {
|
||||||
"streams_all": "All Streams",
|
"streams_all": "Todas las corrientes",
|
||||||
"streams_active": "Active Streams",
|
"streams_active": "Corrientes activas",
|
||||||
"streams_xepg": "XEPG Channels"
|
"streams_xepg": "Canales XEPG"
|
||||||
},
|
},
|
||||||
"opnsense": {
|
"opnsense": {
|
||||||
"cpu": "CPU Load",
|
"cpu": "Carga de la CPU",
|
||||||
"memory": "Active Memory",
|
"memory": "Memoria activa",
|
||||||
"wanUpload": "WAN Upload",
|
"wanUpload": "Carga WAN",
|
||||||
"wanDownload": "WAN Download"
|
"wanDownload": "Descargar WAN"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -399,9 +399,9 @@
|
|||||||
"streams_xepg": "Canal XEPG"
|
"streams_xepg": "Canal XEPG"
|
||||||
},
|
},
|
||||||
"opnsense": {
|
"opnsense": {
|
||||||
"cpu": "CPU Load",
|
"cpu": "Charge CPU",
|
||||||
"memory": "Active Memory",
|
"memory": "Mém. Utilisée",
|
||||||
"wanUpload": "WAN Upload",
|
"wanUpload": "WAN Envoi",
|
||||||
"wanDownload": "WAN Download"
|
"wanDownload": "WAN Récep."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,8 +3,6 @@ import useSWR from "swr";
|
|||||||
import { compareVersions } from "compare-versions";
|
import { compareVersions } from "compare-versions";
|
||||||
import { MdNewReleases } from "react-icons/md";
|
import { MdNewReleases } from "react-icons/md";
|
||||||
|
|
||||||
import cachedFetch from "utils/proxy/cached-fetch";
|
|
||||||
|
|
||||||
export default function Version() {
|
export default function Version() {
|
||||||
const { t, i18n } = useTranslation();
|
const { t, i18n } = useTranslation();
|
||||||
|
|
||||||
@@ -12,9 +10,7 @@ export default function Version() {
|
|||||||
const revision = process.env.NEXT_PUBLIC_REVISION?.length ? process.env.NEXT_PUBLIC_REVISION : "dev";
|
const revision = process.env.NEXT_PUBLIC_REVISION?.length ? process.env.NEXT_PUBLIC_REVISION : "dev";
|
||||||
const version = process.env.NEXT_PUBLIC_VERSION?.length ? process.env.NEXT_PUBLIC_VERSION : "dev";
|
const version = process.env.NEXT_PUBLIC_VERSION?.length ? process.env.NEXT_PUBLIC_VERSION : "dev";
|
||||||
|
|
||||||
const cachedFetcher = (resource) => cachedFetch(resource, 5);
|
const { data: releaseData } = useSWR("/api/releases");
|
||||||
|
|
||||||
const { data: releaseData } = useSWR("https://api.github.com/repos/benphelps/homepage/releases", cachedFetcher);
|
|
||||||
|
|
||||||
// use Intl.DateTimeFormat to format the date
|
// use Intl.DateTimeFormat to format the date
|
||||||
const formatDate = (date) => {
|
const formatDate = (date) => {
|
||||||
@@ -48,7 +44,7 @@ export default function Version() {
|
|||||||
</span>
|
</span>
|
||||||
{version === "main" || version === "dev" || version === "nightly"
|
{version === "main" || version === "dev" || version === "nightly"
|
||||||
? null
|
? null
|
||||||
: releaseData &&
|
: releaseData && latestRelease &&
|
||||||
compareVersions(latestRelease.tag_name, version) > 0 && (
|
compareVersions(latestRelease.tag_name, version) > 0 && (
|
||||||
<a
|
<a
|
||||||
href={latestRelease.html_url}
|
href={latestRelease.html_url}
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ export default function Cpu({ expanded }) {
|
|||||||
<div className="pr-1">{t("resources.load")}</div>
|
<div className="pr-1">{t("resources.load")}</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
<UsageBar percent={100} />
|
<UsageBar percent={0} />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ export default function Disk({ options, expanded }) {
|
|||||||
<div className="pr-1">{t("resources.total")}</div>
|
<div className="pr-1">{t("resources.total")}</div>
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
<UsageBar percent={100} />
|
<UsageBar percent={0} />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ export default function Memory({ expanded }) {
|
|||||||
<div className="pr-1">{t("resources.total")}</div>
|
<div className="pr-1">{t("resources.total")}</div>
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
<UsageBar percent={100} />
|
<UsageBar percent={0} />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
6
src/pages/api/releases.js
Normal file
6
src/pages/api/releases.js
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
import cachedFetch from "utils/proxy/cached-fetch";
|
||||||
|
|
||||||
|
export default async function handler(req, res) {
|
||||||
|
const releasesURL = "https://api.github.com/repos/benphelps/homepage/releases";
|
||||||
|
return res.send(await cachedFetch(releasesURL, 5));
|
||||||
|
}
|
||||||
@@ -52,7 +52,7 @@ export async function servicesResponse() {
|
|||||||
discoveredServices = cleanServiceGroups(await servicesFromDocker());
|
discoveredServices = cleanServiceGroups(await servicesFromDocker());
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error("Failed to discover services, please check docker.yaml for errors or remove example entries.");
|
console.error("Failed to discover services, please check docker.yaml for errors or remove example entries.");
|
||||||
if (e) console.error(e);
|
if (e) console.error(e.toString());
|
||||||
discoveredServices = [];
|
discoveredServices = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -60,7 +60,7 @@ export async function servicesResponse() {
|
|||||||
configuredServices = cleanServiceGroups(await servicesFromConfig());
|
configuredServices = cleanServiceGroups(await servicesFromConfig());
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error("Failed to load services.yaml, please check for errors");
|
console.error("Failed to load services.yaml, please check for errors");
|
||||||
if (e) console.error(e);
|
if (e) console.error(e.toString());
|
||||||
configuredServices = [];
|
configuredServices = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -68,7 +68,7 @@ export async function servicesResponse() {
|
|||||||
initialSettings = await getSettings();
|
initialSettings = await getSettings();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error("Failed to load settings.yaml, please check for errors");
|
console.error("Failed to load settings.yaml, please check for errors");
|
||||||
if (e) console.error(e);
|
if (e) console.error(e.toString());
|
||||||
initialSettings = {};
|
initialSettings = {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -44,6 +44,7 @@ export async function servicesFromDocker() {
|
|||||||
|
|
||||||
const serviceServers = await Promise.all(
|
const serviceServers = await Promise.all(
|
||||||
Object.keys(servers).map(async (serverName) => {
|
Object.keys(servers).map(async (serverName) => {
|
||||||
|
try {
|
||||||
const docker = new Docker(getDockerArguments(serverName).conn);
|
const docker = new Docker(getDockerArguments(serverName).conn);
|
||||||
const containers = await docker.listContainers({
|
const containers = await docker.listContainers({
|
||||||
all: true,
|
all: true,
|
||||||
@@ -74,9 +75,17 @@ export async function servicesFromDocker() {
|
|||||||
});
|
});
|
||||||
|
|
||||||
return { server: serverName, services: discovered.filter((filteredService) => filteredService) };
|
return { server: serverName, services: discovered.filter((filteredService) => filteredService) };
|
||||||
|
} catch (e) {
|
||||||
|
// a server failed, but others may succeed
|
||||||
|
return { server: serverName, services: [] };
|
||||||
|
}
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (serviceServers.every(server => server.services.length === 0)) {
|
||||||
|
throw new Error('All docker servers failed to connect or returned no containers');
|
||||||
|
}
|
||||||
|
|
||||||
const mappedServiceGroups = [];
|
const mappedServiceGroups = [];
|
||||||
|
|
||||||
serviceServers.forEach((server) => {
|
serviceServers.forEach((server) => {
|
||||||
|
|||||||
@@ -84,9 +84,9 @@ export default async function pyloadProxyHandler(req, res) {
|
|||||||
|
|
||||||
if (data?.error || status !== 200) {
|
if (data?.error || status !== 200) {
|
||||||
try {
|
try {
|
||||||
return res.status(status).send({error: {message: "HTTP error communicating with Plex API", data: Buffer.from(data).toString()}});
|
return res.status(status).send({error: {message: "HTTP error communicating with Pyload API", data: Buffer.from(data).toString()}});
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return res.status(status).send({error: {message: "HTTP error communicating with Plex API", data}});
|
return res.status(status).send({error: {message: "HTTP error communicating with Pyload API", data}});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -95,7 +95,7 @@ export default async function pyloadProxyHandler(req, res) {
|
|||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.error(e);
|
logger.error(e);
|
||||||
return res.status(500).send({error: {message: `Error communicating with Plex API: ${e.toString()}`}});
|
return res.status(500).send({error: {message: `Error communicating with Pyload API: ${e.toString()}`}});
|
||||||
}
|
}
|
||||||
|
|
||||||
return res.status(400).json({ error: 'Invalid proxy service type' });
|
return res.status(400).json({ error: 'Invalid proxy service type' });
|
||||||
|
|||||||
Reference in New Issue
Block a user