RustyCMS: file-based headless CMS — API, Admin UI (content, types, assets), Docker/Caddy, image transform; only demo type and demo content in version control
Made-with: Cursor
This commit is contained in:
713
scripts/contentful-to-rustycms.mjs
Normal file
713
scripts/contentful-to-rustycms.mjs
Normal file
@@ -0,0 +1,713 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Migriert Contentful-Export (contentful-export.json) nach rustycms content/de/.
|
||||
* Nur deutsche Locale (en kann später ergänzt werden).
|
||||
*
|
||||
* Aufruf: node scripts/contentful-to-rustycms.mjs [Pfad-zum-Export] [--html-only] [--only=quote|iframe|image|image_gallery]
|
||||
* Default Export-Pfad: ../www.windwiderstand.de/contentful-export.json
|
||||
* --html-only: nur HTML migrieren
|
||||
* --only=X: nur Typ X migrieren (mehrfach möglich), X = html|quote|iframe|image|image_gallery|youtube_video
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const ROOT = path.resolve(__dirname, "..");
|
||||
const CONTENT_DE = path.join(ROOT, "content", "de");
|
||||
const argv = process.argv.slice(2);
|
||||
const ONLY = new Set();
|
||||
if (argv.includes("--html-only")) ONLY.add("html");
|
||||
argv.forEach((a) => {
|
||||
const m = a.match(/^--only=(.+)$/);
|
||||
if (m) ONLY.add(m[1]);
|
||||
});
|
||||
const EXPORT_PATH = argv.filter((a) => a !== "--html-only" && !a.startsWith("--only="))[0] || path.join(ROOT, "..", "www.windwiderstand.de", "contentful-export.json");
|
||||
|
||||
// ─── Slug-Normalisierung ─────────────────────────────────────────────────
|
||||
function slugify(value) {
|
||||
if (value == null || value === "") return "";
|
||||
let s = String(value)
|
||||
.replace(/^\//, "")
|
||||
.replace(/\//g, "-")
|
||||
.replace(/[^a-zA-Z0-9_-]+/g, "-")
|
||||
.replace(/-+/g, "-")
|
||||
.replace(/^-|-$/g, "");
|
||||
return s || "untitled";
|
||||
}
|
||||
|
||||
function safeSlug(entry, type) {
|
||||
const f = entry?.fields || {};
|
||||
if (type === "page" && f.slug != null) return f.slug === "/" ? "home" : slugify(f.slug);
|
||||
if (f.id != null) return slugify(f.id);
|
||||
if (type === "link" && f.slug != null) return slugify(f.slug);
|
||||
if (entry?.sys?.id) return slugify(entry.sys.id);
|
||||
return "untitled";
|
||||
}
|
||||
|
||||
// ─── Rekursive Sammlung: alle Entries und Assets ──────────────────────────
|
||||
function collectNodes(obj, entries, assets) {
|
||||
if (!obj || typeof obj !== "object") return;
|
||||
if (Array.isArray(obj)) {
|
||||
obj.forEach((item) => collectNodes(item, entries, assets));
|
||||
return;
|
||||
}
|
||||
const sys = obj.sys;
|
||||
const fields = obj.fields;
|
||||
if (sys?.type === "Asset" && fields?.file) {
|
||||
const id = sys.id;
|
||||
if (!assets.has(id)) assets.set(id, { sys, fields });
|
||||
return;
|
||||
}
|
||||
const contentTypeId = sys?.contentType?.sys?.id || sys?.contentType?.id;
|
||||
if (sys?.type === "Entry" && contentTypeId) {
|
||||
const id = sys.id;
|
||||
if (!entries.has(id)) {
|
||||
entries.set(id, { sys: { ...sys, contentType: { id: contentTypeId } }, fields: fields || {} });
|
||||
if (fields && typeof fields === "object") {
|
||||
Object.values(fields).forEach((v) => collectNodes(v, entries, assets));
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
Object.values(obj).forEach((v) => collectNodes(v, entries, assets));
|
||||
}
|
||||
|
||||
// ─── JSON5-ähnlich ausgeben (gültiges JSON, 2 Leerzeichen) ────────────────
|
||||
function writeJson5(filePath, obj) {
|
||||
const dir = path.dirname(filePath);
|
||||
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
||||
const str = JSON.stringify(obj, null, 2);
|
||||
fs.writeFileSync(filePath, str + "\n", "utf8");
|
||||
}
|
||||
|
||||
// ─── Main ───────────────────────────────────────────────────────────────
|
||||
function main() {
|
||||
if (!fs.existsSync(EXPORT_PATH)) {
|
||||
console.error("Export-Datei nicht gefunden:", EXPORT_PATH);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log("Lese Export:", EXPORT_PATH);
|
||||
const data = JSON.parse(fs.readFileSync(EXPORT_PATH, "utf8"));
|
||||
const byType = data.byType || {};
|
||||
const entries = new Map();
|
||||
const assets = new Map();
|
||||
Object.values(byType).forEach((group) => {
|
||||
if (group?.items) group.items.forEach((item) => collectNodes(item, entries, assets));
|
||||
});
|
||||
console.log("Einträge:", entries.size, "Assets:", assets.size);
|
||||
|
||||
const idToSlug = new Map();
|
||||
const SUPPORTED_ROW_COMPONENTS = new Set([
|
||||
"markdown", "html", "componentLinkList", "fullwidthBanner", "componentPostOverview", "componentSearchableText",
|
||||
"quoteComponent", "iframe", "image", "imageGallery", "youtubeVideo",
|
||||
]);
|
||||
|
||||
function getSlug(entryOrId, contentType) {
|
||||
const entry = typeof entryOrId === "string" ? entries.get(entryOrId) : entryOrId;
|
||||
if (!entry) return null;
|
||||
const id = entry.sys?.id;
|
||||
const type = contentType || entry.sys?.contentType?.id || entry.sys?.contentType?.sys?.id;
|
||||
if (idToSlug.has(id)) return idToSlug.get(id);
|
||||
// Assets: bereits in idToSlug aus Schritt 1
|
||||
if (entry.sys?.type === "Asset") return idToSlug.get(id) || slugify(entry.fields?.title || id);
|
||||
const slug = safeSlug(entry, type);
|
||||
let finalSlug = slug;
|
||||
let n = 0;
|
||||
const used = new Set(idToSlug.values());
|
||||
while (used.has(finalSlug)) {
|
||||
n++;
|
||||
finalSlug = slug + "-" + n;
|
||||
}
|
||||
idToSlug.set(id, finalSlug);
|
||||
return finalSlug;
|
||||
}
|
||||
|
||||
function refToSlug(ref) {
|
||||
if (ref == null) return null;
|
||||
if (typeof ref === "string") return idToSlug.get(ref) || ref;
|
||||
if (ref.sys?.type === "Entry") return getSlug(ref, ref.sys?.contentType?.id);
|
||||
if (ref.sys?.type === "Asset") return getSlug(ref, "img");
|
||||
return null;
|
||||
}
|
||||
|
||||
const run = (key) => !ONLY.size || ONLY.has(key);
|
||||
|
||||
// Bei --only=image/image_gallery: nur Asset-Slugs in idToSlug eintragen (für refToSlug), ohne img-Dateien zu schreiben
|
||||
if (ONLY.size && (ONLY.has("image") || ONLY.has("image_gallery"))) {
|
||||
assets.forEach((asset, id) => {
|
||||
const slug = slugify(asset.fields?.title || id);
|
||||
const uniq = Array.from(idToSlug.values()).includes(slug) ? slug + "-" + id.slice(0, 6) : slug;
|
||||
idToSlug.set(id, uniq);
|
||||
});
|
||||
}
|
||||
|
||||
if (run("img")) {
|
||||
// ─── 1) Assets → img ───────────────────────────────────────────────────
|
||||
assets.forEach((asset, id) => {
|
||||
const slug = slugify(asset.fields?.title || id);
|
||||
const uniq = Array.from(idToSlug.values()).includes(slug) ? slug + "-" + id.slice(0, 6) : slug;
|
||||
idToSlug.set(id, uniq);
|
||||
const file = asset.fields?.file || {};
|
||||
let url = file.url || "";
|
||||
if (url && !url.startsWith("http")) url = "https:" + url;
|
||||
const out = {
|
||||
_slug: uniq,
|
||||
title: asset.fields?.title ?? "",
|
||||
description: asset.fields?.description ?? "",
|
||||
file: {
|
||||
url,
|
||||
fileName: file.fileName,
|
||||
contentType: file.contentType,
|
||||
details: file.details,
|
||||
},
|
||||
};
|
||||
writeJson5(path.join(CONTENT_DE, "img", uniq + ".json5"), out);
|
||||
});
|
||||
console.log("img:", assets.size);
|
||||
}
|
||||
|
||||
if (run("tag")) {
|
||||
// ─── 2) Tag ────────────────────────────────────────────────────────────
|
||||
(byType.tag?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "tag");
|
||||
const f = entry.fields || {};
|
||||
writeJson5(path.join(CONTENT_DE, "tag", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
name: f.name ?? slug,
|
||||
});
|
||||
});
|
||||
console.log("tag:", (byType.tag?.items || []).length);
|
||||
}
|
||||
|
||||
if (run("link")) {
|
||||
// ─── 3) Link ───────────────────────────────────────────────────────────
|
||||
(byType.link?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "link");
|
||||
const f = entry.fields || {};
|
||||
writeJson5(path.join(CONTENT_DE, "link", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
name: f.linkName ?? slug,
|
||||
internal: slug,
|
||||
linkName: f.linkName ?? "",
|
||||
url: f.url ?? "",
|
||||
newTab: f.newTab ?? false,
|
||||
external: f.externalLink ?? false,
|
||||
description: f.description ?? "",
|
||||
alt: f.alt ?? "",
|
||||
showText: f.showText !== false,
|
||||
author: f.author ?? "–",
|
||||
date: f.date ?? "",
|
||||
source: f.source ?? "–",
|
||||
});
|
||||
});
|
||||
console.log("link:", (byType.link?.items || []).length);
|
||||
}
|
||||
|
||||
if (run("markdown")) {
|
||||
// ─── 4) Markdown ────────────────────────────────────────────────────────
|
||||
function extractMarkdown(entry) {
|
||||
const slug = getSlug(entry, "markdown");
|
||||
const f = entry.fields || {};
|
||||
const layout = f.layout?.fields || {};
|
||||
writeJson5(path.join(CONTENT_DE, "markdown", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
name: f.id ?? slug,
|
||||
content: f.content ?? "",
|
||||
layout: {
|
||||
mobile: layout.mobile ?? "12",
|
||||
tablet: layout.tablet,
|
||||
desktop: layout.desktop,
|
||||
spaceBottom: layout.spaceBottom ?? 0,
|
||||
},
|
||||
alignment: f.alignment ?? "left",
|
||||
});
|
||||
return slug;
|
||||
}
|
||||
function ensureMarkdown(entry) {
|
||||
if (!entry || entry.sys?.contentType?.id !== "markdown") return null;
|
||||
if (!idToSlug.has(entry.sys.id)) extractMarkdown(entry);
|
||||
return idToSlug.get(entry.sys.id);
|
||||
}
|
||||
(byType.markdown?.items || []).forEach(extractMarkdown);
|
||||
entries.forEach((entry) => {
|
||||
if (entry.sys?.contentType?.id === "markdown" && !idToSlug.has(entry.sys.id)) extractMarkdown(entry);
|
||||
});
|
||||
console.log("markdown:", Array.from(entries.values()).filter((e) => e.sys?.contentType?.id === "markdown").length);
|
||||
}
|
||||
|
||||
// ─── 4b) HTML (html) ────────────────────────────────────────────────────
|
||||
if (run("html")) {
|
||||
function extractHtml(entry) {
|
||||
const slug = getSlug(entry, "html");
|
||||
const f = entry.fields || {};
|
||||
const layout = f.layout?.fields || f.layout || {};
|
||||
writeJson5(path.join(CONTENT_DE, "html", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
name: f.id ?? slug,
|
||||
html: f.html ?? "",
|
||||
layout: {
|
||||
mobile: layout.mobile ?? "12",
|
||||
tablet: layout.tablet,
|
||||
desktop: layout.desktop,
|
||||
spaceBottom: layout.spaceBottom ?? 0,
|
||||
},
|
||||
});
|
||||
return slug;
|
||||
}
|
||||
(byType.html?.items || []).forEach(extractHtml);
|
||||
entries.forEach((entry) => {
|
||||
if (entry.sys?.contentType?.id === "html" && !idToSlug.has(entry.sys.id)) extractHtml(entry);
|
||||
});
|
||||
console.log("html:", Array.from(entries.values()).filter((e) => e.sys?.contentType?.id === "html").length);
|
||||
}
|
||||
|
||||
// ─── 4c) Quote (quoteComponent) ─────────────────────────────────────────
|
||||
if (run("quote")) {
|
||||
(byType.quoteComponent?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "quoteComponent");
|
||||
const f = entry.fields || {};
|
||||
const layout = f.layout?.fields || f.layout || {};
|
||||
writeJson5(path.join(CONTENT_DE, "quote", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
quote: f.quote ?? "",
|
||||
author: f.author ?? "",
|
||||
variant: f.variant ?? "left",
|
||||
layout: {
|
||||
mobile: layout.mobile ?? "12",
|
||||
tablet: layout.tablet,
|
||||
desktop: layout.desktop,
|
||||
spaceBottom: layout.spaceBottom ?? 0,
|
||||
},
|
||||
});
|
||||
});
|
||||
entries.forEach((e) => {
|
||||
if (e.sys?.contentType?.id === "quoteComponent" && !idToSlug.has(e.sys.id)) {
|
||||
const slug = getSlug(e, "quoteComponent");
|
||||
const f = e.fields || {};
|
||||
const layout = f.layout?.fields || f.layout || {};
|
||||
writeJson5(path.join(CONTENT_DE, "quote", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
quote: f.quote ?? "",
|
||||
author: f.author ?? "",
|
||||
variant: f.variant ?? "left",
|
||||
layout: { mobile: layout.mobile ?? "12", tablet: layout.tablet, desktop: layout.desktop, spaceBottom: layout.spaceBottom ?? 0 },
|
||||
});
|
||||
}
|
||||
});
|
||||
console.log("quote:", Array.from(entries.values()).filter((e) => e.sys?.contentType?.id === "quoteComponent").length);
|
||||
}
|
||||
|
||||
// ─── 4d) Iframe ────────────────────────────────────────────────────────
|
||||
if (run("iframe")) {
|
||||
(byType.iframe?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "iframe");
|
||||
const f = entry.fields || {};
|
||||
const layout = f.layout?.fields || f.layout || {};
|
||||
writeJson5(path.join(CONTENT_DE, "iframe", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
name: f.name ?? slug,
|
||||
content: f.content ?? "",
|
||||
iframe: f.iframe ?? "",
|
||||
overlayImage: refToSlug(f.overlayImage) ?? undefined,
|
||||
layout: {
|
||||
mobile: layout.mobile ?? "12",
|
||||
tablet: layout.tablet,
|
||||
desktop: layout.desktop,
|
||||
spaceBottom: layout.spaceBottom ?? 0,
|
||||
},
|
||||
});
|
||||
});
|
||||
entries.forEach((e) => {
|
||||
if (e.sys?.contentType?.id === "iframe" && !idToSlug.has(e.sys.id)) {
|
||||
const slug = getSlug(e, "iframe");
|
||||
const f = e.fields || {};
|
||||
const layout = f.layout?.fields || f.layout || {};
|
||||
writeJson5(path.join(CONTENT_DE, "iframe", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
name: f.name ?? slug,
|
||||
content: f.content ?? "",
|
||||
iframe: f.iframe ?? "",
|
||||
overlayImage: refToSlug(f.overlayImage) ?? undefined,
|
||||
layout: { mobile: layout.mobile ?? "12", tablet: layout.tablet, desktop: layout.desktop, spaceBottom: layout.spaceBottom ?? 0 },
|
||||
});
|
||||
}
|
||||
});
|
||||
console.log("iframe:", Array.from(entries.values()).filter((e) => e.sys?.contentType?.id === "iframe").length);
|
||||
}
|
||||
|
||||
// ─── 4e) Image ───────────────────────────────────────────────────────────
|
||||
if (run("image")) {
|
||||
(byType.image?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "image");
|
||||
const f = entry.fields || {};
|
||||
const layout = f.layout?.fields || f.layout || {};
|
||||
writeJson5(path.join(CONTENT_DE, "image", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
name: f.name ?? slug,
|
||||
image: refToSlug(f.image) ?? "",
|
||||
caption: f.caption ?? "",
|
||||
layout: {
|
||||
mobile: layout.mobile ?? "12",
|
||||
tablet: layout.tablet,
|
||||
desktop: layout.desktop,
|
||||
spaceBottom: layout.spaceBottom ?? 0,
|
||||
},
|
||||
...(f.maxWidth != null && { maxWidth: f.maxWidth }),
|
||||
...(f.aspectRatio != null && { aspectRatio: f.aspectRatio }),
|
||||
});
|
||||
});
|
||||
entries.forEach((e) => {
|
||||
if (e.sys?.contentType?.id === "image" && !idToSlug.has(e.sys.id)) {
|
||||
const slug = getSlug(e, "image");
|
||||
const f = e.fields || {};
|
||||
const layout = f.layout?.fields || f.layout || {};
|
||||
writeJson5(path.join(CONTENT_DE, "image", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
name: f.name ?? slug,
|
||||
image: refToSlug(f.image) ?? "",
|
||||
caption: f.caption ?? "",
|
||||
layout: { mobile: layout.mobile ?? "12", tablet: layout.tablet, desktop: layout.desktop, spaceBottom: layout.spaceBottom ?? 0 },
|
||||
...(f.maxWidth != null && { maxWidth: f.maxWidth }),
|
||||
...(f.aspectRatio != null && { aspectRatio: f.aspectRatio }),
|
||||
});
|
||||
}
|
||||
});
|
||||
console.log("image:", Array.from(entries.values()).filter((e) => e.sys?.contentType?.id === "image").length);
|
||||
}
|
||||
|
||||
// ─── 4f) ImageGallery ───────────────────────────────────────────────────
|
||||
if (run("image_gallery")) {
|
||||
(byType.imageGallery?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "imageGallery");
|
||||
const f = entry.fields || {};
|
||||
const layout = f.layout?.fields || f.layout || {};
|
||||
const imageSlugs = (f.images || []).map(refToSlug).filter(Boolean);
|
||||
writeJson5(path.join(CONTENT_DE, "image_gallery", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
name: f.name ?? slug,
|
||||
images: imageSlugs,
|
||||
layout: {
|
||||
mobile: layout.mobile ?? "12",
|
||||
tablet: layout.tablet,
|
||||
desktop: layout.desktop,
|
||||
spaceBottom: layout.spaceBottom ?? 0,
|
||||
},
|
||||
...(f.description != null && f.description !== "" && { description: f.description }),
|
||||
});
|
||||
});
|
||||
entries.forEach((e) => {
|
||||
if ((e.sys?.contentType?.id === "imageGallery" || e.sys?.contentType?.id === "imgGallery") && !idToSlug.has(e.sys.id)) {
|
||||
const slug = getSlug(e, "imageGallery");
|
||||
const f = e.fields || {};
|
||||
const layout = f.layout?.fields || f.layout || {};
|
||||
const imageSlugs = (f.images || []).map(refToSlug).filter(Boolean);
|
||||
writeJson5(path.join(CONTENT_DE, "image_gallery", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
name: f.name ?? slug,
|
||||
images: imageSlugs,
|
||||
layout: { mobile: layout.mobile ?? "12", tablet: layout.tablet, desktop: layout.desktop, spaceBottom: layout.spaceBottom ?? 0 },
|
||||
...(f.description != null && f.description !== "" && { description: f.description }),
|
||||
});
|
||||
}
|
||||
});
|
||||
console.log("image_gallery:", Array.from(entries.values()).filter((e) => e.sys?.contentType?.id === "imageGallery" || e.sys?.contentType?.id === "imgGallery").length);
|
||||
}
|
||||
|
||||
// ─── 4g) YoutubeVideo ───────────────────────────────────────────────────
|
||||
if (run("youtube_video")) {
|
||||
(byType.youtubeVideo?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "youtubeVideo");
|
||||
const f = entry.fields || {};
|
||||
const layout = f.layout?.fields || f.layout || {};
|
||||
writeJson5(path.join(CONTENT_DE, "youtube_video", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
id: f.id ?? slug,
|
||||
youtubeId: f.youtubeId ?? "",
|
||||
...(f.params != null && f.params !== "" && { params: f.params }),
|
||||
...(f.title != null && f.title !== "" && { title: f.title }),
|
||||
...(f.description != null && f.description !== "" && { description: f.description }),
|
||||
layout: {
|
||||
mobile: layout.mobile ?? "12",
|
||||
tablet: layout.tablet,
|
||||
desktop: layout.desktop,
|
||||
spaceBottom: layout.spaceBottom ?? 0,
|
||||
},
|
||||
});
|
||||
});
|
||||
entries.forEach((e) => {
|
||||
if (e.sys?.contentType?.id === "youtubeVideo" && !idToSlug.has(e.sys.id)) {
|
||||
const slug = getSlug(e, "youtubeVideo");
|
||||
const f = e.fields || {};
|
||||
const layout = f.layout?.fields || f.layout || {};
|
||||
writeJson5(path.join(CONTENT_DE, "youtube_video", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
id: f.id ?? slug,
|
||||
youtubeId: f.youtubeId ?? "",
|
||||
...(f.params != null && f.params !== "" && { params: f.params }),
|
||||
...(f.title != null && f.title !== "" && { title: f.title }),
|
||||
...(f.description != null && f.description !== "" && { description: f.description }),
|
||||
layout: { mobile: layout.mobile ?? "12", tablet: layout.tablet, desktop: layout.desktop, spaceBottom: layout.spaceBottom ?? 0 },
|
||||
});
|
||||
}
|
||||
});
|
||||
console.log("youtube_video:", Array.from(entries.values()).filter((e) => e.sys?.contentType?.id === "youtubeVideo").length);
|
||||
}
|
||||
|
||||
if (ONLY.size) {
|
||||
console.log("Fertig. Nur migriert:", [...ONLY].sort().join(", "));
|
||||
return;
|
||||
}
|
||||
|
||||
// ─── 5) Link-List (componentLinkList) ───────────────────────────────────
|
||||
function writeLinkList(entry) {
|
||||
const slug = getSlug(entry, "componentLinkList");
|
||||
const f = entry.fields || {};
|
||||
const linkSlugs = (f.links || []).map((l) => refToSlug(l)).filter(Boolean);
|
||||
writeJson5(path.join(CONTENT_DE, "link_list", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
headline: f.headline ?? "",
|
||||
links: linkSlugs,
|
||||
});
|
||||
}
|
||||
(byType.componentLinkList?.items || []).forEach(writeLinkList);
|
||||
entries.forEach((e) => {
|
||||
if (e.sys?.contentType?.id === "componentLinkList" && !idToSlug.has(e.sys.id)) writeLinkList(e);
|
||||
});
|
||||
console.log("link_list:", Array.from(entries.values()).filter((e) => e.sys?.contentType?.id === "componentLinkList").length);
|
||||
|
||||
// ─── 6) FullwidthBanner ────────────────────────────────────────────────
|
||||
function extractFullwidthBanner(entry) {
|
||||
const slug = getSlug(entry, "fullwidthBanner");
|
||||
const f = entry.fields || {};
|
||||
const img = f.img;
|
||||
let image = [];
|
||||
if (img?.fields?.file?.url) {
|
||||
let u = img.fields.file.url;
|
||||
if (!u.startsWith("http")) u = "https:" + u;
|
||||
image = [u];
|
||||
}
|
||||
writeJson5(path.join(CONTENT_DE, "fullwidth_banner", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
name: f.id ?? slug,
|
||||
variant: f.variant ?? "light",
|
||||
headline: f.headline ?? "",
|
||||
subheadline: f.subheadline ?? "",
|
||||
text: f.text ?? "",
|
||||
image,
|
||||
});
|
||||
return slug;
|
||||
}
|
||||
(byType.fullwidthBanner?.items || []).forEach(extractFullwidthBanner);
|
||||
entries.forEach((e) => {
|
||||
if (e.sys?.contentType?.id === "fullwidthBanner" && !idToSlug.has(e.sys.id)) extractFullwidthBanner(e);
|
||||
});
|
||||
console.log("fullwidth_banner:", Array.from(entries.values()).filter((e) => e.sys?.contentType?.id === "fullwidthBanner").length);
|
||||
|
||||
// ─── 7) PostOverview (componentPostOverview) ────────────────────────────
|
||||
(byType.componentPostOverview?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "componentPostOverview");
|
||||
const f = entry.fields || {};
|
||||
const tagSlugs = (f.filterByTag || []).map(refToSlug).filter(Boolean);
|
||||
writeJson5(path.join(CONTENT_DE, "post_overview", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
id: f.id ?? slug,
|
||||
headline: f.headline ?? "",
|
||||
allPosts: f.allPosts ?? true,
|
||||
filterByTag: tagSlugs,
|
||||
});
|
||||
});
|
||||
entries.forEach((e) => {
|
||||
if (e.sys?.contentType?.id === "componentPostOverview" && !idToSlug.has(e.sys.id)) {
|
||||
const slug = getSlug(e, "componentPostOverview");
|
||||
const f = e.fields || {};
|
||||
const tagSlugs = (f.filterByTag || []).map(refToSlug).filter(Boolean);
|
||||
writeJson5(path.join(CONTENT_DE, "post_overview", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
id: f.id ?? slug,
|
||||
headline: f.headline ?? "",
|
||||
allPosts: f.allPosts ?? true,
|
||||
filterByTag: tagSlugs,
|
||||
});
|
||||
}
|
||||
});
|
||||
console.log("post_overview:", Array.from(entries.values()).filter((e) => e.sys?.contentType?.id === "componentPostOverview").length);
|
||||
|
||||
// ─── 8) TextFragment + SearchableText (optional, falls verwendet) ────────
|
||||
(byType.textFragment?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "textFragment");
|
||||
const f = entry.fields || {};
|
||||
const tagSlugs = (f.tags || []).map(refToSlug).filter(Boolean);
|
||||
writeJson5(path.join(CONTENT_DE, "text_fragment", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
id: f.id ?? slug,
|
||||
title: f.title ?? "",
|
||||
text: f.text ?? "",
|
||||
tags: tagSlugs,
|
||||
});
|
||||
});
|
||||
(byType.componentSearchableText?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "componentSearchableText");
|
||||
const f = entry.fields || {};
|
||||
const fragSlugs = (f.textFragments || []).map(refToSlug).filter(Boolean);
|
||||
const tagSlugs = (f.tagWhitelist || []).map(refToSlug).filter(Boolean);
|
||||
writeJson5(path.join(CONTENT_DE, "searchable_text", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
id: f.id ?? slug,
|
||||
textFragments: fragSlugs,
|
||||
tagWhitelist: tagSlugs,
|
||||
title: f.title,
|
||||
description: f.description,
|
||||
});
|
||||
});
|
||||
|
||||
// ─── 9) Post ───────────────────────────────────────────────────────────
|
||||
(byType.post?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "post");
|
||||
const f = entry.fields || {};
|
||||
const row1Content = (f.row1Content || []).map(refToSlug).filter(Boolean);
|
||||
const obj = {
|
||||
_slug: slug,
|
||||
slug: f.slug ?? "/" + slug,
|
||||
linkName: f.linkName ?? "",
|
||||
headline: f.headline ?? "",
|
||||
subheadline: f.subheadline ?? "",
|
||||
excerpt: f.excerpt ?? "",
|
||||
...(entry.sys?.createdAt && { created: entry.sys.createdAt }),
|
||||
postImage: refToSlug(f.postImage) || null,
|
||||
postTag: (f.postTag || []).map(refToSlug).filter(Boolean),
|
||||
important: f.important ?? false,
|
||||
content: f.content ?? "",
|
||||
showCommentSection: f.showCommentSection !== false,
|
||||
row1JustifyContent: f.row1JustifyContent ?? "start",
|
||||
row1AlignItems: f.row1AlignItems ?? "start",
|
||||
seoTitle: f.seoTitle ?? "",
|
||||
seoDescription: f.seoDescription ?? "",
|
||||
seoMetaRobots: f.seoMetaRobots ?? "index, follow",
|
||||
};
|
||||
if (obj.postImage == null) delete obj.postImage;
|
||||
if (row1Content.length) obj.row1Content = row1Content;
|
||||
writeJson5(path.join(CONTENT_DE, "post", slug + ".json5"), obj);
|
||||
});
|
||||
console.log("post:", (byType.post?.items || []).length);
|
||||
|
||||
// ─── 10) Page (alle Page-Einträge aus entries, damit auch nur in Nav referenzierte) ───
|
||||
const pageEntries = Array.from(entries.values()).filter((e) => (e.sys?.contentType?.id || e.sys?.contentType?.sys?.id) === "page");
|
||||
pageEntries.forEach((entry) => {
|
||||
const slug = getSlug(entry, "page");
|
||||
const f = entry.fields || {};
|
||||
const row1Content = (f.row1Content || [])
|
||||
.filter((ref) => ref?.sys?.contentType && SUPPORTED_ROW_COMPONENTS.has(ref.sys.contentType.sys?.id || ref.sys.contentType.id))
|
||||
.map(refToSlug)
|
||||
.filter(Boolean);
|
||||
const name = slug === "home" || f.slug === "/" ? "home" : slug;
|
||||
const pageSlug = f.slug ?? (name === "home" ? "/" : "/" + name);
|
||||
writeJson5(path.join(CONTENT_DE, "page", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
slug: pageSlug,
|
||||
name,
|
||||
linkName: f.linkName ?? "",
|
||||
headline: f.headline ?? "",
|
||||
subheadline: f.subheadline ?? "",
|
||||
seoTitle: f.seoTitle ?? "",
|
||||
seoDescription: f.seoDescription ?? "",
|
||||
seoMetaRobots: f.seoMetaRobots ?? "index, follow",
|
||||
row1JustifyContent: f.row1JustifyContent ?? "start",
|
||||
row1AlignItems: f.row1AlignItems ?? "start",
|
||||
row1Content,
|
||||
...(refToSlug(f.topFullwidthBanner) ? { topFullwidthBanner: refToSlug(f.topFullwidthBanner) } : {}),
|
||||
});
|
||||
});
|
||||
console.log("page:", pageEntries.length);
|
||||
|
||||
// ─── 11) Footer ────────────────────────────────────────────────────────
|
||||
(byType.footer?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "footer");
|
||||
const f = entry.fields || {};
|
||||
const row1Content = (f.row1Content || []).map(refToSlug).filter(Boolean);
|
||||
writeJson5(path.join(CONTENT_DE, "footer", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
id: f.id ?? slug,
|
||||
row1JustifyContent: f.row1JustifyContent ?? "start",
|
||||
row1AlignItems: f.row1AlignItems ?? "start",
|
||||
row1Content,
|
||||
});
|
||||
});
|
||||
|
||||
// ─── 12) Navigation ─────────────────────────────────────────────────────
|
||||
(byType.navigation?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "navigation");
|
||||
const f = entry.fields || {};
|
||||
const links = (f.links || []).map((p) => refToSlug(p)).filter(Boolean);
|
||||
const internal = f.id ?? (slug === "navigation-header" ? "navigation-header" : slug);
|
||||
writeJson5(path.join(CONTENT_DE, "navigation", (f.id || slug) + ".json5"), {
|
||||
_slug: f.id || slug,
|
||||
name: f.id ?? slug,
|
||||
internal,
|
||||
links,
|
||||
});
|
||||
});
|
||||
|
||||
// ─── 13) PageConfig ─────────────────────────────────────────────────────
|
||||
const pageConfigItems = byType.pageConfig?.items || [];
|
||||
const defaultFooterText = "© Bürgerinitiative Vachdorf. Alle Rechte vorbehalten.";
|
||||
pageConfigItems.forEach((entry) => {
|
||||
const slug = "default";
|
||||
const f = entry.fields || {};
|
||||
const logoRef = f.logo;
|
||||
const logoSlug = logoRef ? (logoRef.sys?.type === "Asset" ? idToSlug.get(logoRef.sys?.id) : null) : null;
|
||||
writeJson5(path.join(CONTENT_DE, "page_config", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
logo: logoSlug ?? "logo",
|
||||
footerText1: defaultFooterText,
|
||||
seoTitle: f.seoTitle ?? "Bürgerinitiative Vachdorf / $1",
|
||||
seoDescription: f.seoDescription ?? "$1 - Bürgerinitiative Vachdorf",
|
||||
website: f.website ?? "https://www.windwiderstand.de",
|
||||
});
|
||||
});
|
||||
if (pageConfigItems.length === 0) {
|
||||
writeJson5(path.join(CONTENT_DE, "page_config", "default.json5"), {
|
||||
_slug: "default",
|
||||
logo: "logo",
|
||||
footerText1: defaultFooterText,
|
||||
seoTitle: "Bürgerinitiative Vachdorf / $1",
|
||||
seoDescription: "$1 - Bürgerinitiative Vachdorf",
|
||||
website: "https://www.windwiderstand.de",
|
||||
});
|
||||
}
|
||||
|
||||
// ─── 14) Campaign ───────────────────────────────────────────────────────
|
||||
(byType.campaign?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "campaign");
|
||||
const f = entry.fields || {};
|
||||
writeJson5(path.join(CONTENT_DE, "campaign", slug + ".json5"), {
|
||||
_slug: slug,
|
||||
campaignName: f.campaingName ?? f.campaignName ?? slug,
|
||||
urlPattern: f.urlPatter ?? f.urlPattern ?? "/",
|
||||
selector: f.selector ?? "body",
|
||||
insertHtml: f.insertHtml ?? "beforeend",
|
||||
timeUntil: f.timeUntil ?? "",
|
||||
html: f.html ?? "",
|
||||
javascript: f.javascript ?? "",
|
||||
css: f.css ?? "",
|
||||
});
|
||||
});
|
||||
|
||||
// ─── 15) Campaigns ─────────────────────────────────────────────────────
|
||||
(byType.campaigns?.items || []).forEach((entry) => {
|
||||
const slug = getSlug(entry, "campaigns");
|
||||
const f = entry.fields || {};
|
||||
const campaignSlugs = (f.campaings || f.campaigns || []).map(refToSlug).filter(Boolean);
|
||||
writeJson5(path.join(CONTENT_DE, "campaigns", (f.id || slug) + ".json5"), {
|
||||
_slug: f.id || slug,
|
||||
id: f.id ?? slug,
|
||||
campaigns: campaignSlugs,
|
||||
enable: f.enable !== false,
|
||||
});
|
||||
});
|
||||
|
||||
console.log("Fertig. Content in", CONTENT_DE);
|
||||
}
|
||||
|
||||
main();
|
||||
86
scripts/migrate-image-to-src-description.mjs
Normal file
86
scripts/migrate-image-to-src-description.mjs
Normal file
@@ -0,0 +1,86 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Migriert content/<locale>/image/*.json5 auf Schema { description, src }.
|
||||
* - Wenn image eine URL ist (http/https): src = image, description = caption || ""
|
||||
* - Wenn image ein img-Slug (Referenz): lädt img/<slug>.json5, src = img.src, description = caption || img.description || ""
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_ROOT = path.join(__dirname, "..", "content");
|
||||
|
||||
function parseJson(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function isUrl(s) {
|
||||
return typeof s === "string" && (s.startsWith("http://") || s.startsWith("https://"));
|
||||
}
|
||||
|
||||
function* findImageDirs() {
|
||||
const locales = fs.readdirSync(CONTENT_ROOT, { withFileTypes: true });
|
||||
for (const loc of locales) {
|
||||
if (!loc.isDirectory()) continue;
|
||||
const imageDir = path.join(CONTENT_ROOT, loc.name, "image");
|
||||
if (fs.existsSync(imageDir)) yield { locale: loc.name, dir: imageDir };
|
||||
}
|
||||
}
|
||||
|
||||
let total = 0;
|
||||
let updated = 0;
|
||||
|
||||
for (const { locale, dir: imageDir } of findImageDirs()) {
|
||||
const imgDir = path.join(CONTENT_ROOT, locale, "img");
|
||||
const files = fs.readdirSync(imageDir).filter((f) => f.endsWith(".json5"));
|
||||
for (const file of files) {
|
||||
total++;
|
||||
const filePath = path.join(imageDir, file);
|
||||
const data = parseJson(fs.readFileSync(filePath, "utf8"));
|
||||
if (!data) {
|
||||
console.warn("Skip (parse error):", filePath);
|
||||
continue;
|
||||
}
|
||||
const slug = data._slug ?? file.replace(/\.json5$/, "");
|
||||
const caption = data.caption != null ? String(data.caption).trim() : "";
|
||||
let description = "";
|
||||
let src = "";
|
||||
|
||||
if (isUrl(data.image)) {
|
||||
src = data.image;
|
||||
description = caption;
|
||||
} else {
|
||||
const imgSlug = data.image;
|
||||
if (!imgSlug) {
|
||||
console.warn("Kein image (URL oder Referenz):", filePath);
|
||||
} else {
|
||||
const imgPath = path.join(imgDir, imgSlug + ".json5");
|
||||
if (!fs.existsSync(imgPath)) {
|
||||
console.warn("Img nicht gefunden:", imgPath);
|
||||
} else {
|
||||
const img = parseJson(fs.readFileSync(imgPath, "utf8"));
|
||||
if (img) {
|
||||
src = img.src ?? "";
|
||||
description = caption || img.description || "";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const out = {
|
||||
_slug: slug,
|
||||
...(description !== "" && { description }),
|
||||
src,
|
||||
};
|
||||
fs.writeFileSync(filePath, JSON.stringify(out, null, 2) + "\n", "utf8");
|
||||
updated++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Image-Migration:", updated, "von", total, "Dateien auf { description, src } umgestellt.");
|
||||
70
scripts/migrate-img-to-src-description.mjs
Normal file
70
scripts/migrate-img-to-src-description.mjs
Normal file
@@ -0,0 +1,70 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Migriert content/<locale>/img/*.json5 von altem Schema (title, file.url, …) auf neues Schema (description, src).
|
||||
* - src = file.url (pflicht im neuen Schema; fehlt file.url → Warnung, src = "")
|
||||
* - description = description || title || ""
|
||||
* - _slug bleibt
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_ROOT = path.join(__dirname, "..", "content");
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function* findImgDirs() {
|
||||
const locales = fs.readdirSync(CONTENT_ROOT, { withFileTypes: true });
|
||||
for (const loc of locales) {
|
||||
if (!loc.isDirectory()) continue;
|
||||
const imgDir = path.join(CONTENT_ROOT, loc.name, "img");
|
||||
if (fs.existsSync(imgDir)) yield imgDir;
|
||||
}
|
||||
}
|
||||
|
||||
let total = 0;
|
||||
let updated = 0;
|
||||
let skipped = 0;
|
||||
let noUrl = 0;
|
||||
|
||||
for (const imgDir of findImgDirs()) {
|
||||
const files = fs.readdirSync(imgDir).filter((f) => f.endsWith(".json5"));
|
||||
for (const file of files) {
|
||||
total++;
|
||||
const filePath = path.join(imgDir, file);
|
||||
const raw = fs.readFileSync(filePath, "utf8");
|
||||
const data = parseJson5(raw);
|
||||
if (!data) {
|
||||
console.warn("Skip (parse error):", filePath);
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
const url = data.file?.url;
|
||||
if (url == null || url === "") {
|
||||
console.warn("Keine file.url:", filePath);
|
||||
noUrl++;
|
||||
}
|
||||
const description = data.description != null && data.description !== ""
|
||||
? data.description
|
||||
: (data.title != null ? String(data.title) : "");
|
||||
const out = {
|
||||
_slug: data._slug ?? file.replace(/\.json5$/, ""),
|
||||
...(description !== "" && { description }),
|
||||
src: url ?? "",
|
||||
};
|
||||
fs.writeFileSync(filePath, JSON.stringify(out, null, 2) + "\n", "utf8");
|
||||
updated++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Img-Migration:", updated, "von", total, "Dateien aktualisiert.");
|
||||
if (skipped) console.log("Übersprungen (Parse-Fehler):", skipped);
|
||||
if (noUrl) console.log("Ohne file.url (src leer):", noUrl);
|
||||
102
scripts/normalize-fullwidth-banner-slugs.mjs
Normal file
102
scripts/normalize-fullwidth-banner-slugs.mjs
Normal file
@@ -0,0 +1,102 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Normalisiert alle Fullwidth-Banner-Dateien: _slug und Dateiname = "fullwidth-banner-" + beschreibender Teil.
|
||||
* - component-fullwidth-banner-X → fullwidth-banner-X
|
||||
* - component-fullwidthbanner-X → fullwidth-banner-X
|
||||
* - X-fullwidthbanner / homepage-fullwidthbanner → fullwidth-banner-X
|
||||
* - 2512_banner_downloads → fullwidth-banner-2512-banner-downloads
|
||||
* Aktualisiert alle Referenzen in content/de.
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_DE = path.join(__dirname, "..", "content", "de");
|
||||
const BANNER_DIR = path.join(CONTENT_DE, "fullwidth_banner");
|
||||
|
||||
function normalize(base) {
|
||||
let rest = base.replace(/_/g, "-").toLowerCase();
|
||||
if (rest.startsWith("component-fullwidth-banner-")) {
|
||||
rest = rest.slice(27);
|
||||
} else if (rest.startsWith("component-fullwidthbanner-")) {
|
||||
rest = rest.slice(26);
|
||||
} else if (rest.endsWith("-fullwidthbanner")) {
|
||||
rest = rest.slice(0, -16);
|
||||
} else if (rest.endsWith("-fullwidth-banner")) {
|
||||
rest = rest.slice(0, -17);
|
||||
} else {
|
||||
// z. B. "2512_banner_downloads" -> "2512-banner-downloads"
|
||||
rest = rest.replace(/_/g, "-").toLowerCase();
|
||||
}
|
||||
return "fullwidth-banner-" + rest;
|
||||
}
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(BANNER_DIR).filter((f) => f.endsWith(".json5")).sort();
|
||||
const oldToNew = new Map();
|
||||
const used = new Set();
|
||||
|
||||
for (const file of files) {
|
||||
const base = file.replace(/\.json5$/, "");
|
||||
const raw = fs.readFileSync(path.join(BANNER_DIR, file), "utf8");
|
||||
const data = parseJson5(raw);
|
||||
const oldSlug = data?._slug || base;
|
||||
let newSlug = normalize(base);
|
||||
let n = 0;
|
||||
while (used.has(newSlug)) {
|
||||
n++;
|
||||
newSlug = normalize(base) + (n === 1 ? "-1" : "-" + n);
|
||||
}
|
||||
used.add(newSlug);
|
||||
oldToNew.set(oldSlug, newSlug);
|
||||
}
|
||||
|
||||
console.log("Fullwidth-Banner-Slug-Map:", Object.fromEntries(oldToNew));
|
||||
|
||||
for (const file of files) {
|
||||
const base = file.replace(/\.json5$/, "");
|
||||
const filePath = path.join(BANNER_DIR, file);
|
||||
const data = parseJson5(fs.readFileSync(filePath, "utf8"));
|
||||
const oldSlug = data?._slug || base;
|
||||
const newSlug = oldToNew.get(oldSlug);
|
||||
if (!newSlug) continue;
|
||||
data._slug = newSlug;
|
||||
const newPath = path.join(BANNER_DIR, newSlug + ".json5");
|
||||
fs.writeFileSync(newPath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
||||
if (path.basename(newPath) !== file) fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
function walkDir(dir, fn) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
const full = path.join(dir, e.name);
|
||||
if (e.isDirectory()) walkDir(full, fn);
|
||||
else if (e.name.endsWith(".json5")) fn(full);
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(CONTENT_DE, (filePath) => {
|
||||
let content = fs.readFileSync(filePath, "utf8");
|
||||
let changed = false;
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
if (oldSlug === newSlug) continue;
|
||||
const escaped = oldSlug.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const newContent = content.replace(new RegExp('"' + escaped + '"', "g"), '"' + newSlug + '"');
|
||||
if (newContent !== content) {
|
||||
content = newContent;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if (changed) fs.writeFileSync(filePath, content, "utf8");
|
||||
});
|
||||
|
||||
console.log("Fertig. Fullwidth-Banner-Slugs normalisiert und Referenzen aktualisiert.");
|
||||
99
scripts/normalize-html-slugs.mjs
Normal file
99
scripts/normalize-html-slugs.mjs
Normal file
@@ -0,0 +1,99 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Normalisiert alle HTML-Dateien: _slug und Dateiname = "html-" + beschreibender Teil.
|
||||
* - component-html-X → html-X
|
||||
* - redirectTo-X → html-X
|
||||
* - X-html / page_links_embedded-html → html-X (mit - statt _)
|
||||
* Aktualisiert alle Referenzen in content/de.
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_DE = path.join(__dirname, "..", "content", "de");
|
||||
const HTML_DIR = path.join(CONTENT_DE, "html");
|
||||
|
||||
function normalize(base) {
|
||||
let rest = base.replace(/_/g, "-").toLowerCase();
|
||||
if (rest.startsWith("component-html-")) {
|
||||
rest = rest.slice(15);
|
||||
} else if (rest.startsWith("redirectto-")) {
|
||||
rest = rest.slice(11);
|
||||
} else if (rest.endsWith("-html")) {
|
||||
rest = rest.slice(0, -5);
|
||||
} else if (rest.startsWith("html-")) {
|
||||
rest = rest.slice(5);
|
||||
}
|
||||
return "html-" + rest;
|
||||
}
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(HTML_DIR).filter((f) => f.endsWith(".json5")).sort();
|
||||
const oldToNew = new Map();
|
||||
const used = new Set();
|
||||
|
||||
for (const file of files) {
|
||||
const base = file.replace(/\.json5$/, "");
|
||||
const raw = fs.readFileSync(path.join(HTML_DIR, file), "utf8");
|
||||
const data = parseJson5(raw);
|
||||
const oldSlug = data?._slug || base;
|
||||
let newSlug = normalize(base);
|
||||
let n = 0;
|
||||
while (used.has(newSlug)) {
|
||||
n++;
|
||||
newSlug = normalize(base) + (n === 1 ? "-1" : "-" + n);
|
||||
}
|
||||
used.add(newSlug);
|
||||
oldToNew.set(oldSlug, newSlug);
|
||||
}
|
||||
|
||||
console.log("HTML-Slug-Map:", Object.fromEntries(oldToNew));
|
||||
|
||||
for (const file of files) {
|
||||
const base = file.replace(/\.json5$/, "");
|
||||
const filePath = path.join(HTML_DIR, file);
|
||||
const data = parseJson5(fs.readFileSync(filePath, "utf8"));
|
||||
const oldSlug = data?._slug || base;
|
||||
const newSlug = oldToNew.get(oldSlug);
|
||||
if (!newSlug) continue;
|
||||
data._slug = newSlug;
|
||||
if (data.name && data.name === oldSlug) data.name = newSlug;
|
||||
const newPath = path.join(HTML_DIR, newSlug + ".json5");
|
||||
fs.writeFileSync(newPath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
||||
if (path.basename(newPath) !== file) fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
function walkDir(dir, fn) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
const full = path.join(dir, e.name);
|
||||
if (e.isDirectory()) walkDir(full, fn);
|
||||
else if (e.name.endsWith(".json5")) fn(full);
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(CONTENT_DE, (filePath) => {
|
||||
let content = fs.readFileSync(filePath, "utf8");
|
||||
let changed = false;
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
if (oldSlug === newSlug) continue;
|
||||
const escaped = oldSlug.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const newContent = content.replace(new RegExp('"' + escaped + '"', "g"), '"' + newSlug + '"');
|
||||
if (newContent !== content) {
|
||||
content = newContent;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if (changed) fs.writeFileSync(filePath, content, "utf8");
|
||||
});
|
||||
|
||||
console.log("Fertig. HTML-Slugs normalisiert und Referenzen aktualisiert.");
|
||||
125
scripts/normalize-image-iframe-gallery-slugs.mjs
Normal file
125
scripts/normalize-image-iframe-gallery-slugs.mjs
Normal file
@@ -0,0 +1,125 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Normalisiert image_gallery, image, iframe: _slug und Dateiname = prefix + slugify(name).
|
||||
* - image_gallery: image_gallery-{name}
|
||||
* - image: image-{name}
|
||||
* - iframe: iframe-{name}
|
||||
* Aktualisiert alle Referenzen in content/de (inkl. row1Content in pages/posts).
|
||||
* Hinweis: image_gallery.images referenziert img (Assets), nicht die image-Komponente.
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_DE = path.join(__dirname, "..", "content", "de");
|
||||
|
||||
function slugify(value) {
|
||||
if (value == null || value === "") return "";
|
||||
return String(value)
|
||||
.replace(/_/g, "-")
|
||||
.replace(/\s+/g, "-")
|
||||
.replace(/[^a-zA-Z0-9-]+/g, "-")
|
||||
.replace(/-+/g, "-")
|
||||
.replace(/^-|-$/g, "")
|
||||
.toLowerCase() || "untitled";
|
||||
}
|
||||
|
||||
function stripPrefix(name, prefixes) {
|
||||
let s = name || "";
|
||||
for (const p of prefixes) {
|
||||
if (s.toLowerCase().startsWith(p.toLowerCase())) {
|
||||
s = s.slice(p.length).replace(/^-+/, "");
|
||||
break;
|
||||
}
|
||||
}
|
||||
return s || name;
|
||||
}
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function processCollection(collectionDir, prefix, namePrefixes = []) {
|
||||
if (!fs.existsSync(collectionDir)) return new Map();
|
||||
const files = fs.readdirSync(collectionDir).filter((f) => f.endsWith(".json5")).sort();
|
||||
const oldToNew = new Map();
|
||||
const used = new Set();
|
||||
|
||||
for (const file of files) {
|
||||
const raw = fs.readFileSync(path.join(collectionDir, file), "utf8");
|
||||
const data = parseJson5(raw);
|
||||
const oldSlug = data?._slug || file.replace(/\.json5$/, "");
|
||||
const name = data?.name ?? oldSlug;
|
||||
let rest = slugify(stripPrefix(name, namePrefixes));
|
||||
let newSlug = prefix + rest;
|
||||
let n = 0;
|
||||
while (used.has(newSlug)) {
|
||||
n++;
|
||||
newSlug = prefix + rest + (n === 1 ? "-1" : "-" + n);
|
||||
}
|
||||
used.add(newSlug);
|
||||
oldToNew.set(oldSlug, newSlug);
|
||||
}
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(collectionDir, file);
|
||||
const data = parseJson5(fs.readFileSync(filePath, "utf8"));
|
||||
const oldSlug = data?._slug || file.replace(/\.json5$/, "");
|
||||
const newSlug = oldToNew.get(oldSlug);
|
||||
if (!newSlug) continue;
|
||||
data._slug = newSlug;
|
||||
const newPath = path.join(collectionDir, newSlug + ".json5");
|
||||
fs.writeFileSync(newPath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
||||
if (path.basename(newPath) !== file) fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
return oldToNew;
|
||||
}
|
||||
|
||||
// Reihenfolge: image_gallery, image, iframe (keine Abhängigkeit untereinander; image_gallery referenziert nur img)
|
||||
const imageGalleryDir = path.join(CONTENT_DE, "image_gallery");
|
||||
const imageDir = path.join(CONTENT_DE, "image");
|
||||
const iframeDir = path.join(CONTENT_DE, "iframe");
|
||||
|
||||
const mapGallery = processCollection(imageGalleryDir, "image_gallery-", ["component-image-gallery-", "component-image-gallery"]);
|
||||
const mapImage = processCollection(imageDir, "image-", ["component-image-"]);
|
||||
const mapIframe = processCollection(iframeDir, "iframe-", ["component-iframe-"]);
|
||||
|
||||
const allMaps = [mapGallery, mapImage, mapIframe];
|
||||
console.log("image_gallery:", Object.fromEntries(mapGallery));
|
||||
console.log("image:", Object.fromEntries(mapImage));
|
||||
console.log("iframe:", Object.fromEntries(mapIframe));
|
||||
|
||||
function walkDir(dir, fn) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
const full = path.join(dir, e.name);
|
||||
if (e.isDirectory()) walkDir(full, fn);
|
||||
else if (e.name.endsWith(".json5")) fn(full);
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(CONTENT_DE, (filePath) => {
|
||||
let content = fs.readFileSync(filePath, "utf8");
|
||||
let changed = false;
|
||||
for (const oldToNew of allMaps) {
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
if (oldSlug === newSlug) continue;
|
||||
const escaped = oldSlug.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const newContent = content.replace(new RegExp('"' + escaped + '"', "g"), '"' + newSlug + '"');
|
||||
if (newContent !== content) {
|
||||
content = newContent;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (changed) fs.writeFileSync(filePath, content, "utf8");
|
||||
});
|
||||
|
||||
console.log("Fertig. image_gallery-, image-, iframe-Slugs normalisiert und Referenzen aktualisiert.");
|
||||
103
scripts/normalize-markdown-slugs.mjs
Normal file
103
scripts/normalize-markdown-slugs.mjs
Normal file
@@ -0,0 +1,103 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Normalisiert alle Markdown-Dateien: _slug und Dateiname = "markdown-" + beschreibender Teil.
|
||||
* - component-markdown-X → markdown-X
|
||||
* - X-markdown / X-Markdown → markdown-X
|
||||
* - markdown-X bleibt markdown-X (evtl. _ → -)
|
||||
* - Sonstige → markdown-X
|
||||
* Aktualisiert alle Referenzen in content/de.
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_DE = path.join(__dirname, "..", "content", "de");
|
||||
const MARKDOWN_DIR = path.join(CONTENT_DE, "markdown");
|
||||
|
||||
function normalize(base) {
|
||||
let rest = base.replace(/_/g, "-").toLowerCase();
|
||||
if (rest.startsWith("component-markdown-")) {
|
||||
rest = rest.slice(19);
|
||||
} else if (rest.startsWith("markdown-")) {
|
||||
rest = rest.slice(9);
|
||||
} else if (rest.endsWith("-markdown")) {
|
||||
rest = rest.slice(0, -9);
|
||||
} else {
|
||||
// z. B. "energiequelle-...", "Page-blog-Markdown" -> schon lowercase "page-blog-markdown" -> rest = "page-blog"
|
||||
if (rest.endsWith("-markdown")) rest = rest.slice(0, -9);
|
||||
}
|
||||
return "markdown-" + rest;
|
||||
}
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(MARKDOWN_DIR).filter((f) => f.endsWith(".json5")).sort();
|
||||
const oldToNew = new Map();
|
||||
const used = new Set();
|
||||
|
||||
for (const file of files) {
|
||||
const base = file.replace(/\.json5$/, "");
|
||||
const raw = fs.readFileSync(path.join(MARKDOWN_DIR, file), "utf8");
|
||||
const data = parseJson5(raw);
|
||||
const oldSlug = data?._slug || base;
|
||||
let newSlug = normalize(base);
|
||||
let n = 0;
|
||||
while (used.has(newSlug)) {
|
||||
n++;
|
||||
newSlug = normalize(base) + (n === 1 ? "-1" : "-" + n);
|
||||
}
|
||||
used.add(newSlug);
|
||||
oldToNew.set(oldSlug, newSlug);
|
||||
}
|
||||
|
||||
console.log("Markdown-Slug-Map (Anzahl:", oldToNew.size, ")");
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
if (oldSlug !== newSlug) console.log(" ", oldSlug, "→", newSlug);
|
||||
}
|
||||
|
||||
for (const file of files) {
|
||||
const base = file.replace(/\.json5$/, "");
|
||||
const oldSlug = parseJson5(fs.readFileSync(path.join(MARKDOWN_DIR, file), "utf8"))?._slug || base;
|
||||
const newSlug = oldToNew.get(oldSlug);
|
||||
if (!newSlug) continue;
|
||||
const filePath = path.join(MARKDOWN_DIR, file);
|
||||
const data = parseJson5(fs.readFileSync(filePath, "utf8"));
|
||||
data._slug = newSlug;
|
||||
const newPath = path.join(MARKDOWN_DIR, newSlug + ".json5");
|
||||
fs.writeFileSync(newPath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
||||
if (path.basename(newPath) !== file) fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
function walkDir(dir, fn) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
const full = path.join(dir, e.name);
|
||||
if (e.isDirectory()) walkDir(full, fn);
|
||||
else if (e.name.endsWith(".json5")) fn(full);
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(CONTENT_DE, (filePath) => {
|
||||
let content = fs.readFileSync(filePath, "utf8");
|
||||
let changed = false;
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
if (oldSlug === newSlug) continue;
|
||||
const escaped = oldSlug.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const newContent = content.replace(new RegExp('"' + escaped + '"', "g"), '"' + newSlug + '"');
|
||||
if (newContent !== content) {
|
||||
content = newContent;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if (changed) fs.writeFileSync(filePath, content, "utf8");
|
||||
});
|
||||
|
||||
console.log("Fertig. Markdown-Slugs normalisiert und Referenzen aktualisiert.");
|
||||
169
scripts/normalize-quote-youtube-slugs.mjs
Normal file
169
scripts/normalize-quote-youtube-slugs.mjs
Normal file
@@ -0,0 +1,169 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Normalisiert quote und youtube_video: _slug und Dateiname = prefix + lesbarer Teil.
|
||||
* - quote: quote-{slugify(author)}
|
||||
* - youtube_video: youtube-video-{slugify(title)|slugify(id ohne Präfix)|youtubeId}
|
||||
* Aktualisiert alle Referenzen in content/de.
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_DE = path.join(__dirname, "..", "content", "de");
|
||||
|
||||
function slugify(value) {
|
||||
if (value == null || value === "") return "";
|
||||
return String(value)
|
||||
.replace(/_/g, "-")
|
||||
.replace(/\s+/g, "-")
|
||||
.replace(/[^a-zA-Z0-9-]+/g, "-")
|
||||
.replace(/-+/g, "-")
|
||||
.replace(/^-|-$/g, "")
|
||||
.toLowerCase() || "untitled";
|
||||
}
|
||||
|
||||
function stripPrefix(s, prefixes) {
|
||||
let t = s || "";
|
||||
for (const p of prefixes) {
|
||||
if (t.toLowerCase().startsWith(p.toLowerCase())) {
|
||||
t = t.slice(p.length).replace(/^-+/, "");
|
||||
break;
|
||||
}
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Quote: prefix + slugify(author) ─────────────────────────────────────
|
||||
function processQuote(quoteDir) {
|
||||
if (!fs.existsSync(quoteDir)) return new Map();
|
||||
const files = fs.readdirSync(quoteDir).filter((f) => f.endsWith(".json5")).sort();
|
||||
const oldToNew = new Map();
|
||||
const used = new Set();
|
||||
|
||||
for (const file of files) {
|
||||
const raw = fs.readFileSync(path.join(quoteDir, file), "utf8");
|
||||
const data = parseJson5(raw);
|
||||
const oldSlug = data?._slug || file.replace(/\.json5$/, "");
|
||||
const author = data?.author ?? "";
|
||||
const quoteText = data?.quote ?? "";
|
||||
let rest = slugify(author) || slugify(quoteText.slice(0, 50)) || "zitat";
|
||||
let newSlug = "quote-" + rest;
|
||||
let n = 0;
|
||||
while (used.has(newSlug)) {
|
||||
n++;
|
||||
newSlug = "quote-" + rest + (n === 1 ? "-1" : "-" + n);
|
||||
}
|
||||
used.add(newSlug);
|
||||
oldToNew.set(oldSlug, newSlug);
|
||||
}
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(quoteDir, file);
|
||||
const data = parseJson5(fs.readFileSync(filePath, "utf8"));
|
||||
const oldSlug = data?._slug || file.replace(/\.json5$/, "");
|
||||
const newSlug = oldToNew.get(oldSlug);
|
||||
if (!newSlug) continue;
|
||||
data._slug = newSlug;
|
||||
const newPath = path.join(quoteDir, newSlug + ".json5");
|
||||
fs.writeFileSync(newPath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
||||
if (path.basename(newPath) !== file) fs.unlinkSync(filePath);
|
||||
}
|
||||
return oldToNew;
|
||||
}
|
||||
|
||||
// ─── Youtube_video: prefix + title oder id (ohne Präfix) oder youtubeId ───
|
||||
const YOUTUBE_ID_PREFIXES = [
|
||||
"youtube-video - ",
|
||||
"component-youtube-",
|
||||
"component-video-",
|
||||
"https-www-youtube-com-watch-v-",
|
||||
"https-youtu-be-",
|
||||
];
|
||||
|
||||
function processYoutubeVideo(ytDir) {
|
||||
if (!fs.existsSync(ytDir)) return new Map();
|
||||
const files = fs.readdirSync(ytDir).filter((f) => f.endsWith(".json5")).sort();
|
||||
const oldToNew = new Map();
|
||||
const used = new Set();
|
||||
|
||||
for (const file of files) {
|
||||
const raw = fs.readFileSync(path.join(ytDir, file), "utf8");
|
||||
const data = parseJson5(raw);
|
||||
const oldSlug = data?._slug || file.replace(/\.json5$/, "");
|
||||
const id = data?.id ?? "";
|
||||
const title = data?.title ?? "";
|
||||
const youtubeId = data?.youtubeId ?? "";
|
||||
let rest = slugify(title) || slugify(stripPrefix(id, YOUTUBE_ID_PREFIXES)) || slugify(stripPrefix(oldSlug, YOUTUBE_ID_PREFIXES)) || youtubeId.toLowerCase() || "untitled";
|
||||
let newSlug = "youtube-video-" + rest;
|
||||
let n = 0;
|
||||
while (used.has(newSlug)) {
|
||||
n++;
|
||||
newSlug = "youtube-video-" + rest + (n === 1 ? "-1" : "-" + n);
|
||||
}
|
||||
used.add(newSlug);
|
||||
oldToNew.set(oldSlug, newSlug);
|
||||
}
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(ytDir, file);
|
||||
const data = parseJson5(fs.readFileSync(filePath, "utf8"));
|
||||
const oldSlug = data?._slug || file.replace(/\.json5$/, "");
|
||||
const newSlug = oldToNew.get(oldSlug);
|
||||
if (!newSlug) continue;
|
||||
data._slug = newSlug;
|
||||
if (data.id && data.id === oldSlug) data.id = newSlug;
|
||||
const newPath = path.join(ytDir, newSlug + ".json5");
|
||||
fs.writeFileSync(newPath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
||||
if (path.basename(newPath) !== file) fs.unlinkSync(filePath);
|
||||
}
|
||||
return oldToNew;
|
||||
}
|
||||
|
||||
// ─── Main ────────────────────────────────────────────────────────────────
|
||||
const quoteDir = path.join(CONTENT_DE, "quote");
|
||||
const ytDir = path.join(CONTENT_DE, "youtube_video");
|
||||
|
||||
const mapQuote = processQuote(quoteDir);
|
||||
const mapYoutube = processYoutubeVideo(ytDir);
|
||||
|
||||
const allMaps = [mapQuote, mapYoutube];
|
||||
console.log("quote:", Object.fromEntries(mapQuote));
|
||||
console.log("youtube_video:", Object.fromEntries(mapYoutube));
|
||||
|
||||
function walkDir(dir, fn) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
const full = path.join(dir, e.name);
|
||||
if (e.isDirectory()) walkDir(full, fn);
|
||||
else if (e.name.endsWith(".json5")) fn(full);
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(CONTENT_DE, (filePath) => {
|
||||
let content = fs.readFileSync(filePath, "utf8");
|
||||
let changed = false;
|
||||
for (const oldToNew of allMaps) {
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
if (oldSlug === newSlug) continue;
|
||||
const escaped = oldSlug.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const newContent = content.replace(new RegExp('"' + escaped + '"', "g"), '"' + newSlug + '"');
|
||||
if (newContent !== content) {
|
||||
content = newContent;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (changed) fs.writeFileSync(filePath, content, "utf8");
|
||||
});
|
||||
|
||||
console.log("Fertig. Quote- und youtube-video-Slugs normalisiert und Referenzen aktualisiert.");
|
||||
55
scripts/normalize-text-fragment-slugs.mjs
Normal file
55
scripts/normalize-text-fragment-slugs.mjs
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Normalisiert alle text_fragment-Dateien: Präfix "text-fragment-", _slug und Dateiname.
|
||||
* Aktualisiert Referenzen in content/de (textFragments in searchable_text).
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_DE = path.join(__dirname, "..", "content", "de");
|
||||
const TEXT_FRAGMENT_DIR = path.join(CONTENT_DE, "text_fragment");
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(TEXT_FRAGMENT_DIR).filter((f) => f.endsWith(".json5")).sort();
|
||||
const oldToNew = new Map();
|
||||
|
||||
for (const file of files) {
|
||||
const base = file.replace(/\.json5$/, "");
|
||||
const filePath = path.join(TEXT_FRAGMENT_DIR, file);
|
||||
const raw = fs.readFileSync(filePath, "utf8");
|
||||
const data = parseJson5(raw);
|
||||
const oldSlug = data?._slug ?? base;
|
||||
const newSlug = "text-fragment-" + oldSlug;
|
||||
oldToNew.set(oldSlug, newSlug);
|
||||
|
||||
const newData = { ...data, _slug: newSlug };
|
||||
const newFile = newSlug + ".json5";
|
||||
const newPath = path.join(TEXT_FRAGMENT_DIR, newFile);
|
||||
fs.writeFileSync(newPath, JSON.stringify(newData, null, 2) + "\n", "utf8");
|
||||
if (newFile !== file) {
|
||||
fs.unlinkSync(filePath);
|
||||
}
|
||||
}
|
||||
|
||||
// Referenzen in searchable_text (textFragments-Arrays) ersetzen
|
||||
const searchableDir = path.join(CONTENT_DE, "searchable_text");
|
||||
for (const file of fs.readdirSync(searchableDir).filter((f) => f.endsWith(".json5"))) {
|
||||
const filePath = path.join(searchableDir, file);
|
||||
let raw = fs.readFileSync(filePath, "utf8");
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
raw = raw.replace(new RegExp('"' + oldSlug.replace(/[.*+?^${}()|[\]\\]/g, "\\$&") + '"', "g"), '"' + newSlug + '"');
|
||||
}
|
||||
fs.writeFileSync(filePath, raw, "utf8");
|
||||
}
|
||||
|
||||
console.log("Normalized", oldToNew.size, "text_fragment slugs:", [...oldToNew.entries()].map(([a, b]) => a + " -> " + b).join(", "));
|
||||
63
scripts/page-add-prefix.mjs
Normal file
63
scripts/page-add-prefix.mjs
Normal file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Fügt allen Pages das Präfix "page-" hinzu: Dateiname und _slug werden zu "page-" + bisheriger Slug.
|
||||
* Aktualisiert Referenzen in content/de (alle .json5 – nur "links"-Arrays können Page-Slugs enthalten;
|
||||
* Einzelne Ersetzung pro Slug, um Kollisionen mit Tag-Namen etc. zu vermeiden).
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_DE = path.join(__dirname, "..", "content", "de");
|
||||
const PAGE_DIR = path.join(CONTENT_DE, "page");
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(PAGE_DIR).filter((f) => f.endsWith(".json5"));
|
||||
const oldToNew = new Map();
|
||||
|
||||
for (const file of files) {
|
||||
const oldSlug = file.replace(/\.json5$/, "");
|
||||
if (oldSlug.startsWith("page-")) continue;
|
||||
oldToNew.set(oldSlug, "page-" + oldSlug);
|
||||
}
|
||||
|
||||
console.log("Präfix page- für", oldToNew.size, "Pages");
|
||||
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
const filePath = path.join(PAGE_DIR, oldSlug + ".json5");
|
||||
const data = parseJson5(fs.readFileSync(filePath, "utf8"));
|
||||
data._slug = newSlug;
|
||||
const newPath = path.join(PAGE_DIR, newSlug + ".json5");
|
||||
fs.writeFileSync(newPath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
||||
fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
const NAV_DIR = path.join(CONTENT_DE, "navigation");
|
||||
for (const file of fs.readdirSync(NAV_DIR).filter((f) => f.endsWith(".json5"))) {
|
||||
const filePath = path.join(NAV_DIR, file);
|
||||
let content = fs.readFileSync(filePath, "utf8");
|
||||
let changed = false;
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
const escaped = oldSlug.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const re = oldSlug === "links"
|
||||
? new RegExp('"links"(?!\\s*:)', "g")
|
||||
: new RegExp('"' + escaped + '"', "g");
|
||||
const newContent = content.replace(re, '"' + newSlug + '"');
|
||||
if (newContent !== content) {
|
||||
content = newContent;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if (changed) fs.writeFileSync(filePath, content, "utf8");
|
||||
}
|
||||
|
||||
console.log("Fertig. Page-Präfix gesetzt und Referenzen aktualisiert.");
|
||||
125
scripts/post-add-created-from-export.mjs
Normal file
125
scripts/post-add-created-from-export.mjs
Normal file
@@ -0,0 +1,125 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Liest contentful-export.json und trägt sys.createdAt der Post-Einträge
|
||||
* als Feld "created" in die bestehenden Dateien unter content/de/post/ ein.
|
||||
*
|
||||
* Zuordnung: Unser _slug = "post-" + slugify(fields.slug ohne führendes /)
|
||||
* Aufruf: node scripts/post-add-created-from-export.mjs [Pfad-zum-Export]
|
||||
* Default: ../www.windwiderstand.de/contentful-export.json
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const ROOT = path.resolve(__dirname, "..");
|
||||
const POST_DIR = path.join(ROOT, "content", "de", "post");
|
||||
const DEFAULT_EXPORT = path.join(ROOT, "..", "www.windwiderstand.de", "contentful-export.json");
|
||||
|
||||
function slugify(value) {
|
||||
if (value == null || value === "") return "";
|
||||
return String(value)
|
||||
.replace(/^\//, "")
|
||||
.replace(/\//g, "-")
|
||||
.replace(/[^a-zA-Z0-9_-]+/g, "-")
|
||||
.replace(/-+/g, "-")
|
||||
.replace(/^-|-$/g, "") || "untitled";
|
||||
}
|
||||
|
||||
function postSlugFromEntry(entry) {
|
||||
const f = entry?.fields || {};
|
||||
const slugRaw = f.slug ?? f.id ?? "";
|
||||
const base = slugify(slugRaw);
|
||||
return base ? `post-${base}` : null;
|
||||
}
|
||||
|
||||
function main() {
|
||||
const exportPath = process.argv[2] || DEFAULT_EXPORT;
|
||||
if (!fs.existsSync(exportPath)) {
|
||||
console.error("Export nicht gefunden:", exportPath);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const data = JSON.parse(fs.readFileSync(exportPath, "utf8"));
|
||||
const items = data?.byType?.post?.items ?? [];
|
||||
const slugToCreated = new Map();
|
||||
for (const entry of items) {
|
||||
const slug = postSlugFromEntry(entry);
|
||||
const createdAt = entry?.sys?.createdAt;
|
||||
if (slug && createdAt) slugToCreated.set(slug, createdAt);
|
||||
}
|
||||
console.log("Export: %d Posts mit createdAt, %d eindeutige Slugs", items.length, slugToCreated.size);
|
||||
|
||||
if (!fs.existsSync(POST_DIR)) {
|
||||
console.error("Verzeichnis nicht gefunden:", POST_DIR);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(POST_DIR).filter((f) => f.endsWith(".json5"));
|
||||
let updated = 0;
|
||||
let skipped = 0;
|
||||
for (const file of files) {
|
||||
const filePath = path.join(POST_DIR, file);
|
||||
const raw = fs.readFileSync(filePath, "utf8");
|
||||
let obj;
|
||||
try {
|
||||
obj = JSON.parse(raw);
|
||||
} catch (e) {
|
||||
console.warn("Überspringe (kein gültiges JSON):", file);
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
const _slug = obj._slug;
|
||||
if (!_slug) {
|
||||
console.warn("Kein _slug in:", file);
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
const created = slugToCreated.get(_slug);
|
||||
if (!created) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
if (obj.created === created) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
obj.created = created;
|
||||
const keys = Object.keys(obj);
|
||||
const order = [
|
||||
"_slug",
|
||||
"slug",
|
||||
"linkName",
|
||||
"headline",
|
||||
"subheadline",
|
||||
"excerpt",
|
||||
"created",
|
||||
"postImage",
|
||||
"postTag",
|
||||
"important",
|
||||
"date",
|
||||
"content",
|
||||
"showCommentSection",
|
||||
"row1JustifyContent",
|
||||
"row1AlignItems",
|
||||
"row1Content",
|
||||
"seoTitle",
|
||||
"seoDescription",
|
||||
"seoMetaRobots",
|
||||
];
|
||||
const ordered = {};
|
||||
for (const k of order) {
|
||||
if (k in obj) ordered[k] = obj[k];
|
||||
}
|
||||
for (const k of keys) {
|
||||
if (!(k in ordered)) ordered[k] = obj[k];
|
||||
}
|
||||
fs.writeFileSync(filePath, JSON.stringify(ordered, null, 2) + "\n", "utf8");
|
||||
console.log(" + created: %s -> %s", _slug, created);
|
||||
updated++;
|
||||
}
|
||||
console.log("Fertig: %d aktualisiert, %d übersprungen.", updated, skipped);
|
||||
}
|
||||
|
||||
main();
|
||||
66
scripts/post-add-prefix.mjs
Normal file
66
scripts/post-add-prefix.mjs
Normal file
@@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Fügt allen Posts das Präfix "post-" hinzu: Dateiname und _slug werden zu "post-" + bisheriger Slug.
|
||||
* Aktualisiert alle Referenzen in content/de.
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_DE = path.join(__dirname, "..", "content", "de");
|
||||
const POST_DIR = path.join(CONTENT_DE, "post");
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(POST_DIR).filter((f) => f.endsWith(".json5"));
|
||||
const oldToNew = new Map();
|
||||
|
||||
for (const file of files) {
|
||||
const oldSlug = file.replace(/\.json5$/, "");
|
||||
if (oldSlug.startsWith("post-")) continue;
|
||||
oldToNew.set(oldSlug, "post-" + oldSlug);
|
||||
}
|
||||
|
||||
console.log("Präfix post- für", oldToNew.size, "Posts");
|
||||
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
const filePath = path.join(POST_DIR, oldSlug + ".json5");
|
||||
const data = parseJson5(fs.readFileSync(filePath, "utf8"));
|
||||
data._slug = newSlug;
|
||||
const newPath = path.join(POST_DIR, newSlug + ".json5");
|
||||
fs.writeFileSync(newPath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
||||
fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
function walkDir(dir, fn) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
const full = path.join(dir, e.name);
|
||||
if (e.isDirectory()) walkDir(full, fn);
|
||||
else if (e.name.endsWith(".json5")) fn(full);
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(CONTENT_DE, (filePath) => {
|
||||
let content = fs.readFileSync(filePath, "utf8");
|
||||
let changed = false;
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
const escaped = oldSlug.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const newContent = content.replace(new RegExp('"' + escaped + '"', "g"), '"' + newSlug + '"');
|
||||
if (newContent !== content) {
|
||||
content = newContent;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if (changed) fs.writeFileSync(filePath, content, "utf8");
|
||||
});
|
||||
|
||||
console.log("Fertig. Post-Präfix gesetzt und Referenzen aktualisiert.");
|
||||
93
scripts/rename-campaigns.mjs
Normal file
93
scripts/rename-campaigns.mjs
Normal file
@@ -0,0 +1,93 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Benennt content/de/campaign/*.json5 um: _slug und Dateiname = "campaign-" + slugify(campaignName).
|
||||
* Aktualisiert Referenzen in content/de/campaigns/*.json5.
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_DE = path.join(__dirname, "..", "content", "de");
|
||||
const CAMPAIGN_DIR = path.join(CONTENT_DE, "campaign");
|
||||
|
||||
function slugify(s) {
|
||||
if (!s || typeof s !== "string") return "";
|
||||
return s
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.replace(/\s+/g, "-")
|
||||
.replace(/[^a-z0-9-]/g, "-")
|
||||
.replace(/-+/g, "-")
|
||||
.replace(/^-|-$/g, "");
|
||||
}
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const campaignFiles = fs.readdirSync(CAMPAIGN_DIR).filter((f) => f.endsWith(".json5"));
|
||||
const oldToNew = new Map();
|
||||
const newSlugs = new Set();
|
||||
|
||||
for (const file of campaignFiles) {
|
||||
const oldSlug = file.replace(/\.json5$/, "");
|
||||
const raw = fs.readFileSync(path.join(CAMPAIGN_DIR, file), "utf8");
|
||||
const data = parseJson5(raw);
|
||||
if (!data) continue;
|
||||
const name = (data.campaignName || oldSlug).trim();
|
||||
let base = slugify(name) || slugify(oldSlug);
|
||||
if (!base) base = "campaign";
|
||||
let newSlug = "campaign-" + base;
|
||||
let n = 0;
|
||||
while (newSlugs.has(newSlug)) {
|
||||
n++;
|
||||
newSlug = "campaign-" + base + "-" + n;
|
||||
}
|
||||
newSlugs.add(newSlug);
|
||||
oldToNew.set(oldSlug, newSlug);
|
||||
}
|
||||
|
||||
console.log("Slug-Map:", Object.fromEntries(oldToNew));
|
||||
|
||||
for (const file of campaignFiles) {
|
||||
const oldSlug = file.replace(/\.json5$/, "");
|
||||
const newSlug = oldToNew.get(oldSlug);
|
||||
if (!newSlug) continue;
|
||||
const filePath = path.join(CAMPAIGN_DIR, file);
|
||||
const data = parseJson5(fs.readFileSync(filePath, "utf8"));
|
||||
data._slug = newSlug;
|
||||
const newPath = path.join(CAMPAIGN_DIR, newSlug + ".json5");
|
||||
fs.writeFileSync(newPath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
||||
if (newPath !== filePath) fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
function walkDir(dir, fn) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
const full = path.join(dir, e.name);
|
||||
if (e.isDirectory()) walkDir(full, fn);
|
||||
else if (e.name.endsWith(".json5")) fn(full);
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(CONTENT_DE, (filePath) => {
|
||||
let content = fs.readFileSync(filePath, "utf8");
|
||||
let changed = false;
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
if (oldSlug === newSlug) continue;
|
||||
const regex = new RegExp('"' + oldSlug.replace(/[.*+?^${}()|[\]\\]/g, "\\$&") + '"', "g");
|
||||
if (regex.test(content)) {
|
||||
content = content.replace(regex, '"' + newSlug + '"');
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if (changed) fs.writeFileSync(filePath, content, "utf8");
|
||||
});
|
||||
|
||||
console.log("Fertig. Campaigns umbenannt und Referenzen aktualisiert.");
|
||||
94
scripts/rename-link-lists.mjs
Normal file
94
scripts/rename-link-lists.mjs
Normal file
@@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Benennt content/de/link_list/*.json5 um: _slug und Dateiname = "link-list-" + slugify(headline).
|
||||
* Kollisionen (z. B. gleiche Headline "Links") erhalten Suffix -1, -2, …
|
||||
* Aktualisiert alle Referenzen in content/de (pages, posts, …).
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_DE = path.join(__dirname, "..", "content", "de");
|
||||
const LINK_LIST_DIR = path.join(CONTENT_DE, "link_list");
|
||||
|
||||
function slugify(s) {
|
||||
if (!s || typeof s !== "string") return "";
|
||||
return s
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.replace(/\s+/g, "-")
|
||||
.replace(/[ää]/g, "ae")
|
||||
.replace(/[öö]/g, "oe")
|
||||
.replace(/[üü]/g, "ue")
|
||||
.replace(/ß/g, "ss")
|
||||
.replace(/[^a-z0-9-]/g, "-")
|
||||
.replace(/-+/g, "-")
|
||||
.replace(/^-|-$/g, "");
|
||||
}
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(LINK_LIST_DIR).filter((f) => f.endsWith(".json5")).sort();
|
||||
const oldToNew = new Map();
|
||||
const baseCount = new Map();
|
||||
|
||||
for (const file of files) {
|
||||
const oldSlug = file.replace(/\.json5$/, "");
|
||||
const raw = fs.readFileSync(path.join(LINK_LIST_DIR, file), "utf8");
|
||||
const data = parseJson5(raw);
|
||||
if (!data) continue;
|
||||
const headline = (data.headline || oldSlug).trim();
|
||||
let base = slugify(headline) || slugify(oldSlug);
|
||||
if (!base) base = "link-list";
|
||||
const count = (baseCount.get(base) || 0) + 1;
|
||||
baseCount.set(base, count);
|
||||
const newSlug = count === 1 ? "link-list-" + base : "link-list-" + base + "-" + (count - 1);
|
||||
oldToNew.set(oldSlug, newSlug);
|
||||
}
|
||||
|
||||
console.log("Slug-Map:", Object.fromEntries(oldToNew));
|
||||
|
||||
for (const file of files) {
|
||||
const oldSlug = file.replace(/\.json5$/, "");
|
||||
const newSlug = oldToNew.get(oldSlug);
|
||||
if (!newSlug) continue;
|
||||
const filePath = path.join(LINK_LIST_DIR, file);
|
||||
const data = parseJson5(fs.readFileSync(filePath, "utf8"));
|
||||
data._slug = newSlug;
|
||||
const newPath = path.join(LINK_LIST_DIR, newSlug + ".json5");
|
||||
fs.writeFileSync(newPath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
||||
if (newPath !== filePath) fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
function walkDir(dir, fn) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
const full = path.join(dir, e.name);
|
||||
if (e.isDirectory()) walkDir(full, fn);
|
||||
else if (e.name.endsWith(".json5")) fn(full);
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(CONTENT_DE, (filePath) => {
|
||||
let content = fs.readFileSync(filePath, "utf8");
|
||||
let changed = false;
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
if (oldSlug === newSlug) continue;
|
||||
const regex = new RegExp('"' + oldSlug.replace(/[.*+?^${}()|[\]\\]/g, "\\$&") + '"', "g");
|
||||
if (regex.test(content)) {
|
||||
content = content.replace(regex, '"' + newSlug + '"');
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if (changed) fs.writeFileSync(filePath, content, "utf8");
|
||||
});
|
||||
|
||||
console.log("Fertig. Link-Listen umbenannt und Referenzen aktualisiert.");
|
||||
100
scripts/rename-links.mjs
Normal file
100
scripts/rename-links.mjs
Normal file
@@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Benennt content/de/link/*.json5 um: _slug und Dateiname werden zu "link-" + slugify(name).
|
||||
* Aktualisiert alle Referenzen in content/de (navigation, link_list, etc.).
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_DE = path.join(__dirname, "..", "content", "de");
|
||||
const LINK_DIR = path.join(CONTENT_DE, "link");
|
||||
|
||||
function slugify(s) {
|
||||
if (!s || typeof s !== "string") return "";
|
||||
return s
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.replace(/\s+/g, "-")
|
||||
.replace(/[ää]/g, "ae")
|
||||
.replace(/[öö]/g, "oe")
|
||||
.replace(/[üü]/g, "ue")
|
||||
.replace(/ß/g, "ss")
|
||||
.replace(/[^a-z0-9-]/g, "-")
|
||||
.replace(/-+/g, "-")
|
||||
.replace(/^-|-$/g, "");
|
||||
}
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const linkFiles = fs.readdirSync(LINK_DIR).filter((f) => f.endsWith(".json5"));
|
||||
const oldToNew = new Map();
|
||||
const newSlugs = new Set();
|
||||
|
||||
for (const file of linkFiles) {
|
||||
const oldSlug = file.replace(/\.json5$/, "");
|
||||
const raw = fs.readFileSync(path.join(LINK_DIR, file), "utf8");
|
||||
const data = parseJson5(raw);
|
||||
if (!data) continue;
|
||||
const name = (data.name || data.linkName || oldSlug).trim();
|
||||
let base = slugify(name) || slugify(oldSlug);
|
||||
if (!base) base = "link";
|
||||
let newSlug = "link-" + base;
|
||||
let n = 0;
|
||||
while (newSlugs.has(newSlug)) {
|
||||
n++;
|
||||
newSlug = "link-" + base + "-" + n;
|
||||
}
|
||||
newSlugs.add(newSlug);
|
||||
oldToNew.set(oldSlug, newSlug);
|
||||
}
|
||||
|
||||
console.log("Slug-Map:", Object.fromEntries(oldToNew));
|
||||
|
||||
// 1) Neue Link-Dateien schreiben, alte löschen
|
||||
for (const file of linkFiles) {
|
||||
const oldSlug = file.replace(/\.json5$/, "");
|
||||
const newSlug = oldToNew.get(oldSlug);
|
||||
if (!newSlug) continue;
|
||||
const filePath = path.join(LINK_DIR, file);
|
||||
const data = parseJson5(fs.readFileSync(filePath, "utf8"));
|
||||
data._slug = newSlug;
|
||||
data.internal = newSlug;
|
||||
const newPath = path.join(LINK_DIR, newSlug + ".json5");
|
||||
fs.writeFileSync(newPath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
||||
if (newPath !== filePath) fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
// 2) Referenzen in allen content/de/**/*.json5 ersetzen
|
||||
function walkDir(dir, fn) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
const full = path.join(dir, e.name);
|
||||
if (e.isDirectory()) walkDir(full, fn);
|
||||
else if (e.name.endsWith(".json5")) fn(full);
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(CONTENT_DE, (filePath) => {
|
||||
let content = fs.readFileSync(filePath, "utf8");
|
||||
let changed = false;
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
if (oldSlug === newSlug) continue;
|
||||
const regex = new RegExp('"' + oldSlug.replace(/[.*+?^${}()|[\]\\]/g, "\\$&") + '"', "g");
|
||||
if (regex.test(content)) {
|
||||
content = content.replace(regex, '"' + newSlug + '"');
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if (changed) fs.writeFileSync(filePath, content, "utf8");
|
||||
});
|
||||
|
||||
console.log("Fertig. Links umbenannt und Referenzen aktualisiert.");
|
||||
92
scripts/rename-posts.mjs
Normal file
92
scripts/rename-posts.mjs
Normal file
@@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Benennt content/de/post/*.json5 um: _slug und Dateiname = slug (ohne führendes /).
|
||||
* Kollisionen erhalten Suffix -1, -2, …
|
||||
* Aktualisiert alle Referenzen in content/de.
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_DE = path.join(__dirname, "..", "content", "de");
|
||||
const POST_DIR = path.join(CONTENT_DE, "post");
|
||||
|
||||
function slugFromPost(s) {
|
||||
if (!s || typeof s !== "string") return "";
|
||||
return s
|
||||
.trim()
|
||||
.replace(/^\//, "")
|
||||
.replace(/\.html?$/i, "")
|
||||
.replace(/_/g, "-")
|
||||
.replace(/[^a-z0-9-]/gi, "-")
|
||||
.replace(/-+/g, "-")
|
||||
.replace(/^-|-$/g, "");
|
||||
}
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(POST_DIR).filter((f) => f.endsWith(".json5")).sort();
|
||||
const oldToNew = new Map();
|
||||
const baseCount = new Map();
|
||||
|
||||
for (const file of files) {
|
||||
const oldSlug = file.replace(/\.json5$/, "");
|
||||
const raw = fs.readFileSync(path.join(POST_DIR, file), "utf8");
|
||||
const data = parseJson5(raw);
|
||||
if (!data) continue;
|
||||
const slugVal = data.slug || "";
|
||||
let base = slugFromPost(slugVal) || oldSlug;
|
||||
if (!base) base = "post";
|
||||
const count = (baseCount.get(base) || 0) + 1;
|
||||
baseCount.set(base, count);
|
||||
const newSlug = count === 1 ? base : base + "-" + (count - 1);
|
||||
oldToNew.set(oldSlug, newSlug);
|
||||
}
|
||||
|
||||
console.log("Slug-Map (Anzahl:", oldToNew.size, "):", Object.fromEntries([...oldToNew.entries()].slice(0, 10)), "...");
|
||||
|
||||
for (const file of files) {
|
||||
const oldSlug = file.replace(/\.json5$/, "");
|
||||
const newSlug = oldToNew.get(oldSlug);
|
||||
if (!newSlug) continue;
|
||||
const filePath = path.join(POST_DIR, file);
|
||||
const data = parseJson5(fs.readFileSync(filePath, "utf8"));
|
||||
data._slug = newSlug;
|
||||
const newPath = path.join(POST_DIR, newSlug + ".json5");
|
||||
fs.writeFileSync(newPath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
||||
if (newPath !== filePath) fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
function walkDir(dir, fn) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
const full = path.join(dir, e.name);
|
||||
if (e.isDirectory()) walkDir(full, fn);
|
||||
else if (e.name.endsWith(".json5")) fn(full);
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(CONTENT_DE, (filePath) => {
|
||||
let content = fs.readFileSync(filePath, "utf8");
|
||||
let changed = false;
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
if (oldSlug === newSlug) continue;
|
||||
const escaped = oldSlug.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const newContent = content.replace(new RegExp('"' + escaped + '"', "g"), '"' + newSlug + '"');
|
||||
if (newContent !== content) {
|
||||
content = newContent;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if (changed) fs.writeFileSync(filePath, content, "utf8");
|
||||
});
|
||||
|
||||
console.log("Fertig. Posts umbenannt und Referenzen aktualisiert.");
|
||||
95
scripts/rename-tags-by-name.mjs
Normal file
95
scripts/rename-tags-by-name.mjs
Normal file
@@ -0,0 +1,95 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Benennt alle Tags um: _slug und Dateiname = "tag-" + slugify(name).
|
||||
* Kollisionen erhalten Suffix -1, -2, …
|
||||
* Aktualisiert alle Referenzen in content/de.
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_DE = path.join(__dirname, "..", "content", "de");
|
||||
const TAG_DIR = path.join(CONTENT_DE, "tag");
|
||||
|
||||
function slugify(s) {
|
||||
if (!s || typeof s !== "string") return "";
|
||||
return s
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.replace(/\s+/g, "-")
|
||||
.replace(/&/g, "und")
|
||||
.replace(/[ää]/g, "ae")
|
||||
.replace(/[öö]/g, "oe")
|
||||
.replace(/[üü]/g, "ue")
|
||||
.replace(/ß/g, "ss")
|
||||
.replace(/[^a-z0-9-]/g, "-")
|
||||
.replace(/-+/g, "-")
|
||||
.replace(/^-|-$/g, "");
|
||||
}
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(TAG_DIR).filter((f) => f.endsWith(".json5")).sort();
|
||||
const oldToNew = new Map();
|
||||
const baseCount = new Map();
|
||||
|
||||
for (const file of files) {
|
||||
const oldSlug = file.replace(/\.json5$/, "");
|
||||
const raw = fs.readFileSync(path.join(TAG_DIR, file), "utf8");
|
||||
const data = parseJson5(raw);
|
||||
if (!data) continue;
|
||||
const name = (data.name || oldSlug.replace(/^tag-/, "")).trim();
|
||||
let base = slugify(name) || oldSlug.replace(/^tag-/, "");
|
||||
if (!base) base = "tag";
|
||||
const count = (baseCount.get(base) || 0) + 1;
|
||||
baseCount.set(base, count);
|
||||
const newSlug = count === 1 ? "tag-" + base : "tag-" + base + "-" + (count - 1);
|
||||
oldToNew.set(oldSlug, newSlug);
|
||||
}
|
||||
|
||||
console.log("Tag-Slug-Map:", Object.fromEntries(oldToNew));
|
||||
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
if (oldSlug === newSlug) continue;
|
||||
const filePath = path.join(TAG_DIR, oldSlug + ".json5");
|
||||
if (!fs.existsSync(filePath)) continue;
|
||||
const data = parseJson5(fs.readFileSync(filePath, "utf8"));
|
||||
data._slug = newSlug;
|
||||
const newPath = path.join(TAG_DIR, newSlug + ".json5");
|
||||
fs.writeFileSync(newPath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
||||
fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
function walkDir(dir, fn) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
const full = path.join(dir, e.name);
|
||||
if (e.isDirectory()) walkDir(full, fn);
|
||||
else if (e.name.endsWith(".json5")) fn(full);
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(CONTENT_DE, (filePath) => {
|
||||
let content = fs.readFileSync(filePath, "utf8");
|
||||
let changed = false;
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
if (oldSlug === newSlug) continue;
|
||||
const escaped = oldSlug.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const newContent = content.replace(new RegExp('"' + escaped + '"', "g"), '"' + newSlug + '"');
|
||||
if (newContent !== content) {
|
||||
content = newContent;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if (changed) fs.writeFileSync(filePath, content, "utf8");
|
||||
});
|
||||
|
||||
console.log("Fertig. Tags nach Name umbenannt und Referenzen aktualisiert.");
|
||||
66
scripts/tag-add-prefix.mjs
Normal file
66
scripts/tag-add-prefix.mjs
Normal file
@@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Fügt allen Tags das Präfix "tag-" hinzu: Dateiname und _slug werden zu "tag-" + bisheriger Slug.
|
||||
* Aktualisiert alle Referenzen in content/de (postTag, filterByTag, tags, tagWhitelist).
|
||||
*/
|
||||
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const CONTENT_DE = path.join(__dirname, "..", "content", "de");
|
||||
const TAG_DIR = path.join(CONTENT_DE, "tag");
|
||||
|
||||
function parseJson5(str) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(TAG_DIR).filter((f) => f.endsWith(".json5"));
|
||||
const oldToNew = new Map();
|
||||
|
||||
for (const file of files) {
|
||||
const oldSlug = file.replace(/\.json5$/, "");
|
||||
if (oldSlug.startsWith("tag-")) continue;
|
||||
oldToNew.set(oldSlug, "tag-" + oldSlug);
|
||||
}
|
||||
|
||||
console.log("Präfix tag- für", oldToNew.size, "Tags");
|
||||
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
const filePath = path.join(TAG_DIR, oldSlug + ".json5");
|
||||
const data = parseJson5(fs.readFileSync(filePath, "utf8"));
|
||||
data._slug = newSlug;
|
||||
const newPath = path.join(TAG_DIR, newSlug + ".json5");
|
||||
fs.writeFileSync(newPath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
||||
fs.unlinkSync(filePath);
|
||||
}
|
||||
|
||||
function walkDir(dir, fn) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
const full = path.join(dir, e.name);
|
||||
if (e.isDirectory()) walkDir(full, fn);
|
||||
else if (e.name.endsWith(".json5")) fn(full);
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(CONTENT_DE, (filePath) => {
|
||||
let content = fs.readFileSync(filePath, "utf8");
|
||||
let changed = false;
|
||||
for (const [oldSlug, newSlug] of oldToNew) {
|
||||
const escaped = oldSlug.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
const newContent = content.replace(new RegExp('"' + escaped + '"', "g"), '"' + newSlug + '"');
|
||||
if (newContent !== content) {
|
||||
content = newContent;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if (changed) fs.writeFileSync(filePath, content, "utf8");
|
||||
});
|
||||
|
||||
console.log("Fertig. Tag-Präfix gesetzt und Referenzen aktualisiert.");
|
||||
Reference in New Issue
Block a user