Skip to content

Commit

Permalink
Support embedded compendium folders in pack scripts (foundryvtt#8136)
Browse files Browse the repository at this point in the history
  • Loading branch information
In3luki committed Jun 5, 2023
1 parent 51366f2 commit f3122ed
Show file tree
Hide file tree
Showing 4 changed files with 172 additions and 48 deletions.
46 changes: 39 additions & 7 deletions build/lib/compendium-pack.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ import { isObject, setHasElement, sluggify, tupleHasValue } from "@util/misc.ts"
import fs from "fs";
import path from "path";
import coreIconsJSON from "../core-icons.json" assert { type: "json" };
import { PackError } from "./helpers.ts";
import { PackError, getFilesRecursively } from "./helpers.ts";
import { PackEntry } from "./types.ts";
import { LevelDatabase } from "./level-database.ts";
import { DBFolder, LevelDatabase } from "./level-database.ts";

interface PackMetadata {
system: string;
Expand Down Expand Up @@ -54,6 +54,7 @@ class CompendiumPack {
documentType: string;
systemId: string;
data: PackEntry[];
folders: DBFolder[];

static outDir = path.resolve(process.cwd(), "static/packs");
private static namesToIds = new Map<string, Map<string, string>>();
Expand All @@ -65,7 +66,7 @@ class CompendiumPack {
uuid: /@UUID\[Compendium\.pf2e\.(?<packName>[^.]+)\.(?<docName>[^\]]+)\]\{?/g,
};

constructor(packDir: string, parsedData: unknown[]) {
constructor(packDir: string, parsedData: unknown[], parsedFolders: unknown[]) {
const metadata = CompendiumPack.packsMetadata.find(
(pack) => path.basename(pack.path) === path.basename(packDir)
);
Expand All @@ -76,6 +77,11 @@ class CompendiumPack {
this.packId = metadata.name;
this.documentType = metadata.type;

if (!this.#isFoldersData(parsedFolders)) {
throw PackError(`Folder data supplied for ${this.packId} does not resemble folder source data.`);
}
this.folders = parsedFolders;

if (!this.#isPackData(parsedData)) {
throw PackError(`Data supplied for ${this.packId} does not resemble Foundry document source data.`);
}
Expand Down Expand Up @@ -151,8 +157,7 @@ class CompendiumPack {
}

static loadJSON(dirPath: string): CompendiumPack {
const filenames = fs.readdirSync(dirPath);
const filePaths = filenames.map((f) => path.resolve(dirPath, f));
const filePaths = getFilesRecursively(dirPath);
const parsedData = filePaths.map((filePath) => {
const jsonString = fs.readFileSync(filePath, "utf-8");
const packSource: PackEntry = (() => {
Expand All @@ -178,8 +183,27 @@ class CompendiumPack {
return packSource;
});

const folders = ((): DBFolder[] => {
const foldersFile = path.resolve(dirPath, "_folders.json");
if (fs.existsSync(foldersFile)) {
const jsonString = fs.readFileSync(foldersFile, "utf-8");
const foldersSource: DBFolder[] = (() => {
try {
return JSON.parse(jsonString);
} catch (error) {
if (error instanceof Error) {
throw PackError(`File ${foldersFile} could not be parsed: ${error.message}`);
}
}
})();

return foldersSource;
}
return [];
})();

const dbFilename = path.basename(dirPath);
return new CompendiumPack(dbFilename, parsedData);
return new CompendiumPack(dbFilename, parsedData, folders);
}

#finalize(docSource: PackEntry): string {
Expand Down Expand Up @@ -324,7 +348,7 @@ class CompendiumPack {

const db = new LevelDatabase(packDir, { packName: path.basename(packDir) });
const finalized: PackEntry[] = this.data.map((datum) => JSON.parse(this.#finalize(datum)));
await db.createPack(finalized);
await db.createPack(finalized, this.folders);
console.log(`Pack "${this.packId}" with ${this.data.length} entries built successfully.`);

return this.data.length;
Expand Down Expand Up @@ -353,6 +377,14 @@ class CompendiumPack {
return packData.every((maybeDocSource: unknown) => this.#isDocumentSource(maybeDocSource));
}

#isFolderSource(maybeFolderSource: unknown): maybeFolderSource is DBFolder {
return isObject(maybeFolderSource) && "_id" in maybeFolderSource && "folder" in maybeFolderSource;
}

#isFoldersData(folderData: unknown[]): folderData is DBFolder[] {
return folderData.every((maybeFolderData) => this.#isFolderSource(maybeFolderData));
}

#assertSizeValid(source: ActorSourcePF2e | ItemSourcePF2e): void {
if (source.type === "npc" || source.type === "vehicle") {
if (!tupleHasValue(SIZES, source.system.traits.size.value)) {
Expand Down
106 changes: 73 additions & 33 deletions build/lib/extractor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@ import process from "process";
import systemJSON from "../../static/system.json" assert { type: "json" };
import templateJSON from "../../static/template.json" assert { type: "json" };
import { CompendiumPack, isActorSource, isItemSource } from "./compendium-pack.ts";
import { PackError } from "./helpers.ts";
import { PackError, getFilesRecursively } from "./helpers.ts";
import { PackEntry } from "./types.ts";
import { LevelDatabase } from "./level-database.ts";
import { DBFolder, LevelDatabase } from "./level-database.ts";

declare global {
interface Global {
Expand Down Expand Up @@ -47,6 +47,7 @@ class PackExtractor {
#lastActor: ActorSourcePF2e | null = null;
readonly #newDocIdMap: Record<string, string> = {};
readonly #idsToNames = new Map<string, Map<string, string>>();
#folderPathMap = new Map<string, string>();

#npcSystemKeys = new Set([
...Object.keys(templateJSON.Actor.templates.common),
Expand Down Expand Up @@ -120,44 +121,62 @@ class PackExtractor {
const outPath = path.resolve(this.tempDataPath, packDirectory);

const db = new LevelDatabase(filePath, { packName: packDirectory });
const packSources = await db.getEntries();
const { packSources, folders } = await db.getEntries();

// Prepare subfolder data
if (folders.length) {
const getFolderPath = (folder: DBFolder, parts: string[] = []): string => {
if (parts.length > 3) {
throw PackError(
`Error: Maximum folder depth exceeded for "${folder.name}" in pack: ${packDirectory}`
);
}
parts.unshift(sluggify(folder.name));
if (folder.folder) {
// This folder is inside another folder
const parent = folders.find((f) => f._id === folder.folder);
if (!parent) {
throw PackError(`Error: Unknown parent folder id [${folder.folder}] in pack: ${packDirectory}`);
}
return getFolderPath(parent, parts);
}
parts.unshift(packDirectory);
return path.join(...parts);
};
const sanitzeFolder = (folder: Partial<DBFolder>): void => {
delete folder._stats;
};

for (const folder of folders) {
this.#folderPathMap.set(folder._id, getFolderPath(folder));
sanitzeFolder(folder);
}
const folderFilePath = path.resolve(outPath, "_folders.json");
await fs.promises.writeFile(folderFilePath, this.#prettyPrintJSON(folders), "utf-8");
}

const idPattern = /^[a-z0-9]{20,}$/g;
for (const source of packSources) {
// Remove or replace unwanted values from the document source
const preparedSource = this.#convertLinks(source, packDirectory);
if ("items" in preparedSource && preparedSource.type === "npc" && !this.disablePresort) {
preparedSource.items = this.#sortDataItems(preparedSource);
}

// Pretty print JSON data
const outData = (() => {
const allKeys: Set<string> = new Set();
const idKeys: string[] = [];

JSON.stringify(preparedSource, (key, value) => {
if (idPattern.test(key)) {
idKeys.push(key);
} else {
allKeys.add(key);
}

return value;
});

const sortedKeys = Array.from(allKeys).sort().concat(idKeys);

const newJson = JSON.stringify(preparedSource, sortedKeys, 4);
return `${newJson}\n`;
})();
const outData = this.#prettyPrintJSON(preparedSource);

// Remove all non-alphanumeric characters from the name
const slug = sluggify(source.name);
const slug = sluggify(preparedSource.name);
const outFileName = `${slug}.json`;
const outFilePath = path.resolve(outPath, outFileName);

// Handle subfolders
const subfolder = preparedSource.folder ? this.#folderPathMap.get(preparedSource.folder) : null;
const outFolderPath = subfolder ? path.resolve(this.tempDataPath, subfolder) : outPath;
if (subfolder && !fs.existsSync(outFolderPath)) {
fs.mkdirSync(outFolderPath, { recursive: true });
}
const outFilePath = path.resolve(outFolderPath, outFileName);

if (fs.existsSync(outFilePath)) {
throw PackError(`Error: Duplicate name "${source.name}" in pack: ${packDirectory}`);
throw PackError(`Error: Duplicate name "${preparedSource.name}" in pack: ${packDirectory}`);
}

this.#assertDocIdSame(preparedSource, outFilePath);
Expand All @@ -169,6 +188,27 @@ class PackExtractor {
return packSources.length;
}

#prettyPrintJSON(object: object): string {
const idPattern = /^[a-z0-9]{20,}$/g;
const allKeys: Set<string> = new Set();
const idKeys: string[] = [];

JSON.stringify(object, (key, value) => {
if (idPattern.test(key)) {
idKeys.push(key);
} else {
allKeys.add(key);
}

return value;
});

const sortedKeys = Array.from(allKeys).sort().concat(idKeys);
const newJson = JSON.stringify(object, sortedKeys, 4);

return `${newJson}\n`;
}

#assertDocIdSame(newSource: PackEntry, jsonPath: string): void {
if (fs.existsSync(jsonPath)) {
const oldSource = JSON.parse(fs.readFileSync(jsonPath, { encoding: "utf-8" })) as PackEntry;
Expand Down Expand Up @@ -350,7 +390,9 @@ class PackExtractor {
for (const key in docSource) {
if (key === "_id") {
topLevel = docSource;
delete docSource.folder;
if (docSource.folder === null) {
delete docSource.folder;
}
delete (docSource as { _stats?: unknown })._stats;

docSource.img &&= docSource.img.replace(
Expand Down Expand Up @@ -781,16 +823,14 @@ class PackExtractor {

for (const packDir of packDirs) {
const metadata = this.packsMetadata.find((p) => path.basename(p.path) === packDir);
if (metadata === undefined) {
if (!metadata) {
throw PackError(`Compendium at ${packDir} has no metadata in the local system.json file.`);
}

const packMap: Map<string, string> = new Map();
this.#idsToNames.set(metadata.name, packMap);

const filenames = fs.readdirSync(path.resolve(this.dataPath, packDir));
const filePaths = filenames.map((n) => path.resolve(this.dataPath, packDir, n));

const filePaths = getFilesRecursively(path.resolve(this.dataPath, packDir));
for (const filePath of filePaths) {
const jsonString = fs.readFileSync(filePath, "utf-8");
const source = (() => {
Expand Down
19 changes: 18 additions & 1 deletion build/lib/helpers.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,23 @@
import fs from "fs";
import path from "path";

const PackError = (message: string): void => {
console.error(`Error: ${message}`);
process.exit(1);
};

export { PackError };
const getFilesRecursively = (directory: string, filePaths: string[] = []): string[] => {
const filesInDirectory = fs.readdirSync(directory);
for (const file of filesInDirectory) {
const absolute = path.join(directory, file);
if (fs.lstatSync(absolute).isDirectory()) {
getFilesRecursively(absolute, filePaths);
} else {
if (file === "_folders.json" || !file.endsWith(".json")) continue;
filePaths.push(absolute);
}
}
return filePaths;
};

export { getFilesRecursively, PackError };
49 changes: 42 additions & 7 deletions build/lib/level-database.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,10 @@ const EMBEDDED_KEYS = ["items", "pages", "results"] as const;

class LevelDatabase extends ClassicLevel<string, DBEntry> {
#dbkey: DBKey;
#documentDb: Sublevel<DBEntry>;
#embeddedKey: EmbeddedKey | null;

#documentDb: Sublevel<DBEntry>;
#foldersDb: Sublevel<DBFolder>;
#embeddedDb: Sublevel<EmbeddedEntry> | null = null;

constructor(location: string, options: LevelDatabaseOptions<DBEntry>) {
Expand All @@ -26,16 +28,17 @@ class LevelDatabase extends ClassicLevel<string, DBEntry> {
this.#dbkey = dbKey;
this.#embeddedKey = embeddedKey;

this.#documentDb = this.sublevel<string, DBEntry>(dbKey, dbOptions);
this.#documentDb = this.sublevel(dbKey, dbOptions);
this.#foldersDb = this.sublevel("folders", dbOptions) as unknown as Sublevel<DBFolder>;
if (this.#embeddedKey) {
this.#embeddedDb = this.sublevel<string, DBEntry>(
this.#embeddedDb = this.sublevel(
`${this.#dbkey}.${this.#embeddedKey}`,
dbOptions
) as unknown as Sublevel<EmbeddedEntry>;
}
}

async createPack(docSources: DBEntry[]): Promise<void> {
async createPack(docSources: DBEntry[], folders: DBFolder[]): Promise<void> {
const isDoc = (source: unknown): source is EmbeddedEntry => {
return isObject(source) && "_id" in source;
};
Expand All @@ -60,10 +63,17 @@ class LevelDatabase extends ClassicLevel<string, DBEntry> {
if (embeddedBatch?.length) {
await embeddedBatch.write();
}
if (folders.length) {
const folderBatch = this.#foldersDb.batch();
for (const folder of folders) {
folderBatch.put(folder._id, folder);
}
await folderBatch.write();
}
await this.close();
}

async getEntries(): Promise<PackEntry[]> {
async getEntries(): Promise<{ packSources: PackEntry[]; folders: DBFolder[] }> {
const packSources: PackEntry[] = [];
for await (const [docId, source] of this.#documentDb.iterator()) {
const embeddedKey = this.#embeddedKey;
Expand All @@ -75,8 +85,13 @@ class LevelDatabase extends ClassicLevel<string, DBEntry> {
}
packSources.push(source as PackEntry);
}
const folders: DBFolder[] = [];
for await (const [_key, folder] of this.#foldersDb.iterator()) {
folders.push(folder);
}
await this.close();
return packSources;

return { packSources, folders };
}

#getDBKeys(packName: string): { dbKey: DBKey; embeddedKey: EmbeddedKey | null } {
Expand Down Expand Up @@ -125,14 +140,34 @@ type Sublevel<T> = AbstractSublevel<ClassicLevel<string, T>, string | Buffer | U

type EmbeddedEntry = ItemSourcePF2e | JournalEntryPageSource | TableResultSource;
type DBEntry = Omit<PackEntry, "pages" | "items" | "results"> & {
folder?: string | null;
items?: (EmbeddedEntry | string)[];
pages?: (EmbeddedEntry | string)[];
results?: (EmbeddedEntry | string)[];
};

interface DBFolder {
name: string;
sorting: string;
folder: string | null;
type: CompendiumDocumentType;
_id: string;
sort: number;
color: string | null;
flags: object;
_stats: {
systemId: string | null;
systemVersion: string | null;
coreVersion: string | null;
createdTime: number | null;
modifiedTime: number | null;
lastModifiedBy: string | null;
};
}

interface LevelDatabaseOptions<T> {
packName: string;
dbOptions?: DatabaseOptions<string, T>;
}

export { LevelDatabase };
export { DBFolder, LevelDatabase };

0 comments on commit f3122ed

Please sign in to comment.