Skip to content

Commit

Permalink
Format tests (#8104)
Browse files Browse the repository at this point in the history
  • Loading branch information
hkamran80 committed Jun 17, 2024
1 parent 28391ef commit bff9a7d
Show file tree
Hide file tree
Showing 5 changed files with 98 additions and 85 deletions.
21 changes: 11 additions & 10 deletions tests/categories.js
Original file line number Diff line number Diff line change
@@ -1,31 +1,32 @@
const fs = require('fs').promises;
const core = require('@actions/core');
const fs = require("fs").promises;
const core = require("@actions/core");

async function main() {
let errors = false;
const files = process.argv.slice(2);
const res = await fetch(
'https://raw.githubusercontent.com/2factorauth/frontend/master/data/categories.json',
"https://raw.githubusercontent.com/2factorauth/frontend/master/data/categories.json",
{
'accept': 'application/json',
'user-agent': '2factorauth/twofactorauth +https://2fa.directory/bots',
});
accept: "application/json",
"user-agent": "2factorauth/twofactorauth +https://2fa.directory/bots",
},
);

if (!res.ok) throw new Error('Unable to fetch categories');
if (!res.ok) throw new Error("Unable to fetch categories");

const data = await res.json();
const allowed_categories = Object.keys(data);

if (files) {
for (const file of files) {
const data = await fs.readFile(file, 'utf8');
const data = await fs.readFile(file, "utf8");
const json = await JSON.parse(data);
const entry = json[Object.keys(json)[0]];
const {categories} = entry;
const { categories } = entry;

for (const category of categories || []) {
if (!allowed_categories.includes(category)) {
core.error(`${category} is not a valid category.`,{file});
core.error(`${category} is not a valid category.`, { file });
errors = true;
}
}
Expand Down
52 changes: 30 additions & 22 deletions tests/images.js
Original file line number Diff line number Diff line change
@@ -1,52 +1,59 @@
const fs = require('fs').promises;
const core = require('@actions/core');
const {glob} = require('glob');
const fs = require("fs").promises;
const core = require("@actions/core");
const { glob } = require("glob");

// Allowed image dimensions
const PNG_RES = [
[16, 16], [32, 32], [64, 64], [128, 128]];
[16, 16],
[32, 32],
[64, 64],
[128, 128],
];

let seen_images = [];
let seenImages = [];
let errors = false;

async function main() {
await parseEntries(await glob("entries/**/*.json"));

await parseEntries(await glob('entries/**/*.json'));

await parseImages(await glob('img/*/*.*'));
await parseImages(await glob("img/*/*.*"));

process.exit(+errors);
}

async function parseEntries(entries) {
for (const file of entries) {
const data = await fs.readFile(file, 'utf8');
const data = await fs.readFile(file, "utf8");
const json = await JSON.parse(data);
const entry = json[Object.keys(json)[0]];
const {img, domain} = entry;
const path = `img/${img ? `${img[0]}/${img}`:`${domain[0]}/${domain}.svg`}`;
const { img, domain } = entry;
const path = `img/${img ? `${img[0]}/${img}` : `${domain[0]}/${domain}.svg`}`;

try {
await fs.readFile(path);
} catch (e) {
core.error(`Image ${path} not found.`, {file});
core.error(`Image ${path} not found.`, { file });
errors = true;
}
seen_images.push(path);
seenImages.push(path);
}
}

async function parseImages(images) {
for (const image of images) {
if (!seen_images.includes(image)) {
core.error(`Unused image`, {file: image});
if (!seenImages.includes(image)) {
core.error(`Unused image`, { file: image });
errors = true;
}

if (image.endsWith('.png')) {
if (image.endsWith(".png")) {
if (!dimensionsAreValid(await getPNGDimensions(image), PNG_RES)) {
core.error(`PNGs must be one of the following dimensions: ${PNG_RES.map(
a => a.join('x')).join(', ')}`, {file: image});
core.error(
`PNGs must be one of the following dimensions: ${PNG_RES.map((a) =>
a.join("x"),
).join(", ")}`,
{ file: image },
);
errors = true;
}
}
Expand All @@ -55,16 +62,17 @@ async function parseImages(images) {

function dimensionsAreValid(dimensions, validSizes) {
return validSizes.some(
size => size[0] === dimensions[0] && size[1] === dimensions[1]);
(size) => size[0] === dimensions[0] && size[1] === dimensions[1],
);
}

async function getPNGDimensions(file) {
const buffer = await fs.readFile(file);
if (buffer.toString('ascii', 1, 4) !== 'PNG') throw new Error(
`${file} is not a valid PNG file`);
if (buffer.toString("ascii", 1, 4) !== "PNG")
throw new Error(`${file} is not a valid PNG file`);

// Return [width, height]
return [buffer.readUInt32BE(16), buffer.readUInt32BE(20)];
}

main().catch(e => core.setFailed(e));
main().catch((e) => core.setFailed(e));
70 changes: 36 additions & 34 deletions tests/json.js
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
const fs = require('fs').promises;
const core = require('@actions/core');
const Ajv = require('ajv');
const addFormats = require('ajv-formats');
const schema = require('./schema.json');
const {basename} = require('node:path');

const ajv = new Ajv({strict: false, allErrors: true});
const fs = require("fs").promises;
const core = require("@actions/core");
const Ajv = require("ajv");
const addFormats = require("ajv-formats");
const schema = require("./schema.json");
const { basename } = require("node:path");

const ajv = new Ajv({ strict: false, allErrors: true });
addFormats(ajv);
require('ajv-errors')(ajv);
require("ajv-errors")(ajv);

const validate = ajv.compile(schema);
let errors = false;
Expand All @@ -26,16 +26,18 @@ function error(message, properties) {
async function main() {
const files = process.argv.slice(2);

await Promise.all(files.map(async (file) => {
try {
const json = await JSON.parse(await fs.readFile(file, 'utf8'));
const entry = json[Object.keys(json)[0]];
validateJSONSchema(file, json);
validateFileContents(file, entry);
} catch (e) {
error(`Failed to process ${file}: ${err.message}`, {file});
}
}));
await Promise.all(
files.map(async (file) => {
try {
const json = await JSON.parse(await fs.readFile(file, "utf8"));
const entry = json[Object.keys(json)[0]];
validateJSONSchema(file, json);
validateFileContents(file, entry);
} catch (e) {
error(`Failed to process ${file}: ${err.message}`, { file });
}
}),
);

process.exit(+errors);
}
Expand All @@ -51,12 +53,11 @@ function validateJSONSchema(file, json) {
if (!valid) {
errors = true;
validate.errors.forEach((err) => {
const {message, instancePath, keyword: title} = err;
const instance = instancePath?.split('/');
const { message, instancePath, keyword: title } = err;
const instance = instancePath?.split("/");
if (message)
error(`${instance[instance.length - 1]} ${message}`, {file, title});
else
error(err, {file});
error(`${instance[instance.length - 1]} ${message}`, { file, title });
else error(err, { file });
});
}
}
Expand All @@ -71,27 +72,28 @@ function validateFileContents(file, entry) {
const valid_name = `${entry.domain}.json`;

if (basename(file) !== valid_name)
error(`File name should be ${valid_name}`, {file, title: 'File name'});
error(`File name should be ${valid_name}`, { file, title: "File name" });

if (entry.url === `https://${entry.domain}`)
error(`Unnecessary url element defined.`, {file});
error(`Unnecessary url element defined.`, { file });

if (entry.img === `${entry.domain}.svg`)
error(`Unnecessary img element defined.`, {file});
error(`Unnecessary img element defined.`, { file });

if (file !== `entries/${entry.domain[0]}/${valid_name}`)
error(`Entry should be placed in entries/${entry.domain[0]}/`, {file});
error(`Entry should be placed in entries/${entry.domain[0]}/`, { file });

if (entry.tfa?.includes('custom-software') && !entry['custom-software'])
error('Missing custom-software element', {file});
if (entry.tfa?.includes("custom-software") && !entry["custom-software"])
error("Missing custom-software element", { file });

if (entry.tfa?.includes('custom-hardware') && !entry['custom-hardware'])
error('Missing custom-hardware element', {file});
if (entry.tfa?.includes("custom-hardware") && !entry["custom-hardware"])
error("Missing custom-hardware element", { file });

if (entry.tfa && !entry.documentation)
core.warning(
'No documentation set. Please provide screenshots in the pull request',
{file, title: 'Missing documentation'});
"No documentation set. Please provide screenshots in the pull request",
{ file, title: "Missing documentation" },
);
}

module.exports = main();
19 changes: 10 additions & 9 deletions tests/languages.js
Original file line number Diff line number Diff line change
@@ -1,31 +1,32 @@
const fs = require('fs').promises;
const core = require('@actions/core');
const fs = require("fs").promises;
const core = require("@actions/core");

const url = new URL(
'https://pkgstore.datahub.io/core/language-codes/language-codes_json/data/97607046542b532c395cf83df5185246/language-codes_json.json');
"https://pkgstore.datahub.io/core/language-codes/language-codes_json/data/97607046542b532c395cf83df5185246/language-codes_json.json",
);

async function main() {
let errors = false;
const files = process.argv.slice(2);
const res = await fetch(url, {
'accept': 'application/json',
'user-agent': '2factorauth/twofactorauth +https://2fa.directory/bots',
accept: "application/json",
"user-agent": "2factorauth/twofactorauth +https://2fa.directory/bots",
});

if (!res.ok) throw new Error('Unable to fetch language codes');
if (!res.ok) throw new Error("Unable to fetch language codes");

const data = await res.json();
const codes = Object.values(data).map(language => language.alpha2);
const codes = Object.values(data).map((language) => language.alpha2);

if (files) {
for (const file of files) {
const data = await fs.readFile(file, 'utf8');
const data = await fs.readFile(file, "utf8");
const json = await JSON.parse(data);
const entry = json[Object.keys(json)[0]];
const language = entry.contact?.language;

if (language && !codes.includes(language)) {
core.error(`${language} is not a valid language`, {file});
core.error(`${language} is not a valid language`, { file });
errors = true;
}
}
Expand Down
21 changes: 11 additions & 10 deletions tests/regions.js
Original file line number Diff line number Diff line change
@@ -1,32 +1,33 @@
const fs = require('fs').promises;
const core = require('@actions/core');
const fs = require("fs").promises;
const core = require("@actions/core");

const url = new URL(
'https://raw.githubusercontent.com/stefangabos/world_countries/master/data/countries/en/world.json');
"https://raw.githubusercontent.com/stefangabos/world_countries/master/data/countries/en/world.json",
);

async function main() {
let errors = false;
const files = process.argv.slice(2);
const res = await fetch(url, {
'accept': 'application/json',
'user-agent': '2factorauth/twofactorauth +https://2fa.directory/bots',
accept: "application/json",
"user-agent": "2factorauth/twofactorauth +https://2fa.directory/bots",
});

if (!res.ok) throw new Error('Unable to fetch region codes');
if (!res.ok) throw new Error("Unable to fetch region codes");

const data = await res.json();
const codes = Object.values(data).map(region => region.alpha2);
const codes = Object.values(data).map((region) => region.alpha2);

if (files) {
for (const file of files) {
const data = await fs.readFile(file, 'utf8');
const data = await fs.readFile(file, "utf8");
const json = await JSON.parse(data);
const entry = json[Object.keys(json)[0]];
const {regions} = entry;
const { regions } = entry;

for (const region of regions || []) {
if (!codes.includes(region)) {
core.error(`${region} is not a valid region code`, {file});
core.error(`${region} is not a valid region code`, { file });
errors = true;
}
}
Expand Down

0 comments on commit bff9a7d

Please sign in to comment.