mirror of
https://github.com/Xaymar/obs-StreamFX
synced 2024-11-30 15:23:01 +00:00
458 lines
9.8 KiB
JavaScript
458 lines
9.8 KiB
JavaScript
// AUTOGENERATED COPYRIGHT HEADER START
|
|
// Copyright (C) 2023 Michael Fabian 'Xaymar' Dirks <info@xaymar.com>
|
|
// AUTOGENERATED COPYRIGHT HEADER END
|
|
const CHILD_PROCESS = require("node:child_process");
|
|
const PROCESS = require("node:process");
|
|
const PATH = require("node:path");
|
|
const FS = require("node:fs");
|
|
const FSPROMISES = require("node:fs/promises");
|
|
const OS = require("os");
|
|
|
|
const SECTION_START = "AUTOGENERATED COPYRIGHT HEADER START";
|
|
const SECTION_END = "AUTOGENERATED COPYRIGHT HEADER END";
|
|
const IGNORED = [
|
|
/^\.git$/gi,
|
|
/^cmake\/clang$/gi,
|
|
/^cmake\/version$/gi,
|
|
/^third-party$/gi,
|
|
]
|
|
|
|
let abortAllWork = false;
|
|
class RateLimiter {
|
|
constructor(limit = undefined) {
|
|
const OS = require("node:os");
|
|
this._limit = limit;
|
|
if (!this._limit) {
|
|
this._limit = Math.ceil(Math.max(2, OS.cpus().length / 3 * 2));
|
|
}
|
|
this._cur = this._limit;
|
|
this._pend = 0;
|
|
this._locks = [];
|
|
}
|
|
|
|
async run(runner) {
|
|
// Use Promises to spin-lock this execution path until there is a free slot.
|
|
this._pend += 1;
|
|
while (true) {
|
|
if (this._cur > 0) {
|
|
this._cur -= 1;
|
|
break;
|
|
} else {
|
|
await Promise.race(this._locks);
|
|
}
|
|
}
|
|
this._pend -= 1;
|
|
|
|
let data = {};
|
|
data.pri = new Promise((resolve, reject) => {
|
|
try {
|
|
if (runner.constructor.name == "AsyncFunction") {
|
|
runner().then((res) => {
|
|
resolve(res);
|
|
}, (err) => {
|
|
reject(err);
|
|
})
|
|
} else {
|
|
resolve(runner());
|
|
}
|
|
} catch (ex) {
|
|
reject(ex);
|
|
}
|
|
});
|
|
data.sec = data.pri.finally(() => {
|
|
// Remove this promise from the locks list.
|
|
let idx = this._locks.indexOf(data.pri);
|
|
if (idx >= 0) {
|
|
this._locks.splice(idx, 1);
|
|
}
|
|
let idx2 = this._locks.indexOf(data.sec);
|
|
if (idx2 >= 0) {
|
|
this._locks.splice(idx2, 1);
|
|
}
|
|
this._cur += 1;
|
|
//console.log(`Avail: ${this._cur} / ${this._limit}; Pending: ${this._pend}`)
|
|
});
|
|
this._locks.push(data.sec);
|
|
return await data.sec;
|
|
}
|
|
}
|
|
|
|
let gitRL = new RateLimiter(1);
|
|
let workRL = new RateLimiter();
|
|
|
|
async function isIgnored(path) {
|
|
let rpath = PATH.relative(process.cwd(), path).replaceAll(PATH.sep, PATH.posix.sep);
|
|
for (let ignore of IGNORED) {
|
|
if (ignore instanceof RegExp) {
|
|
if (ignore.global) {
|
|
let matches = rpath.matchAll(ignore);
|
|
for (let match of matches) {
|
|
return true;
|
|
}
|
|
} else {
|
|
if (rpath.match(ignore) !== null) {
|
|
return true;
|
|
}
|
|
}
|
|
} else if (rpath.startsWith(ignore)) {
|
|
return true;
|
|
}
|
|
}
|
|
|
|
return await gitRL.run(async () => {
|
|
return await new Promise((resolve, reject) => {
|
|
try {
|
|
let proc = CHILD_PROCESS.spawn("git", [
|
|
"check-ignore",
|
|
path
|
|
], {
|
|
"cwd": PROCESS.cwd(),
|
|
"encoding": "utf8",
|
|
});
|
|
proc.stdout.on('data', (data) => {
|
|
})
|
|
proc.on('close', (code) => {
|
|
resolve(code == 0);
|
|
});
|
|
proc.on('exit', (code) => {
|
|
resolve(code == 0);
|
|
});
|
|
} catch (ex) {
|
|
reject(ex);
|
|
}
|
|
});
|
|
});
|
|
}
|
|
|
|
async function git_retrieveAuthors(file) {
|
|
// git --no-pager log --date-order --reverse "--format=format:%aI|%aN <%aE>" -- file
|
|
let lines = await gitRL.run(async () => {
|
|
return await new Promise((resolve, reject) => {
|
|
try {
|
|
let chunks = [];
|
|
let proc = CHILD_PROCESS.spawn("git", [
|
|
"--no-pager",
|
|
"log",
|
|
"--date-order",
|
|
"--reverse",
|
|
"--format=format:%aI|%aN <%aE>",
|
|
"--",
|
|
file
|
|
], {
|
|
"cwd": PROCESS.cwd(),
|
|
"encoding": "utf8",
|
|
});
|
|
proc.stdout.on('data', (chunk) => {
|
|
chunks.push(chunk);
|
|
});
|
|
proc.stdout.on('close', () => {
|
|
let chunk = proc.stdout.read();
|
|
if (chunk) {
|
|
chunks.push(chunk);
|
|
}
|
|
});
|
|
proc.on('exit', (code) => {
|
|
// Merge all data into one buffer.
|
|
let length = 0;
|
|
for (let chunk of chunks) {
|
|
length += chunk.byteLength;
|
|
}
|
|
let buf = Buffer.alloc(length);
|
|
length = 0;
|
|
for (let chunk of chunks) {
|
|
if (!(chunk instanceof Buffer)) {
|
|
chunk = Buffer.from(chunk);
|
|
}
|
|
chunk.copy(buf, length, 0);
|
|
length += chunk.byteLength;
|
|
}
|
|
|
|
if (code == 0) {
|
|
if (buf) {
|
|
resolve(buf.toString());
|
|
} else {
|
|
reject(code);
|
|
}
|
|
} else {
|
|
reject(code);
|
|
}
|
|
});
|
|
} catch (ex) {
|
|
reject(ex);
|
|
}
|
|
});
|
|
});
|
|
|
|
lines = lines.split(lines.indexOf("\r\n") >= 0 ? "\r\n" : "\n");
|
|
let authors = new Map();
|
|
for (let line of lines) {
|
|
let [date, name] = line.split("|");
|
|
|
|
let author = authors.get(name);
|
|
if (author) {
|
|
author.to = new Date(date)
|
|
} else {
|
|
authors.set(name, {
|
|
from: new Date(date),
|
|
to: new Date(date),
|
|
})
|
|
}
|
|
}
|
|
return authors;
|
|
}
|
|
|
|
async function generateCopyright(file) {
|
|
let authors = await git_retrieveAuthors(file)
|
|
let lines = [];
|
|
for (let entry of authors) {
|
|
let from = entry[1].from.getUTCFullYear();
|
|
let to = entry[1].to.getUTCFullYear();
|
|
lines.push(`Copyright (C) ${from != to ? `${from}-${to}` : to} ${entry[0]}`);
|
|
}
|
|
return lines;
|
|
}
|
|
|
|
function makeHeader(file, copyright) {
|
|
let file_name = PATH.basename(file).toLocaleLowerCase();
|
|
let file_exts = file_name.substring(file_name.indexOf("."));
|
|
|
|
let styles = {
|
|
"#": {
|
|
files: [
|
|
"cmakelists.txt"
|
|
], exts: [
|
|
".clang-tidy",
|
|
".clang-format",
|
|
".cmake",
|
|
".editorconfig",
|
|
".gitignore",
|
|
".gitmodules",
|
|
".yml",
|
|
],
|
|
prepend: [
|
|
`# ${SECTION_START}`,
|
|
],
|
|
append: [
|
|
`# ${SECTION_END}`,
|
|
],
|
|
prefix: "# ",
|
|
suffix: "",
|
|
},
|
|
";": {
|
|
files: [
|
|
""
|
|
], exts: [
|
|
".iss",
|
|
".iss.in",
|
|
],
|
|
prepend: [
|
|
`; ${SECTION_START}`,
|
|
],
|
|
append: [
|
|
`; ${SECTION_END}`,
|
|
],
|
|
prefix: "; ",
|
|
suffix: "",
|
|
},
|
|
"//": {
|
|
files: [
|
|
], exts: [
|
|
".c",
|
|
".c.in",
|
|
".cpp",
|
|
".cpp.in",
|
|
".h",
|
|
".h.in",
|
|
".hpp",
|
|
".hpp.in",
|
|
".js",
|
|
".rc",
|
|
".rc.in",
|
|
".effect"
|
|
],
|
|
prepend: [
|
|
`// ${SECTION_START}`,
|
|
],
|
|
append: [
|
|
`// ${SECTION_END}`,
|
|
],
|
|
prefix: "// ",
|
|
suffix: "",
|
|
},
|
|
"<!---->": {
|
|
files: [
|
|
], exts: [
|
|
".htm",
|
|
".htm.in",
|
|
".html",
|
|
".html.in",
|
|
".xml",
|
|
".xml.in",
|
|
".plist",
|
|
".plist.in",
|
|
".pkgproj",
|
|
".pkgproj.in",
|
|
],
|
|
prepend: [
|
|
`<!-- ${SECTION_START} -->`,
|
|
],
|
|
append: [
|
|
`<!-- ${SECTION_END} -->`,
|
|
],
|
|
prefix: "<!-- ",
|
|
suffix: " -->",
|
|
}
|
|
};
|
|
|
|
for (let key in styles) {
|
|
let style = [key, styles[key]];
|
|
if (style[1].files.includes(file_name)
|
|
|| style[1].files.includes(file)
|
|
|| style[1].exts.includes(file_exts)) {
|
|
let header = [];
|
|
header.push(...style[1].prepend);
|
|
for (let line of copyright) {
|
|
header.push(`${style[1].prefix}${line}${style[1].suffix}`);
|
|
}
|
|
header.push(...style[1].append);
|
|
return header;
|
|
}
|
|
}
|
|
|
|
throw new Error("Unrecognized file format.")
|
|
}
|
|
|
|
async function updateFile(file) {
|
|
await workRL.run(async () => {
|
|
try {
|
|
if (abortAllWork) {
|
|
return;
|
|
}
|
|
|
|
// Copyright information.
|
|
let copyright = await generateCopyright(file);
|
|
let header = undefined;
|
|
try {
|
|
header = makeHeader(file, copyright);
|
|
} catch (ex) {
|
|
console.log(`Skipping file '${file}'...`);
|
|
return;
|
|
}
|
|
console.log(`Updating file '${file}'...`);
|
|
|
|
// File contents.
|
|
let content = await FSPROMISES.readFile(file);
|
|
let eol = (content.indexOf("\r\n") != -1 ? OS.EOL : "\n");
|
|
let insert = Buffer.from(header.join(eol) + eol);
|
|
|
|
// Find the starting point.
|
|
let startHeader = content.indexOf(SECTION_START);
|
|
startHeader = content.lastIndexOf(eol, startHeader);
|
|
startHeader += Buffer.from(eol).byteLength;
|
|
|
|
// Find the ending point.
|
|
let endHeader = content.indexOf(SECTION_END);
|
|
endHeader = content.indexOf(eol, endHeader);
|
|
endHeader += Buffer.from(eol).byteLength;
|
|
|
|
if (abortAllWork) {
|
|
return;
|
|
}
|
|
|
|
let fd = await FSPROMISES.open(file, "w");
|
|
let fp = [];
|
|
if ((startHeader >= 0) && (endHeader > startHeader)) {
|
|
let pos = 0;
|
|
if (startHeader > 0) {
|
|
fd.write(content, 0, startHeader, 0);
|
|
pos += startHeader;
|
|
}
|
|
fd.write(insert, 0, undefined, pos);
|
|
pos += insert.byteLength;
|
|
fd.write(content, endHeader, undefined, pos);
|
|
} else {
|
|
fd.write(insert, 0, undefined, 0);
|
|
fd.write(content, 0, undefined, insert.byteLength);
|
|
}
|
|
await fd.close();
|
|
} catch (ex) {
|
|
console.error(`Error processing '${file}'!: ${ex}`);
|
|
abortAllWork = true;
|
|
PROCESS.exitCode = 1;
|
|
return;
|
|
}
|
|
});
|
|
}
|
|
|
|
async function scanPath(path) {
|
|
// Abort here if the user aborted the process, or if the path is ignored.
|
|
if (abortAllWork) {
|
|
return;
|
|
}
|
|
|
|
let promises = [];
|
|
|
|
await workRL.run(async () => {
|
|
let files = await FSPROMISES.readdir(path, { "withFileTypes": true });
|
|
for (let file of files) {
|
|
if (abortAllWork) {
|
|
break;
|
|
}
|
|
|
|
let fullname = PATH.join(path, file.name);
|
|
if (await isIgnored(fullname)) {
|
|
console.log(`Ignoring path '${fullname}'...`);
|
|
continue;
|
|
}
|
|
|
|
if (file.isDirectory()) {
|
|
console.log(`Scanning path '${fullname}'...`);
|
|
promises.push(scanPath(fullname));
|
|
} else {
|
|
promises.push(updateFile(fullname));
|
|
}
|
|
}
|
|
});
|
|
|
|
await Promise.all(promises);
|
|
}
|
|
|
|
(async function () {
|
|
PROCESS.on("SIGINT", () => {
|
|
abortAllWork = true;
|
|
PROCESS.exitCode = 1;
|
|
console.log("Sanely aborting all pending work...");
|
|
})
|
|
|
|
let path = PATH.resolve(PROCESS.argv[2]);
|
|
|
|
{ // Bootstrap to actually be in the directory where '.git' is.
|
|
let is_git_directory = false;
|
|
while (!is_git_directory) {
|
|
if (abortAllWork) {
|
|
return;
|
|
}
|
|
|
|
let entries = await FSPROMISES.readdir(PROCESS.cwd());
|
|
if (entries.includes(".git")) {
|
|
console.log(`Found .git at '${process.cwd()}'.`);
|
|
is_git_directory = true;
|
|
} else {
|
|
PROCESS.chdir(PATH.resolve(PATH.join(PROCESS.cwd(), "..")));
|
|
}
|
|
}
|
|
path = PATH.normalize(PATH.relative(process.cwd(), path));
|
|
}
|
|
|
|
if (!await isIgnored(path)) {
|
|
if ((await FSPROMISES.stat(path)).isDirectory()) {
|
|
console.log(`Scanning path '${path}'...`);
|
|
await scanPath(path);
|
|
} else {
|
|
await updateFile(path);
|
|
}
|
|
} else {
|
|
console.log(`Ignoring path '${path}'...`);
|
|
}
|
|
console.log("Done");
|
|
})();
|