-
Notifications
You must be signed in to change notification settings - Fork 2
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Add import/export database routes #468
Comments
const crypto = require("crypto");
const zlib = require("zlib");
const path = require("path");
const fs = require("fs");
const { Writable, pipeline } = require("stream");
const { client } = require("mongodb");
const tar = require("tar-stream");
const BASE_PATH = path.join(process.cwd(), "./plugins");
const ALGORITHM = "aes-256-cbc";
module.exports = (router) => {
router.post("/export", async (req, res) => {
const pack = tar.pack();
if (req.query.encrypt == "true") {
const key = crypto.randomBytes(32);
const iv = crypto.randomBytes(16);
res.setHeader("X-ENCRYPTION-KEY", key.toString("hex"));
res.setHeader("X-ENCRYPTION-IV", iv.toString("hex"));
const cipher = crypto.createCipheriv(ALGORITHM, key, iv);
pack.pipe(zlib.createGzip()).pipe(cipher).pipe(res);
} else {
pack.pipe(zlib.createGzip()).pipe(res);
}
for await (let collection of client.listCollections()) {
// TODO: check/handle binary (serialized buffer objects)
// > endpoint commands payload
// > _id's should be mongodb object id's
let data = (await client.collection(collection.name).find().toArray());
pack.entry({ name: `database/${collection.name}.json` }, JSON.stringify(data));
}
fs.readdirSync(BASE_PATH, {
recursive: true
}).filter((entry) => {
// TODO: ignore .gitkeep file
return !fs.statSync(path.join(BASE_PATH, entry)).isDirectory();
}).map((entry) => {
return [entry, fs.readFileSync(path.join(BASE_PATH, entry), "utf8")];
}).forEach(([file, content]) => {
pack.entry({ name: `plugins/${file}` }, content);
});
pack.finalize();
});
router.post("/import", (req, res) => {
const extract = tar.extract();
extract.on("error", (err) => {
res.status(500).json({
error: err.message,
details: err,
success: false
});
console.log("Terrible error", err);
//process.exit(1);
});
extract.on("finish", () => {
console.log("tar-stream finished")
res.json({
success: true,
message: "Restart to apply changes!"
});
});
if (req.query.encrypt == "true") {
const key = Buffer.from(req.headers["x-encryption-key"], "hex");
const iv = Buffer.from(req.headers["x-encryption-iv"], "hex");
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv);
pipeline(req, decipher, zlib.createGunzip(), extract, (err) => {
if (err) {
console.error("encrypted", err);
}
});
} else {
pipeline(req, zlib.createGunzip(), extract, (err) => {
if (err) {
console.error("uncrypted", err);
}
});
}
extract.on("entry", (header, stream, next) => {
// header is the tar header
// stream is the content body (might be an empty stream)
// call next when you are done with this entry
if (header.name.startsWith("database/")) {
console.log("restartoe database collection", header);
let chunks = [];
let name = header.name.replace("database/", "");
let writeable = new Writable({
write(chunk, enc, cb) {
chunks.push(chunk);
cb(null);
}
});
stream.pipe(writeable).on("close", async () => {
// TODO: check/handle binary (serialized buffer objects)
// > endpoint commands payload
// > _id's should be mongodb object id's
let documents = JSON.parse(Buffer.concat(chunks).toString());
// prevents bulk write error
// MongoInvalidArgumentError: Invalid BulkOperation, Batch cannot be empty
if (documents.length === 0) {
next();
return;
}
console.log("collection name", path.basename(name, ".json"));
client.collection(path.basename(name, ".json")).insertMany(documents).catch((err) => {
if (err?.code === 11000 && req.query?.skipDuplicates === "true") {
next();
} else {
next(err);
}
}).then(() => {
next();
});
});
} else if (header.name.startsWith("plugins/")) {
console.log("restroe plugin file", header);
let name = header.name.replace("plugins/", "");
fs.mkdirSync(path.dirname(path.join(BASE_PATH, name)), {
recursive: true
});
stream.pipe(fs.createWriteStream(path.join(BASE_PATH, name))).once("error", (err) => {
next(err);
}).once("close", () => {
next();
});
} else {
console.log("unknown file prefix/name", header);
}
});
});
}; |
Does this break the component items backend/system/component/class.item.js Lines 19 to 52 in dbf499e
On my local installation (tested the import/export routes there) not every component item has a labels array (not even a empty one). Some items have one labels array some items not, some items have a empty one. What happened here? Test with a fresh/clean installation. |
Allow backup/import of database (+ plugins)
Analog to logfiles export
The text was updated successfully, but these errors were encountered: