Remove Feb 2018 legacy migrator
This is to allow other features to use the /logs folder. Going forward, updating from a pre-Feb 2018 version of the bot will require first updating the bot to version v2.30.1 and running it once. After that, updating to newer versions is possible.cshd
parent
5c6df913bf
commit
0d29859dd8
26
src/index.js
26
src/index.js
|
@ -45,7 +45,6 @@ const config = require("./cfg");
|
|||
const utils = require("./utils");
|
||||
const main = require("./main");
|
||||
const knex = require("./knex");
|
||||
const legacyMigrator = require("./legacy/legacyMigrator");
|
||||
|
||||
// Force crash on unhandled rejections (use something like forever/pm2 to restart)
|
||||
process.on("unhandledRejection", err => {
|
||||
|
@ -68,31 +67,6 @@ process.on("unhandledRejection", err => {
|
|||
console.log("Done!");
|
||||
}
|
||||
|
||||
// Migrate legacy data if we need to
|
||||
if (await legacyMigrator.shouldMigrate()) {
|
||||
console.log("=== MIGRATING LEGACY DATA ===");
|
||||
console.log("Do not close the bot!");
|
||||
console.log("");
|
||||
|
||||
await legacyMigrator.migrate();
|
||||
|
||||
const relativeDbDir = (path.isAbsolute(config.dbDir) ? config.dbDir : path.resolve(process.cwd(), config.dbDir));
|
||||
const relativeLogDir = (path.isAbsolute(config.logDir) ? config.logDir : path.resolve(process.cwd(), config.logDir));
|
||||
|
||||
console.log("");
|
||||
console.log("=== LEGACY DATA MIGRATION FINISHED ===");
|
||||
console.log("");
|
||||
console.log("IMPORTANT: After the bot starts, please verify that all logs, threads, blocked users, and snippets are still working correctly.");
|
||||
console.log("Once you've done that, the following files/directories are no longer needed. I would recommend keeping a backup of them, however.");
|
||||
console.log("");
|
||||
console.log("FILE: " + path.resolve(relativeDbDir, "threads.json"));
|
||||
console.log("FILE: " + path.resolve(relativeDbDir, "blocked.json"));
|
||||
console.log("FILE: " + path.resolve(relativeDbDir, "snippets.json"));
|
||||
console.log("DIRECTORY: " + relativeLogDir);
|
||||
console.log("");
|
||||
console.log("Starting the bot...");
|
||||
}
|
||||
|
||||
// Start the bot
|
||||
main.start();
|
||||
})();
|
||||
|
|
|
@ -1,71 +0,0 @@
|
|||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const config = require("../cfg");
|
||||
|
||||
const dbDir = config.dbDir;
|
||||
|
||||
const databases = {};
|
||||
|
||||
/**
|
||||
* @deprecated Only used for migrating legacy data
|
||||
*/
|
||||
class JSONDB {
|
||||
constructor(path, def = {}, useCloneByDefault = false) {
|
||||
this.path = path;
|
||||
this.useCloneByDefault = useCloneByDefault;
|
||||
|
||||
this.load = new Promise(resolve => {
|
||||
fs.readFile(path, {encoding: "utf8"}, (err, data) => {
|
||||
if (err) return resolve(def);
|
||||
|
||||
let unserialized;
|
||||
try { unserialized = JSON.parse(data); }
|
||||
catch (e) { unserialized = def; }
|
||||
|
||||
resolve(unserialized);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
get(clone) {
|
||||
if (clone == null) clone = this.useCloneByDefault;
|
||||
|
||||
return this.load.then(data => {
|
||||
if (clone) return JSON.parse(JSON.stringify(data));
|
||||
else return data;
|
||||
});
|
||||
}
|
||||
|
||||
save(newData) {
|
||||
const serialized = JSON.stringify(newData);
|
||||
this.load = new Promise((resolve, reject) => {
|
||||
fs.writeFile(this.path, serialized, {encoding: "utf8"}, () => {
|
||||
resolve(newData);
|
||||
});
|
||||
});
|
||||
|
||||
return this.get();
|
||||
}
|
||||
}
|
||||
|
||||
function getDb(dbName, def) {
|
||||
if (! databases[dbName]) {
|
||||
const dbPath = path.resolve(dbDir, `${dbName}.json`);
|
||||
databases[dbName] = new JSONDB(dbPath, def);
|
||||
}
|
||||
|
||||
return databases[dbName];
|
||||
}
|
||||
|
||||
function get(dbName, def) {
|
||||
return getDb(dbName, def).get();
|
||||
}
|
||||
|
||||
function save(dbName, data) {
|
||||
return getDb(dbName, data).save(data);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
get,
|
||||
save,
|
||||
};
|
|
@ -1,222 +0,0 @@
|
|||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const promisify = require("util").promisify;
|
||||
const moment = require("moment");
|
||||
const Eris = require("eris");
|
||||
|
||||
const knex = require("../knex");
|
||||
const config = require("../cfg");
|
||||
const jsonDb = require("./jsonDb");
|
||||
const threads = require("../data/threads");
|
||||
|
||||
const {THREAD_STATUS, THREAD_MESSAGE_TYPE} = require("../data/constants");
|
||||
|
||||
const readDir = promisify(fs.readdir);
|
||||
const readFile = promisify(fs.readFile);
|
||||
const access = promisify(fs.access);
|
||||
const writeFile = promisify(fs.writeFile);
|
||||
|
||||
async function migrate() {
|
||||
console.log("Migrating open threads...");
|
||||
await migrateOpenThreads();
|
||||
|
||||
console.log("Migrating logs...");
|
||||
await migrateLogs();
|
||||
|
||||
console.log("Migrating blocked users...");
|
||||
await migrateBlockedUsers();
|
||||
|
||||
console.log("Migrating snippets...");
|
||||
await migrateSnippets();
|
||||
|
||||
await writeFile(path.join(config.dbDir, ".migrated_legacy"), "");
|
||||
}
|
||||
|
||||
async function shouldMigrate() {
|
||||
// If there is a file marking a finished migration, assume we don't need to migrate
|
||||
const migrationFile = path.join(config.dbDir, ".migrated_legacy");
|
||||
try {
|
||||
await access(migrationFile);
|
||||
return false;
|
||||
} catch (e) {}
|
||||
|
||||
// If there are any old threads, we need to migrate
|
||||
const oldThreads = await jsonDb.get("threads", []);
|
||||
if (oldThreads.length) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// If there are any old blocked users, we need to migrate
|
||||
const blockedUsers = await jsonDb.get("blocked", []);
|
||||
if (blockedUsers.length) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// If there are any old snippets, we need to migrate
|
||||
const snippets = await jsonDb.get("snippets", {});
|
||||
if (Object.keys(snippets).length) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// If the log file dir exists and has logs in it, we need to migrate
|
||||
try {
|
||||
const files = await readDir(config.logDir);
|
||||
if (files.length > 1) return true; // > 1, since .gitignore is one of them
|
||||
} catch(e) {}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
async function migrateOpenThreads() {
|
||||
const bot = new Eris.Client(config.token);
|
||||
|
||||
const toReturn = new Promise(resolve => {
|
||||
bot.on("ready", async () => {
|
||||
const oldThreads = await jsonDb.get("threads", []);
|
||||
|
||||
const promises = oldThreads.map(async oldThread => {
|
||||
const existingOpenThread = await knex("threads")
|
||||
.where("channel_id", oldThread.channelId)
|
||||
.first();
|
||||
|
||||
if (existingOpenThread) return;
|
||||
|
||||
const oldChannel = bot.getChannel(oldThread.channelId);
|
||||
if (! oldChannel) return;
|
||||
|
||||
const threadMessages = await oldChannel.getMessages(1000);
|
||||
const log = threadMessages.reverse().map(msg => {
|
||||
const date = moment.utc(msg.timestamp, "x").format("YYYY-MM-DD HH:mm:ss");
|
||||
return `[${date}] ${msg.author.username}#${msg.author.discriminator}: ${msg.content}`;
|
||||
}).join("\n") + "\n";
|
||||
|
||||
const newThread = {
|
||||
status: THREAD_STATUS.OPEN,
|
||||
user_id: oldThread.userId,
|
||||
user_name: oldThread.username,
|
||||
channel_id: oldThread.channelId,
|
||||
is_legacy: 1
|
||||
};
|
||||
|
||||
const threadId = await threads.createThreadInDB(newThread);
|
||||
|
||||
await knex("thread_messages").insert({
|
||||
thread_id: threadId,
|
||||
message_type: THREAD_MESSAGE_TYPE.LEGACY,
|
||||
user_id: oldThread.userId,
|
||||
user_name: "",
|
||||
body: log,
|
||||
is_anonymous: 0,
|
||||
created_at: moment.utc().format("YYYY-MM-DD HH:mm:ss")
|
||||
});
|
||||
});
|
||||
|
||||
resolve(Promise.all(promises));
|
||||
});
|
||||
|
||||
bot.connect();
|
||||
});
|
||||
|
||||
await toReturn;
|
||||
|
||||
bot.disconnect();
|
||||
}
|
||||
|
||||
async function migrateLogs() {
|
||||
const logDir = config.logDir || `${__dirname}/../../logs`;
|
||||
const logFiles = await readDir(logDir);
|
||||
|
||||
for (let i = 0; i < logFiles.length; i++) {
|
||||
const logFile = logFiles[i];
|
||||
if (! logFile.endsWith(".txt")) continue;
|
||||
|
||||
const [rawDate, userId, threadId] = logFile.slice(0, -4).split("__");
|
||||
const date = `${rawDate.slice(0, 10)} ${rawDate.slice(11).replace("-", ":")}`;
|
||||
|
||||
const fullPath = path.join(logDir, logFile);
|
||||
const contents = await readFile(fullPath, {encoding: "utf8"});
|
||||
|
||||
const newThread = {
|
||||
id: threadId,
|
||||
status: THREAD_STATUS.CLOSED,
|
||||
user_id: userId,
|
||||
user_name: "",
|
||||
channel_id: null,
|
||||
is_legacy: 1,
|
||||
created_at: date
|
||||
};
|
||||
|
||||
await knex.transaction(async trx => {
|
||||
const existingThread = await trx("threads")
|
||||
.where("id", newThread.id)
|
||||
.first();
|
||||
|
||||
if (existingThread) return;
|
||||
|
||||
await trx("threads").insert(newThread);
|
||||
|
||||
await trx("thread_messages").insert({
|
||||
thread_id: newThread.id,
|
||||
message_type: THREAD_MESSAGE_TYPE.LEGACY,
|
||||
user_id: userId,
|
||||
user_name: "",
|
||||
body: contents,
|
||||
is_anonymous: 0,
|
||||
created_at: date
|
||||
});
|
||||
});
|
||||
|
||||
// Progress indicator for servers with tons of logs
|
||||
if ((i + 1) % 500 === 0) {
|
||||
console.log(` ${i + 1}...`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function migrateBlockedUsers() {
|
||||
const now = moment.utc().format("YYYY-MM-DD HH:mm:ss");
|
||||
const blockedUsers = await jsonDb.get("blocked", []);
|
||||
|
||||
for (const userId of blockedUsers) {
|
||||
const existingBlockedUser = await knex("blocked_users")
|
||||
.where("user_id", userId)
|
||||
.first();
|
||||
|
||||
if (existingBlockedUser) return;
|
||||
|
||||
await knex("blocked_users").insert({
|
||||
user_id: userId,
|
||||
user_name: "",
|
||||
blocked_by: null,
|
||||
blocked_at: now
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function migrateSnippets() {
|
||||
const now = moment.utc().format("YYYY-MM-DD HH:mm:ss");
|
||||
const snippets = await jsonDb.get("snippets", {});
|
||||
|
||||
const promises = Object.entries(snippets).map(async ([trigger, data]) => {
|
||||
const existingSnippet = await knex("snippets")
|
||||
.where("trigger", trigger)
|
||||
.first();
|
||||
|
||||
if (existingSnippet) return;
|
||||
|
||||
return knex("snippets").insert({
|
||||
trigger,
|
||||
body: data.text,
|
||||
is_anonymous: data.isAnonymous ? 1 : 0,
|
||||
created_by: null,
|
||||
created_at: now
|
||||
});
|
||||
});
|
||||
|
||||
return Promise.all(promises);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
migrate,
|
||||
shouldMigrate,
|
||||
};
|
Loading…
Reference in New Issue