feat: initial commit
This commit is contained in:
commit
36efc33c9a
4 changed files with 135 additions and 0 deletions
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
node_modules/
|
||||
package-lock.json
|
||||
|
||||
migrated.sql
|
||||
dump_file.sql
|
||||
12
README.md
Normal file
12
README.md
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
# ChomeNS Bot database migrator
|
||||
|
||||
A Node.JS database migrator
|
||||
|
||||
# How to use
|
||||
|
||||
1. `mariadb-dump -u root -h 127.0.0.1 --default-character-set=utf8mb4 -N --routines --skip-triggers chomens_bot players -p > dump_file.sql`
|
||||
2. You should get a `dump_file.sql` in your current directory
|
||||
3. Install the dependencies
|
||||
4. Run `migrate.js` and wait (it will take a while, since this is a Node.JS program and it obviously won't be fast)
|
||||
5. You should get a `migrated.sql` in your current directory
|
||||
6. Make sure the bot is stopped and then run `mariadb -u root -h 127.0.0.1 -p'your password' chomens_bot < migrated.sql`
|
||||
105
migrator.js
Normal file
105
migrator.js
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
// fun fact: this migration script is written entirely in the github actions vps using micro editor
|
||||
// so don't expect good code here
|
||||
|
||||
const fs = require('fs/promises')
|
||||
const { Parser } = require('node-sql-parser/build/mariadb');
|
||||
|
||||
(async () => {
|
||||
const file = (await fs.readFile('./dump_file.sql', 'utf8')).toString();
|
||||
|
||||
console.log('parsing dump file')
|
||||
|
||||
const parser = new Parser()
|
||||
const { ast } = parser.parse(file) // this takes the longest (for a production database that has ~650,000 rows)
|
||||
|
||||
console.log('parsed dump file')
|
||||
|
||||
// too lazy to explain this part, i just used the REPL to get what i wanted
|
||||
// (an array containing the username and the JSON data)
|
||||
const parsed = ast
|
||||
.filter(each => each.type === 'insert')
|
||||
.map(each => each.values)
|
||||
.map(each => each.values)
|
||||
.flat()
|
||||
.map(each => each.value)
|
||||
.map(line => ({
|
||||
username: line[0].value
|
||||
.replaceAll('\n', '\\n')
|
||||
.replaceAll('\b', '\\b')
|
||||
.replaceAll('\f', '\\f')
|
||||
.replaceAll('\r', '\\r')
|
||||
.replaceAll('\t', '\\t'),
|
||||
data: JSON.parse(line[1].value.replaceAll('\\"', '"'))
|
||||
}))
|
||||
|
||||
console.log('parsed the parsed dump file (into actually readable things you know)')
|
||||
|
||||
let migrated = 'SET GLOBAL max_allowed_packet=107374182400;\n\n'
|
||||
|
||||
migrated += 'DROP TABLE IF EXISTS `players`;\n\n'
|
||||
|
||||
migrated += "CREATE TABLE IF NOT EXISTS players (" +
|
||||
"uuid CHAR(36) PRIMARY KEY, " +
|
||||
"username VARCHAR(32) NOT NULL, " +
|
||||
"lastSeenTime TIMESTAMP," +
|
||||
"lastSeenServer VARCHAR(100)" +
|
||||
");\n"
|
||||
|
||||
migrated += "CREATE TABLE IF NOT EXISTS playerIPs (" +
|
||||
"uuid CHAR(36) NOT NULL, " +
|
||||
"server VARCHAR(100) NOT NULL, " +
|
||||
"ip VARCHAR(45) NOT NULL, " +
|
||||
"PRIMARY KEY (uuid, server), " +
|
||||
"INDEX idx_ip_server (ip, server), " +
|
||||
"FOREIGN KEY (uuid) REFERENCES players(uuid) ON DELETE CASCADE" +
|
||||
");\n\n"
|
||||
|
||||
migrated += 'INSERT IGNORE INTO `players` (uuid, username, lastSeenTime, lastSeenServer) VALUES \n'
|
||||
|
||||
const now = Date.now()
|
||||
|
||||
for (const { username, data } of parsed) {
|
||||
// it was very spammy to log players that don't have last seen entries so i commented them out
|
||||
|
||||
let lastSeenTime = data.lastSeen?.time
|
||||
if (!lastSeenTime) {
|
||||
lastSeenTime = now // literally just use the time at migration
|
||||
// console.log(`${username} doesn't have lastSeen.time`)
|
||||
}
|
||||
|
||||
let lastSeenServer = data.lastSeen?.server
|
||||
if (!lastSeenServer) {
|
||||
lastSeenServer = 'play.kaboom.pw:25565'
|
||||
// console.log(`${username} doesn't have lastSeen.server, i defaulted to ${lastSeenServer}`)
|
||||
}
|
||||
|
||||
migrated += `('${data.uuid}', '${username}', FROM_UNIXTIME(FLOOR(${lastSeenTime} / 1000)), '${lastSeenServer}'),\n`
|
||||
}
|
||||
|
||||
migrated = migrated.substring(0, migrated.length - 2) + ';\n' // removes trailing comma
|
||||
|
||||
console.log('done processing players table')
|
||||
|
||||
migrated += '\n'
|
||||
migrated += 'INSERT IGNORE INTO `playerIPs` (uuid, server, ip) VALUES \n'
|
||||
|
||||
for (const { username, data } of parsed) {
|
||||
if (data.ips === undefined) {
|
||||
console.log(`${username} doesn't have data.ips`)
|
||||
continue
|
||||
}
|
||||
|
||||
for (const server of Object.keys(data.ips)) {
|
||||
migrated += `('${data.uuid}', '${server}', '${data.ips[server]}'),\n`
|
||||
}
|
||||
}
|
||||
|
||||
migrated = migrated.substring(0, migrated.length - 2) + ';\n' // removes trailing comma
|
||||
|
||||
console.log('done processing playerIPs table')
|
||||
|
||||
migrated += `\n-- migrated on ${new Date()} using migrator made by chayapak\n`
|
||||
|
||||
await fs.writeFile('migrated.sql', migrated, 'utf8')
|
||||
console.log('result SAVED ! thank you.')
|
||||
})()
|
||||
13
package.json
Normal file
13
package.json
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"name": "chomens-database-migrator",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "migrator.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"type": "commonjs"
|
||||
}
|
||||
Loading…
Add table
Reference in a new issue