Окончательное избавление от sqlite в пользу jembadb

This commit is contained in:
Book Pauk
2022-07-25 16:12:15 +07:00
parent a2fa312839
commit d7be4d3d94
12 changed files with 95 additions and 1873 deletions

View File

@@ -23,24 +23,6 @@ async function main() {
await fs.ensureDir(tempDownloadDir);
//sqlite3
const sqliteRemoteUrl = 'https://mapbox-node-binary.s3.amazonaws.com/sqlite3/v5.0.2/napi-v3-linux-x64.tar.gz';
const sqliteDecompressedFilename = `${tempDownloadDir}/napi-v3-linux-x64/node_sqlite3.node`;
if (!await fs.pathExists(sqliteDecompressedFilename)) {
// Скачиваем node_sqlite3.node для винды, т.к. pkg не включает его в сборку
const res = await axios.get(sqliteRemoteUrl, {responseType: 'stream'})
await pipeline(res.data, fs.createWriteStream(`${tempDownloadDir}/sqlite.tar.gz`));
console.log(`done downloading ${sqliteRemoteUrl}`);
//распаковываем
console.log(await decomp.unpackTarZZ(`${tempDownloadDir}/sqlite.tar.gz`, tempDownloadDir));
console.log('files decompressed');
}
// копируем в дистрибутив
await fs.copy(sqliteDecompressedFilename, `${outDir}/node_sqlite3.node`);
console.log(`copied ${sqliteDecompressedFilename} to ${outDir}/node_sqlite3.node`);
//ipfs
const ipfsDecompressedFilename = `${tempDownloadDir}/go-ipfs/ipfs`;
if (!await fs.pathExists(ipfsDecompressedFilename)) {

View File

@@ -23,24 +23,6 @@ async function main() {
await fs.ensureDir(tempDownloadDir);
//sqlite3
const sqliteRemoteUrl = 'https://mapbox-node-binary.s3.amazonaws.com/sqlite3/v5.0.2/napi-v3-win32-x64.tar.gz';
const sqliteDecompressedFilename = `${tempDownloadDir}/napi-v3-win32-x64/node_sqlite3.node`;
if (!await fs.pathExists(sqliteDecompressedFilename)) {
// Скачиваем node_sqlite3.node для винды, т.к. pkg не включает его в сборку
const res = await axios.get(sqliteRemoteUrl, {responseType: 'stream'})
await pipeline(res.data, fs.createWriteStream(`${tempDownloadDir}/sqlite.tar.gz`));
console.log(`done downloading ${sqliteRemoteUrl}`);
//распаковываем
console.log(await decomp.unpackTarZZ(`${tempDownloadDir}/sqlite.tar.gz`, tempDownloadDir));
console.log('files decompressed');
}
// копируем в дистрибутив
await fs.copy(sqliteDecompressedFilename, `${outDir}/node_sqlite3.node`);
console.log(`copied ${sqliteDecompressedFilename} to ${outDir}/node_sqlite3.node`);
//ipfs
const ipfsDecompressedFilename = `${tempDownloadDir}/go-ipfs/ipfs.exe`;
if (!await fs.pathExists(ipfsDecompressedFilename)) {

1586
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -71,9 +71,6 @@
"safe-buffer": "^5.2.1",
"sanitize-html": "^2.5.3",
"sjcl": "^1.0.8",
"sql-template-strings": "^2.2.2",
"sqlite": "^4.0.23",
"sqlite3": "^5.0.2",
"tar-fs": "^2.1.1",
"unbzip2-stream": "^1.4.3",
"vue": "^3.2.37",

View File

@@ -25,19 +25,6 @@ module.exports = {
acceptFileExt: '.fb2, .fb3, .html, .txt, .zip, .bz2, .gz, .rar, .epub, .mobi, .rtf, .doc, .docx, .pdf, .djvu, .jpg, .jpeg, .png',
webConfigParams: ['name', 'version', 'mode', 'maxUploadFileSize', 'useExternalBookConverter', 'acceptFileExt', 'branch'],
db: [
{
poolName: 'app',
connCount: 20,
fileName: 'app.sqlite',
},
{
poolName: 'readerStorage',
connCount: 20,
fileName: 'reader-storage.sqlite',
}
],
jembaDb: [
{
dbName: 'app',
@@ -60,15 +47,6 @@ module.exports = {
},
],
/*
remoteWebDavStorage: false,
remoteWebDavStorage: {
url: '127.0.0.1:1900',
username: '',
password: '',
},
*/
remoteStorage: false,
/*
remoteStorage: {

View File

@@ -1,61 +0,0 @@
//TODO: удалить модуль в 2023г
const fs = require('fs-extra');
const SqliteConnectionPool = require('./SqliteConnectionPool');
const log = new (require('../core/AppLogger'))().log;//singleton
const migrations = {
'app': require('./migrations/app'),
'readerStorage': require('./migrations/readerStorage'),
};
let instance = null;
//singleton
class ConnManager {
constructor() {
if (!instance) {
this.inited = false;
instance = this;
}
return instance;
}
async init(config) {
this.config = config;
this._pool = {};
const force = null;//(config.branch == 'development' ? 'last' : null);
for (const poolConfig of this.config.db) {
const dbFileName = this.config.dataDir + '/' + poolConfig.fileName;
//бэкап
if (!poolConfig.noBak && await fs.pathExists(dbFileName))
await fs.copy(dbFileName, `${dbFileName}.bak`);
const connPool = new SqliteConnectionPool();
await connPool.open(poolConfig, dbFileName);
log(`Opened database "${poolConfig.poolName}"`);
//миграции
const migs = migrations[poolConfig.poolName];
if (migs && migs.data.length) {
const applied = await connPool.migrate(migs.data, migs.table, force);
if (applied.length)
log(`${applied.length} migrations applied to "${poolConfig.poolName}"`);
}
this._pool[poolConfig.poolName] = connPool;
}
this.inited = true;
}
get pool() {
return this._pool;
}
}
module.exports = ConnManager;

View File

@@ -1,42 +0,0 @@
//TODO: удалить модуль в 2023г
const fs = require('fs-extra');
const log = new (require('../core/AppLogger'))().log;//singleton
class Converter {
async run(config) {
log('Converter start');
try {
const connManager = new (require('./ConnManager'))();//singleton
const storagePool = connManager.pool.readerStorage;
const jembaConnManager = new (require('./JembaConnManager'))();//singleton
const db = jembaConnManager.db['reader-storage'];
const srcDbPath = `${config.dataDir}/reader-storage.sqlite`;
if (!await fs.pathExists(srcDbPath)) {
log(LM_WARN, ' Source DB does not exist, nothing to do');
return;
}
const rows = await db.select({table: 'storage', count: true});
if (rows.length && rows[0].count != 0) {
log(LM_WARN, ` Destination table already exists (found ${rows[0].count} items), nothing to do`);
return;
}
const dbSrc = await storagePool.get();
try {
const rows = await dbSrc.all(`SELECT * FROM storage`);
await db.insert({table: 'storage', rows});
log(` Inserted ${rows.length} items`);
} finally {
dbSrc.ret();
}
} finally {
log('Converter finish');
}
}
}
module.exports = Converter;

View File

@@ -1,193 +0,0 @@
//TODO: удалить модуль в 2023г
const sqlite3 = require('sqlite3');
const sqlite = require('sqlite');
const SQL = require('sql-template-strings');
class SqliteConnectionPool {
constructor() {
this.closed = true;
}
async open(poolConfig, dbFileName) {
const connCount = poolConfig.connCount || 1;
const busyTimeout = poolConfig.busyTimeout || 60*1000;
const cacheSize = poolConfig.cacheSize || 2000;
this.dbFileName = dbFileName;
this.connections = [];
this.freed = new Set();
this.waitingQueue = [];
for (let i = 0; i < connCount; i++) {
let client = await sqlite.open({
filename: dbFileName,
driver: sqlite3.Database
});
client.configure('busyTimeout', busyTimeout); //ms
await client.exec(`PRAGMA cache_size = ${cacheSize}`);
client.ret = () => {
this.freed.add(i);
if (this.waitingQueue.length) {
this.waitingQueue.shift().onFreed(i);
}
};
this.freed.add(i);
this.connections[i] = client;
}
this.closed = false;
}
get() {
return new Promise((resolve) => {
if (this.closed)
throw new Error('Connection pool closed');
const freeConnIndex = this.freed.values().next().value;
if (freeConnIndex !== undefined) {
this.freed.delete(freeConnIndex);
resolve(this.connections[freeConnIndex]);
return;
}
this.waitingQueue.push({
onFreed: (connIndex) => {
this.freed.delete(connIndex);
resolve(this.connections[connIndex]);
},
});
});
}
async run(query) {
const dbh = await this.get();
try {
let result = await dbh.run(query);
dbh.ret();
return result;
} catch (e) {
dbh.ret();
throw e;
}
}
async all(query) {
const dbh = await this.get();
try {
let result = await dbh.all(query);
dbh.ret();
return result;
} catch (e) {
dbh.ret();
throw e;
}
}
async exec(query) {
const dbh = await this.get();
try {
let result = await dbh.exec(query);
dbh.ret();
return result;
} catch (e) {
dbh.ret();
throw e;
}
}
async close() {
for (let i = 0; i < this.connections.length; i++) {
await this.connections[i].close();
}
this.closed = true;
}
// Modified from node-sqlite/.../src/Database.js
async migrate(migs, table, force) {
const migrations = migs.sort((a, b) => Math.sign(a.id - b.id));
if (!migrations.length) {
throw new Error('No migration data');
}
migrations.map(migration => {
const data = migration.data;
const [up, down] = data.split(/^--\s+?down\b/mi);
if (!down) {
const message = `The ${migration.filename} file does not contain '-- Down' separator.`;
throw new Error(message);
} else {
/* eslint-disable no-param-reassign */
migration.up = up.replace(/^-- .*?$/gm, '').trim();// Remove comments
migration.down = down.trim(); // and trim whitespaces
}
});
// Create a database table for migrations meta data if it doesn't exist
await this.run(`CREATE TABLE IF NOT EXISTS "${table}" (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
up TEXT NOT NULL,
down TEXT NOT NULL
)`);
// Get the list of already applied migrations
let dbMigrations = await this.all(
`SELECT id, name, up, down FROM "${table}" ORDER BY id ASC`,
);
// Undo migrations that exist only in the database but not in migs,
// also undo the last migration if the `force` option was set to `last`.
const lastMigration = migrations[migrations.length - 1];
for (const migration of dbMigrations.slice().sort((a, b) => Math.sign(b.id - a.id))) {
if (!migrations.some(x => x.id === migration.id) ||
(force === 'last' && migration.id === lastMigration.id)) {
const dbh = await this.get();
await dbh.run('BEGIN');
try {
await dbh.exec(migration.down);
await dbh.run(SQL`DELETE FROM "`.append(table).append(SQL`" WHERE id = ${migration.id}`));
await dbh.run('COMMIT');
dbMigrations = dbMigrations.filter(x => x.id !== migration.id);
} catch (err) {
await dbh.run('ROLLBACK');
throw err;
} finally {
dbh.ret();
}
} else {
break;
}
}
// Apply pending migrations
let applied = [];
const lastMigrationId = dbMigrations.length ? dbMigrations[dbMigrations.length - 1].id : 0;
for (const migration of migrations) {
if (migration.id > lastMigrationId) {
const dbh = await this.get();
await dbh.run('BEGIN');
try {
await dbh.exec(migration.up);
await dbh.run(SQL`INSERT INTO "`.append(table).append(
SQL`" (id, name, up, down) VALUES (${migration.id}, ${migration.name}, ${migration.up}, ${migration.down})`)
);
await dbh.run('COMMIT');
applied.push(migration.id);
} catch (err) {
await dbh.run('ROLLBACK');
throw err;
} finally {
dbh.ret();
}
}
}
return applied;
}
}
module.exports = SqliteConnectionPool;

View File

@@ -1,5 +0,0 @@
module.exports = {
table: 'migration1',
data: [
]
}

View File

@@ -1,7 +0,0 @@
module.exports = `
-- Up
CREATE TABLE storage (id TEXT PRIMARY KEY, rev INTEGER, time INTEGER, data TEXT);
-- Down
DROP TABLE storage;
`;

View File

@@ -1,6 +0,0 @@
module.exports = {
table: 'migration1',
data: [
{id: 1, name: 'create', data: require('./001-create')}
]
}

View File

@@ -45,15 +45,8 @@ async function init() {
}
//connections
const connManager = new (require('./db/ConnManager'))();//singleton
await connManager.init(config);
const jembaConnManager = new (require('./db/JembaConnManager'))();//singleton
await jembaConnManager.init(config, argv['auto-repair']);
//converter SQLITE => JembaDb
const converter = new (require('./db/Converter'))();
await converter.run(config);
}
async function main() {