Compare commits

...

73 Commits

Author SHA1 Message Date
Book Pauk
7fa891b4fc Merge branch 'release/0.9.11' 2020-12-09 22:31:33 +07:00
Book Pauk
6cb7412cf3 Версия 0.9.11 2020-12-09 22:30:58 +07:00
Book Pauk
157322834b Небольшая поправка 2020-12-09 22:30:19 +07:00
Book Pauk
1a13a0fee1 Работа над конвертером pdf 2020-12-09 22:19:14 +07:00
Book Pauk
37256255bf Добавлена поддержка тегов 'sup' и 'sub' 2020-12-09 20:35:52 +07:00
Book Pauk
75e01c899e Работа над конвертером pdf 2020-12-09 20:08:17 +07:00
Book Pauk
ef0d6eab89 Работа над конвертером Pdf 2020-12-09 19:05:09 +07:00
Book Pauk
5d54b1b0f4 Работа над конвертером pdf 2020-12-09 03:52:24 +07:00
Book Pauk
522f953b4f Работа над конвертером pdf 2020-12-09 03:06:15 +07:00
Book Pauk
15f02c7115 Работа над конвертером pdf 2020-12-09 01:29:58 +07:00
Book Pauk
174c877eee Рефакторинг, плюс небольшие доработки 2020-12-09 01:29:09 +07:00
Book Pauk
fd9ec736d7 Рефакторинг 2020-12-08 19:36:53 +07:00
Book Pauk
2c94025ba3 Поправлен баг 2020-12-08 19:31:00 +07:00
Book Pauk
bfadf35c40 Закончена работа над xmlParser, оттестировано 2020-12-08 18:48:55 +07:00
Book Pauk
f3b69caa12 Работа над модулем xmlParser 2020-12-08 16:17:36 +07:00
Book Pauk
18a83a5b0b Поправки настроек сжатия 2020-12-08 14:26:49 +07:00
Book Pauk
bd9669b782 Поправка цели dev 2020-12-08 14:26:25 +07:00
Book Pauk
e05713aa7f Работа над конвертером pdf 2020-12-08 14:15:17 +07:00
Book Pauk
bc3e1f0a6f Мелкий рефакторинг 2020-12-07 22:13:14 +07:00
Book Pauk
063d01b5ca Перевод pdf-конвертера на использование pdfalto 2020-12-07 22:05:01 +07:00
Book Pauk
81c38d7749 Мелкий рефакторинг 2020-12-07 20:13:32 +07:00
Book Pauk
a29842b084 Поправка readme 2020-12-07 20:12:37 +07:00
Book Pauk
bb5adcdaf6 Рефакторинг 2020-12-07 01:30:10 +07:00
Book Pauk
537e17a219 Merge tag '0.9.10-5' into develop
0.9.10-5
2020-12-05 13:42:45 +07:00
Book Pauk
03ce50153e Merge branch 'release/0.9.10-5' 2020-12-05 13:42:39 +07:00
Book Pauk
15d01ad7fc Коррекция таймаутов очереди ожидания 2020-12-05 13:41:42 +07:00
Book Pauk
e2b29e2c2f Merge tag '0.9.10-4' into develop
0.9.10-4
2020-12-05 13:25:10 +07:00
Book Pauk
ce7ae84e0f Merge branch 'release/0.9.10-4' 2020-12-05 13:25:06 +07:00
Book Pauk
01eb545f15 Улучшение работы с очередью, поправка багов 2020-12-05 13:24:04 +07:00
Book Pauk
706738c7f1 Merge tag '0.9.10-3' into develop
0.9.10-3
2020-12-05 01:40:37 +07:00
Book Pauk
6afa78cde9 Merge branch 'release/0.9.10-3' 2020-12-05 01:40:30 +07:00
Book Pauk
71f5710bba Увеличен лимит количества файлов для распаковки 2020-12-05 01:12:29 +07:00
Book Pauk
0d87043f91 Поправлен неверный вызов reject 2020-12-05 01:11:31 +07:00
Book Pauk
e25375fb7a Поправка багов 2020-12-05 00:31:53 +07:00
Book Pauk
41822999c8 Небольшие поправки 2020-12-05 00:06:54 +07:00
Book Pauk
07444bc7c2 Добавлена подсказка в сообщение об ошибке 2020-12-04 23:25:34 +07:00
Book Pauk
ec48e5b0b7 Мелкая поправка 2020-12-04 20:14:53 +07:00
Book Pauk
e8e2e9297f Merge tag '0.9.10-2' into develop
0.9.10-2
2020-12-04 20:00:40 +07:00
Book Pauk
4f871dd5ca Merge branch 'release/0.9.10-2' 2020-12-04 20:00:35 +07:00
Book Pauk
f5f07a591a Небольшие доработки конвертирования 2020-12-04 20:00:05 +07:00
Book Pauk
4c11e6918f Merge tag '0.9.10-1' into develop
0.9.10-1
2020-12-04 18:38:02 +07:00
Book Pauk
403b9c0508 Merge branch 'release/0.9.10-1' 2020-12-04 18:37:58 +07:00
Book Pauk
ee8ba75371 Переделывание конвертера Djvu, теперь работает быстрее и без промежуточного конвертирования в pdf 2020-12-04 18:24:08 +07:00
Book Pauk
a2773fb180 Поправил readme 2020-12-04 18:23:57 +07:00
Book Pauk
ca36d588fc Merge tag '0.9.10' into develop
0.9.10
2020-12-03 22:05:47 +07:00
Book Pauk
1e65707b7f Merge branch 'release/0.9.10' 2020-12-03 22:05:42 +07:00
Book Pauk
eddf34ce55 Мелкая поправка 2020-12-03 22:04:47 +07:00
Book Pauk
0fb43aa33c Версия 0.9.10 2020-12-03 21:41:23 +07:00
Book Pauk
b273b02da4 Убрал "k" в статусбаре 2020-12-03 21:37:58 +07:00
Book Pauk
0b997f9673 Поправка приветственного сообщения 2020-12-03 21:37:27 +07:00
Book Pauk
bdb2ae57a8 Добавлена частичная поддержка формата Djvu 2020-12-03 21:30:50 +07:00
Book Pauk
b5e563679a Мелкая поправка 2020-12-03 20:00:22 +07:00
Book Pauk
992c104262 Удален ненужный файл 2020-12-03 19:57:27 +07:00
Book Pauk
555154031e Добавлен запрос persistent storage, увеличен размер кэша книг 2020-12-03 19:53:08 +07:00
Book Pauk
acb083e429 Поправлен readme 2020-12-03 19:30:27 +07:00
Book Pauk
4a527d192d Улучшено управление внешними конвертерами 2020-12-03 19:04:34 +07:00
Book Pauk
39c3bf17dd Добавлен пакет pidusage 2020-12-03 18:43:57 +07:00
Book Pauk
afc8c84f41 Мелкие поправки таймаутов 2020-12-03 12:39:16 +07:00
Book Pauk
a085e04c4d Поправки параметров запуска Rar 2020-12-03 10:24:47 +07:00
Book Pauk
2f82b0db34 Добавлена поддержка rar-архивов с помощью внешнего архиватора 2020-12-02 23:56:17 +07:00
Book Pauk
0124c2b17d Поправка цели dev 2020-12-02 23:55:23 +07:00
Book Pauk
d2cfbbc9f3 Поправлен триггер сокрытия диалога 2020-12-02 22:24:11 +07:00
Book Pauk
c59f48822c Префикс "file://" заменен на "disk://", т.к. порождается конфликт с браузерным протоколом file:// 2020-12-02 22:01:36 +07:00
Book Pauk
b2d6584c4a Merge tag '0.9.9-2' into develop
0.9.9-2
2020-11-24 04:53:10 +07:00
Book Pauk
8f7cafb240 Merge branch 'release/0.9.9-2' 2020-11-24 04:52:55 +07:00
Book Pauk
08fd0f15ff Улучшено распознавание параграфов чистого текста 2020-11-24 04:49:18 +07:00
Book Pauk
dbb1bfe587 Поправки распознавания кодировки fb2-файла 2020-11-24 02:09:17 +07:00
Book Pauk
fe4b7a5a85 Улучшено распознавание xml-формата 2020-11-23 23:49:20 +07:00
Book Pauk
d8df5d76e5 Поправка ридми 2020-11-23 21:57:52 +07:00
Book Pauk
b65dcc5ade Добавлен параметр -nodrm 2020-11-23 21:49:19 +07:00
Book Pauk
a5c387a19e Поправка строки запуска сервера 2020-11-23 21:35:45 +07:00
Book Pauk
07c38d9a9f Поправка для журналирования ошибок конвертера 2020-11-23 21:19:04 +07:00
Book Pauk
20ac8a444b Merge tag '0.9.9-1' into develop
0.9.9-1
2020-11-21 13:36:15 +07:00
39 changed files with 926 additions and 277 deletions

View File

@@ -66,7 +66,7 @@ class Reader {
await utils.sleep(refreshPause);
i++;
if (i > 120*1000/refreshPause) {//2 мин ждем телодвижений воркера
if (i > 180*1000/refreshPause) {//3 мин ждем телодвижений воркера
throw new Error('Слишком долгое время ожидания');
}
//проверка воркера
@@ -181,9 +181,8 @@ class Reader {
maxUploadFileSize = 10*1024*1024;
if (file.size > maxUploadFileSize)
throw new Error(`Размер файла превышает ${maxUploadFileSize} байт`);
let formData = new FormData();
formData.append('file', file);
formData.append('file', file, file.name);
const options = {
headers: {

View File

@@ -128,6 +128,10 @@ class App extends Vue {
this.setAppTitle();
(async() => {
//запросим persistent storage
if (navigator.storage && navigator.storage.persist) {
navigator.storage.persist();
}
await this.routerReady();
this.redirectIfNeeded();
})();

View File

@@ -7,8 +7,8 @@
<span class="greeting"><b>{{ title }}</b></span>
<div class="q-my-sm"></div>
<span class="greeting">Добро пожаловать!</span>
<span class="greeting">Поддерживаются форматы: <b>fb2, html, txt</b> и сжатие: <b>zip, bz2, gz</b></span>
<span v-if="isExternalConverter" class="greeting">...а также форматы: <b>rtf, doc, docx, pdf, epub, mobi</b></span>
<span class="greeting">Поддерживаются форматы: <b>fb2, html, txt</b> и сжатие: <b>zip, bz2, gz<span v-if="isExternalConverter">, rar</span></b></span>
<span v-if="isExternalConverter" class="greeting">...а также частично форматы: <b>epub, mobi, rtf, doc, docx, pdf, djvu</b></span>
</div>
<div class="col-auto column justify-start items-center no-wrap overflow-hidden">

View File

@@ -593,12 +593,6 @@ class Reader extends Vue {
}
}
refreshBookSplitToPara() {
if (this.mostRecentBook()) {
this.loadBook({url: this.mostRecentBook().url, skipCheck: true, isText: true, force: true});
}
}
recentBooksClose() {
this.recentBooksActive = false;
}
@@ -688,9 +682,14 @@ class Reader extends Vue {
}
}
refreshBook() {
if (this.mostRecentBook()) {
this.loadBook({url: this.mostRecentBook().url, force: true});
refreshBook(mode) {
const mrb = this.mostRecentBook();
if (mrb) {
if (mode && mode == 'split') {
this.loadBook({url: mrb.url, uploadFileName: mrb.uploadFileName, skipCheck: true, isText: true, force: true});
} else {
this.loadBook({url: mrb.url, uploadFileName: mrb.uploadFileName, force: true});
}
}
}
@@ -846,8 +845,12 @@ class Reader extends Vue {
let url = encodeURI(decodeURI(opts.url));
//TODO: убрать конвертирование 'file://' после 06.2021
if (url.length == 71 && url.indexOf('file://') == 0)
url = url.replace(/^file/, 'disk');
if ((url.indexOf('http://') != 0) && (url.indexOf('https://') != 0) &&
(url.indexOf('file://') != 0))
(url.indexOf('disk://') != 0))
url = 'http://' + url;
// уже просматривается сейчас
@@ -878,6 +881,7 @@ class Reader extends Vue {
wasOpened = (wasOpened ? wasOpened : {});
const bookPos = (opts.bookPos !== undefined ? opts.bookPos : wasOpened.bookPos);
const bookPosSeen = (opts.bookPos !== undefined ? opts.bookPos : wasOpened.bookPosSeen);
const uploadFileName = (opts.uploadFileName ? opts.uploadFileName : '');
let book = null;
@@ -924,7 +928,8 @@ class Reader extends Vue {
url,
skipCheck: (opts.skipCheck ? true : false),
isText: (opts.isText ? true : false),
enableSitesFilter: this.enableSitesFilter
enableSitesFilter: this.enableSitesFilter,
uploadFileName
},
(state) => {
progress.setState(state);
@@ -940,7 +945,7 @@ class Reader extends Vue {
});
// добавляем в историю
await bookManager.setRecentBook(Object.assign({bookPos, bookPosSeen}, addedBook));
await bookManager.setRecentBook(Object.assign({bookPos, bookPosSeen, uploadFileName}, addedBook));
this.mostRecentBook();
this.addAction(bookPos);
this.updateRoute(true);
@@ -977,7 +982,7 @@ class Reader extends Vue {
progress.hide(); this.progressActive = false;
await this.loadBook({url});
await this.loadBook({url, uploadFileName: opts.file.name, force: true});
} catch (e) {
progress.hide(); this.progressActive = false;
this.loaderActive = true;
@@ -1049,7 +1054,7 @@ class Reader extends Vue {
this.copyTextToggle();
break;
case 'splitToPara':
this.refreshBookSplitToPara();
this.refreshBook('split');
break;
case 'refresh':
this.refreshBook();

View File

@@ -296,7 +296,7 @@ class RecentBooksPage extends Vue {
isUrl(url) {
if (url)
return (url.indexOf('file://') != 0);
return (url.indexOf('disk://') != 0);
else
return false;
}

View File

@@ -216,8 +216,15 @@ class ServerStorage extends Vue {
}
error(message) {
if (this.showServerStorageMessages && !this.offlineModeActive)
this.$root.notify.error(message);
if (this.showServerStorageMessages && !this.offlineModeActive) {
this.errorMessageCounter = (this.errorMessageCounter ? this.errorMessageCounter + 1 : 1);
const hint = (this.errorMessageCounter < 2 ? '' :
'<div><br>Надоело это сообщение? Добавьте в настройках кнопку "Автономный режим" ' +
'<i class="la la-unlink" style="font-size: 20px; color: white"></i> на панель инструментов и активируйте ее.</div>'
);
this.$root.notify.error(message + hint);
}
}
async loadSettings(force = false, doNotifySuccess = true) {

View File

@@ -77,9 +77,15 @@ export default class DrawHelper {
let j = 0;
//формируем строку
for (const part of line.parts) {
let tOpen = (part.style.bold ? '<b>' : '');
let tOpen = '';
tOpen += (part.style.bold ? '<b>' : '');
tOpen += (part.style.italic ? '<i>' : '');
let tClose = (part.style.italic ? '</i>' : '');
tOpen += (part.style.sup ? '<span style="vertical-align: baseline; position: relative; line-height: 0; top: -0.3em">' : '');
tOpen += (part.style.sub ? '<span style="vertical-align: baseline; position: relative; line-height: 0; top: 0.3em">' : '');
let tClose = '';
tClose += (part.style.sub ? '</span>' : '');
tClose += (part.style.sup ? '</span>' : '');
tClose += (part.style.italic ? '</i>' : '');
tClose += (part.style.bold ? '</b>' : '');
let text = '';
@@ -159,7 +165,7 @@ export default class DrawHelper {
const fh = h - 2*pad;
const fh2 = fh/2;
const t1 = `${Math.floor((bookPos + 1)/1000)}k/${Math.floor(textLength/1000)}k`;
const t1 = `${Math.floor((bookPos + 1)/1000)}/${Math.floor(textLength/1000)}`;
const w1 = this.measureTextFont(t1, font) + fh2;
const read = (bookPos + 1)/textLength;
const t2 = `${(read*100).toFixed(2)}%`;

View File

@@ -1053,7 +1053,7 @@ class TextPage extends Vue {
onStatusBarClick() {
const url = this.meta.url;
if (url && url.indexOf('file://') != 0) {
if (url && url.indexOf('disk://') != 0) {
window.open(url, '_blank');
} else {
this.$root.stdDialog.alert('Оригинал недоступен, т.к. файл книги был загружен с локального диска.', ' ', {color: 'info'});

View File

@@ -285,7 +285,7 @@ export default class BookParser {
sectionLevel++;
}
if (tag == 'emphasis' || tag == 'strong') {
if (tag == 'emphasis' || tag == 'strong' || tag == 'sup' || tag == 'sub') {
growParagraph(`<${tag}>`, 0);
}
@@ -343,7 +343,7 @@ export default class BookParser {
sectionLevel--;
}
if (tag == 'emphasis' || tag == 'strong') {
if (tag == 'emphasis' || tag == 'strong' || tag == 'sup' || tag == 'sub') {
growParagraph(`</${tag}>`, 0);
}
@@ -507,7 +507,7 @@ export default class BookParser {
splitToStyle(s) {
let result = [];/*array of {
style: {bold: Boolean, italic: Boolean, center: Boolean, space: Number},
style: {bold: Boolean, italic: Boolean, sup: Boolean, sub: Boolean, center: Boolean, space: Number},
image: {local: Boolean, inline: Boolean, id: String},
text: String,
}*/
@@ -530,6 +530,12 @@ export default class BookParser {
case 'emphasis':
style.italic = true;
break;
case 'sup':
style.sup = true;
break;
case 'sub':
style.sub = true;
break;
case 'center':
style.center = true;
break;
@@ -580,6 +586,12 @@ export default class BookParser {
case 'emphasis':
style.italic = false;
break;
case 'sup':
style.sup = false;
break;
case 'sub':
style.sub = false;
break;
case 'center':
style.center = false;
break;

View File

@@ -4,7 +4,7 @@ import _ from 'lodash';
import * as utils from '../../../share/utils';
import BookParser from './BookParser';
const maxDataSize = 300*1024*1024;//compressed bytes
const maxDataSize = 500*1024*1024;//compressed bytes
//локальный кэш метаданных книг, ограничение maxDataSize
const bmMetaStore = localForage.createInstance({
@@ -64,8 +64,12 @@ class BookManager {
await this.cleanRecentBooks();
if (this.recentRev > 10)
await bmRecentStoreOld.clear();
//TODO: убрать после 06.2021, когда bmRecentStoreOld устареет
{
await this.convertFileToDiskPrefix();
if (this.recentRev > 10)
await bmRecentStoreOld.clear();
}
} else {//TODO: убрать после 06.2021, когда bmRecentStoreOld устареет
this.recentLast = await bmRecentStoreOld.getItem('recent-last');
if (this.recentLast) {
@@ -165,7 +169,7 @@ class BookManager {
}
async deflateWithProgress(data, callback) {
const chunkSize = 128*1024;
const chunkSize = 512*1024;
const deflator = new utils.pako.Deflate({level: 5});
let chunkTotal = 1 + Math.floor(data.length/chunkSize);
@@ -199,7 +203,7 @@ class BookManager {
}
async inflateWithProgress(data, callback) {
const chunkSize = 64*1024;
const chunkSize = 512*1024;
const inflator = new utils.pako.Inflate({to: 'string'});
let chunkTotal = 1 + Math.floor(data.length/chunkSize);
@@ -455,6 +459,33 @@ class BookManager {
return isDel;
}
async convertFileToDiskPrefix() {
let isConverted = false;
const newRecent = {};
for (let key of Object.keys(this.recent)) {
let newKey = key;
let newUrl = this.recent[key].url;
if (newKey.indexOf('66696c65') == 0) {
newKey = newKey.replace(/^66696c65/, '6469736b');
if (newUrl)
newUrl = newUrl.replace(/^file/, 'disk');
isConverted = true;
}
newRecent[newKey] = this.recent[key];
newRecent[newKey].key = newKey;
if (newUrl)
newRecent[newKey].url = newUrl;
}
if (isConverted) {
this.recent = newRecent;
await this.recentSetItem(null, true);
}
return isConverted;
}
mostRecentBook() {
if (this.recentLastKey) {
return this.recent[this.recentLastKey];

View File

@@ -1,4 +1,28 @@
export const versionHistory = [
{
showUntil: '2020-12-08',
header: '0.9.11 (2020-12-09)',
content:
`
<ul>
<li>оптимизации, улучшения работы конвертеров</li>
</ul>
`
},
{
showUntil: '2020-12-10',
header: '0.9.10 (2020-12-03)',
content:
`
<ul>
<li>добавлена частичная поддержка формата Djvu</li>
<li>добавлена поддержка Rar-архивов</li>
<li>исправления багов</li>
</ul>
`
},
{
showUntil: '2020-11-20',
header: '0.9.9 (2020-11-21)',

View File

@@ -1,5 +1,5 @@
<template>
<q-dialog v-model="active">
<q-dialog v-model="active" no-route-dismiss>
<div class="column bg-white no-wrap">
<div class="header row">
<div class="caption col row items-center q-ml-md">

View File

@@ -1,5 +1,5 @@
<template>
<q-dialog ref="dialog" v-model="active" @show="onShow" @hide="onHide">
<q-dialog ref="dialog" v-model="active" @show="onShow" @hide="onHide" no-route-dismiss>
<slot></slot>
<!--------------------------------------------------->

View File

@@ -1,3 +1,3 @@
#!/bin/bash
sudo -H -u www-data /home/beta.liberama/liberama
sudo -H -u www-data bash -c "cd /var/www; /home/beta.liberama/liberama"

View File

@@ -27,15 +27,31 @@ sudo chown www-data.www-data /home/liberama
### external converter `calibre`, download from https://download.calibre-ebook.com/
```
wget "https://download.calibre-ebook.com/3.39.1/calibre-3.39.1-x86_64.txz"
wget "https://download.calibre-ebook.com/5.5.0/calibre-5.5.0-x86_64.txz"
sudo -u www-data mkdir -p /home/liberama/data/calibre
sudo -u www-data tar xvf calibre-3.39.1-x86_64.txz -C /home/liberama/data/calibre
sudo -u www-data tar xvf calibre-5.5.0-x86_64.txz -C /home/liberama/data/calibre
```
### external converter `pdfalto`, github https://github.com/kermitt2/pdfalto
```
git clone https://github.com/kermitt2/pdfalto
cd pdfalto
git submodule update --init --recursive
cmake ./
добавить в начало CMakeLists.txt строчку: set(CMAKE_EXE_LINKER_FLAGS "-no-pie")
make
sudo -u www-data mkdir -p /home/liberama/data/pdfalto
sudo -u www-data cp pdfalto /home/liberama/data/pdfalto
```
### external converters
```
sudo apt install rar
sudo apt install libreoffice
sudo apt install poppler-utils
sudo apt install djvulibre-bin
sudo apt install libtiff-tools
sudo apt install graphicsmagick-imagemagick-compat
```
### nginx, server config

View File

@@ -1,7 +1,7 @@
#!/bin/bash
if ! pgrep -x "liberama" > /dev/null ; then
sudo -H -u www-data /home/liberama/liberama
sudo -H -u www-data bash -c "cd /var/www; /home/liberama/liberama"
else
echo "Process 'liberama' already running"
fi

View File

@@ -1,4 +1,4 @@
#!/bin/bash
sudo -H -u www-data /home/liberama/liberama &
sudo -H -u www-data bash -c "cd /var/www; /home/liberama/liberama" & disown
sudo service cron start

17
package-lock.json generated
View File

@@ -1,6 +1,6 @@
{
"name": "Liberama",
"version": "0.9.9",
"version": "0.9.11",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@@ -10124,6 +10124,21 @@
"integrity": "sha512-ISBaA8xQNmwELC7eOjqFKMESB2VIqt4PPDD0nsS95b/9dZXvVKOlz9keMSnoGGKcOHXfTvDD6WMaRoSc9UuhRA==",
"dev": true
},
"pidusage": {
"version": "2.0.21",
"resolved": "https://registry.npmjs.org/pidusage/-/pidusage-2.0.21.tgz",
"integrity": "sha512-cv3xAQos+pugVX+BfXpHsbyz/dLzX+lr44zNMsYiGxUw+kV5sgQCIcLd1z+0vq+KyC7dJ+/ts2PsfgWfSC3WXA==",
"requires": {
"safe-buffer": "^5.2.1"
},
"dependencies": {
"safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="
}
}
},
"pify": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz",

View File

@@ -1,6 +1,6 @@
{
"name": "Liberama",
"version": "0.9.9",
"version": "0.9.11",
"author": "Book Pauk <bookpauk@gmail.com>",
"license": "CC0-1.0",
"repository": "bookpauk/liberama",
@@ -8,7 +8,7 @@
"node": ">=10.0.0"
},
"scripts": {
"dev": "nodemon --inspect --exec 'node server'",
"dev": "nodemon --inspect --ignore server/public --ignore server/data --ignore client --exec 'node server'",
"build:client": "webpack --config build/webpack.prod.config.js",
"build:linux": "npm run build:client && node build/linux && pkg -t latest-linux-x64 -o dist/linux/liberama .",
"build:win": "npm run build:client && node build/win && pkg -t latest-win-x64 -o dist/win/liberama .",
@@ -72,6 +72,7 @@
"multer": "^1.4.2",
"pako": "^1.0.11",
"path-browserify": "^1.0.0",
"pidusage": "^2.0.21",
"quasar": "^1.14.3",
"safe-buffer": "^5.2.0",
"sjcl": "^1.0.8",

View File

@@ -22,6 +22,7 @@ class ReaderController extends BaseController {
enableSitesFilter: (request.hasOwnProperty('enableSitesFilter') ? request.enableSitesFilter : true),
skipCheck: (request.hasOwnProperty('skipCheck') ? request.skipCheck : false),
isText: (request.hasOwnProperty('isText') ? request.isText : false),
uploadFileName: (request.hasOwnProperty('uploadFileName') ? request.uploadFileName : false),
});
const state = this.workerState.getState(workerId);
return (state ? state : {});

View File

@@ -136,8 +136,9 @@ class WebSocketController {
break;
i++;
if (i > 2*60*1000/refreshPause) {//2 мин ждем телодвижений воркера
if (i > 3*60*1000/refreshPause) {//3 мин ждем телодвижений воркера
this.send({state: 'error', error: 'Время ожидания процесса истекло'}, req, ws);
break;
}
i = (prevProgress != state.progress || prevState != state.state ? 1 : i);
}

View File

@@ -15,6 +15,13 @@ class FileDecompressor {
constructor(limitFileSize = 0) {
this.detector = new FileDetector();
this.limitFileSize = limitFileSize;
this.rarPath = '/usr/bin/rar';
this.rarExists = false;
(async() => {
if (await fs.pathExists(this.rarPath))
this.rarExists = true;
})();
}
async decompressNested(filename, outputDir) {
@@ -30,7 +37,11 @@ class FileDecompressor {
files: []
};
if (!fileType || !(fileType.ext == 'zip' || fileType.ext == 'bz2' || fileType.ext == 'gz' || fileType.ext == 'tar')) {
if (!fileType || !(
fileType.ext == 'zip' || fileType.ext == 'bz2' || fileType.ext == 'gz'
|| fileType.ext == 'tar' || (this.rarExists && fileType.ext == 'rar')
)
) {
return result;
}
@@ -94,6 +105,11 @@ class FileDecompressor {
async decompress(fileExt, filename, outputDir) {
let files = [];
if (fileExt == 'rar' && this.rarExists) {
files = await this.unRar(filename, outputDir);
return files;
}
switch (fileExt) {
case 'zip':
files = await this.unZip(filename, outputDir);
@@ -119,17 +135,16 @@ class FileDecompressor {
try {
return await zip.unpack(filename, outputDir, {
limitFileSize: this.limitFileSize,
limitFileCount: 1000,
limitFileCount: 10000,
decodeEntryNameCallback: (nameRaw) => {
return utils.bufferRemoveZeroes(nameRaw);
}
}
);
});
} catch (e) {
fs.emptyDir(outputDir);
return await zip.unpack(filename, outputDir, {
limitFileSize: this.limitFileSize,
limitFileCount: 1000,
limitFileCount: 10000,
decodeEntryNameCallback: (nameRaw) => {
nameRaw = utils.bufferRemoveZeroes(nameRaw);
const enc = textUtils.getEncodingLite(nameRaw);
@@ -156,7 +171,7 @@ class FileDecompressor {
if (this.limitFileSize) {
if ((await fs.stat(filename)).size > this.limitFileSize) {
reject('Файл слишком большой');
reject(new Error('Файл слишком большой'));
return;
}
}
@@ -222,7 +237,39 @@ class FileDecompressor {
inputStream.pipe(stream).pipe(outputStream);
})().catch(reject); });
}
}
async unRar(filename, outputDir) {
try {
const args = ['x', '-p-', '-y', filename, `${outputDir}`];
const result = await utils.spawnProcess(this.rarPath, {
killAfter: 60,
args
});
if (result.code == 0) {
const files = [];
await utils.findFiles(async(file) => {
const stat = await fs.stat(file);
files.push({path: path.relative(outputDir, file), size: stat.size});
}, outputDir);
return files;
} else {
const error = `${result.code}|FORLOG|, exec: ${this.rarPath}, args: ${args.join(' ')}, stdout: ${result.stdout}, stderr: ${result.stderr}`;
throw new Error(`Архиватор Rar завершился с ошибкой: ${error}`);
}
} catch(e) {
if (e.status == 'killed') {
throw new Error('Слишком долгое ожидание архиватора Rar');
} else if (e.status == 'error') {
throw new Error(e.error);
} else {
throw new Error(e);
}
}
}
async gzipBuffer(buf) {
return new Promise((resolve, reject) => {

View File

@@ -54,7 +54,7 @@ class FileDetector {
fromBuffer(buffer, callback) {
let result = null;
//console.log(buffer);
const invalidSignaturesList = this.validateSigantures();
if (invalidSignaturesList.length) {
return callback(invalidSignaturesList);

View File

@@ -676,7 +676,9 @@
{ "type": "equal", "end": 19, "bytes": "3c3f786d6c2076657273696f6e3d22312e3022" },
{ "type": "equal", "end": 22, "bytes": "efbbbf3c3f786d6c2076657273696f6e3d22312e3022" },
{ "type": "equal", "end": 19, "bytes": "3c3f786d6c2076657273696f6e3d27312e3027" },
{ "type": "equal", "end": 22, "bytes": "efbbbf3c3f786d6c2076657273696f6e3d27312e3027" }
{ "type": "equal", "end": 22, "bytes": "efbbbf3c3f786d6c2076657273696f6e3d27312e3027" },
{ "type": "equal", "end": 40, "bytes": "fffe3c003f0078006d006c002000760065007200730069006f006e003d00220031002e0030002200" },
{ "type": "equal", "end": 40, "bytes": "fffe3c003f0078006d006c002000760065007200730069006f006e003d00270031002e0030002700" }
]
}
]

View File

@@ -28,7 +28,7 @@ class LimitedQueue {
get(onPlaceChange) {
return new Promise((resolve, reject) => {
if (this.destroyed)
reject('destroyed');
reject(new Error('destroyed'));
const take = () => {
if (this.freed <= 0)
@@ -73,7 +73,7 @@ class LimitedQueue {
if (onPlaceChange)
onPlaceChange(this.listeners.length);
} else {
reject('Превышен размер очереди ожидания');
reject(new Error('Превышен размер очереди ожидания'));
}
}
});

View File

@@ -1 +0,0 @@
test

View File

@@ -5,8 +5,9 @@ const he = require('he');
const LimitedQueue = require('../../LimitedQueue');
const textUtils = require('./textUtils');
const utils = require('../../utils');
const xmlParser = require('../../xmlParser');
const queue = new LimitedQueue(2, 20, 3*60*1000);//3 минуты ожидание подвижек
const queue = new LimitedQueue(3, 20, 2*60*1000);//2 минуты ожидание подвижек
class ConvertBase {
constructor(config) {
@@ -14,7 +15,6 @@ class ConvertBase {
this.calibrePath = `${config.dataDir}/calibre/ebook-convert`;
this.sofficePath = '/usr/bin/soffice';
this.pdfToHtmlPath = '/usr/bin/pdftohtml';
}
async run(data, opts) {// eslint-disable-line no-unused-vars
@@ -27,9 +27,6 @@ class ConvertBase {
if (!await fs.pathExists(this.sofficePath))
throw new Error('Внешний конвертер LibreOffice не найден');
if (!await fs.pathExists(this.pdfToHtmlPath))
throw new Error('Внешний конвертер pdftohtml не найден');
}
async execConverter(path, args, onData, abort) {
@@ -42,20 +39,35 @@ class ConvertBase {
throw new Error('Слишком большая очередь конвертирования. Пожалуйста, попробуйте позже.');
}
abort = (abort ? abort : () => false);
const myAbort = () => {
return q.abort() || abort();
}
try {
if (myAbort())
throw new Error('abort');
const result = await utils.spawnProcess(path, {
killAfter: 600,
killAfter: 3600,//1 час
args,
onData: (data) => {
q.resetTimeout();
if (queue.freed > 0)
q.resetTimeout();
onData(data);
},
abort
//будем периодически проверять работу конвертера и если очереди нет, то разрешаем работу пинком onData
onUsage: (stats) => {
if (queue.freed > 0 && stats.cpu >= 10) {
q.resetTimeout();
onData('.');
}
},
onUsageInterval: 10,
abort: myAbort
});
if (result.code != 0) {
let error = result.code;
if (this.config.branch == 'development')
error = `exec: ${path}, stdout: ${result.stdout}, stderr: ${result.stderr}`;
const error = `${result.code}|FORLOG|, exec: ${path}, args: ${args.join(' ')}, stdout: ${result.stdout}, stderr: ${result.stderr}`;
throw new Error(`Внешний конвертер завершился с ошибкой: ${error}`);
}
} catch(e) {
@@ -91,61 +103,14 @@ class ConvertBase {
}
formatFb2(fb2) {
let out = '<?xml version="1.0" encoding="utf-8"?>';
out += '<FictionBook xmlns="http://www.gribuser.ru/xml/fictionbook/2.0" xmlns:l="http://www.w3.org/1999/xlink">';
out += this.formatFb2Node(fb2);
out += '</FictionBook>';
return out;
}
formatFb2Node(node, name) {
let out = '';
if (Array.isArray(node)) {
for (const n of node) {
out += this.formatFb2Node(n);
const out = xmlParser.formatXml({
FictionBook: {
_attrs: {xmlns: 'http://www.gribuser.ru/xml/fictionbook/2.0', 'xmlns:l': 'http://www.w3.org/1999/xlink'},
_a: [fb2],
}
} else if (typeof node == 'string') {
if (name)
out += `<${name}>${this.repSpaces(node)}</${name}>`;
else
out += this.repSpaces(node);
} else {
if (node._n)
name = node._n;
}, 'utf-8', this.repSpaces);
let attrs = '';
if (node._attrs) {
for (let attrName in node._attrs) {
attrs += ` ${attrName}="${node._attrs[attrName]}"`;
}
}
let tOpen = '';
let tBody = '';
let tClose = '';
if (name)
tOpen += `<${name}${attrs}>`;
if (node.hasOwnProperty('_t'))
tBody += this.repSpaces(node._t);
for (let nodeName in node) {
if (nodeName && nodeName[0] == '_' && nodeName != '_a')
continue;
const n = node[nodeName];
tBody += this.formatFb2Node(n, nodeName);
}
if (name)
tClose += `</${name}>`;
if (attrs == '' && name == 'p' && tBody.trim() == '')
out += '<empty-line/>'
else
out += `${tOpen}${tBody}${tClose}`;
}
return out;
return out.replace(/<p>\s*?<\/p>/g, '<empty-line/>');
}
}

View File

@@ -0,0 +1,119 @@
const fs = require('fs-extra');
const path = require('path');
const utils = require('../../utils');
const ConvertBase = require('./ConvertBase');
class ConvertDjvu extends ConvertBase {
check(data, opts) {
const {inputFiles} = opts;
return this.config.useExternalBookConverter &&
inputFiles.sourceFileType && inputFiles.sourceFileType.ext == 'djvu';
}
async run(data, opts) {
if (!this.check(data, opts))
return false;
const {inputFiles, callback, abort, uploadFileName} = opts;
const ddjvuPath = '/usr/bin/ddjvu';
if (!await fs.pathExists(ddjvuPath))
throw new Error('Внешний конвертер ddjvu не найден');
const tiffsplitPath = '/usr/bin/tiffsplit';
if (!await fs.pathExists(tiffsplitPath))
throw new Error('Внешний конвертер tiffsplitPath не найден');
const mogrifyPath = '/usr/bin/mogrify';
if (!await fs.pathExists(mogrifyPath))
throw new Error('Внешний конвертер mogrifyPath не найден');
const dir = `${inputFiles.filesDir}/`;
const inpFile = `${dir}${path.basename(inputFiles.sourceFile)}`;
const tifFile = `${inpFile}.tif`;
//конвертируем в tiff
let perc = 0;
await this.execConverter(ddjvuPath, ['-format=tiff', '-quality=50', '-verbose', inputFiles.sourceFile, tifFile], () => {
perc = (perc < 100 ? perc + 1 : 40);
callback(perc);
}, abort);
const tifFileSize = (await fs.stat(tifFile)).size;
let limitSize = 3*this.config.maxUploadFileSize;
if (tifFileSize > limitSize) {
throw new Error(`Файл для конвертирования слишком большой|FORLOG| ${tifFileSize} > ${limitSize}`);
}
//разбиваем на файлы
await this.execConverter(tiffsplitPath, [tifFile, dir], null, abort);
await fs.remove(tifFile);
//конвертируем в jpg
await this.execConverter(mogrifyPath, ['-quality', '20', '-scale', '2048', '-verbose', '-format', 'jpg', `${dir}*.tif`], () => {
perc = (perc < 100 ? perc + 1 : 40);
callback(perc);
}, abort);
//читаем изображения
limitSize = 2*this.config.maxUploadFileSize;
let imagesSize = 0;
const loadImage = async(image) => {
image.data = (await fs.readFile(image.file)).toString('base64');
image.name = path.basename(image.file);
imagesSize += image.data.length;
if (imagesSize > limitSize) {
throw new Error(`Файл для конвертирования слишком большой|FORLOG| imagesSize: ${imagesSize} > ${limitSize}`);
}
}
let files = [];
await utils.findFiles(async(file) => {
if (path.extname(file) == '.jpg')
files.push({name: file, base: path.basename(file)});
}, dir);
files.sort((a, b) => a.base.localeCompare(b.base));
let images = [];
let loading = [];
files.forEach(f => {
const image = {file: f.name};
images.push(image);
loading.push(loadImage(image));
});
await Promise.all(loading);
//формируем fb2
let titleInfo = {};
let desc = {_n: 'description', 'title-info': titleInfo};
let pars = [];
let body = {_n: 'body', section: {_a: [pars]}};
let binary = [];
let fb2 = [desc, body, binary];
let title = '';
if (uploadFileName)
title = uploadFileName;
titleInfo['book-title'] = title;
for (const image of images) {
const img = {_n: 'binary', _attrs: {id: image.name, 'content-type': 'image/jpeg'}, _t: image.data};
binary.push(img);
pars.push({_n: 'p', _t: ''});
pars.push({_n: 'image', _attrs: {'l:href': `#${image.name}`}});
}
return this.formatFb2(fb2);
}
}
module.exports = ConvertDjvu;

View File

@@ -1,5 +1,6 @@
const ConvertBase = require('./ConvertBase');
const iconv = require('iconv-lite');
const textUtils = require('./textUtils');
class ConvertFb2 extends ConvertBase {
check(data, opts) {
@@ -9,26 +10,46 @@ class ConvertFb2 extends ConvertBase {
}
async run(data, opts) {
if (!this.check(data, opts))
let newData = data;
//Корректируем кодировку, 16-битные кодировки должны стать utf-8
const encoding = textUtils.getEncoding(newData);
if (encoding.indexOf('UTF-16') == 0) {
newData = Buffer.from(iconv.decode(newData, encoding));
}
if (!this.check(newData, opts))
return false;
return this.checkEncoding(data);
return this.checkEncoding(newData);
}
checkEncoding(data) {
let result = data;
const left = data.indexOf('<?xml version="1.0"');
let q = '"';
let left = data.indexOf('<?xml version="1.0"');
if (left < 0) {
left = data.indexOf('<?xml version=\'1.0\'');
q = '\'';
}
if (left >= 0) {
const right = data.indexOf('?>', left);
if (right >= 0) {
const head = data.slice(left, right + 2).toString();
const m = head.match(/encoding="(.*?)"/);
const m = head.match(/encoding=['"](.*?)['"]/);
if (m) {
let encoding = m[1].toLowerCase();
if (encoding != 'utf-8') {
result = iconv.decode(data, encoding);
result = Buffer.from(result.toString().replace(m[0], 'encoding="utf-8"'));
//encoding может не соответсвовать реальной кодировке файла, поэтому:
let calcEncoding = textUtils.getEncoding(data);
if (calcEncoding.indexOf('ISO-8859') >= 0) {
calcEncoding = encoding;
}
result = iconv.decode(data, calcEncoding);
result = Buffer.from(result.toString().replace(m[0], `encoding=${q}utf-8${q}`));
}
}
}

View File

@@ -2,7 +2,7 @@ const fs = require('fs-extra');
const ConvertHtml = require('./ConvertHtml');
class ConvertDocX extends ConvertHtml {
class ConvertFb3 extends ConvertHtml {
async check(data, opts) {
const {inputFiles} = opts;
if (this.config.useExternalBookConverter &&
@@ -39,13 +39,14 @@ class ConvertDocX extends ConvertHtml {
const title = this.getTitle(text)
.replace(/<\/?p>/g, '')
;
text = `<title>${title}</title>` + text
text = `<fb2-title>${title}</fb2-title>` + text
.replace(/<title>/g, '<br><b>')
.replace(/<\/title>/g, '</b><br>')
.replace(/<subtitle>/g, '<br><br><subtitle>')
.replace(/<subtitle>/g, '<br><br><fb2-subtitle>')
.replace(/<\/subtitle>/g, '</fb2-subtitle>')
;
return await super.run(Buffer.from(text), {skipCheck: true, cutTitle: true});
return await super.run(Buffer.from(text), {skipCheck: true});
}
}
module.exports = ConvertDocX;
module.exports = ConvertFb3;

View File

@@ -34,7 +34,6 @@ class ConvertHtml extends ConvertBase {
} else {
isText = opts.isText;
}
let {cutTitle} = opts;
let titleInfo = {};
let desc = {_n: 'description', 'title-info': titleInfo};
@@ -44,12 +43,17 @@ class ConvertHtml extends ConvertBase {
let fb2 = [desc, body, binary];
let title = '';
let author = '';
let inTitle = false;
let inSectionTitle = false;
let inAuthor = false;
let inSubTitle = false;
let inImage = false;
let image = {};
let bold = false;
let italic = false;
let superscript = false;
let subscript = false;
let begining = true;
let spaceCounter = [];
@@ -62,7 +66,7 @@ class ConvertHtml extends ConvertBase {
};
const growParagraph = (text) => {
if (!pars.length)
if (!pars.length || pars[pars.length - 1]._n != 'p')
newParagraph();
const l = pars.length;
@@ -94,12 +98,16 @@ class ConvertHtml extends ConvertBase {
const onTextNode = (text, cutCounter, cutTag) => {// eslint-disable-line no-unused-vars
text = this.escapeEntities(text);
if (!cutCounter && !(cutTitle && inTitle)) {
if (!(cutCounter || inTitle || inSectionTitle || inSubTitle)) {
let tOpen = '';
tOpen += (inSubTitle ? '<subtitle>' : '');
tOpen += (bold ? '<strong>' : '');
tOpen += (italic ? '<emphasis>' : '');
tOpen += (superscript ? '<sup>' : '');
tOpen += (subscript ? '<sub>' : '');
let tClose = ''
tClose += (subscript ? '</sub>' : '');
tClose += (superscript ? '</sup>' : '');
tClose += (italic ? '</emphasis>' : '');
tClose += (bold ? '</strong>' : '');
tClose += (inSubTitle ? '</subtitle>' : '');
@@ -110,12 +118,22 @@ class ConvertHtml extends ConvertBase {
if (inTitle && !title)
title = text;
if (inAuthor && !author)
author = text;
if (inSectionTitle) {
pars.unshift({_n: 'title', _t: text});
}
if (inSubTitle) {
pars.push({_n: 'subtitle', _t: text});
}
if (inImage) {
image._t = text;
binary.push(image);
pars.push({_n: 'image', _attrs: {'l:href': '#' + image._attrs.id}, _t: ''});
newParagraph();
}
};
@@ -140,15 +158,27 @@ class ConvertHtml extends ConvertBase {
bold = true;
break;
}
if (tag == 'sup')
superscript = true;
if (tag == 'sub')
subscript = true;
}
if (tag == 'title' || tag == 'cut-title') {
if (tag == 'title' || tag == 'fb2-title') {
inTitle = true;
if (tag == 'cut-title')
cutTitle = true;
}
if (tag == 'subtitle') {
if (tag == 'fb2-author') {
inAuthor = true;
}
if (tag == 'fb2-section-title') {
inSectionTitle = true;
}
if (tag == 'fb2-subtitle') {
inSubTitle = true;
}
@@ -156,7 +186,7 @@ class ConvertHtml extends ConvertBase {
inImage = true;
const attrs = sax.getAttrsSync(tail);
image = {_n: 'binary', _attrs: {id: attrs.name.value, 'content-type': attrs.type.value}, _t: ''};
}
}
};
const onEndNode = (tag, tail, singleTag, cutCounter, cutTag) => {// eslint-disable-line no-unused-vars
@@ -179,12 +209,26 @@ class ConvertHtml extends ConvertBase {
bold = false;
break;
}
if (tag == 'sup')
superscript = false;
if (tag == 'sub')
subscript = false;
}
if (tag == 'title' || tag == 'cut-title')
if (tag == 'title' || tag == 'fb2-title')
inTitle = false;
if (tag == 'subtitle')
if (tag == 'fb2-author') {
inAuthor = false;
}
if (tag == 'fb2-section-title') {
inSectionTitle = false;
}
if (tag == 'fb2-subtitle')
inSubTitle = false;
if (tag == 'fb2-image')
@@ -195,10 +239,17 @@ class ConvertHtml extends ConvertBase {
sax.parseSync(buf, {
onStartNode, onEndNode, onTextNode,
innerCut: new Set(['head', 'script', 'style', 'binary', 'fb2-image'])
innerCut: new Set(['head', 'script', 'style', 'binary', 'fb2-image', 'fb2-title', 'fb2-author'])
});
titleInfo['book-title'] = title;
if (author)
titleInfo.author = {'last-name': author};
body.section._a[0] = pars;
//console.log(JSON.stringify(fb2, null, 2));
//подозрение на чистый текст, надо разбить на параграфы
if (isText || (buf.length > 30*1024 && pars.length < buf.length/2000)) {
let total = 0;
@@ -218,7 +269,7 @@ class ConvertHtml extends ConvertBase {
let i = 0;
//если разброс не слишком большой, выделяем параграфы
if (d < 10 && spaceCounter.length) {
if (d < 20 && spaceCounter.length) {
total /= 20;
i = spaceCounter.length - 1;
while (i > 0 && (!spaceCounter[i] || spaceCounter[i] < total)) i--;
@@ -228,56 +279,49 @@ class ConvertHtml extends ConvertBase {
if (parIndent > 2) parIndent--;
let newPars = [];
let curPar = {};
const newPar = () => {
newPars.push({_n: 'p', _t: ''});
curPar = {_n: 'p', _t: ''};
newPars.push(curPar);
};
const growPar = (text) => {
if (!newPars.length)
newPar();
const l = newPars.length;
newPars[l - 1]._t += text;
}
i = 0;
for (const par of pars) {
if (par._n != 'p') {
newPars.push(par);
continue;
}
if (i > 0)
newPar();
i++;
let j = 0;
newPar();
const lines = par._t.split('\n');
for (let line of lines) {
line = repCrLfTab(line);
for (let j = 0; j < lines.length; j++) {
const line = repCrLfTab(lines[j]);
let l = 0;
while (l < line.length && line[l] == ' ') {
l++;
}
if (l >= parIndent || line == '') {
if (j > 0)
newPar();
j++;
if (j > 0 &&
(l >= parIndent ||
(j < lines.length - 1 && line == '')
)
) {
newPar();
}
growPar(line.trim() + ' ');
curPar._t += line.trim() + ' ';
}
}
body.section._a[0] = newPars;
} else {
body.section._a[0] = pars;
}
//убираем лишнее, делаем валидный fb2, т.к. в рез-те разбиения на параграфы бьются теги
bold = false;
italic = false;
superscript = false;
subscript = false;
inSubTitle = false;
pars = body.section._a[0];
for (let i = 0; i < pars.length; i++) {
@@ -297,7 +341,11 @@ class ConvertHtml extends ConvertBase {
tOpen += (inSubTitle ? '<subtitle>' : '');
tOpen += (bold ? '<strong>' : '');
tOpen += (italic ? '<emphasis>' : '');
tOpen += (superscript ? '<sup>' : '');
tOpen += (subscript ? '<sub>' : '');
let tClose = ''
tClose += (subscript ? '</sub>' : '');
tClose += (superscript ? '</sup>' : '');
tClose += (italic ? '</emphasis>' : '');
tClose += (bold ? '</strong>' : '');
tClose += (inSubTitle ? '</subtitle>' : '');
@@ -313,6 +361,10 @@ class ConvertHtml extends ConvertBase {
bold = true;
if (tag == 'emphasis')
italic = true;
if (tag == 'sup')
superscript = true;
if (tag == 'sub')
subscript = true;
if (tag == 'subtitle')
inSubTitle = true;
}
@@ -322,6 +374,10 @@ class ConvertHtml extends ConvertBase {
bold = false;
if (tag == 'emphasis')
italic = false;
if (tag == 'sup')
superscript = false;
if (tag == 'sub')
subscript = false;
if (tag == 'subtitle')
inSubTitle = false;
}

View File

@@ -1,9 +1,11 @@
//const _ = require('lodash');
const fs = require('fs-extra');
const path = require('path');
const sax = require('../../sax');
const utils = require('../../utils');
const ConvertHtml = require('./ConvertHtml');
const xmlParser = require('../../xmlParser');
class ConvertPdf extends ConvertHtml {
check(data, opts) {
@@ -16,15 +18,24 @@ class ConvertPdf extends ConvertHtml {
async run(notUsed, opts) {
if (!this.check(notUsed, opts))
return false;
await this.checkExternalConverterPresent();
const {inputFiles, callback, abort} = opts;
const {inputFiles, callback, abort, uploadFileName} = opts;
const outFile = `${inputFiles.filesDir}/${utils.randomHexString(10)}.xml`;
const inpFile = inputFiles.sourceFile;
const outBasename = `${inputFiles.filesDir}/${utils.randomHexString(10)}`;
const outFile = `${outBasename}.xml`;
const metaFile = `${outBasename}_metadata.xml`;
const pdfaltoPath = `${this.config.dataDir}/pdfalto/pdfalto`;
if (!await fs.pathExists(pdfaltoPath))
throw new Error('Внешний конвертер pdfalto не найден');
//конвертируем в xml
let perc = 0;
await this.execConverter(this.pdfToHtmlPath, ['-c', '-s', '-xml', inputFiles.sourceFile, outFile], () => {
await this.execConverter(pdfaltoPath, [inpFile, outFile], () => {
perc = (perc < 80 ? perc + 10 : 40);
callback(perc);
}, abort);
@@ -33,17 +44,22 @@ class ConvertPdf extends ConvertHtml {
const data = await fs.readFile(outFile);
callback(90);
await utils.sleep(100);
//парсим xml
let lines = [];
let pagelines = [];
let line = {text: ''};
let page = {};
let fonts = {};
let sectionTitleFound = false;
let images = [];
let loading = [];
let inText = false;
let bold = false;
let italic = false;
let title = '';
let prevTop = 0;
let author = '';
let i = -1;
let titleCount = 0;
const loadImage = async(image) => {
const src = path.parse(image.src);
@@ -57,7 +73,7 @@ class ConvertPdf extends ConvertHtml {
image.type = type;
image.name = src.base;
}
}
};
const putImage = (curTop) => {
if (!isNaN(curTop) && images.length) {
@@ -67,104 +83,180 @@ class ConvertPdf extends ConvertHtml {
images.shift();
}
}
}
};
const onTextNode = (text, cutCounter, cutTag) => {// eslint-disable-line no-unused-vars
if (!cutCounter && inText) {
let tOpen = (bold ? '<b>' : '');
tOpen += (italic ? '<i>' : '');
let tClose = (italic ? '</i>' : '');
tClose += (bold ? '</b>' : '');
const putPageLines = () => {
pagelines.sort((a, b) => (a.top - b.top)*10000 + (a.left - b.left))
//объединяем в одну строку равные по высоте
const pl = [];
let pt = 0;
let j = -1;
pagelines.forEach(line => {
//добавим закрывающий тег стиля
line.text += line.tClose;
lines[i].text += `${tOpen}${text}${tClose} `;
if (titleCount < 2 && text.trim() != '') {
title += text + (titleCount ? '' : ' - ');
titleCount++;
//проверим, возможно это заголовок
if (line.fonts.length == 1 && line.pageWidth) {
const f = (line.fonts.length ? fonts[line.fonts[0]] : null);
const centerLeft = (line.pageWidth - line.width)/2;
if (f && f.isBold && Math.abs(centerLeft - line.left) < 3) {
if (!sectionTitleFound) {
line.isSectionTitle = true;
sectionTitleFound = true;
} else {
line.isSubtitle = true;
}
}
}
}
//объединяем
if (pt == 0 || Math.abs(pt - line.top) > 3) {
j++;
pl[j] = line;
} else {
pl[j].text += ` ${line.text}`;
}
pt = line.top;
});
//заполняем lines
const lastIndex = i;
pl.forEach(line => {
putImage(line.top);
//добавим пустую строку, если надо
const prevLine = (i > lastIndex ? lines[i] : {fonts: [], top: 0});
if (prevLine && !prevLine.isImage) {
const f = (prevLine.fonts.length ? fonts[prevLine.fonts[0]] : (line.fonts.length ? fonts[line.fonts[0]] : null));
if (f && f.fontSize && !line.isImage && line.top - prevLine.top > f.fontSize*1.8) {
i++;
lines[i] = {text: '<br>'};
}
}
i++;
lines[i] = line;
});
pagelines = [];
putImage(100000);
};
const onStartNode = (tag, tail, singleTag, cutCounter, cutTag) => {// eslint-disable-line no-unused-vars
if (!cutCounter) {
if (inText) {
switch (tag) {
case 'i':
italic = true;
break;
case 'b':
bold = true;
break;
if (tag == 'textstyle') {
const attrs = sax.getAttrsSync(tail);
const fontId = (attrs.id && attrs.id.value ? attrs.id.value : '');
const fontStyle = (attrs.fontstyle && attrs.fontstyle.value ? attrs.fontstyle.value : '');
const fontSize = (attrs.fontsize && attrs.fontsize.value ? attrs.fontsize.value : '');
if (fontId) {
const styleTags = {bold: 'b', italics: 'i', superscript: 'sup', subscript: 'sub'};
const f = fonts[fontId] = {tOpen: '', tClose: '', isBold: false, fontSize};
if (fontStyle) {
const styles = fontStyle.split(' ');
styles.forEach(style => {
const s = styleTags[style];
if (s) {
f.tOpen += `<${s}>`;
f.tClose = `</${s}>${f.tClose}`;
if (s == 'b')
f.isBold = true;
}
});
}
}
}
if (tag == 'text' && !inText) {
let attrs = sax.getAttrsSync(tail);
const line = {
text: '',
top: parseInt((attrs.top && attrs.top.value ? attrs.top.value : null), 10),
left: parseInt((attrs.left && attrs.left.value ? attrs.left.value : null), 10),
width: parseInt((attrs.width && attrs.width.value ? attrs.width.value : null), 10),
height: parseInt((attrs.height && attrs.height.value ? attrs.height.value : null), 10),
};
if (tag == 'page') {
const attrs = sax.getAttrsSync(tail);
page = {
width: parseInt((attrs.width && attrs.width.value ? attrs.width.value : null), 10),
};
if (line.width != 0 || line.height != 0) {
inText = true;
if (isNaN(line.top) || isNaN(prevTop) || (Math.abs(prevTop - line.top) > 3)) {
putImage(line.top);
i++;
lines[i] = line;
}
prevTop = line.top;
}
putPageLines();
}
if (tag == 'textline') {
const attrs = sax.getAttrsSync(tail);
line = {
text: '',
top: parseInt((attrs.vpos && attrs.vpos.value ? attrs.vpos.value : null), 10),
left: parseInt((attrs.hpos && attrs.hpos.value ? attrs.hpos.value : null), 10),
width: parseInt((attrs.width && attrs.width.value ? attrs.width.value : null), 10),
height: parseInt((attrs.height && attrs.height.value ? attrs.height.value : null), 10),
tOpen: '',
tClose: '',
isSectionTitle: false,
isSubtitle: false,
pageWidth: page.width,
fonts: [],
};
if (line.width != 0 || line.height != 0) {
pagelines.push(line);
}
}
if (tag == 'image') {
const attrs = sax.getAttrsSync(tail);
const src = (attrs.src && attrs.src.value ? attrs.src.value : '');
if (tag == 'string') {
const attrs = sax.getAttrsSync(tail);
if (attrs.content && attrs.content.value) {
let tOpen = '';
let tClose = '';
const fontId = (attrs.stylerefs && attrs.stylerefs.value ? attrs.stylerefs.value : '');
if (fontId && fonts[fontId]) {
tOpen = fonts[fontId].tOpen;
tClose = fonts[fontId].tClose;
if (!line.fonts.length || line.fonts[0] != fontId)
line.fonts.push(fontId);
}
if (line.tOpen != tOpen) {
line.text += line.tClose + tOpen;
line.tOpen = tOpen;
line.tClose = tClose;
}
line.text += `${line.text.length ? ' ' : ''}${attrs.content.value}`;
}
}
if (tag == 'illustration') {
const attrs = sax.getAttrsSync(tail);
if (attrs.type && attrs.type.value == 'image') {
let src = (attrs.fileid && attrs.fileid.value ? attrs.fileid.value : '');
if (src) {
const image = {
isImage: true,
src,
data: '',
type: '',
top: parseInt((attrs.top && attrs.top.value ? attrs.top.value : null), 10) || 0,
top: parseInt((attrs.vpos && attrs.vpos.value ? attrs.vpos.value : null), 10) || 0,
left: parseInt((attrs.hpos && attrs.hpos.value ? attrs.hpos.value : null), 10) || 0,
width: parseInt((attrs.width && attrs.width.value ? attrs.width.value : null), 10) || 0,
height: parseInt((attrs.height && attrs.height.value ? attrs.height.value : null), 10) || 0,
};
loading.push(loadImage(image));
images.push(image);
images.sort((a, b) => a.top - b.top)
const exists = images.filter(img => (img.top == image.top && img.left == image.left && img.width == image.width && img.height == image.height));
if (!exists.length) {
loading.push(loadImage(image));
images.push(image);
images.sort((a, b) => (a.top - b.top)*10000 + (a.left - b.left));
}
}
}
if (tag == 'page') {
putImage(100000);
}
}
};
const onEndNode = (tag, tail, singleTag, cutCounter, cutTag) => {// eslint-disable-line no-unused-vars
if (inText) {
switch (tag) {
case 'i':
italic = false;
break;
case 'b':
bold = false;
break;
}
}
if (tag == 'text')
inText = false;
};
let buf = this.decode(data).toString();
sax.parseSync(buf, {
onStartNode, onEndNode, onTextNode
onStartNode
});
putImage(100000);
putPageLines();
await Promise.all(loading);
await utils.sleep(100);
//найдем параграфы и отступы
const indents = [];
@@ -185,16 +277,51 @@ class ConvertPdf extends ConvertHtml {
}
indents[0] = 0;
//title
if (fs.pathExists(metaFile)) {
const metaXmlString = (await fs.readFile(metaFile)).toString();
let metaXmlParsed = xmlParser.parseXml(metaXmlString);
metaXmlParsed = xmlParser.simplifyXmlParsed(metaXmlParsed);
if (metaXmlParsed.metadata) {
title = (metaXmlParsed.metadata.title ? metaXmlParsed.metadata.title._t : '');
author = (metaXmlParsed.metadata.author ? metaXmlParsed.metadata.author._t : '');
}
}
if (!title && uploadFileName)
title = uploadFileName;
//console.log(JSON.stringify(lines, null, 2));
//формируем текст
let text = `<title>${title}</title>`;
const limitSize = 2*this.config.maxUploadFileSize;
let text = '';
if (title)
text += `<fb2-title>${title}</fb2-title>`;
if (author)
text += `<fb2-author>${author}</fb2-author>`;
let concat = '';
let sp = '';
for (const line of lines) {
if (text.length > limitSize) {
throw new Error(`Файл для конвертирования слишком большой|FORLOG| text.length: ${text.length} > ${limitSize}`);
}
if (line.isImage) {
text += `<fb2-image type="${line.type}" name="${line.name}">${line.data}</fb2-image>`;
continue;
}
if (line.isSectionTitle) {
text += `<fb2-section-title>${line.text.trim()}</fb2-section-title>`;
continue;
}
if (line.isSubtitle) {
text += `<br><fb2-subtitle>${line.text.trim()}</fb2-subtitle>`;
continue;
}
if (concat == '') {
const left = line.left || 0;
sp = ' '.repeat(indents[left]);
@@ -212,7 +339,9 @@ class ConvertPdf extends ConvertHtml {
if (concat)
text += sp + concat + "\n";
return await super.run(Buffer.from(text), {skipCheck: true, isText: true, cutTitle: true});
//console.log(text);
await utils.sleep(100);
return await super.run(Buffer.from(text), {skipCheck: true, isText: true});
}
}

View File

@@ -48,7 +48,7 @@ class ConvertSites extends ConvertHtml {
if (text === false)
return false;
return await super.run(Buffer.from(text), {skipCheck: true, cutTitle: true});
return await super.run(Buffer.from(text), {skipCheck: true});
}
getTitle(text) {
@@ -79,7 +79,7 @@ class ConvertSites extends ConvertHtml {
let book = this.getTitle(text);
book = book.replace(' (fb2) | Флибуста', '');
const title = `<title>${author}${(author ? ' - ' : '')}${book}</title>`;
const title = `<fb2-title>${author}${(author ? ' - ' : '')}${book}</fb2-title>`;
let begin = '<h3 class="book">';
if (text.indexOf(begin) <= 0)
@@ -95,12 +95,12 @@ class ConvertSites extends ConvertHtml {
return text.substring(l, r)
.replace(/blockquote class="?book"?/g, 'p')
.replace(/<br\/?>\s*<\/h3>/g, '</h3>')
.replace(/<h3 class="?book"?>/g, '<br><br><subtitle>')
.replace(/<h5 class="?book"?>/g, '<br><br><subtitle>')
.replace(/<h3>/g, '<br><br><subtitle>')
.replace(/<h5>/g, '<br><br><subtitle>')
.replace(/<\/h3>/g, '</subtitle><br>')
.replace(/<\/h5>/g, '</subtitle><br>')
.replace(/<h3 class="?book"?>/g, '<br><br><fb2-subtitle>')
.replace(/<h5 class="?book"?>/g, '<br><br><fb2-subtitle>')
.replace(/<h3>/g, '<br><br><fb2-subtitle>')
.replace(/<h5>/g, '<br><br><fb2-subtitle>')
.replace(/<\/h3>/g, '</fb2-subtitle><br>')
.replace(/<\/h5>/g, '</fb2-subtitle><br>')
.replace(/<div class="?stanza"?>/g, '<br>')
.replace(/<div>/g, '<br>')
+ title;

View File

@@ -4,6 +4,7 @@ const FileDetector = require('../../FileDetector');
//порядок важен
const convertClassFactory = [
require('./ConvertEpub'),
require('./ConvertDjvu'),
require('./ConvertPdf'),
require('./ConvertRtf'),
require('./ConvertDocX'),

View File

@@ -12,7 +12,7 @@ const utils = require('../utils');
const log = new (require('../AppLogger'))().log;//singleton
const cleanDirPeriod = 60*60*1000;//1 раз в час
const queue = new LimitedQueue(5, 100, 5*60*1000);//5 минут ожидание подвижек
const queue = new LimitedQueue(5, 100, 2*60*1000 + 15000);//2 минуты ожидание подвижек
let instance = null;
@@ -30,7 +30,7 @@ class ReaderWorker {
this.workerState = new WorkerState();
this.down = new FileDownloader(config.maxUploadFileSize);
this.decomp = new FileDecompressor(2*config.maxUploadFileSize);
this.decomp = new FileDecompressor(3*config.maxUploadFileSize);
this.bookConverter = new BookConverter(this.config);
this.remoteWebDavStorage = false;
@@ -81,7 +81,7 @@ class ReaderWorker {
const decompDirname = utils.randomHexString(30);
//download or use uploaded
if (url.indexOf('file://') != 0) {//download
if (url.indexOf('disk://') != 0) {//download
const downdata = await this.down.load(url, (progress) => {
wState.set({progress});
}, q.abort);
@@ -130,6 +130,8 @@ class ReaderWorker {
convertFilename = `${this.config.tempDownloadDir}/${tempFilename2}`;
await this.bookConverter.convertToFb2(decompFiles, convertFilename, opts, progress => {
wState.set({progress});
if (queue.freed > 0)
q.resetTimeout();
}, q.abort);
//сжимаем файл в tmp, если там уже нет с тем же именем-sha256
@@ -171,10 +173,15 @@ class ReaderWorker {
} catch (e) {
log(LM_ERR, e.stack);
let mes = e.message.split('|FORLOG|');
if (mes[1])
log(LM_ERR, mes[0] + mes[1]);
log(LM_ERR, `downloadedFilename: ${downloadedFilename}`);
if (e.message == 'abort')
e.message = overLoadMes;
wState.set({state: 'error', error: e.message});
mes = mes[0];
if (mes == 'abort')
mes = overLoadMes;
wState.set({state: 'error', error: mes});
} finally {
//clean
if (q)
@@ -209,7 +216,7 @@ class ReaderWorker {
await fs.remove(file.path);
}
return `file://${hash}`;
return `disk://${hash}`;
}
async restoreRemoteFile(filename) {

View File

@@ -76,13 +76,13 @@ class ZipStreamer {
if (limitFileCount || limitFileSize || decodeEntryNameCallback) {
const entries = Object.values(unzip.entries());
if (limitFileCount && entries.length > limitFileCount) {
reject('Слишком много файлов');
reject(new Error('Слишком много файлов'));
return;
}
for (const entry of entries) {
if (limitFileSize && !entry.isDirectory && entry.size > limitFileSize) {
reject('Файл слишком большой');
reject(new Error('Файл слишком большой'));
return;
}

View File

@@ -6,7 +6,8 @@ function parseSync(xstr, options) {
onCdata: _onCdata = dummy,
onComment: _onComment = dummy,
onProgress: _onProgress = dummy,
innerCut = new Set()
innerCut = new Set(),
lowerCase = true,
} = options;
let i = 0;
@@ -91,7 +92,8 @@ function parseSync(xstr, options) {
} else {
tag = tagData;
}
tag = tag.toLowerCase();
if (lowerCase)
tag = tag.toLowerCase();
if (innerCut.has(tag) && (!cutCounter || cutTag === tag)) {
if (!cutCounter)
@@ -146,7 +148,8 @@ async function parse(xstr, options) {
onCdata: _onCdata = dummy,
onComment: _onComment = dummy,
onProgress: _onProgress = dummy,
innerCut = new Set()
innerCut = new Set(),
lowerCase = true,
} = options;
let i = 0;
@@ -231,7 +234,8 @@ async function parse(xstr, options) {
} else {
tag = tagData;
}
tag = tag.toLowerCase();
if (lowerCase)
tag = tag.toLowerCase();
if (innerCut.has(tag) && (!cutCounter || cutTag === tag)) {
if (!cutCounter)
@@ -276,7 +280,7 @@ async function parse(xstr, options) {
await _onProgress(100);
}
function getAttrsSync(tail) {
function getAttrsSync(tail, lowerCase = true) {
let result = {};
let name = '';
let value = '';
@@ -287,13 +291,16 @@ function getAttrsSync(tail) {
let waitEq = false;
const pushResult = () => {
if (lowerCase)
name = name.toLowerCase();
if (name != '') {
const fn = name;
let ns = '';
if (name.indexOf(':') >= 0) {
[ns, name] = name.split(':');
if (fn.indexOf(':') >= 0) {
[ns, name] = fn.split(':');
}
result[name] = {value, ns};
result[name] = {value, ns, fn};
}
name = '';
value = '';

View File

@@ -1,7 +1,9 @@
const { spawn } = require('child_process');
const fs = require('fs-extra');
const path = require('path');
const crypto = require('crypto');
const baseX = require('base-x');
const pidusage = require('pidusage');
const BASE36 = '0123456789abcdefghijklmnopqrstuvwxyz';
const bs36 = baseX(BASE36);
@@ -45,10 +47,11 @@ async function touchFile(filename) {
}
function spawnProcess(cmd, opts) {
let {args, killAfter, onData, abort} = opts;
let {args, killAfter, onData, onUsage, onUsageInterval, abort} = opts;
killAfter = (killAfter ? killAfter : 120);//seconds
onData = (onData ? onData : () => {});
args = (args ? args : []);
onUsageInterval = (onUsageInterval ? onUsageInterval : 30);//seconds
return new Promise((resolve, reject) => { (async() => {
let resolved = false;
@@ -75,9 +78,19 @@ function spawnProcess(cmd, opts) {
reject({status: 'error', error, stdout, stderr});
});
//ждем процесс, контролируем его работу раз в секунду
let onUsageCounter = onUsageInterval;
while (!resolved) {
await sleep(1000);
killAfter -= 1;
onUsageCounter--;
if (onUsage && onUsageCounter <= 0) {
const stats = await pidusage(proc.pid);
onUsage(stats);
onUsageCounter = onUsageInterval;
}
killAfter--;
if (killAfter <= 0 || (abort && abort())) {
process.kill(proc.pid);
if (killAfter <= 0) {
@@ -91,6 +104,22 @@ function spawnProcess(cmd, opts) {
})().catch(reject); });
}
async function findFiles(callback, dir) {
if (!(callback && dir))
return;
let result = true;
const files = await fs.readdir(dir, { withFileTypes: true });
for (const file of files) {
const found = path.resolve(dir, file.name);
if (file.isDirectory())
result = await findFiles(callback, found);
else
await callback(found);
}
return result;
}
module.exports = {
toBase36,
fromBase36,
@@ -99,5 +128,6 @@ module.exports = {
sleep,
randomHexString,
touchFile,
spawnProcess
spawnProcess,
findFiles
};

143
server/core/xmlParser.js Normal file
View File

@@ -0,0 +1,143 @@
const sax = require('./sax');
function formatXml(xmlParsed, encoding = 'utf-8', textFilterFunc) {
let out = `<?xml version="1.0" encoding="${encoding}"?>`;
out += formatXmlNode(xmlParsed, textFilterFunc);
return out;
}
function formatXmlNode(node, textFilterFunc) {
textFilterFunc = (textFilterFunc ? textFilterFunc : text => text);
const formatNode = (node, name) => {
let out = '';
if (Array.isArray(node)) {
for (const n of node) {
out += formatNode(n);
}
} else if (typeof node == 'string') {
if (name)
out += `<${name}>${textFilterFunc(node)}</${name}>`;
else
out += textFilterFunc(node);
} else {
if (node._n)
name = node._n;
let attrs = '';
if (node._attrs) {
for (let attrName in node._attrs) {
attrs += ` ${attrName}="${node._attrs[attrName]}"`;
}
}
let tOpen = '';
let tBody = '';
let tClose = '';
if (name)
tOpen += `<${name}${attrs}>`;
if (node.hasOwnProperty('_t'))
tBody += textFilterFunc(node._t);
for (let nodeName in node) {
if (nodeName && nodeName[0] == '_' && nodeName != '_a')
continue;
const n = node[nodeName];
tBody += formatNode(n, nodeName);
}
if (name)
tClose += `</${name}>`;
out += `${tOpen}${tBody}${tClose}`;
}
return out;
}
return formatNode(node);
}
function parseXml(xmlString, lowerCase = true) {
let result = {};
let node = result;
const onTextNode = (text, cutCounter, cutTag) => {// eslint-disable-line no-unused-vars
node._t = text;
};
const onStartNode = (tag, tail, singleTag, cutCounter, cutTag) => {// eslint-disable-line no-unused-vars
if (tag == '?xml')
return;
const newNode = {_n: tag, _p: node};
if (tail) {
const parsedAttrs = sax.getAttrsSync(tail, lowerCase);
const atKeys = Object.keys(parsedAttrs);
if (atKeys.length) {
const attrs = {};
for (let i = 0; i < atKeys.length; i++) {
const attrName = atKeys[i];
attrs[parsedAttrs[attrName].fn] = parsedAttrs[attrName].value;
}
newNode._attrs = attrs;
}
}
if (!node._a)
node._a = [];
node._a.push(newNode);
node = newNode;
};
const onEndNode = (tag, tail, singleTag, cutCounter, cutTag) => {// eslint-disable-line no-unused-vars
if (node._p && node._n == tag)
node = node._p;
};
sax.parseSync(xmlString, {
onStartNode, onEndNode, onTextNode, lowerCase
});
if (result._a)
result = result._a[0];
return result;
}
function simplifyXmlParsed(node) {
const simplifyNodeArray = (a) => {
const result = {};
for (let i = 0; i < a.length; i++) {
const child = a[i];
if (child._n && !result[child._n]) {
result[child._n] = {};
if (child._a) {
result[child._n] = simplifyNodeArray(child._a);
}
if (child._t) {
result[child._n]._t = child._t;
}
if (child._attrs) {
result[child._n]._attrs = child._attrs;
}
}
}
return result;
};
return simplifyNodeArray([node]);
}
module.exports = {
formatXml,
formatXmlNode,
parseXml,
simplifyXmlParsed
}