Merge branch 'develop' into feature/quasar

This commit is contained in:
Book Pauk
2020-02-06 22:04:31 +07:00
18 changed files with 1190 additions and 39 deletions

View File

@@ -12,6 +12,18 @@
</div> </div>
</div> </div>
<div class="address">
<img class="logo" src="./assets/paypal.png">
<div class="para">{{ paypalAddress }}
<el-tooltip :open-delay="500" effect="light">
<template slot="content">
Скопировать
</template>
<i class="el-icon-copy-document copy-icon" @click="copyAddress(paypalAddress, 'Paypal-адрес')"></i>
</el-tooltip>
</div>
</div>
<div class="address"> <div class="address">
<img class="logo" src="./assets/bitcoin.png"> <img class="logo" src="./assets/bitcoin.png">
<div class="para">{{ bitcoinAddress }} <div class="para">{{ bitcoinAddress }}
@@ -53,6 +65,7 @@ export default @Component({
}) })
class DonateHelpPage extends Vue { class DonateHelpPage extends Vue {
yandexAddress = '410018702323056'; yandexAddress = '410018702323056';
paypalAddress = 'bookpauk@gmail.com';
bitcoinAddress = '3EbgZ7MK1UVaN38Gty5DCBtS4PknM4Ut85'; bitcoinAddress = '3EbgZ7MK1UVaN38Gty5DCBtS4PknM4Ut85';
litecoinAddress = 'MP39Riec4oSNB3XMjiquKoLWxbufRYNXxZ'; litecoinAddress = 'MP39Riec4oSNB3XMjiquKoLWxbufRYNXxZ';
moneroAddress = '8BQPnvHcPSHM5gMQsmuypDgx9NNsYqwXKfDDuswEyF2Q2ewQSfd2pkK6ydH2wmMyq2JViZvy9DQ35hLMx7g72mFWNJTPtnz'; moneroAddress = '8BQPnvHcPSHM5gMQsmuypDgx9NNsYqwXKfDDuswEyF2Q2ewQSfd2pkK6ydH2wmMyq2JViZvy9DQ35hLMx7g72mFWNJTPtnz';

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

View File

@@ -68,7 +68,7 @@ class PasteTextPage extends Vue {
} }
loadBuffer() { loadBuffer() {
this.$emit('load-buffer', {buffer: `<cut-title>${this.bookTitle}</cut-title>${this.$refs.textArea.value}`}); this.$emit('load-buffer', {buffer: `<buffer><cut-title>${utils.escapeXml(this.bookTitle)}</cut-title>${this.$refs.textArea.value}</buffer>`});
this.close(); this.close();
} }

View File

@@ -1,4 +1,16 @@
export const versionHistory = [ export const versionHistory = [
{
showUntil: '2020-02-05',
header: '0.8.4 (2020-02-06)',
content:
`
<ul>
<li>добавлен paypal-адрес для пожертвований</li>
<li>исправления багов</li>
</ul>
`
},
{ {
showUntil: '2020-01-27', showUntil: '2020-01-27',
header: '0.8.3 (2020-01-28)', header: '0.8.3 (2020-01-28)',

View File

@@ -194,3 +194,12 @@ export function parseQuery(str) {
} }
return query; return query;
} }
export function escapeXml(str) {
return str.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/"/g, '&quot;')
.replace(/'/g, '&apos;')
;
}

View File

@@ -1,4 +1,4 @@
#!/bin/sh #!/bin/bash
npm run build:linux npm run build:linux
sudo -u www-data cp -r ../../dist/linux/* /home/liberama sudo -u www-data cp -r ../../dist/linux/* /home/liberama

View File

@@ -1,3 +1,11 @@
#!/bin/sh #!/bin/bash
sudo -H -u www-data sh -c "cd /var/www; /home/liberama/liberama" sudo -H -u www-data bash -c "\
while true; do\
trap '' 2;\
cd /var/www;\
/home/liberama/liberama;\
trap 2;\
echo \"Restart after 5 sec. Press Ctrl+C to exit.\";\
sleep 5;\
done;"

7
package-lock.json generated
View File

@@ -1,6 +1,6 @@
{ {
"name": "Liberama", "name": "Liberama",
"version": "0.8.3", "version": "0.8.4",
"lockfileVersion": 1, "lockfileVersion": 1,
"requires": true, "requires": true,
"dependencies": { "dependencies": {
@@ -7428,11 +7428,6 @@
"semver": "^5.3.0" "semver": "^5.3.0"
} }
}, },
"node-stream-zip": {
"version": "1.8.2",
"resolved": "https://registry.npmjs.org/node-stream-zip/-/node-stream-zip-1.8.2.tgz",
"integrity": "sha512-zwP2F/R28Oqtl0gOLItk5QjJ6jEU8XO4kaUMgeqvCyXPgdCZlm8T/5qLMiNy+moJCBCiMQAaX7aVMRhT0t2vkQ=="
},
"nopt": { "nopt": {
"version": "4.0.1", "version": "4.0.1",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.1.tgz", "resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.1.tgz",

View File

@@ -1,6 +1,6 @@
{ {
"name": "Liberama", "name": "Liberama",
"version": "0.8.3", "version": "0.8.4",
"author": "Book Pauk <bookpauk@gmail.com>", "author": "Book Pauk <bookpauk@gmail.com>",
"license": "CC0-1.0", "license": "CC0-1.0",
"repository": "bookpauk/liberama", "repository": "bookpauk/liberama",
@@ -72,7 +72,6 @@
"lodash": "^4.17.15", "lodash": "^4.17.15",
"minimist": "^1.2.0", "minimist": "^1.2.0",
"multer": "^1.4.2", "multer": "^1.4.2",
"node-stream-zip": "^1.8.2",
"pako": "^1.0.10", "pako": "^1.0.10",
"path-browserify": "^1.0.0", "path-browserify": "^1.0.0",
"quasar": "^1.8.5", "quasar": "^1.8.5",

View File

@@ -25,7 +25,8 @@ class AppLogger {
loggerParams = [ loggerParams = [
{log: 'ConsoleLog'}, {log: 'ConsoleLog'},
{log: 'FileLog', fileName: `${config.logDir}/${config.name}.log`}, {log: 'FileLog', fileName: `${config.logDir}/${config.name}.log`},
{log: 'FileLog', fileName: `${config.logDir}/${config.name}.err.log`, exclude: [LM_OK, LM_INFO]}, {log: 'FileLog', fileName: `${config.logDir}/${config.name}.err.log`, exclude: [LM_OK, LM_INFO, LM_TOTAL]},
{log: 'FileLog', fileName: `${config.logDir}/${config.name}.fatal.log`, exclude: [LM_OK, LM_INFO, LM_WARN, LM_ERR, LM_TOTAL]},//LM_FATAL only
]; ];
} }

View File

@@ -3,11 +3,13 @@ const zlib = require('zlib');
const path = require('path'); const path = require('path');
const unbzip2Stream = require('unbzip2-stream'); const unbzip2Stream = require('unbzip2-stream');
const tar = require('tar-fs'); const tar = require('tar-fs');
const ZipStreamer = require('./ZipStreamer'); const iconv = require('iconv-lite');
const ZipStreamer = require('./Zip/ZipStreamer');
const appLogger = new (require('./AppLogger'))();//singleton const appLogger = new (require('./AppLogger'))();//singleton
const utils = require('./utils');
const FileDetector = require('./FileDetector'); const FileDetector = require('./FileDetector');
const textUtils = require('./Reader/BookConverter/textUtils');
const utils = require('./utils');
class FileDecompressor { class FileDecompressor {
constructor(limitFileSize = 0) { constructor(limitFileSize = 0) {
@@ -114,7 +116,25 @@ class FileDecompressor {
async unZip(filename, outputDir) { async unZip(filename, outputDir) {
const zip = new ZipStreamer(); const zip = new ZipStreamer();
return await zip.unpack(filename, outputDir, null, this.limitFileSize); try {
return await zip.unpack(filename, outputDir, {
limitFileSize: this.limitFileSize,
limitFileCount: 1000
});
} catch (e) {
fs.emptyDir(outputDir);
return await zip.unpack(filename, outputDir, {
limitFileSize: this.limitFileSize,
limitFileCount: 1000,
decodeEntryNameCallback: (nameRaw) => {
const enc = textUtils.getEncodingLite(nameRaw);
if (enc.indexOf('ISO-8859') < 0) {
return iconv.decode(nameRaw, enc);
}
return nameRaw;
}
});
}
} }
unBz2(filename, outputDir) { unBz2(filename, outputDir) {

View File

@@ -3,7 +3,7 @@ const fs = require('fs-extra');
const path = require('path'); const path = require('path');
const log = new (require('../AppLogger'))().log;//singleton const log = new (require('../AppLogger'))().log;//singleton
const ZipStreamer = require('../ZipStreamer'); const ZipStreamer = require('../Zip/ZipStreamer');
const utils = require('../utils'); const utils = require('../utils');

View File

@@ -226,12 +226,12 @@ class Logger {
// catch ctrl+c event and exit normally // catch ctrl+c event and exit normally
process.on('SIGINT', () => { process.on('SIGINT', () => {
this.log(LM_WARN, 'Ctrl-C pressed, exiting...'); this.log(LM_FATAL, 'Ctrl-C pressed, exiting...');
process.exit(2); process.exit(2);
}); });
process.on('SIGTERM', () => { process.on('SIGTERM', () => {
this.log(LM_WARN, 'Kill signal, exiting...'); this.log(LM_FATAL, 'Kill signal, exiting...');
process.exit(2); process.exit(2);
}); });

View File

@@ -1,6 +1,5 @@
const fs = require('fs-extra'); const fs = require('fs-extra');
const iconv = require('iconv-lite'); const iconv = require('iconv-lite');
const chardet = require('chardet');
const he = require('he'); const he = require('he');
const LimitedQueue = require('../../LimitedQueue'); const LimitedQueue = require('../../LimitedQueue');
@@ -77,16 +76,6 @@ class ConvertBase {
decode(data) { decode(data) {
let selected = textUtils.getEncoding(data); let selected = textUtils.getEncoding(data);
if (selected == 'ISO-8859-5') {
const charsetAll = chardet.detectAll(data.slice(0, 20000));
for (const charset of charsetAll) {
if (charset.name.indexOf('ISO-8859') < 0) {
selected = charset.name;
break;
}
}
}
if (selected.toLowerCase() != 'utf-8') if (selected.toLowerCase() != 'utf-8')
return iconv.decode(data, selected); return iconv.decode(data, selected);
else else

View File

@@ -6,6 +6,7 @@ class ConvertHtml extends ConvertBase {
check(data, opts) { check(data, opts) {
const {dataType} = opts; const {dataType} = opts;
//html?
if (dataType && (dataType.ext == 'html' || dataType.ext == 'xml')) if (dataType && (dataType.ext == 'html' || dataType.ext == 'xml'))
return {isText: false}; return {isText: false};
@@ -14,6 +15,11 @@ class ConvertHtml extends ConvertBase {
return {isText: true}; return {isText: true};
} }
//из буфера обмена?
if (data.toString().indexOf('<buffer>') == 0) {
return {isText: false};
}
return false; return false;
} }

View File

@@ -1,4 +1,23 @@
function getEncoding(buf, returnAll) { const chardet = require('chardet');
function getEncoding(buf) {
let selected = getEncodingLite(buf);
if (selected == 'ISO-8859-5') {
const charsetAll = chardet.detectAll(buf.slice(0, 20000));
for (const charset of charsetAll) {
if (charset.name.indexOf('ISO-8859') < 0) {
selected = charset.name;
break;
}
}
}
return selected;
}
function getEncodingLite(buf, returnAll) {
const lowerCase = 3; const lowerCase = 3;
const upperCase = 1; const upperCase = 1;
@@ -106,5 +125,6 @@ function checkIfText(buf) {
module.exports = { module.exports = {
getEncoding, getEncoding,
getEncodingLite,
checkIfText, checkIfText,
} }

View File

@@ -2,7 +2,7 @@ const fs = require('fs-extra');
const path = require('path'); const path = require('path');
const zipStream = require('zip-stream'); const zipStream = require('zip-stream');
const unzipStream = require('node-stream-zip'); const unzipStream = require('./node_stream_zip');
class ZipStreamer { class ZipStreamer {
constructor() { constructor() {
@@ -52,9 +52,15 @@ class ZipStreamer {
})().catch(reject); }); })().catch(reject); });
} }
unpack(zipFile, outputDir, entryCallback, limitFileSize = 0) { unpack(zipFile, outputDir, options, entryCallback) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
entryCallback = (entryCallback ? entryCallback : () => {}); entryCallback = (entryCallback ? entryCallback : () => {});
const {
limitFileSize = 0,
limitFileCount = 0,
decodeEntryNameCallback = false,
} = options;
const unzip = new unzipStream({file: zipFile}); const unzip = new unzipStream({file: zipFile});
unzip.on('error', reject); unzip.on('error', reject);
@@ -67,23 +73,41 @@ class ZipStreamer {
}); });
unzip.on('ready', () => { unzip.on('ready', () => {
if (limitFileSize) { if (limitFileCount || limitFileSize || decodeEntryNameCallback) {
for (const entry of Object.values(unzip.entries())) { const entries = Object.values(unzip.entries());
if (!entry.isDirectory && entry.size > limitFileSize) { if (limitFileCount && entries.length > limitFileCount) {
reject('Слишком много файлов');
return;
}
for (const entry of entries) {
if (limitFileSize && !entry.isDirectory && entry.size > limitFileSize) {
reject('Файл слишком большой'); reject('Файл слишком большой');
return; return;
} }
if (decodeEntryNameCallback) {
entry.name = (decodeEntryNameCallback(entry.nameRaw)).toString();
}
} }
} }
unzip.extract(null, outputDir, (err) => { unzip.extract(null, outputDir, (err) => {
if (err) reject(err); if (err) {
unzip.close(); reject(err);
resolve(files); return;
}
try {
unzip.close();
resolve(files);
} catch (e) {
reject(e);
}
}); });
}); });
}); });
} }
} }
module.exports = ZipStreamer; module.exports = ZipStreamer;

File diff suppressed because it is too large Load Diff