diff --git a/client/api/reader.js b/client/api/reader.js
index b3e2da87..62d1ab23 100644
--- a/client/api/reader.js
+++ b/client/api/reader.js
@@ -1,7 +1,5 @@
import axios from 'axios';
-import * as utils from '../share/utils';
-
const api = axios.create({
baseURL: '/api/reader'
});
@@ -11,8 +9,50 @@ const workerApi = axios.create({
});
class Reader {
+
+ async getStateFinish(workerId, callback) {
+ if (!callback) callback = () => {};
+
+ //присылается текст, состоящий из json-объектов state каждые 300ms, с разделителем splitter между ними
+ const splitter = '-- aod2t5hDXU32bUFyqlFE next status --';
+ let lastIndex = 0;
+ let response = await workerApi.post('/get-state-finish', {workerId}, {
+ onDownloadProgress: progress => {
+ //небольая оптимизация, вместо простого responseText.split
+ const xhr = progress.target;
+ let currIndex = xhr.responseText.length;
+ if (lastIndex == currIndex)
+ return;
+ const last = xhr.responseText.substring(lastIndex, currIndex);
+ lastIndex = currIndex;
+
+ //быстрее будет last.split
+ const res = last.split(splitter).pop();
+ if (res) {
+ try {
+ callback(JSON.parse(res));
+ } catch (e) {
+ //
+ }
+ }
+ }
+ });
+
+ //берем последний state
+ response = response.data.split(splitter).pop();
+
+ if (response) {
+ try {
+ response = JSON.parse(response);
+ } catch (e) {
+ response = false;
+ }
+ }
+
+ return response;
+ }
+
async loadBook(opts, callback) {
- const refreshPause = 300;
if (!callback) callback = () => {};
let response = await api.post('/load-book', opts);
@@ -22,53 +62,98 @@ class Reader {
throw new Error('Неверный ответ api');
callback({totalSteps: 4});
+ callback(response.data);
- let i = 0;
- while (1) {// eslint-disable-line no-constant-condition
- callback(response.data);
+ response = await this.getStateFinish(workerId, callback);
- if (response.data.state == 'finish') {//воркер закончил работу, можно скачивать кешированный на сервере файл
+ if (response) {
+ if (response.state == 'finish') {//воркер закончил работу, можно скачивать кешированный на сервере файл
callback({step: 4});
- const book = await this.loadCachedBook(response.data.path, callback);
- return Object.assign({}, response.data, {data: book.data});
+ const book = await this.loadCachedBook(response.path, callback, false, (response.size ? response.size : -1));
+ return Object.assign({}, response, {data: book.data});
}
- if (response.data.state == 'error') {
- let errMes = response.data.error;
+
+ if (response.state == 'error') {
+ let errMes = response.error;
if (errMes.indexOf('getaddrinfo') >= 0 ||
errMes.indexOf('ECONNRESET') >= 0 ||
errMes.indexOf('EINVAL') >= 0 ||
errMes.indexOf('404') >= 0)
- errMes = `Ресурс не найден по адресу: ${response.data.url}`;
+ errMes = `Ресурс не найден по адресу: ${response.url}`;
throw new Error(errMes);
}
- if (i > 0)
- await utils.sleep(refreshPause);
-
- i++;
- if (i > 120*1000/refreshPause) {//2 мин ждем телодвижений воркера
- throw new Error('Слишком долгое время ожидания');
- }
- //проверка воркера
- const prevProgress = response.data.progress;
- const prevState = response.data.state;
- response = await workerApi.post('/get-state', {workerId});
- i = (prevProgress != response.data.progress || prevState != response.data.state ? 1 : i);
+ } else {
+ throw new Error('Пустой ответ сервера');
}
}
async checkUrl(url) {
- return await axios.head(url, {headers: {'Cache-Control': 'no-cache'}});
- }
-
- async loadCachedBook(url, callback) {
- const response = await axios.head(url);
-
- let estSize = 1000000;
- if (response.headers['content-length']) {
- estSize = response.headers['content-length'];
+ let fileExists = false;
+ try {
+ await axios.head(url, {headers: {'Cache-Control': 'no-cache'}});
+ fileExists = true;
+ } catch (e) {
+ //
}
+ //восстановим при необходимости файл на сервере из удаленного облака
+ if (!fileExists) {
+ let response = await api.post('/restore-cached-file', {path: url});
+
+ const workerId = response.data.workerId;
+ if (!workerId)
+ throw new Error('Неверный ответ api');
+
+ response = await this.getStateFinish(workerId);
+ if (response.state == 'error') {
+ throw new Error(response.error);
+ }
+ }
+
+ return true;
+ }
+
+ async loadCachedBook(url, callback, restore = true, estSize = -1) {
+ if (!callback) callback = () => {};
+ let response = null;
+
callback({state: 'loading', progress: 0});
+
+ //получение размера файла
+ let fileExists = false;
+ if (estSize < 0) {
+ try {
+ response = await axios.head(url, {headers: {'Cache-Control': 'no-cache'}});
+
+ if (response.headers['content-length']) {
+ estSize = response.headers['content-length'];
+ }
+ fileExists = true;
+ } catch (e) {
+ //
+ }
+ }
+
+ //восстановим при необходимости файл на сервере из удаленного облака
+ if (restore && !fileExists) {
+ response = await api.post('/restore-cached-file', {path: url});
+
+ const workerId = response.data.workerId;
+ if (!workerId)
+ throw new Error('Неверный ответ api');
+
+ response = await this.getStateFinish(workerId);
+ if (response.state == 'error') {
+ throw new Error(response.error);
+ }
+
+ if (response.size && estSize < 0) {
+ estSize = response.size;
+ }
+ }
+
+ //получение файла
+ estSize = (estSize > 0 ? estSize : 1000000);
const options = {
onDownloadProgress: progress => {
while (progress.loaded > estSize) estSize *= 1.5;
@@ -77,7 +162,7 @@ class Reader {
callback({progress: Math.round((progress.loaded*100)/estSize)});
}
}
- //загрузка
+
return await axios.get(url, options);
}
diff --git a/client/components/Reader/LoaderPage/LoaderPage.vue b/client/components/Reader/LoaderPage/LoaderPage.vue
index 530a9695..9eee7120 100644
--- a/client/components/Reader/LoaderPage/LoaderPage.vue
+++ b/client/components/Reader/LoaderPage/LoaderPage.vue
@@ -112,7 +112,7 @@ class LoaderPage extends Vue {
submitUrl() {
if (this.bookUrl) {
- this.$emit('load-book', {url: this.bookUrl});
+ this.$emit('load-book', {url: this.bookUrl, force: true});
this.bookUrl = '';
}
}
diff --git a/client/components/Reader/versionHistory.js b/client/components/Reader/versionHistory.js
index 7d7f0afb..b05fa59d 100644
--- a/client/components/Reader/versionHistory.js
+++ b/client/components/Reader/versionHistory.js
@@ -1,4 +1,15 @@
export const versionHistory = [
+{
+ showUntil: '2020-01-19',
+ header: '0.8.2 (2020-01-20)',
+ content:
+`
+
+ - внутренние оптимизации
+
+`
+},
+
{
showUntil: '2020-01-06',
header: '0.8.1 (2020-01-07)',
diff --git a/package-lock.json b/package-lock.json
index 58e7c28b..b6dcfe3e 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1725,6 +1725,11 @@
}
}
},
+ "base-64": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/base-64/-/base-64-0.1.0.tgz",
+ "integrity": "sha1-eAqZyE59YAJgNhURxId2E78k9rs="
+ },
"base-x": {
"version": "3.0.7",
"resolved": "https://registry.npmjs.org/base-x/-/base-x-3.0.7.tgz",
@@ -5719,6 +5724,11 @@
"parse-passwd": "^1.0.0"
}
},
+ "hot-patcher": {
+ "version": "0.5.0",
+ "resolved": "https://registry.npmjs.org/hot-patcher/-/hot-patcher-0.5.0.tgz",
+ "integrity": "sha512-2Uu2W0s8+dnqXzdlg0MRsRzPoDCs1wVjOGSyMRRaMzLDX4bgHw6xDYKccsWafXPPxQpkQfEjgW6+17pwcg60bw=="
+ },
"hsl-regex": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/hsl-regex/-/hsl-regex-1.0.0.tgz",
@@ -6902,6 +6912,11 @@
}
}
},
+ "merge": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/merge/-/merge-1.2.1.tgz",
+ "integrity": "sha512-VjFo4P5Whtj4vsLzsYBu5ayHhoHJ0UqNm7ibvShmbmoz7tGi0vXaoJbGdB+GmDMLUdg8DpQXEIeVDAe8MaABvQ=="
+ },
"merge-descriptors": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz",
@@ -7882,6 +7897,11 @@
"integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==",
"dev": true
},
+ "path-posix": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/path-posix/-/path-posix-1.0.0.tgz",
+ "integrity": "sha1-BrJhE/Vr6rBCVFojv6iAA8ysJg8="
+ },
"path-to-regexp": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
@@ -10423,6 +10443,11 @@
"integrity": "sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM=",
"dev": true
},
+ "querystringify": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.1.1.tgz",
+ "integrity": "sha512-w7fLxIRCRT7U8Qu53jQnJyPkYZIaR4n5151KMfcJlO/A9397Wxb1amJvROTK6TOnp7PfoAmg/qXiNHI+08jRfA=="
+ },
"randombytes": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
@@ -10709,6 +10734,11 @@
"integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==",
"dev": true
},
+ "requires-port": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
+ "integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8="
+ },
"resize-observer-polyfill": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz",
@@ -12489,6 +12519,11 @@
}
}
},
+ "url-join": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/url-join/-/url-join-4.0.1.tgz",
+ "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA=="
+ },
"url-loader": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/url-loader/-/url-loader-1.1.2.tgz",
@@ -12508,6 +12543,15 @@
}
}
},
+ "url-parse": {
+ "version": "1.4.7",
+ "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.4.7.tgz",
+ "integrity": "sha512-d3uaVyzDB9tQoSXFvuSUNFibTd9zxd2bkVrDRvF5TmvWWQwqE4lgYJ5m+x1DbecWkw+LK4RNl2CU1hHuOKPVlg==",
+ "requires": {
+ "querystringify": "^2.1.1",
+ "requires-port": "^1.0.0"
+ }
+ },
"url-parse-lax": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz",
@@ -12737,6 +12781,40 @@
"neo-async": "^2.5.0"
}
},
+ "webdav": {
+ "version": "2.10.1",
+ "resolved": "https://registry.npmjs.org/webdav/-/webdav-2.10.1.tgz",
+ "integrity": "sha512-3UfnjGTAqSM9MW3Rpt1KrY1KneYK0wPCFryHTncqw1OP1pyiniT3uYhVpgmH6za/TkWOfnTnKCDKhwrLJFdzow==",
+ "requires": {
+ "axios": "^0.19.0",
+ "base-64": "^0.1.0",
+ "hot-patcher": "^0.5.0",
+ "merge": "^1.2.1",
+ "minimatch": "^3.0.4",
+ "path-posix": "^1.0.0",
+ "url-join": "^4.0.1",
+ "url-parse": "^1.4.7",
+ "xml2js": "^0.4.19"
+ },
+ "dependencies": {
+ "axios": {
+ "version": "0.19.1",
+ "resolved": "https://registry.npmjs.org/axios/-/axios-0.19.1.tgz",
+ "integrity": "sha512-Yl+7nfreYKaLRvAvjNPkvfjnQHJM1yLBY3zhqAwcJSwR/6ETkanUgylgtIvkvz0xJ+p/vZuNw8X7Hnb7Whsbpw==",
+ "requires": {
+ "follow-redirects": "1.5.10"
+ }
+ }
+ }
+ },
+ "webdav-fs": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/webdav-fs/-/webdav-fs-2.0.0.tgz",
+ "integrity": "sha512-TjqQKNnf1NuPiMEFJVrmWUYpIEgUxWRktddu5JhQsyxFIOyAJT0cpLyaoOdAp/yrG1yXgDsa6ZL9z+h4Z71zWA==",
+ "requires": {
+ "webdav": "^2.0.0"
+ }
+ },
"webpack": {
"version": "4.40.2",
"resolved": "https://registry.npmjs.org/webpack/-/webpack-4.40.2.tgz",
@@ -13005,6 +13083,20 @@
"mkdirp": "^0.5.1"
}
},
+ "xml2js": {
+ "version": "0.4.23",
+ "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz",
+ "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==",
+ "requires": {
+ "sax": ">=0.6.0",
+ "xmlbuilder": "~11.0.0"
+ }
+ },
+ "xmlbuilder": {
+ "version": "11.0.1",
+ "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz",
+ "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA=="
+ },
"xtend": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
diff --git a/package.json b/package.json
index 7a8de918..99e0e533 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "Liberama",
- "version": "0.8.1",
+ "version": "0.8.2",
"author": "Book Pauk ",
"license": "CC0-1.0",
"repository": "bookpauk/liberama",
@@ -84,6 +84,7 @@
"vue-router": "^3.1.3",
"vuex": "^3.1.1",
"vuex-persistedstate": "^2.5.4",
+ "webdav-fs": "^2.0.0",
"zip-stream": "^2.1.2"
}
}
diff --git a/server/config/base.js b/server/config/base.js
index 33007a57..7370cec7 100644
--- a/server/config/base.js
+++ b/server/config/base.js
@@ -21,7 +21,7 @@ module.exports = {
maxTempPublicDirSize: 512*1024*1024,//512Мб
maxUploadPublicDirSize: 200*1024*1024,//100Мб
- useExternalBookConverter: false,
+ useExternalBookConverter: false,
db: [
{
@@ -45,5 +45,14 @@ module.exports = {
},
],
+ remoteWebDavStorage: false,
+ /*
+ remoteWebDavStorage: {
+ url: '127.0.0.1:1900',
+ username: '',
+ password: '',
+ },
+ */
+
};
diff --git a/server/config/index.js b/server/config/index.js
index bd17f7b3..7ffa99f1 100644
--- a/server/config/index.js
+++ b/server/config/index.js
@@ -10,6 +10,7 @@ const propsToSave = [
'useExternalBookConverter',
'servers',
+ 'remoteWebDavStorage',
];
let instance = null;
diff --git a/server/controllers/ReaderController.js b/server/controllers/ReaderController.js
index c2288ec7..09596225 100644
--- a/server/controllers/ReaderController.js
+++ b/server/controllers/ReaderController.js
@@ -62,6 +62,24 @@ class ReaderController extends BaseController {
res.status(400).send({error});
return false;
}
+
+ async restoreCachedFile(req, res) {
+ const request = req.body;
+ let error = '';
+ try {
+ if (!request.path)
+ throw new Error(`key 'path' is empty`);
+
+ const workerId = this.readerWorker.restoreCachedFile(request.path);
+ const state = this.workerState.getState(workerId);
+ return (state ? state : {});
+ } catch (e) {
+ error = e.message;
+ }
+ //bad request
+ res.status(400).send({error});
+ return false;
+ }
}
module.exports = ReaderController;
diff --git a/server/controllers/WorkerController.js b/server/controllers/WorkerController.js
index 69bd4982..54d229dc 100644
--- a/server/controllers/WorkerController.js
+++ b/server/controllers/WorkerController.js
@@ -1,5 +1,6 @@
const BaseController = require('./BaseController');
const WorkerState = require('../core/WorkerState');//singleton
+const utils = require('../core/utils');
class WorkerController extends BaseController {
constructor(config) {
@@ -15,6 +16,7 @@ class WorkerController extends BaseController {
throw new Error(`key 'workerId' is wrong`);
const state = this.workerState.getState(request.workerId);
+
return (state ? state : {});
} catch (e) {
error = e.message;
@@ -23,6 +25,59 @@ class WorkerController extends BaseController {
res.status(400).send({error});
return false;
}
+
+ async getStateFinish(req, res) {
+ const request = req.body;
+ let error = '';
+ try {
+ if (!request.workerId)
+ throw new Error(`key 'workerId' is wrong`);
+
+ res.writeHead(200, {
+ 'Content-Type': 'text/json; charset=utf-8',
+ });
+
+ const splitter = '-- aod2t5hDXU32bUFyqlFE next status --';
+ const refreshPause = 300;
+ let i = 0;
+ let prevProgress = -1;
+ let prevState = '';
+ let state;
+ while (1) {// eslint-disable-line no-constant-condition
+ state = this.workerState.getState(request.workerId);
+ if (!state) break;
+
+ res.write(splitter + JSON.stringify(state));
+ res.flush();
+
+ if (state.state != 'finish' && state.state != 'error')
+ await utils.sleep(refreshPause);
+ else
+ break;
+
+ i++;
+ if (i > 2*60*1000/refreshPause) {//2 мин ждем телодвижений воркера
+ res.write(splitter + JSON.stringify({state: 'error', error: 'Слишком долгое время ожидания'}));
+ break;
+ }
+ i = (prevProgress != state.progress || prevState != state.state ? 1 : i);
+ prevProgress = state.progress;
+ prevState = state.state;
+ }
+
+ if (!state) {
+ res.write(splitter + JSON.stringify({}));
+ }
+
+ res.end();
+ return false;
+ } catch (e) {
+ error = e.message;
+ }
+ //bad request
+ res.status(400).send({error});
+ return false;
+ }
}
module.exports = WorkerController;
diff --git a/server/core/FileDecompressor.js b/server/core/FileDecompressor.js
index 7b7c68fc..6460cf46 100644
--- a/server/core/FileDecompressor.js
+++ b/server/core/FileDecompressor.js
@@ -5,6 +5,7 @@ const unbzip2Stream = require('unbzip2-stream');
const tar = require('tar-fs');
const ZipStreamer = require('./ZipStreamer');
+const appLogger = new (require('./AppLogger'))();//singleton
const utils = require('./utils');
const FileDetector = require('./FileDetector');
@@ -189,9 +190,9 @@ class FileDecompressor {
});
}
- async gzipFile(inputFile, outputFile) {
+ async gzipFile(inputFile, outputFile, level = 1) {
return new Promise((resolve, reject) => {
- const gzip = zlib.createGzip({level: 1});
+ const gzip = zlib.createGzip({level});
const input = fs.createReadStream(inputFile);
const output = fs.createWriteStream(outputFile);
@@ -208,7 +209,21 @@ class FileDecompressor {
const outFilename = `${outDir}/${hash}`;
if (!await fs.pathExists(outFilename)) {
- await this.gzipFile(filename, outFilename);
+ await this.gzipFile(filename, outFilename, 1);
+
+ // переупакуем через некоторое время на максималках
+ const filenameCopy = `${filename}.copy`;
+ await fs.copy(filename, filenameCopy);
+
+ (async() => {
+ await utils.sleep(5000);
+ const filenameGZ = `${filename}.gz`;
+ await this.gzipFile(filenameCopy, filenameGZ, 9);
+
+ await fs.move(filenameGZ, outFilename, {overwrite: true});
+
+ await fs.remove(filenameCopy);
+ })().catch((e) => { if (appLogger.inited) appLogger.log(LM_ERR, `FileDecompressor.gzipFileIfNotExists: ${e.message}`) });
} else {
await utils.touchFile(outFilename);
}
diff --git a/server/core/Reader/ReaderWorker.js b/server/core/Reader/ReaderWorker.js
index 2b8d1180..d2b6a92f 100644
--- a/server/core/Reader/ReaderWorker.js
+++ b/server/core/Reader/ReaderWorker.js
@@ -5,6 +5,7 @@ const WorkerState = require('../WorkerState');//singleton
const FileDownloader = require('../FileDownloader');
const FileDecompressor = require('../FileDecompressor');
const BookConverter = require('./BookConverter');
+const RemoteWebDavStorage = require('../RemoteWebDavStorage');
const utils = require('../utils');
const log = new (require('../AppLogger'))().log;//singleton
@@ -28,6 +29,11 @@ class ReaderWorker {
this.decomp = new FileDecompressor();
this.bookConverter = new BookConverter(this.config);
+ this.remoteWebDavStorage = false;
+ if (config.remoteWebDavStorage) {
+ this.remoteWebDavStorage = new RemoteWebDavStorage(config.remoteWebDavStorage);
+ }
+
this.periodicCleanDir(this.config.tempPublicDir, this.config.maxTempPublicDirSize, 60*60*1000);//1 раз в час
this.periodicCleanDir(this.config.uploadDir, this.config.maxUploadPublicDirSize, 60*60*1000);//1 раз в час
@@ -39,7 +45,6 @@ class ReaderWorker {
async loadBook(opts, wState) {
const url = opts.url;
- let errMes = '';
let decompDir = '';
let downloadedFilename = '';
let isUploaded = false;
@@ -87,17 +92,18 @@ class ReaderWorker {
});
//сжимаем файл в tmp, если там уже нет с тем же именем-sha256
- const compFilename = await this.decomp.gzipFileIfNotExists(convertFilename, `${this.config.tempPublicDir}`);
+ const compFilename = await this.decomp.gzipFileIfNotExists(convertFilename, this.config.tempPublicDir);
+ const stat = await fs.stat(compFilename);
wState.set({progress: 100});
//finish
const finishFilename = path.basename(compFilename);
- wState.finish({path: `/tmp/${finishFilename}`});
+ wState.finish({path: `/tmp/${finishFilename}`, size: stat.size});
} catch (e) {
log(LM_ERR, e.stack);
- wState.set({state: 'error', error: (errMes ? errMes : e.message)});
+ wState.set({state: 'error', error: e.message});
} finally {
//clean
if (decompDir)
@@ -133,6 +139,41 @@ class ReaderWorker {
return `file://${hash}`;
}
+ restoreCachedFile(filename) {
+ const workerId = this.workerState.generateWorkerId();
+ const wState = this.workerState.getControl(workerId);
+ wState.set({state: 'start'});
+
+ (async() => {
+ try {
+ wState.set({state: 'download', step: 1, totalSteps: 1, path: filename, progress: 0});
+
+ const basename = path.basename(filename);
+ const targetName = `${this.config.tempPublicDir}/${basename}`;
+
+ if (!await fs.pathExists(targetName)) {
+ let found = false;
+ if (this.remoteWebDavStorage) {
+ found = await this.remoteWebDavStorage.getFileSuccess(targetName);
+ }
+
+ if (!found) {
+ throw new Error('404 Файл не найден');
+ }
+ }
+
+ const stat = await fs.stat(targetName);
+ wState.finish({path: `/tmp/${basename}`, size: stat.size, progress: 100});
+ } catch (e) {
+ if (e.message.indexOf('404') < 0)
+ log(LM_ERR, e.stack);
+ wState.set({state: 'error', error: e.message});
+ }
+ })();
+
+ return workerId;
+ }
+
async periodicCleanDir(dir, maxSize, timeout) {
try {
const list = await fs.readdir(dir);
@@ -153,7 +194,16 @@ class ReaderWorker {
let i = 0;
while (i < files.length && size > maxSize) {
const file = files[i];
- await fs.remove(`${dir}/${file.name}`);
+ const oldFile = `${dir}/${file.name}`;
+ if (this.remoteWebDavStorage) {
+ try {
+ //log(`remoteWebDavStorage.putFile ${path.basename(oldFile)}`);
+ await this.remoteWebDavStorage.putFile(oldFile);
+ } catch (e) {
+ log(LM_ERR, e.stack);
+ }
+ }
+ await fs.remove(oldFile);
size -= file.stat.size;
i++;
}
diff --git a/server/core/RemoteWebDavStorage.js b/server/core/RemoteWebDavStorage.js
new file mode 100644
index 00000000..e7d0ad83
--- /dev/null
+++ b/server/core/RemoteWebDavStorage.js
@@ -0,0 +1,121 @@
+const fs = require('fs-extra');
+const path = require('path');
+
+const WebDavFS = require('webdav-fs');
+
+class RemoteWebDavStorage {
+ constructor(config) {
+ const opts = Object.assign({}, config);
+ this.wfs = WebDavFS(config.url, opts);
+ }
+
+ stat(filename) {
+ return new Promise((resolve, reject) => {
+ this.wfs.stat(filename, function(err, fileStat) {
+ if (err)
+ reject(err);
+ resolve(fileStat);
+ });
+ });
+ }
+
+ writeFile(filename, data) {
+ return new Promise((resolve, reject) => {
+ this.wfs.writeFile(filename, data, 'binary', function(err) {
+ if (err)
+ reject(err);
+ resolve();
+ });
+ });
+ }
+
+ unlink(filename) {
+ return new Promise((resolve, reject) => {
+ this.wfs.unlink(filename, function(err) {
+ if (err)
+ reject(err);
+ resolve();
+ });
+ });
+ }
+
+ readFile(filename) {
+ return new Promise((resolve, reject) => {
+ this.wfs.readFile(filename, 'binary', function(err, data) {
+ if (err)
+ reject(err);
+ resolve(data);
+ });
+ });
+ }
+
+ mkdir(dirname) {
+ return new Promise((resolve, reject) => {
+ this.wfs.mkdir(dirname, function(err) {
+ if (err)
+ reject(err);
+ resolve();
+ });
+ });
+ }
+
+ async putFile(filename) {
+ if (!await fs.pathExists(filename)) {
+ throw new Error(`File not found: ${filename}`);
+ }
+
+ const base = path.basename(filename);
+ let remoteFilename = `/${base}`;
+
+ if (base.length > 3) {
+ const remoteDir = `/${base.substr(0, 3)}`;
+ try {
+ await this.mkdir(remoteDir);
+ } catch (e) {
+ //
+ }
+ remoteFilename = `${remoteDir}/${base}`;
+ }
+
+ try {
+ const localStat = await fs.stat(filename);
+ const remoteStat = await this.stat(remoteFilename);
+ if (remoteStat.isFile && localStat.size == remoteStat.size) {
+ return;
+ }
+ await this.unlink(remoteFilename);
+ } catch (e) {
+ //
+ }
+
+ const data = await fs.readFile(filename);
+ await this.writeFile(remoteFilename, data);
+ }
+
+ async getFile(filename) {
+ if (await fs.pathExists(filename)) {
+ return;
+ }
+
+ const base = path.basename(filename);
+ let remoteFilename = `/${base}`;
+ if (base.length > 3) {
+ remoteFilename = `/${base.substr(0, 3)}/${base}`;
+ }
+
+ const data = await this.readFile(remoteFilename);
+ await fs.writeFile(filename, data);
+ }
+
+ async getFileSuccess(filename) {
+ try {
+ await this.getFile(filename);
+ return true;
+ } catch (e) {
+ //
+ }
+ return false;
+ }
+}
+
+module.exports = RemoteWebDavStorage;
\ No newline at end of file
diff --git a/server/routes.js b/server/routes.js
index ffdd92c8..09b96239 100644
--- a/server/routes.js
+++ b/server/routes.js
@@ -28,7 +28,9 @@ function initRoutes(app, config) {
['POST', '/api/reader/load-book', reader.loadBook.bind(reader), [aAll], {}],
['POST', '/api/reader/storage', reader.storage.bind(reader), [aAll], {}],
['POST', '/api/reader/upload-file', [upload.single('file'), reader.uploadFile.bind(reader)], [aAll], {}],
+ ['POST', '/api/reader/restore-cached-file', reader.restoreCachedFile.bind(reader), [aAll], {}],
['POST', '/api/worker/get-state', worker.getState.bind(worker), [aAll], {}],
+ ['POST', '/api/worker/get-state-finish', worker.getStateFinish.bind(worker), [aAll], {}],
];
//to app