diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 0000000..11704c3 --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,17 @@ +{ + "env": { + "browser": true, + "es2021": true, + "jest/globals": true + }, + "extends": "eslint:recommended", + "parserOptions": { + "ecmaVersion": 12, + "sourceType": "module" + }, + "plugins": { + "jest" + }, + "rules": { + } +} diff --git a/.nvimrc b/.nvimrc new file mode 120000 index 0000000..1490f7a --- /dev/null +++ b/.nvimrc @@ -0,0 +1 @@ +.vimrc \ No newline at end of file diff --git a/.prettierrc.json b/.prettierrc.json new file mode 100644 index 0000000..5d823c2 --- /dev/null +++ b/.prettierrc.json @@ -0,0 +1,7 @@ +{ + "trailingComma": "es5", + "tabWidth": 2, + "semi": true, + "singleQuote": true, + "jsxBracketSameLine": true +} diff --git a/.vimrc b/.vimrc new file mode 100644 index 0000000..72b62bc --- /dev/null +++ b/.vimrc @@ -0,0 +1,2 @@ +set autoread +set path+=.,src/** diff --git a/README.md b/README.md index ae0a56c..42dd255 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ ## Installing ```shell -npm i @scottg1/repertory-js +npm i @blockstorage/repertory-js ``` ## Repertory Configuration @@ -55,7 +55,7 @@ also be set to a strong, random password. ## Example API Usage ```javascript -const rep = require('@scottg1/repertory-js'); +const rep = require('@blockstorage/repertory-js'); // Repertory host settings diff --git a/package.json b/package.json index 3597ddf..00c2e3a 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,5 @@ { - "name": "@scottg1/repertory-js", + "name": "@blockstorage/repertory-js", "version": "1.3.1-r1", "description": "A Node.js module for interfacing with Repertory's remote mount API", "scripts": { @@ -35,6 +35,8 @@ "@babel/plugin-transform-runtime": "^7.13.9", "@babel/preset-env": "^7.13.9", "babel-plugin-transform-class-properties": "^6.24.1", + "eslint": "^7.22.0", + "eslint-plugin-jest": "^24.3.2", "jest": "^26.6.3" }, "type": "module", diff --git a/src/__tests__/connection.test.js b/src/__tests__/connection.test.js index 9d5a747..9bcc75e 100644 --- a/src/__tests__/connection.test.js +++ b/src/__tests__/connection.test.js @@ -19,21 +19,23 @@ test(`socket receive data fails when decryption fails`, async () => { on: (name, cb) => { cbl[name] = cb; }, - } + }; const conn = new connection('', 0, 'b', socket); let reject; - const mock_reject = jest.fn().mockImplementation(e => reject(e)); + const mock_reject = jest.fn().mockImplementation((e) => reject(e)); conn.reject = mock_reject; conn.resolve = jest.fn(); const p = new packet('a'); await p.encrypt(); p.encode_top_ui32(p.buffer.length); - await expect(new Promise((_, r) => { - reject = r; - cbl['data'](Buffer.from(p.buffer)); - })).rejects.toThrow(Error); + await expect( + new Promise((_, r) => { + reject = r; + cbl['data'](Buffer.from(p.buffer)); + }) + ).rejects.toThrow(Error); expect(mock_reject.mock.calls.length).toBe(1); }); @@ -42,8 +44,7 @@ test(`disconnect succeeds if an error is thrown`, async () => { destroy: () => { throw new Error('mock destroy error'); }, - on: () => { - }, + on: () => {}, }; const conn = new connection('', 0, 'b', socket); @@ -56,7 +57,7 @@ test(`send fails on socket error`, async () => { on: (name, cb) => { cbl[name] = cb; }, - } + }; const conn = new connection('', 0, 'b', socket); const mock_reject = jest.fn(); @@ -73,7 +74,7 @@ test(`error is thrown when socket is closed`, async () => { on: (name, cb) => { cbl[name] = cb; }, - } + }; const conn = new connection('', 0, 'b', socket); const mock_reject = jest.fn(); diff --git a/src/__tests__/connection_pool.test.js b/src/__tests__/connection_pool.test.js index 2298e28..2325e2d 100644 --- a/src/__tests__/connection_pool.test.js +++ b/src/__tests__/connection_pool.test.js @@ -19,7 +19,7 @@ test(`error on socket release is ignored`, async () => { invoked = true; throw new Error('mock release error'); }, - } + }; }); const mock_send = jest.fn(); @@ -45,9 +45,8 @@ test(`connection pool send fails when connection send fails`, async () => { const conn = new connection_pool(2, '', 20000); jest.spyOn(conn.pool, 'acquire').mockImplementation(() => { return { - release: () => { - }, - } + release: () => {}, + }; }); const mock_send = jest.fn(); diff --git a/src/__tests__/constants.test.js b/src/__tests__/constants.test.js index 2809045..fcf0651 100644 --- a/src/__tests__/constants.test.js +++ b/src/__tests__/constants.test.js @@ -1,4 +1,4 @@ -import {get_version, instance_id, package_json} from '../utils/constants' +import { get_version, instance_id, package_json } from '../utils/constants'; const uuid = require('uuid'); diff --git a/src/__tests__/file.test.js b/src/__tests__/file.test.js index 6e1ab4f..f469937 100644 --- a/src/__tests__/file.test.js +++ b/src/__tests__/file.test.js @@ -1,13 +1,11 @@ import file from '../io/file'; -jest.mock('../ops/index.js', () => ( - { - ...(jest.requireActual('../ops/index.js')), - close_file: jest.fn(), - } -)); +jest.mock('../ops/index.js', () => ({ + ...jest.requireActual('../ops/index.js'), + close_file: jest.fn(), +})); -import {close_file} from '../ops/index'; +import { close_file } from '../ops/index'; test(`can close a closed file`, async () => { const f = new file(); diff --git a/src/__tests__/repertory.test.js b/src/__tests__/repertory.test.js index 728ca23..2bd5602 100644 --- a/src/__tests__/repertory.test.js +++ b/src/__tests__/repertory.test.js @@ -1,6 +1,6 @@ import crypto from 'crypto'; import fs from 'fs'; -import {Uint64BE} from 'int64-buffer'; +import { Uint64BE } from 'int64-buffer'; import * as repertory from '../index.js'; import connection from '../networking/connection'; @@ -10,18 +10,18 @@ const TEST_HOST = process.env.TEST_HOST || 'localhost'; const TEST_PASSWORD = process.env.TEST_PASSWORD || ''; const TEST_PORT = process.env.TEST_PORT || 20000; -const calculate_sha256 = path => { +const calculate_sha256 = (path) => { return new Promise((resolve, reject) => { const hash = crypto.createHash('sha256'); fs.createReadStream(path) - .on('data', data => hash.update(data)) - .on('error', err => reject(err)) - .on('end', () => { - const h = hash.digest('hex'); - console.log(path, h); - resolve(h); - }); + .on('data', (data) => hash.update(data)) + .on('error', (err) => reject(err)) + .on('end', () => { + const h = hash.digest('hex'); + console.log(path, h); + resolve(h); + }); }); }; @@ -43,8 +43,12 @@ test('can create a connection to repertory api', async () => { test('create_pool returns a connection if pool size is <=1', async () => { for (let i = 0; i < 2; i++) { - const conn = - await repertory.create_pool(i, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + i, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); expect(conn).toBeInstanceOf(connection); test_connection(conn, true); @@ -53,8 +57,12 @@ test('create_pool returns a connection if pool size is <=1', async () => { }); test('can create a connection pool', async () => { - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); console.log(conn); expect(conn).toBeInstanceOf(connection_pool); expect(conn.host_or_ip).toEqual(TEST_HOST); @@ -68,8 +76,12 @@ test('can create a connection pool', async () => { }); test('can get drive information using api', async () => { - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); const api = repertory.create_api(conn); const di = await api.get_drive_information(); console.log(di); @@ -82,8 +94,12 @@ test('can get drive information using api', async () => { }); test('can create and remove a directory using api', async () => { - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); const api = repertory.create_api(conn); expect(await api.directory.create('/repertory_js')).toEqual(0); expect(await api.directory.remove('/repertory_js')).toEqual(0); @@ -92,8 +108,12 @@ test('can create and remove a directory using api', async () => { }); test('can get directory list and snapshot using api', async () => { - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); const api = repertory.create_api(conn); const test_results = async (remote_path, page_count, get_page) => { @@ -131,8 +151,12 @@ test('can get directory list and snapshot using api', async () => { }); test('can create, close and delete a file using api', async () => { - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); const api = repertory.create_api(conn); const f = await api.file.create_or_open('/repertory_file.dat'); console.log(f); @@ -149,8 +173,12 @@ test('can create, close and delete a file using api', async () => { }); test('can open, close and delete a file using api', async () => { - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); const api = repertory.create_api(conn); let f = await api.file.create_or_open('/repertory_file.dat'); expect(await f.close()).toEqual(0); @@ -170,8 +198,12 @@ test('can open, close and delete a file using api', async () => { }); test('can write to and read from a file using api', async () => { - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); const api = repertory.create_api(conn); const f = await api.file.create_or_open('/repertory_file.dat'); @@ -193,8 +225,12 @@ test('can write to and read from a file using api', async () => { }); test('can truncate a file using api', async () => { - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); const api = repertory.create_api(conn); const f = await api.file.create_or_open('/repertory_file.dat'); @@ -213,22 +249,34 @@ test('can truncate a file using api', async () => { test('can upload and download a file using api', async () => { try { fs.unlinkSync('repertory_test.dat'); - } catch { - } + } catch {} - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); const api = repertory.create_api(conn); - expect(await api.file.upload('test.dat', '/repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); })) - .toBeTruthy(); + expect( + await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => { + console.log(l, r, p, c); + }) + ).toBeTruthy(); - expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); })) - .toBeTruthy(); + expect( + await api.file.download( + '/repertory_test.dat', + 'repertory_test.dat', + (l, r, p, c) => { + console.log(l, r, p, c); + } + ) + ).toBeTruthy(); - expect(await calculate_sha256('test.dat')) - .toEqual(await calculate_sha256('repertory_test.dat')); + expect(await calculate_sha256('test.dat')).toEqual( + await calculate_sha256('repertory_test.dat') + ); expect(await api.file.delete('/repertory_test.dat')).toEqual(0); fs.unlinkSync('repertory_test.dat'); @@ -237,21 +285,39 @@ test('can upload and download a file using api', async () => { }, 60000); test('can download and overwrite a file using api', async () => { - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); const api = repertory.create_api(conn); - expect(await api.file.upload('test.dat', '/repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); })) - .toBeTruthy(); + expect( + await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => { + console.log(l, r, p, c); + }) + ).toBeTruthy(); - expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); })) - .toBeTruthy(); + expect( + await api.file.download( + '/repertory_test.dat', + 'repertory_test.dat', + (l, r, p, c) => { + console.log(l, r, p, c); + } + ) + ).toBeTruthy(); - expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); }, - true)) - .toBeTruthy(); + expect( + await api.file.download( + '/repertory_test.dat', + 'repertory_test.dat', + (l, r, p, c) => { + console.log(l, r, p, c); + }, + true + ) + ).toBeTruthy(); expect(await api.file.delete('/repertory_test.dat')).toEqual(0); fs.unlinkSync('repertory_test.dat'); @@ -260,21 +326,39 @@ test('can download and overwrite a file using api', async () => { }, 60000); test('download fails if overwrite is false using api', async () => { - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); const api = repertory.create_api(conn); - expect(await api.file.upload('test.dat', '/repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); })) - .toBeTruthy(); + expect( + await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => { + console.log(l, r, p, c); + }) + ).toBeTruthy(); - expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); })) - .toBeTruthy(); + expect( + await api.file.download( + '/repertory_test.dat', + 'repertory_test.dat', + (l, r, p, c) => { + console.log(l, r, p, c); + } + ) + ).toBeTruthy(); - await expect(api.file.download('/repertory_test.dat', 'repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); }, - false)) - .rejects.toThrow(Error); + await expect( + api.file.download( + '/repertory_test.dat', + 'repertory_test.dat', + (l, r, p, c) => { + console.log(l, r, p, c); + }, + false + ) + ).rejects.toThrow(Error); expect(await api.file.delete('/repertory_test.dat')).toEqual(0); fs.unlinkSync('repertory_test.dat'); @@ -283,17 +367,29 @@ test('download fails if overwrite is false using api', async () => { }, 60000); test('can upload and overwrite a file using api', async () => { - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); const api = repertory.create_api(conn); - expect(await api.file.upload('test.dat', '/repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); })) - .toBeTruthy(); + expect( + await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => { + console.log(l, r, p, c); + }) + ).toBeTruthy(); - expect(await api.file.upload('test.dat', '/repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); }, - true)) - .toBeTruthy(); + expect( + await api.file.upload( + 'test.dat', + '/repertory_test.dat', + (l, r, p, c) => { + console.log(l, r, p, c); + }, + true + ) + ).toBeTruthy(); expect(await api.file.delete('/repertory_test.dat')).toEqual(0); @@ -301,17 +397,29 @@ test('can upload and overwrite a file using api', async () => { }, 60000); test('upload fails if overwrite is false using api', async () => { - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); const api = repertory.create_api(conn); - expect(await api.file.upload('test.dat', '/repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); })) - .toBeTruthy(); + expect( + await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => { + console.log(l, r, p, c); + }) + ).toBeTruthy(); - await expect(api.file.upload('test.dat', '/repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); }, - false)) - .rejects.toThrow(Error); + await expect( + api.file.upload( + 'test.dat', + '/repertory_test.dat', + (l, r, p, c) => { + console.log(l, r, p, c); + }, + false + ) + ).rejects.toThrow(Error); expect(await api.file.delete('/repertory_test.dat')).toEqual(0); @@ -319,12 +427,18 @@ test('upload fails if overwrite is false using api', async () => { }, 60000); test('can resume download using api', async () => { - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); const api = repertory.create_api(conn); - expect(await api.file.upload('test.dat', '/repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); })) - .toBeTruthy(); + expect( + await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => { + console.log(l, r, p, c); + }) + ).toBeTruthy(); const fd = fs.openSync('test.dat', 'r'); const buffer = Buffer.alloc(1024); @@ -333,13 +447,21 @@ test('can resume download using api', async () => { fs.writeFileSync('repertory_test.dat', buffer); - expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); }, - false, true)) - .toBeTruthy(); + expect( + await api.file.download( + '/repertory_test.dat', + 'repertory_test.dat', + (l, r, p, c) => { + console.log(l, r, p, c); + }, + false, + true + ) + ).toBeTruthy(); - expect(await calculate_sha256('test.dat')) - .toEqual(await calculate_sha256('repertory_test.dat')); + expect(await calculate_sha256('test.dat')).toEqual( + await calculate_sha256('repertory_test.dat') + ); expect(await api.file.delete('/repertory_test.dat')).toEqual(0); fs.unlinkSync('repertory_test.dat'); @@ -348,8 +470,12 @@ test('can resume download using api', async () => { }, 60000); test('can resume upload using api', async () => { - const conn = - await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD); + const conn = await repertory.create_pool( + 2, + TEST_HOST, + TEST_PORT, + TEST_PASSWORD + ); const api = repertory.create_api(conn); const fd = fs.openSync('test.dat', 'r'); @@ -361,17 +487,31 @@ test('can resume upload using api', async () => { await f.write(0, buffer); await f.close(); - expect(await api.file.upload('test.dat', '/repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); }, - false, true)) - .toBeTruthy(); + expect( + await api.file.upload( + 'test.dat', + '/repertory_test.dat', + (l, r, p, c) => { + console.log(l, r, p, c); + }, + false, + true + ) + ).toBeTruthy(); - expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat', - (l, r, p, c) => { console.log(l, r, p, c); })) - .toBeTruthy(); + expect( + await api.file.download( + '/repertory_test.dat', + 'repertory_test.dat', + (l, r, p, c) => { + console.log(l, r, p, c); + } + ) + ).toBeTruthy(); - expect(await calculate_sha256('test.dat')) - .toEqual(await calculate_sha256('repertory_test.dat')); + expect(await calculate_sha256('test.dat')).toEqual( + await calculate_sha256('repertory_test.dat') + ); expect(await api.file.delete('/repertory_test.dat')).toEqual(0); fs.unlinkSync('repertory_test.dat'); diff --git a/src/index.js b/src/index.js index 7160981..3a7f537 100644 --- a/src/index.js +++ b/src/index.js @@ -1,7 +1,7 @@ -import file from './io/file' +import file from './io/file'; import connection from './networking/connection'; import connection_pool from './networking/connection_pool'; -import * as ops from './ops' +import * as ops from './ops'; export const connect = async (host_or_ip, port, password) => { const conn = new connection(host_or_ip, port, password); @@ -9,33 +9,53 @@ export const connect = async (host_or_ip, port, password) => { return conn; }; -export const create_api = conn => { +export const create_api = (conn) => { return { - directory : { - create: async remote_path => ops.create_directory(conn, remote_path), + directory: { + create: async (remote_path) => ops.create_directory(conn, remote_path), list: async (remote_path, page_reader_cb) => ops.list_directory(conn, remote_path, page_reader_cb), - remove: async remote_path => ops.remove_directory(conn, remote_path), - snapshot: async remote_path => { + remove: async (remote_path) => ops.remove_directory(conn, remote_path), + snapshot: async (remote_path) => { return ops.snapshot_directory(conn, remote_path); }, }, - file : { - create_or_open : async remote_path => new file( - conn, await ops.create_or_open_file(conn, remote_path), remote_path), - delete : async (remote_path) => ops.delete_file(conn, remote_path), - download : - async (remote_path, local_path, progress_cb, overwrite, resume) => - ops.download_file(conn, remote_path, local_path, progress_cb, - overwrite, resume), - open : async remote_path => - new file(conn, await ops.open_file(conn, remote_path), remote_path), - upload : - async (local_path, remote_path, progress_cb, overwrite, resume) => - ops.upload_file(conn, local_path, remote_path, progress_cb, - overwrite, resume), + file: { + create_or_open: async (remote_path) => + new file( + conn, + await ops.create_or_open_file(conn, remote_path), + remote_path + ), + delete: async (remote_path) => ops.delete_file(conn, remote_path), + download: async ( + remote_path, + local_path, + progress_cb, + overwrite, + resume + ) => + ops.download_file( + conn, + remote_path, + local_path, + progress_cb, + overwrite, + resume + ), + open: async (remote_path) => + new file(conn, await ops.open_file(conn, remote_path), remote_path), + upload: async (local_path, remote_path, progress_cb, overwrite, resume) => + ops.upload_file( + conn, + local_path, + remote_path, + progress_cb, + overwrite, + resume + ), }, - get_drive_information : async () => ops.get_drive_information(conn), + get_drive_information: async () => ops.get_drive_information(conn), }; }; diff --git a/src/io/file.js b/src/io/file.js index 091cbbc..4bcb762 100644 --- a/src/io/file.js +++ b/src/io/file.js @@ -17,8 +17,12 @@ export default class file { async close() { if (this.handle !== null) { - const result = await ops.close_file(this.conn, this.remote_path, - this.handle, this.thread_id); + const result = await ops.close_file( + this.conn, + this.remote_path, + this.handle, + this.thread_id + ); if (result === 0) { this.handle = null; } @@ -30,38 +34,59 @@ export default class file { async get_size() { if (this.handle === null) { - return Promise.reject(new Error('\'get_size()\' failed: invalid handle')); + return Promise.reject(new Error("'get_size()' failed: invalid handle")); } const attrs = await ops.get_file_attributes( - this.conn, this.handle, this.remote_path, this.thread_id); + this.conn, + this.handle, + this.remote_path, + this.thread_id + ); return attrs.size; } async read(offset, length) { if (this.handle === null) { - return Promise.reject(new Error('\'read()\' failed: invalid handle')); + return Promise.reject(new Error("'read()' failed: invalid handle")); } - return ops.read_file(this.conn, this.handle, this.remote_path, offset, - length, this.thread_id); + return ops.read_file( + this.conn, + this.handle, + this.remote_path, + offset, + length, + this.thread_id + ); } async truncate(length) { if (this.handle === null) { - return Promise.reject(new Error('\'truncate()\' failed: invalid handle')); + return Promise.reject(new Error("'truncate()' failed: invalid handle")); } - return ops.truncate_file(this.conn, this.handle, this.remote_path, length, - this.thread_id); + return ops.truncate_file( + this.conn, + this.handle, + this.remote_path, + length, + this.thread_id + ); } async write(offset, buffer) { if (this.handle === null) { - return Promise.reject(new Error('\'write()\' failed: invalid handle')); + return Promise.reject(new Error("'write()' failed: invalid handle")); } - return ops.write_file(this.conn, this.handle, this.remote_path, offset, - buffer, this.thread_id); + return ops.write_file( + this.conn, + this.handle, + this.remote_path, + offset, + buffer, + this.thread_id + ); } } diff --git a/src/networking/connection.js b/src/networking/connection.js index d8e3fb5..9af889a 100644 --- a/src/networking/connection.js +++ b/src/networking/connection.js @@ -1,6 +1,6 @@ import Socket from 'net'; -import * as constants from '../utils/constants' +import * as constants from '../utils/constants'; import packet from './packet'; @@ -33,13 +33,16 @@ export default class connection { if (!this.socket) { try { await new Promise((resolve, reject) => { - this.socket = - Socket.createConnection(this.port, this.host_or_ip, err => { + this.socket = Socket.createConnection( + this.port, + this.host_or_ip, + (err) => { if (err) { - return reject(err) + return reject(err); } - return resolve() - }); + return resolve(); + } + ); }); } catch (err) { return Promise.reject(new Error(`'connect()' failed: ${err}`)); @@ -57,7 +60,7 @@ export default class connection { buffer = null; }; - this.socket.on('data', chunk => { + this.socket.on('data', (chunk) => { buffer = buffer ? Buffer.concat([buffer, chunk]) : chunk; if (buffer.length > 4) { const size = buffer.readUInt32BE(0); @@ -71,19 +74,20 @@ export default class connection { const response = new packet(this.password); response.buffer = new Uint8Array(packet_data); - response.decrypt() - .then(() => { - resolve(response) - }) - .catch(e => { - reject(e) - }) + response + .decrypt() + .then(() => { + resolve(response); + }) + .catch((e) => { + reject(e); + }); } } } }); - this.socket.on('error', e => { + this.socket.on('error', (e) => { if (this.reject) { const reject = this.reject; @@ -115,7 +119,7 @@ export default class connection { this.connected = false; } } catch (e) { - console.log(e) + console.log(e); } } @@ -131,7 +135,7 @@ export default class connection { return new Promise((resolve, reject) => { this.reject = reject; this.resolve = resolve; - this.socket.write(Buffer.from(packet.buffer), null, err => { + this.socket.write(Buffer.from(packet.buffer), null, (err) => { if (err) { this.cleanup_handlers(); reject(err); diff --git a/src/networking/connection_pool.js b/src/networking/connection_pool.js index 9089d3a..5cb0e62 100644 --- a/src/networking/connection_pool.js +++ b/src/networking/connection_pool.js @@ -9,18 +9,18 @@ export default class connection_pool { this.password = password; if (pool_size > 1) { this.pool = new Pool({ - connect : {host : host_or_ip, port : port}, - connectTimeout : 5000, - pool : {max : pool_size, min : 2} + connect: { host: host_or_ip, port: port }, + connectTimeout: 5000, + pool: { max: pool_size, min: 2 }, }); } else { throw new Error("'pool_size' must be > 1"); } } - host_or_ip = ""; + host_or_ip = ''; next_thread_id = 1; - password = ""; + password = ''; port = 20000; pool; shutdown = false; @@ -48,16 +48,19 @@ export default class connection_pool { }; try { - const result = await new connection(this.host_or_ip, this.port, - this.password, socket) - .send(method_name, packet, - optional_thread_id || socket.thread_id); + const result = await new connection( + this.host_or_ip, + this.port, + this.password, + socket + ).send(method_name, packet, optional_thread_id || socket.thread_id); cleanup(); return result; } catch (err) { cleanup(); return Promise.reject( - new Error(`'send(${method_name})' failed: ${err}`)); + new Error(`'send(${method_name})' failed: ${err}`) + ); } } catch (err) { return Promise.reject(new Error(`'acquire()' socket failed: ${err}`)); diff --git a/src/networking/packet.js b/src/networking/packet.js index 5864059..7cc527b 100644 --- a/src/networking/packet.js +++ b/src/networking/packet.js @@ -1,7 +1,7 @@ -import {randomBytes} from 'crypto'; -import {Int64BE, Uint64BE} from 'int64-buffer'; -import {sha256} from 'js-sha256'; -import {TextEncoder} from 'text-encoding'; +import { randomBytes } from 'crypto'; +import { Int64BE, Uint64BE } from 'int64-buffer'; +import { sha256 } from 'js-sha256'; +import { TextEncoder } from 'text-encoding'; import { be_ui8_array_to_i16, @@ -20,7 +20,9 @@ import { import JSChaCha20 from '../utils/jschacha20'; export default class packet { - constructor(token) { this.token = token; } + constructor(token) { + this.token = token; + } static HEADER = new TextEncoder().encode('repertory'); @@ -28,13 +30,14 @@ export default class packet { decode_offset = 0; token; - append_buffer = buffer => { + append_buffer = (buffer) => { if (!(buffer instanceof Uint8Array)) { throw new Error('Buffer must be of type Uint8Array'); } - this.buffer = - this.buffer ? new Uint8Array([...this.buffer, ...buffer ]) : buffer; + this.buffer = this.buffer + ? new Uint8Array([...this.buffer, ...buffer]) + : buffer; }; clear = () => { @@ -42,13 +45,15 @@ export default class packet { this.decode_offset = 0; }; - decode_buffer = length => { + decode_buffer = (length) => { if (!this.buffer) { throw new Error('Invalid buffer'); } - const ret = - this.buffer.slice(this.decode_offset, this.decode_offset + length); + const ret = this.buffer.slice( + this.decode_offset, + this.decode_offset + length + ); this.decode_offset += length; return Buffer.from(ret); }; @@ -68,9 +73,20 @@ export default class packet { const flags = this.decode_ui32(); const directory = !!this.decode_ui8(); return { - mode, nlink, uid, gid, atime, mtime, ctime, birth_time, size, blocks, - blksize, flags, directory, - } + mode, + nlink, + uid, + gid, + atime, + mtime, + ctime, + birth_time, + size, + blocks, + blksize, + flags, + directory, + }; }; decode_utf8 = () => { @@ -92,11 +108,13 @@ export default class packet { throw new Error('String not found in buffer'); }; - decode_i8 = - () => { return ui8_array_to_i8(this.buffer, this.decode_offset++); }; + decode_i8 = () => { + return ui8_array_to_i8(this.buffer, this.decode_offset++); + }; - decode_ui8 = - () => { return ui8_array_to_ui8(this.buffer, this.decode_offset++); }; + decode_ui8 = () => { + return ui8_array_to_ui8(this.buffer, this.decode_offset++); + }; decode_i16 = () => { const ret = be_ui8_array_to_i16(this.buffer, this.decode_offset); @@ -124,7 +142,7 @@ export default class packet { decode_i64 = () => { const ret = new Int64BE( - this.buffer.slice(this.decode_offset, this.decode_offset + 8), + this.buffer.slice(this.decode_offset, this.decode_offset + 8) ); this.decode_offset += 8; return ret.toString(10); @@ -132,7 +150,7 @@ export default class packet { decode_ui64 = () => { const ret = new Uint64BE( - this.buffer.slice(this.decode_offset, this.decode_offset + 8), + this.buffer.slice(this.decode_offset, this.decode_offset + 8) ); this.decode_offset += 8; return ret.toString(10); @@ -146,10 +164,9 @@ export default class packet { const key = Uint8Array.from(hash.array()); const nonce = this.buffer.slice(0, 12); - this.buffer = new JSChaCha20(key, nonce, 0) - .decrypt( - this.buffer.slice(12), - ); + this.buffer = new JSChaCha20(key, nonce, 0).decrypt( + this.buffer.slice(12) + ); this.decode_offset = packet.HEADER.length; @@ -164,65 +181,93 @@ export default class packet { } }; - encode_buffer = buffer => { this.append_buffer(new Uint8Array(buffer)); }; + encode_buffer = (buffer) => { + this.append_buffer(new Uint8Array(buffer)); + }; - encode_i8 = num => { this.append_buffer(i8_to_ui8_array(num)); }; + encode_i8 = (num) => { + this.append_buffer(i8_to_ui8_array(num)); + }; - encode_top_i8 = num => { this.push_buffer(i8_to_ui8_array(num)); }; + encode_top_i8 = (num) => { + this.push_buffer(i8_to_ui8_array(num)); + }; - encode_u8 = num => { this.append_buffer(ui8_to_ui8_array(num)); }; + encode_u8 = (num) => { + this.append_buffer(ui8_to_ui8_array(num)); + }; - encode_top_u8 = num => { this.push_buffer(ui8_to_ui8_array(num)); }; + encode_top_u8 = (num) => { + this.push_buffer(ui8_to_ui8_array(num)); + }; - encode_i16 = num => { this.append_buffer(i16_to_be_ui8_array(num)); }; + encode_i16 = (num) => { + this.append_buffer(i16_to_be_ui8_array(num)); + }; - encode_top_i16 = num => { this.push_buffer(i16_to_be_ui8_array(num)); }; + encode_top_i16 = (num) => { + this.push_buffer(i16_to_be_ui8_array(num)); + }; - encode_ui16 = num => { this.append_buffer(ui16_to_be_ui8_array(num)); }; + encode_ui16 = (num) => { + this.append_buffer(ui16_to_be_ui8_array(num)); + }; - encode_top_ui16 = num => { this.push_buffer(ui16_to_be_ui8_array(num)); }; + encode_top_ui16 = (num) => { + this.push_buffer(ui16_to_be_ui8_array(num)); + }; - encode_i32 = num => { this.append_buffer(i32_to_be_ui8_array(num)); }; + encode_i32 = (num) => { + this.append_buffer(i32_to_be_ui8_array(num)); + }; - encode_top_i32 = num => { this.push_buffer(i32_to_be_ui8_array(num)); }; + encode_top_i32 = (num) => { + this.push_buffer(i32_to_be_ui8_array(num)); + }; - encode_ui32 = num => { this.append_buffer(ui32_to_be_ui8_array(num)); }; + encode_ui32 = (num) => { + this.append_buffer(ui32_to_be_ui8_array(num)); + }; - encode_top_ui32 = num => { this.push_buffer(ui32_to_be_ui8_array(num)); }; + encode_top_ui32 = (num) => { + this.push_buffer(ui32_to_be_ui8_array(num)); + }; - encode_i64 = num => { + encode_i64 = (num) => { this.append_buffer(new Uint8Array(new Int64BE(num).toArray())); }; - encode_top_i64 = - num => { this.push_buffer(new Uint8Array(new Int64BE(num).toArray())); }; + encode_top_i64 = (num) => { + this.push_buffer(new Uint8Array(new Int64BE(num).toArray())); + }; - encode_ui64 = num => { + encode_ui64 = (num) => { this.append_buffer(new Uint8Array(new Uint64BE(num).toArray())); }; - encode_top_ui64 = - num => { this.push_buffer(new Uint8Array(new Uint64BE(num).toArray())); }; + encode_top_ui64 = (num) => { + this.push_buffer(new Uint8Array(new Uint64BE(num).toArray())); + }; - encode_utf8 = str => { + encode_utf8 = (str) => { if (!(typeof str === 'string' || str instanceof String)) { throw new Error('Value must be of type string'); } - const buffer = new Uint8Array([...new TextEncoder().encode(str), 0 ]); + const buffer = new Uint8Array([...new TextEncoder().encode(str), 0]); this.append_buffer(buffer); }; - encode_top_utf8 = str => { + encode_top_utf8 = (str) => { if (!(typeof str === 'string' || str instanceof String)) { throw new Error('Value must be of type string'); } - const buffer = new Uint8Array([...new TextEncoder().encode(str), 0 ]); + const buffer = new Uint8Array([...new TextEncoder().encode(str), 0]); this.push_buffer(buffer); }; - encrypt = async nonce => { + encrypt = async (nonce) => { try { this.push_buffer(packet.HEADER); const hash = sha256.create(); @@ -242,12 +287,13 @@ export default class packet { } }; - push_buffer = buffer => { + push_buffer = (buffer) => { if (!(buffer instanceof Uint8Array)) { throw new Error('Buffer must be of type Uint8Array'); } - this.buffer = - this.buffer ? new Uint8Array([...buffer, ...this.buffer ]) : buffer; + this.buffer = this.buffer + ? new Uint8Array([...buffer, ...this.buffer]) + : buffer; }; } diff --git a/src/ops/index.js b/src/ops/index.js index 2b41d41..ca6b321 100644 --- a/src/ops/index.js +++ b/src/ops/index.js @@ -1,5 +1,5 @@ import fs from 'fs'; -import {Uint64BE} from 'int64-buffer'; +import { Uint64BE } from 'int64-buffer'; import file from '../io/file'; import packet from '../networking/packet'; @@ -9,8 +9,10 @@ const _snapshot_directory = async (conn, remote_path) => { const request = new packet(); request.encode_utf8(remote_path); - const response = - await conn.send('::RemoteJSONCreateDirectorySnapshot', request); + const response = await conn.send( + '::RemoteJSONCreateDirectorySnapshot', + request + ); response.decode_ui32(); // Service flags const result = response.decode_i32(); @@ -28,15 +30,17 @@ const _snapshot_directory = async (conn, remote_path) => { }; try { - const get_page = async page => { + const get_page = async (page) => { try { const request = new packet(); request.encode_utf8(remote_path); request.encode_ui64(data.handle); request.encode_ui32(page); - const response = - await conn.send('::RemoteJSONReadDirectorySnapshot', request); + const response = await conn.send( + '::RemoteJSONReadDirectorySnapshot', + request + ); response.decode_ui32(); // Service flags const result = response.decode_i32(); @@ -67,22 +71,29 @@ const _snapshot_directory = async (conn, remote_path) => { } }; -export const close_file = - async (conn, remote_path, handle, optional_thread_id) => { - try { - const request = new packet(); - request.encode_utf8(remote_path); - request.encode_ui64(handle); +export const close_file = async ( + conn, + remote_path, + handle, + optional_thread_id +) => { + try { + const request = new packet(); + request.encode_utf8(remote_path); + request.encode_ui64(handle); - const response = - await conn.send('::RemoteFUSERelease', request, optional_thread_id); - response.decode_ui32(); // Service flags + const response = await conn.send( + '::RemoteFUSERelease', + request, + optional_thread_id + ); + response.decode_ui32(); // Service flags - return response.decode_i32(); - } catch (err) { - return Promise.reject(new Error(`'close_file' failed: ${err}`)); - } - }; + return response.decode_i32(); + } catch (err) { + return Promise.reject(new Error(`'close_file' failed: ${err}`)); + } +}; export const create_directory = async (conn, remote_path) => { try { @@ -99,28 +110,34 @@ export const create_directory = async (conn, remote_path) => { } }; -export const create_or_open_file = - async (conn, remote_path, optional_thread_id) => { - try { - const request = new packet(); - request.encode_utf8(remote_path); - request.encode_ui16((7 << 6) | (5 << 3)); - request.encode_ui32(2 | 4); // Read-Write, Create +export const create_or_open_file = async ( + conn, + remote_path, + optional_thread_id +) => { + try { + const request = new packet(); + request.encode_utf8(remote_path); + request.encode_ui16((7 << 6) | (5 << 3)); + request.encode_ui32(2 | 4); // Read-Write, Create - const response = - await conn.send('::RemoteFUSECreate', request, optional_thread_id); - response.decode_ui32(); // Service flags + const response = await conn.send( + '::RemoteFUSECreate', + request, + optional_thread_id + ); + response.decode_ui32(); // Service flags - const result = response.decode_i32(); - if (result === 0) { - return response.decode_ui64(); - } - - return Promise.reject(new Error(`'create_or_open_file' error: ${result}`)); - } catch (err) { - return Promise.reject(new Error(`'create_or_open_file' failed: ${err}`)); + const result = response.decode_i32(); + if (result === 0) { + return response.decode_ui64(); } - }; + + return Promise.reject(new Error(`'create_or_open_file' error: ${result}`)); + } catch (err) { + return Promise.reject(new Error(`'create_or_open_file' failed: ${err}`)); + } +}; export const delete_file = async (conn, remote_path) => { try { @@ -136,97 +153,113 @@ export const delete_file = async (conn, remote_path) => { } }; -export const download_file = - async (conn, remote_path, local_path, progress_cb, overwrite, resume) => { - try { - const src = new file(conn, await open_file(conn, remote_path), remote_path); - const cleanup = async fd => { - try { - await src.close(); - } catch (err) { - console.log(err); - } - try { - if (fd !== undefined) { - fs.closeSync(fd); - } - } catch (err) { - console.log(err); - } - }; - +export const download_file = async ( + conn, + remote_path, + local_path, + progress_cb, + overwrite, + resume +) => { + try { + const src = new file(conn, await open_file(conn, remote_path), remote_path); + const cleanup = async (fd) => { try { - const src_size = await src.get_size(); - let dst_fd; - - try { - let offset = 0; - if (overwrite) { - dst_fd = fs.openSync(local_path, 'w+'); - } else if (resume) { - dst_fd = fs.openSync(local_path, 'r+'); - - const dst_size = fs.fstatSync(dst_fd).size; - if (dst_size === src_size) { - await cleanup(dst_fd); - return true; - } - - if (dst_size > src_size) { - await cleanup(dst_fd); - return Promise.reject(new Error( - `'download_file' failed: destination is larger than source`)); - } - - offset = dst_size; - } else { - if (fs.existsSync(local_path)) { - await cleanup(dst_fd); - return Promise.reject( - new Error(`'download_file' failed: file exists`)); - } - - dst_fd = fs.openSync(local_path, 'wx+'); - } - - let remain = src_size - offset; - while (remain > 0) { - const to_write = remain >= 65536 ? 65536 : remain; - const buffer = await src.read(offset, to_write); - const written = fs.writeSync(dst_fd, buffer, 0, to_write, offset); - if (written > 0) { - remain -= written; - offset += written; - if (progress_cb) { - progress_cb(local_path, remote_path, - ((src_size - remain) / src_size) * 100.0, false); - } - } - } - - if (progress_cb) { - progress_cb(local_path, remote_path, 100, true); - } - - await cleanup(dst_fd); - return true; - } catch (err) { - await cleanup(dst_fd); - return Promise.reject(new Error(`'download_file' failed: ${err}`)); + await src.close(); + } catch (err) { + console.log(err); + } + try { + if (fd !== undefined) { + fs.closeSync(fd); } } catch (err) { - await cleanup(); + console.log(err); + } + }; + + try { + const src_size = await src.get_size(); + let dst_fd; + + try { + let offset = 0; + if (overwrite) { + dst_fd = fs.openSync(local_path, 'w+'); + } else if (resume) { + dst_fd = fs.openSync(local_path, 'r+'); + + const dst_size = fs.fstatSync(dst_fd).size; + if (dst_size === src_size) { + await cleanup(dst_fd); + return true; + } + + if (dst_size > src_size) { + await cleanup(dst_fd); + return Promise.reject( + new Error( + `'download_file' failed: destination is larger than source` + ) + ); + } + + offset = dst_size; + } else { + if (fs.existsSync(local_path)) { + await cleanup(dst_fd); + return Promise.reject( + new Error(`'download_file' failed: file exists`) + ); + } + + dst_fd = fs.openSync(local_path, 'wx+'); + } + + let remain = src_size - offset; + while (remain > 0) { + const to_write = remain >= 65536 ? 65536 : remain; + const buffer = await src.read(offset, to_write); + const written = fs.writeSync(dst_fd, buffer, 0, to_write, offset); + if (written > 0) { + remain -= written; + offset += written; + if (progress_cb) { + progress_cb( + local_path, + remote_path, + ((src_size - remain) / src_size) * 100.0, + false + ); + } + } + } + + if (progress_cb) { + progress_cb(local_path, remote_path, 100, true); + } + + await cleanup(dst_fd); + return true; + } catch (err) { + await cleanup(dst_fd); return Promise.reject(new Error(`'download_file' failed: ${err}`)); } } catch (err) { + await cleanup(); return Promise.reject(new Error(`'download_file' failed: ${err}`)); } - }; + } catch (err) { + return Promise.reject(new Error(`'download_file' failed: ${err}`)); + } +}; -export const get_drive_information = async conn => { +export const get_drive_information = async (conn) => { try { - const response = - await conn.send('::RemoteWinFSPGetVolumeInfo', new packet()); + const response = await conn.send( + '::RemoteWinFSPGetVolumeInfo', + new packet() + ); response.decode_ui32(); // Service flags const result = response.decode_i32(); @@ -241,40 +274,52 @@ export const get_drive_information = async conn => { } return Promise.reject( - new Error(`'get_drive_information' failed: ${result}`)); + new Error(`'get_drive_information' failed: ${result}`) + ); } catch (err) { return Promise.reject(new Error(`'get_drive_information' failed: ${err}`)); } }; -export const get_file_attributes = - async (conn, handle, remote_path, optional_thread_id) => { - try { - const request = new packet(); - request.encode_utf8(remote_path); - request.encode_ui64(handle); - request.encode_ui32(0); - request.encode_ui32(0); +export const get_file_attributes = async ( + conn, + handle, + remote_path, + optional_thread_id +) => { + try { + const request = new packet(); + request.encode_utf8(remote_path); + request.encode_ui64(handle); + request.encode_ui32(0); + request.encode_ui32(0); - const response = - await conn.send('::RemoteFUSEFgetattr', request, optional_thread_id); - response.decode_ui32(); // Service flags + const response = await conn.send( + '::RemoteFUSEFgetattr', + request, + optional_thread_id + ); + response.decode_ui32(); // Service flags - const result = response.decode_i32(); - if (result === 0) { - return response.decode_stat(); - } - - return Promise.reject(new Error(`'get_file_attributes' failed: ${result}`)); - } catch (err) { - return Promise.reject(new Error(`'get_file_attributes' failed: ${err}`)); + const result = response.decode_i32(); + if (result === 0) { + return response.decode_stat(); } - }; + + return Promise.reject(new Error(`'get_file_attributes' failed: ${result}`)); + } catch (err) { + return Promise.reject(new Error(`'get_file_attributes' failed: ${err}`)); + } +}; export const list_directory = async (conn, remote_path, page_reader_cb) => { const dir_snapshot = await _snapshot_directory(conn, remote_path); try { - await page_reader_cb(dir_snapshot.remote_path, dir_snapshot.page_count, dir_snapshot.get_page); + await page_reader_cb( + dir_snapshot.remote_path, + dir_snapshot.page_count, + dir_snapshot.get_page + ); await dir_snapshot.release(); } catch (err) { await dir_snapshot.release(); @@ -288,8 +333,11 @@ export const open_file = async (conn, remote_path, optional_thread_id) => { request.encode_utf8(remote_path); request.encode_ui32(2); // Read-Write - const response = - await conn.send('::RemoteFUSEOpen', request, optional_thread_id); + const response = await conn.send( + '::RemoteFUSEOpen', + request, + optional_thread_id + ); response.decode_ui32(); // Service flags const result = response.decode_i32(); @@ -302,28 +350,37 @@ export const open_file = async (conn, remote_path, optional_thread_id) => { } }; -export const read_file = - async (conn, handle, remote_path, offset, length, optional_thread_id) => { - try { - const request = new packet(); - request.encode_utf8(remote_path); - request.encode_ui64(length); - request.encode_ui64(offset); - request.encode_ui64(handle); +export const read_file = async ( + conn, + handle, + remote_path, + offset, + length, + optional_thread_id +) => { + try { + const request = new packet(); + request.encode_utf8(remote_path); + request.encode_ui64(length); + request.encode_ui64(offset); + request.encode_ui64(handle); - const response = - await conn.send('::RemoteFUSERead', request, optional_thread_id); - response.decode_ui32(); // Service flags + const response = await conn.send( + '::RemoteFUSERead', + request, + optional_thread_id + ); + response.decode_ui32(); // Service flags - const result = response.decode_i32(); - if (result === length) { - return response.decode_buffer(result); - } - return Promise.reject(new Error(`'read_file' error: ${result}`)); - } catch (err) { - return Promise.reject(new Error(`'read_file' failed: ${err}`)); + const result = response.decode_i32(); + if (result === length) { + return response.decode_buffer(result); } - }; + return Promise.reject(new Error(`'read_file' error: ${result}`)); + } catch (err) { + return Promise.reject(new Error(`'read_file' failed: ${err}`)); + } +}; export const remove_directory = async (conn, remote_path) => { try { @@ -341,146 +398,183 @@ export const remove_directory = async (conn, remote_path) => { export const snapshot_directory = _snapshot_directory; -export const truncate_file = - async (conn, handle, remote_path, length, optional_thread_id) => { - try { - const request = new packet(); - request.encode_utf8(remote_path); - request.encode_ui64(length); - request.encode_ui64(handle); +export const truncate_file = async ( + conn, + handle, + remote_path, + length, + optional_thread_id +) => { + try { + const request = new packet(); + request.encode_utf8(remote_path); + request.encode_ui64(length); + request.encode_ui64(handle); - const response = - await conn.send('::RemoteFUSEFtruncate', request, optional_thread_id); - response.decode_ui32(); // Service flags + const response = await conn.send( + '::RemoteFUSEFtruncate', + request, + optional_thread_id + ); + response.decode_ui32(); // Service flags - return response.decode_i32(); - } catch (err) { - return Promise.reject(new Error(`'truncate_file' failed: ${err}`)); - } - }; + return response.decode_i32(); + } catch (err) { + return Promise.reject(new Error(`'truncate_file' failed: ${err}`)); + } +}; -export const upload_file = - async (conn, local_path, remote_path, progress_cb, overwrite, resume) => { - try { - const src_fd = fs.openSync(local_path, 'r'); - const cleanup = async f => { - try { - fs.closeSync(src_fd); - } catch (err) { - console.log(err); - } - try { - if (f) { - await f.close(); - } - } catch (err) { - console.log(err); - } - }; +export const upload_file = async ( + conn, + local_path, + remote_path, + progress_cb, + overwrite, + resume +) => { + try { + const src_fd = fs.openSync(local_path, 'r'); + const cleanup = async (f) => { try { - const src_st = fs.fstatSync(src_fd); - let dst; - const create_dest = async () => { - dst = new file(conn, await create_or_open_file(conn, remote_path), - remote_path); - }; - - try { - let offset = 0; - if (overwrite) { - await create_dest(); - const result = await dst.truncate(0); - if (result !== 0) { - await cleanup(dst); - return Promise.reject(new Error(`'upload_file' failed: ${result}`)); - } - } else if (resume) { - await create_dest(); - const dst_size = new Uint64BE(await dst.get_size()).toNumber(); - if (dst_size === src_st.size) { - await cleanup(dst); - return true; - } - - if (dst_size > src_st.size) { - await cleanup(dst); - return Promise.reject(new Error( - `'upload_file' failed: destination is larger than source`)); - } - - offset = dst_size; - } else { - try { - const f = - new file(conn, await open_file(conn, remote_path), remote_path); - await cleanup(f); - return Promise.reject( - new Error('\'upload_file\' failed: file exists')); - } catch (err) { - await create_dest(); - } - } - - let remain = src_st.size - offset; - const default_buffer = Buffer.alloc(65536 * 2); - while (remain > 0) { - const to_write = - remain >= default_buffer.length ? default_buffer.length : remain; - const buffer = to_write === default_buffer.length - ? default_buffer - : Buffer.alloc(to_write); - fs.readSync(src_fd, buffer, 0, to_write, offset); - const written = await dst.write(offset, buffer); - if (written > 0) { - remain -= written; - offset += written; - if (progress_cb) { - progress_cb(local_path, remote_path, - ((src_st.size - remain) / src_st.size) * 100.0, - false); - } - } - } - - if (progress_cb) { - progress_cb(local_path, remote_path, 100, true); - } - - await cleanup(dst); - return true; - } catch (err) { - await cleanup(dst); - return Promise.reject(new Error(`'upload_file' failed: ${err}`)); + fs.closeSync(src_fd); + } catch (err) { + console.log(err); + } + try { + if (f) { + await f.close(); } } catch (err) { - await cleanup(); + console.log(err); + } + }; + try { + const src_st = fs.fstatSync(src_fd); + let dst; + const create_dest = async () => { + dst = new file( + conn, + await create_or_open_file(conn, remote_path), + remote_path + ); + }; + + try { + let offset = 0; + if (overwrite) { + await create_dest(); + const result = await dst.truncate(0); + if (result !== 0) { + await cleanup(dst); + return Promise.reject(new Error(`'upload_file' failed: ${result}`)); + } + } else if (resume) { + await create_dest(); + const dst_size = new Uint64BE(await dst.get_size()).toNumber(); + if (dst_size === src_st.size) { + await cleanup(dst); + return true; + } + + if (dst_size > src_st.size) { + await cleanup(dst); + return Promise.reject( + new Error( + `'upload_file' failed: destination is larger than source` + ) + ); + } + + offset = dst_size; + } else { + try { + const f = new file( + conn, + await open_file(conn, remote_path), + remote_path + ); + await cleanup(f); + return Promise.reject( + new Error("'upload_file' failed: file exists") + ); + } catch (err) { + await create_dest(); + } + } + + let remain = src_st.size - offset; + const default_buffer = Buffer.alloc(65536 * 2); + while (remain > 0) { + const to_write = + remain >= default_buffer.length ? default_buffer.length : remain; + const buffer = + to_write === default_buffer.length + ? default_buffer + : Buffer.alloc(to_write); + fs.readSync(src_fd, buffer, 0, to_write, offset); + const written = await dst.write(offset, buffer); + if (written > 0) { + remain -= written; + offset += written; + if (progress_cb) { + progress_cb( + local_path, + remote_path, + ((src_st.size - remain) / src_st.size) * 100.0, + false + ); + } + } + } + + if (progress_cb) { + progress_cb(local_path, remote_path, 100, true); + } + + await cleanup(dst); + return true; + } catch (err) { + await cleanup(dst); return Promise.reject(new Error(`'upload_file' failed: ${err}`)); } } catch (err) { + await cleanup(); return Promise.reject(new Error(`'upload_file' failed: ${err}`)); } - }; + } catch (err) { + return Promise.reject(new Error(`'upload_file' failed: ${err}`)); + } +}; -export const write_file = - async (conn, handle, remote_path, offset, buffer, optional_thread_id) => { - try { - const request = new packet(); - request.encode_utf8(remote_path); - request.encode_ui64(buffer.length); - request.encode_buffer(buffer); - request.encode_ui64(offset); - request.encode_ui64(handle); +export const write_file = async ( + conn, + handle, + remote_path, + offset, + buffer, + optional_thread_id +) => { + try { + const request = new packet(); + request.encode_utf8(remote_path); + request.encode_ui64(buffer.length); + request.encode_buffer(buffer); + request.encode_ui64(offset); + request.encode_ui64(handle); - const response = - await conn.send('::RemoteFUSEWrite', request, optional_thread_id); - response.decode_ui32(); // Service flags + const response = await conn.send( + '::RemoteFUSEWrite', + request, + optional_thread_id + ); + response.decode_ui32(); // Service flags - const result = response.decode_i32(); - if (result === buffer.length) { - return result; - } - return Promise.reject(new Error(`'write_file' error: ${result}`)); - } catch (err) { - return Promise.reject(new Error(`'write_file' failed: ${err}`)); + const result = response.decode_i32(); + if (result === buffer.length) { + return result; } - }; + return Promise.reject(new Error(`'write_file' error: ${result}`)); + } catch (err) { + return Promise.reject(new Error(`'write_file' failed: ${err}`)); + } +}; diff --git a/src/utils/byte_order.js b/src/utils/byte_order.js index cb7e9e1..66bac76 100644 --- a/src/utils/byte_order.js +++ b/src/utils/byte_order.js @@ -1,10 +1,10 @@ export const is_big_endian_system = - new Uint8Array(new Uint32Array([ 0x12345678 ]).buffer)[0] === 0x12; + new Uint8Array(new Uint32Array([0x12345678]).buffer)[0] === 0x12; export const is_little_endian_system = - new Uint8Array(new Uint32Array([ 0x12345678 ]).buffer)[0] === 0x78; + new Uint8Array(new Uint32Array([0x12345678]).buffer)[0] === 0x78; -export const i8_to_ui8_array = num => { +export const i8_to_ui8_array = (num) => { if (typeof num === 'string' || num instanceof String) { num = parseInt(num, 10); } @@ -21,7 +21,7 @@ export const ui8_array_to_i8 = (ar, offset) => { return buffer.readInt8(0); }; -export const ui8_to_ui8_array = num => { +export const ui8_to_ui8_array = (num) => { if (typeof num === 'string' || num instanceof String) { num = parseInt(num, 10); } @@ -38,7 +38,7 @@ export const ui8_array_to_ui8 = (ar, offset) => { return buffer.readUInt8(0); }; -export const i16_to_be_ui8_array = num => { +export const i16_to_be_ui8_array = (num) => { if (typeof num === 'string' || num instanceof String) { num = parseInt(num, 10); } @@ -56,7 +56,7 @@ export const be_ui8_array_to_i16 = (ar, offset) => { return buffer.readInt16BE(0); }; -export const ui16_to_be_ui8_array = num => { +export const ui16_to_be_ui8_array = (num) => { if (typeof num === 'string' || num instanceof String) { num = parseInt(num, 10); } @@ -74,7 +74,7 @@ export const be_ui8_array_to_ui16 = (ar, offset) => { return buffer.readUInt16BE(0); }; -export const i32_to_be_ui8_array = num => { +export const i32_to_be_ui8_array = (num) => { if (typeof num === 'string' || num instanceof String) { num = parseInt(num, 10); } @@ -92,7 +92,7 @@ export const be_ui8_array_to_i32 = (ar, offset) => { return buffer.readInt32BE(0); }; -export const ui32_to_be_ui8_array = num => { +export const ui32_to_be_ui8_array = (num) => { if (typeof num === 'string' || num instanceof String) { num = parseInt(num, 10); } diff --git a/src/utils/constants.js b/src/utils/constants.js index 2f36a26..2d5d868 100644 --- a/src/utils/constants.js +++ b/src/utils/constants.js @@ -1,6 +1,7 @@ -const {v4: uuidv4} = require('uuid'); -import _package_json from '../../package.json' +const { v4: uuidv4 } = require('uuid'); +import _package_json from '../../package.json'; export const instance_id = uuidv4(); export const package_json = _package_json; -export const get_version = () => process.env.REPERTORY_JS_FORCE_VERSION || _package_json.version; +export const get_version = () => + process.env.REPERTORY_JS_FORCE_VERSION || _package_json.version; diff --git a/src/utils/jschacha20.js b/src/utils/jschacha20.js index feddac9..1ea1933 100644 --- a/src/utils/jschacha20.js +++ b/src/utils/jschacha20.js @@ -63,7 +63,7 @@ * * @constructor */ -var JSChaCha20 = function(key, nonce, counter) { +var JSChaCha20 = function (key, nonce, counter) { if (typeof counter === 'undefined') { counter = 0; } @@ -175,7 +175,7 @@ var JSChaCha20 = function(key, nonce, counter) { this._byteCounter = 0; }; -JSChaCha20.prototype._chacha = function() { +JSChaCha20.prototype._chacha = function () { var mix = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; var i = 0; var b = 0; @@ -221,7 +221,7 @@ JSChaCha20.prototype._chacha = function() { * @param {number} d * @private */ -JSChaCha20.prototype._quarterround = function(output, a, b, c, d) { +JSChaCha20.prototype._quarterround = function (output, a, b, c, d) { output[d] = this._rotl(output[d] ^ (output[a] += output[b]), 16); output[b] = this._rotl(output[b] ^ (output[c] += output[d]), 12); output[d] = this._rotl(output[d] ^ (output[a] += output[b]), 8); @@ -242,7 +242,7 @@ JSChaCha20.prototype._quarterround = function(output, a, b, c, d) { * @return {number} * @private */ -JSChaCha20.prototype._get32 = function(data, index) { +JSChaCha20.prototype._get32 = function (data, index) { return ( data[index++] ^ (data[index++] << 8) ^ @@ -259,7 +259,7 @@ JSChaCha20.prototype._get32 = function(data, index) { * @return {number} * @private */ -JSChaCha20.prototype._rotl = function(data, shift) { +JSChaCha20.prototype._rotl = function (data, shift) { return (data << shift) | (data >>> (32 - shift)); }; @@ -269,7 +269,7 @@ JSChaCha20.prototype._rotl = function(data, shift) { * @param {Uint8Array} data * @return {Uint8Array} */ -JSChaCha20.prototype.encrypt = function(data) { +JSChaCha20.prototype.encrypt = function (data) { return this._update(data); }; @@ -279,7 +279,7 @@ JSChaCha20.prototype.encrypt = function(data) { * @param {Uint8Array} data * @return {Uint8Array} */ -JSChaCha20.prototype.decrypt = function(data) { +JSChaCha20.prototype.decrypt = function (data) { return this._update(data); }; @@ -290,7 +290,7 @@ JSChaCha20.prototype.decrypt = function(data) { * @return {Uint8Array} * @private */ -JSChaCha20.prototype._update = function(data) { +JSChaCha20.prototype._update = function (data) { if (!(data instanceof Uint8Array) || data.length === 0) { throw new Error('Data should be type of bytes (Uint8Array) and not empty!'); }