Updated repo
This commit is contained in:
@@ -1,4 +0,0 @@
|
||||
node_modules
|
||||
.vscode
|
||||
dist
|
||||
backup
|
||||
17
.eslintrc
17
.eslintrc
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"node": true,
|
||||
"jquery": true,
|
||||
"jest/globals": true,
|
||||
"es6": true
|
||||
},
|
||||
"parserOptions": {
|
||||
"sourceType": "module",
|
||||
"allowImportExportEverywhere": true
|
||||
},
|
||||
"extends": [
|
||||
"eslint:recommended"
|
||||
],
|
||||
"parser": "babel-eslint"
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
node_modules
|
||||
.vscode
|
||||
dist
|
||||
backup
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"trailingComma": "es5",
|
||||
"tabWidth": 2,
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"jsxBracketSameLine": true
|
||||
}
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
# Changelog
|
||||
|
||||
## 1.3.1-r3
|
||||
- Added directory/file exists
|
||||
- Fix unit tests
|
||||
|
||||
## 1.3.1-r2
|
||||
- Initial release
|
||||
18
LICENSE.md
18
LICENSE.md
@@ -1,18 +0,0 @@
|
||||
# `repertory-js` MIT License
|
||||
|
||||
### Copyright <2021> <scott.e.graves@protonmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
|
||||
associated documentation files (the "Software"), to deal in the Software without restriction,
|
||||
including without limitation the rights to use, copy, modify, merge, publish, distribute,
|
||||
sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial
|
||||
portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
|
||||
NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
|
||||
OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
203
README.md
203
README.md
@@ -1,201 +1,2 @@
|
||||
# About
|
||||
|
||||
`repertory-js` is a Node.js module for interfacing with `repertory's` remote mount API.
|
||||
|
||||
## Installing
|
||||
|
||||
```shell
|
||||
npm i @blockstorage/repertory-js
|
||||
```
|
||||
|
||||
## Repertory Configuration
|
||||
|
||||
A Repertory mount must be active with the `EnableRemoteMount` setting enabled. `RemoteToken` should
|
||||
also be set to a strong, random password.
|
||||
|
||||
### Enabling Sia Remote Mount API on Windows Systems
|
||||
|
||||
```shell
|
||||
repertory.exe -unmount
|
||||
repertory.exe -set RemoteMount.EnableRemoteMount true
|
||||
repertory.exe -set RemoteMount.RemoteToken "my password"
|
||||
|
||||
[Optional - change listening port]
|
||||
repertory.exe -set RemoteMount.RemotePort 20202
|
||||
```
|
||||
|
||||
### Enabling Sia Remote Mount API on *NIX Systems
|
||||
|
||||
```shell
|
||||
./repertory -unmount
|
||||
./repertory -set RemoteMount.EnableRemoteMount true
|
||||
./repertory -set RemoteMount.RemoteToken "my password"
|
||||
|
||||
[Optional - change listening port]
|
||||
./repertory -set RemoteMount.RemotePort 20202
|
||||
```
|
||||
|
||||
### Skynet and ScPrime Mounts
|
||||
|
||||
* For Skynet mounts, add `-sk` argument to all commands listed above.
|
||||
* For ScPrime mounts, add `-sp` argument to all commands listed above.
|
||||
|
||||
## Module Environment Variables
|
||||
|
||||
* To successfully complete unit tests, a `repertory` mount supporting remote mount needs to be
|
||||
active. Set the following environment variables prior to running tests:
|
||||
* `TEST_HOST`
|
||||
* `TEST_PASSWORD`
|
||||
* `TEST_PORT`
|
||||
* To override the version being sent to `repertory`, set the following variable:
|
||||
* `REPERTORY_JS_FORCE_VERSION`
|
||||
* NOTE: This variable is primarily used for debugging/testing purposes and should normally
|
||||
NOT be set.
|
||||
|
||||
## Example API Usage
|
||||
|
||||
```javascript
|
||||
import * as rep from "@blockstorage/repertory-js";
|
||||
//const rep = require("@blockstorage/repertory-js");
|
||||
|
||||
|
||||
// Repertory host settings
|
||||
const MY_HOST_OR_IP = 'localhost';
|
||||
const MY_PORT = 20000;
|
||||
const MY_TOKEN = 'password';
|
||||
|
||||
// Progress callback for uploads / downloads
|
||||
const progress_cb = (local_path, remote_path, progress, completed) => {
|
||||
console.log(local_path, remote_path, progress, completed);
|
||||
};
|
||||
|
||||
|
||||
//************************************************************************************************//
|
||||
// Step 1. Create a connection pool (recommended) //
|
||||
//************************************************************************************************//
|
||||
|
||||
const conn = await rep.create_pool(8, MY_HOST_OR_IP, MY_PORT, MY_TOKEN);
|
||||
/* Or create a single connection for light operations
|
||||
const conn = await rep.connect(MY_HOST_OR_IP, MY_PORT, MY_TOKEN);
|
||||
*/
|
||||
|
||||
/* Disconnect when complete
|
||||
await conn.disconnect();
|
||||
*/
|
||||
|
||||
//************************************************************************************************//
|
||||
// Step 2. Create an 'api' instance using the connection pool / connection //
|
||||
//************************************************************************************************//
|
||||
|
||||
const api = rep.create_api(conn);
|
||||
|
||||
|
||||
//************************************************************************************************//
|
||||
// Step 3. Use 'api' //
|
||||
//************************************************************************************************//
|
||||
|
||||
//------------------------------------------------------------------------------------------------//
|
||||
// *********** Directory Operations *********** //
|
||||
//------------------------------------------------------------------------------------------------//
|
||||
|
||||
// Check if directory exists
|
||||
const exists = await api.directory.exists('/my_directory');
|
||||
|
||||
// List directory contents
|
||||
await api.directory.list('/', async (remote_path, page_count, get_page) => {
|
||||
for (let i = 0; i < page_count; i++) {
|
||||
const items = await get_page(i); // Always 'await'
|
||||
console.log(items);
|
||||
}
|
||||
});
|
||||
|
||||
// Asynchronous directory list
|
||||
const snap = await api.directory.snapshot('/');
|
||||
try {
|
||||
for (let i = 0; i < snap.page_count; i++) {
|
||||
const items = await snap.get_page(i); // Always 'await'
|
||||
console.log(items);
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
} finally {
|
||||
await snap.release();
|
||||
}
|
||||
|
||||
// Create new directory
|
||||
await api.directory.create('/test');
|
||||
|
||||
// Remove existing directory
|
||||
await api.directory.remove('/test')
|
||||
|
||||
|
||||
//------------------------------------------------------------------------------------------------//
|
||||
// *********** File Operations *********** //
|
||||
//------------------------------------------------------------------------------------------------//
|
||||
|
||||
// Check if file exists
|
||||
const exists = await api.file.exists('/my_file.txt')
|
||||
|
||||
// Delete a file
|
||||
await api.file.delete('/my_file.txt')
|
||||
|
||||
// Download a remote file
|
||||
await api.file.download('/my_file.txt', 'C:\\my_file.txt', progress_cb);
|
||||
|
||||
// Download a remote file and overwrite existing local file
|
||||
await api.file.download('/my_file.txt', 'C:\\my_file.txt', progress_cb, true);
|
||||
|
||||
// Resume failed download
|
||||
await api.file.download('/my_file.txt', 'C:\\my_file.txt', progress_cb, false, true);
|
||||
|
||||
// Upload a local file
|
||||
await api.file.upload('C:\\my_file.txt', '/my_file.txt', progress_cb);
|
||||
|
||||
// Upload a local file and overwrite existing remote file
|
||||
await api.file.upload('C:\\my_file.txt', '/my_file.txt', progress_cb, true);
|
||||
|
||||
// Resume failed upload
|
||||
await api.file.upload('C:\\my_file.txt', '/my_file.txt', progress_cb, false, true);
|
||||
|
||||
|
||||
//------------------------------------------------------------------------------------------------//
|
||||
// *********** Low-Level File Operations *********** //
|
||||
//------------------------------------------------------------------------------------------------//
|
||||
|
||||
// Create or open a remote file
|
||||
{
|
||||
const f = await api.file.create_or_open('/my_file.txt');
|
||||
await f.close();
|
||||
}
|
||||
|
||||
// Open an existing remote file
|
||||
{
|
||||
const f = await api.file.open('/my_file.txt');
|
||||
await f.close();
|
||||
}
|
||||
|
||||
// Write to a file
|
||||
{
|
||||
const f = await api.file.create_or_open('/my_file.txt');
|
||||
|
||||
const b = Buffer.alloc(1);
|
||||
b[0] = 1;
|
||||
await f.write(0, b); // write '1' byte at file offset '0'
|
||||
|
||||
await f.close();
|
||||
}
|
||||
|
||||
// Read from a file
|
||||
{
|
||||
const f = await api.file.create_or_open('/my_file.txt');
|
||||
const b = await f.read(0, 1); // read '1' byte from file offset '0'
|
||||
await f.close();
|
||||
}
|
||||
|
||||
// Truncate / resize file
|
||||
{
|
||||
const f = await api.file.create_or_open('/my_file.txt');
|
||||
await f.truncate(10);
|
||||
await f.close();
|
||||
}
|
||||
```
|
||||
# Repertory JS development is dead.
|
||||
Repository has been archived. 1.3.x_branch has the final release.
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
module.exports = function (api) {
|
||||
api.cache(true);
|
||||
return {
|
||||
presets: [
|
||||
'@babel/preset-env'
|
||||
],
|
||||
plugins: [
|
||||
'@babel/plugin-proposal-class-properties',
|
||||
'@babel/plugin-proposal-private-methods',
|
||||
'@babel/plugin-transform-runtime',
|
||||
'@babel/plugin-transform-regenerator',
|
||||
'@babel/plugin-transform-async-to-generator'
|
||||
]
|
||||
};
|
||||
};
|
||||
11
fixup
11
fixup
@@ -1,11 +0,0 @@
|
||||
cat <<EOF >dist/cjs/package.json
|
||||
{
|
||||
"type": "commonjs"
|
||||
}
|
||||
EOF
|
||||
|
||||
cat <<EOF >dist/mjs/package.json
|
||||
{
|
||||
"type": "module"
|
||||
}
|
||||
EOF
|
||||
68
package.json
68
package.json
@@ -1,68 +0,0 @@
|
||||
{
|
||||
"name": "@blockstorage/repertory-js",
|
||||
"version": "1.3.1-r3",
|
||||
"description": "A Node.js module for interfacing with Repertory's remote mount API",
|
||||
"author": "scott.e.graves@protonmail.com",
|
||||
"license": "MIT",
|
||||
"homepage": "https://bitbucket.org/blockstorage/repertory-js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://bitbucket.org/blockstorage/repertory-js.git"
|
||||
},
|
||||
"keywords": [
|
||||
"repertory",
|
||||
"repertory-ui",
|
||||
"library",
|
||||
"mount",
|
||||
"fuse",
|
||||
"winfsp",
|
||||
"blockchain",
|
||||
"decentralized",
|
||||
"cloud",
|
||||
"storage",
|
||||
"altcoin",
|
||||
"cryptocurrency"
|
||||
],
|
||||
"main": "dist/cjs/index.js",
|
||||
"module": "dist/mjs/index.js",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/mjs/index.js",
|
||||
"require": "./dist/cjs/index.js"
|
||||
}
|
||||
},
|
||||
"files": [
|
||||
"dist/cjs",
|
||||
"dist/mjs"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "rollup -c && ./fixup",
|
||||
"test": "jest",
|
||||
"prepublish": "rollup -c --silent && ./fixup"
|
||||
},
|
||||
"dependencies": {
|
||||
"int64-buffer": "^1.0.0",
|
||||
"socket-pool": "^1.2.3",
|
||||
"text-encoding": "^0.7.0",
|
||||
"uuid": "^8.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.14.3",
|
||||
"@babel/plugin-proposal-class-properties": "^7.13.0",
|
||||
"@babel/plugin-proposal-private-methods": "^7.13.0",
|
||||
"@babel/plugin-transform-async-to-generator": "^7.13.0",
|
||||
"@babel/plugin-transform-regenerator": "^7.13.15",
|
||||
"@babel/plugin-transform-runtime": "^7.14.3",
|
||||
"@babel/preset-env": "^7.14.2",
|
||||
"@rollup/plugin-babel": "^5.3.0",
|
||||
"@rollup/plugin-commonjs": "^15.1.0",
|
||||
"@rollup/plugin-json": "^4.0.0",
|
||||
"@rollup/plugin-node-resolve": "^9.0.0",
|
||||
"@types/jest": "^26.0.23",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"jest": "^26.6.3",
|
||||
"rollup": "^2.50.0",
|
||||
"rollup-plugin-eslint": "^7.0.0",
|
||||
"rollup-plugin-terser": "^7.0.2"
|
||||
}
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
import resolve from '@rollup/plugin-node-resolve';
|
||||
import commonjs from '@rollup/plugin-commonjs';
|
||||
import babel from '@rollup/plugin-babel';
|
||||
import { terser } from 'rollup-plugin-terser';
|
||||
import json from '@rollup/plugin-json';
|
||||
|
||||
const commonConfig = {
|
||||
input: 'src/index.js',
|
||||
output: {
|
||||
name: 'index',
|
||||
sourcemap: true,
|
||||
},
|
||||
plugins: [
|
||||
resolve({
|
||||
customResolveOptions: {
|
||||
moduleDirectory: 'node_modules',
|
||||
},
|
||||
}),
|
||||
babel({
|
||||
exclude: 'node_modules/**',
|
||||
babelHelpers: 'runtime',
|
||||
}),
|
||||
commonjs(),
|
||||
json(),
|
||||
],
|
||||
};
|
||||
|
||||
// ESM config
|
||||
const esmConfig = Object.assign({}, commonConfig);
|
||||
esmConfig.output = Object.assign({}, commonConfig.output, {
|
||||
file: 'dist/mjs/index.js',
|
||||
format: 'esm',
|
||||
});
|
||||
|
||||
// ESM prod config
|
||||
const esmProdConfig = Object.assign({}, esmConfig);
|
||||
esmProdConfig.output = Object.assign({}, esmConfig.output, {
|
||||
file: 'dist/mjs/index.min.js',
|
||||
sourcemap: false,
|
||||
});
|
||||
esmProdConfig.plugins = [...esmConfig.plugins, terser()];
|
||||
|
||||
// CJS config
|
||||
const cjsConfig = Object.assign({}, commonConfig);
|
||||
cjsConfig.output = Object.assign({}, commonConfig.output, {
|
||||
file: 'dist/cjs/index.js',
|
||||
format: 'cjs',
|
||||
});
|
||||
|
||||
// CJS prod config
|
||||
const cjsProdConfig = Object.assign({}, cjsConfig);
|
||||
cjsProdConfig.output = Object.assign({}, cjsConfig.output, {
|
||||
file: 'dist/cjs/index.min.js',
|
||||
sourcemap: false,
|
||||
});
|
||||
cjsProdConfig.plugins = [...cjsConfig.plugins, terser()];
|
||||
let configurations = [];
|
||||
configurations.push(esmConfig, esmProdConfig, cjsConfig, cjsProdConfig);
|
||||
|
||||
export default configurations;
|
||||
@@ -1,105 +0,0 @@
|
||||
import connection from '../networking/connection';
|
||||
import packet from '../networking/packet';
|
||||
import Socket from 'net';
|
||||
|
||||
test(`connect fails when error occurs during createConnection`, async () => {
|
||||
const mock_create = (port, host, cb) => {
|
||||
cb(new Error('mock create error'));
|
||||
};
|
||||
jest.spyOn(Socket, 'createConnection').mockImplementation(mock_create);
|
||||
|
||||
const conn = new connection('localhost', 20000);
|
||||
await expect(conn.connect()).rejects.toThrow(Error);
|
||||
});
|
||||
|
||||
test(`socket receive data fails when decryption fails`, async () => {
|
||||
let cbl = {};
|
||||
const socket = {
|
||||
on: (name, cb) => {
|
||||
cbl[name] = cb;
|
||||
},
|
||||
};
|
||||
|
||||
const conn = new connection('', 0, 'b', socket);
|
||||
let reject;
|
||||
const mock_reject = jest.fn().mockImplementation((e) => reject(e));
|
||||
conn.reject = mock_reject;
|
||||
conn.resolve = jest.fn();
|
||||
|
||||
const p = new packet('a');
|
||||
await p.encrypt();
|
||||
p.encode_top_ui32(p.buffer.length);
|
||||
await expect(
|
||||
new Promise((_, r) => {
|
||||
reject = r;
|
||||
cbl['data'](Buffer.from(p.buffer));
|
||||
})
|
||||
).rejects.toThrow(Error);
|
||||
expect(mock_reject.mock.calls.length).toBe(1);
|
||||
});
|
||||
|
||||
test(`disconnect succeeds if an error is thrown`, async () => {
|
||||
const socket = {
|
||||
destroy: () => {
|
||||
throw new Error('mock destroy error');
|
||||
},
|
||||
on: () => {},
|
||||
};
|
||||
|
||||
const conn = new connection('', 0, 'b', socket);
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test(`send fails on socket error`, async () => {
|
||||
let cbl = {};
|
||||
const socket = {
|
||||
on: (name, cb) => {
|
||||
cbl[name] = cb;
|
||||
},
|
||||
};
|
||||
|
||||
const conn = new connection('', 0, 'b', socket);
|
||||
const mock_reject = jest.fn();
|
||||
conn.reject = mock_reject;
|
||||
conn.resolve = jest.fn();
|
||||
|
||||
cbl['error']('socket error');
|
||||
expect(mock_reject).toBeCalled();
|
||||
});
|
||||
|
||||
test(`error is thrown when socket is closed`, async () => {
|
||||
let cbl = {};
|
||||
const socket = {
|
||||
on: (name, cb) => {
|
||||
cbl[name] = cb;
|
||||
},
|
||||
};
|
||||
|
||||
const conn = new connection('', 0, 'b', socket);
|
||||
const mock_reject = jest.fn();
|
||||
conn.reject = mock_reject;
|
||||
conn.resolve = jest.fn();
|
||||
|
||||
cbl['close']();
|
||||
expect(mock_reject).toBeCalled();
|
||||
});
|
||||
|
||||
test(`send fails when write error occurs`, async () => {
|
||||
let cbl = {};
|
||||
const socket = {
|
||||
on: (name, cb) => {
|
||||
cbl[name] = cb;
|
||||
},
|
||||
write: (b, c, cb) => {
|
||||
cb('mock write error');
|
||||
},
|
||||
};
|
||||
|
||||
const conn = new connection('', 0, 'b', socket);
|
||||
try {
|
||||
await conn.send('c', new packet('b'));
|
||||
expect('send should fail').toBeNull();
|
||||
} catch (err) {
|
||||
expect(err).toBeDefined();
|
||||
}
|
||||
});
|
||||
@@ -1,59 +0,0 @@
|
||||
import connection_pool from '../networking/connection_pool';
|
||||
import packet from '../networking/packet';
|
||||
import connection from '../networking/connection';
|
||||
|
||||
jest.mock('../networking/connection');
|
||||
|
||||
test(`construction fails if pool size is <= 1`, () => {
|
||||
expect(() => new connection_pool(1)).toThrow(Error);
|
||||
expect(() => new connection_pool(0)).toThrow(Error);
|
||||
expect(() => new connection_pool(-1)).toThrow(Error);
|
||||
});
|
||||
|
||||
test(`error on socket release is ignored`, async () => {
|
||||
const conn = new connection_pool(2, '', 20000);
|
||||
let invoked = false;
|
||||
jest.spyOn(conn.pool, 'acquire').mockImplementation(() => {
|
||||
return {
|
||||
release: () => {
|
||||
invoked = true;
|
||||
throw new Error('mock release error');
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const mock_send = jest.fn();
|
||||
connection.prototype.send = async () => {
|
||||
return mock_send();
|
||||
};
|
||||
mock_send.mockResolvedValue(0);
|
||||
|
||||
expect(await conn.send('', new packet())).toEqual(0);
|
||||
expect(invoked).toBeTruthy();
|
||||
});
|
||||
|
||||
test(`connection pool send fails if socket acquire fails`, async () => {
|
||||
const conn = new connection_pool(2, '', 20000);
|
||||
jest.spyOn(conn.pool, 'acquire').mockImplementation(() => {
|
||||
throw new Error('mock acquire exception');
|
||||
});
|
||||
|
||||
await expect(conn.send('', new packet())).rejects.toThrow(Error);
|
||||
});
|
||||
|
||||
test(`connection pool send fails when connection send fails`, async () => {
|
||||
const conn = new connection_pool(2, '', 20000);
|
||||
jest.spyOn(conn.pool, 'acquire').mockImplementation(() => {
|
||||
return {
|
||||
release: () => {},
|
||||
};
|
||||
});
|
||||
|
||||
const mock_send = jest.fn();
|
||||
connection.prototype.send = async () => {
|
||||
return mock_send();
|
||||
};
|
||||
mock_send.mockRejectedValue(new Error('mock send failed'));
|
||||
|
||||
await expect(conn.send('', new packet())).rejects.toThrow(Error);
|
||||
});
|
||||
@@ -1,26 +0,0 @@
|
||||
import { get_version, instance_id, package_json } from '../utils/constants';
|
||||
import * as uuid from 'uuid';
|
||||
|
||||
test(`can read 'package.json'`, () => {
|
||||
console.log(package_json);
|
||||
expect(package_json).toBeDefined();
|
||||
});
|
||||
|
||||
test(`'instance_id' is valid`, () => {
|
||||
console.log(instance_id);
|
||||
expect(instance_id).toBeDefined();
|
||||
expect(uuid.parse(instance_id)).toBeInstanceOf(Uint8Array);
|
||||
});
|
||||
|
||||
test(`'version' can be read from 'package.json'`, () => {
|
||||
console.log(get_version());
|
||||
expect(get_version()).toBe('1.3.1-r3');
|
||||
});
|
||||
|
||||
test(`'version' can be overridden by environment variable`, () => {
|
||||
console.log(process.env);
|
||||
process.env.REPERTORY_JS_FORCE_VERSION = '1.3.0';
|
||||
console.log(get_version());
|
||||
expect(get_version()).toBe('1.3.0');
|
||||
console.log(process.env);
|
||||
});
|
||||
@@ -1,47 +0,0 @@
|
||||
import file from '../io/file';
|
||||
|
||||
jest.mock('../ops/index.js', () => ({
|
||||
...jest.requireActual('../ops/index.js'),
|
||||
close_file: jest.fn(),
|
||||
}));
|
||||
|
||||
import { close_file } from '../ops/index';
|
||||
|
||||
test(`can close a closed file`, async () => {
|
||||
const f = new file();
|
||||
expect(await f.close()).toEqual(0);
|
||||
});
|
||||
|
||||
test(`'get_size' fails on closed file`, async () => {
|
||||
const f = new file();
|
||||
await expect(f.get_size()).rejects.toThrow(Error);
|
||||
});
|
||||
|
||||
test(`'read' fails on closed file`, async () => {
|
||||
const f = new file();
|
||||
await expect(f.read(0, 10)).rejects.toThrow(Error);
|
||||
});
|
||||
|
||||
test(`'truncate' fails on closed file`, async () => {
|
||||
const f = new file();
|
||||
await expect(f.truncate(0)).rejects.toThrow(Error);
|
||||
});
|
||||
|
||||
test(`'write' fails on closed file`, async () => {
|
||||
const f = new file();
|
||||
await expect(f.write(0, Buffer.alloc(2))).rejects.toThrow(Error);
|
||||
});
|
||||
|
||||
test(`handle is set to null on close`, async () => {
|
||||
const f = new file(null, 1, '/path');
|
||||
close_file.mockReturnValue(0);
|
||||
expect(await f.close()).toEqual(0);
|
||||
expect(f.handle).toBeNull();
|
||||
});
|
||||
|
||||
test(`handle is not changed on close if return is not 0`, async () => {
|
||||
const f = new file(null, 1, '/path');
|
||||
close_file.mockReturnValue(1);
|
||||
expect(await f.close()).toEqual(1);
|
||||
expect(f.handle).toBe(1);
|
||||
});
|
||||
@@ -1,9 +0,0 @@
|
||||
import packet from '../networking/packet';
|
||||
|
||||
test('can construct a packet', () => {
|
||||
const p = new packet('my password');
|
||||
console.log(p);
|
||||
expect(p.token).toEqual('my password');
|
||||
expect(p.buffer).toBeNull();
|
||||
expect(p.decode_offset).toEqual(0);
|
||||
});
|
||||
@@ -1,548 +0,0 @@
|
||||
import crypto from 'crypto';
|
||||
import fs from 'fs';
|
||||
import { Uint64BE } from 'int64-buffer';
|
||||
|
||||
import * as repertory from '../index.js';
|
||||
import connection from '../networking/connection';
|
||||
import connection_pool from '../networking/connection_pool';
|
||||
|
||||
const TEST_HOST = process.env.TEST_HOST || 'localhost';
|
||||
const TEST_PASSWORD = process.env.TEST_PASSWORD || '';
|
||||
const TEST_PORT = process.env.TEST_PORT || 20000;
|
||||
|
||||
const calculate_sha256 = (path) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const hash = crypto.createHash('sha256');
|
||||
|
||||
fs.createReadStream(path)
|
||||
.on('data', (data) => hash.update(data))
|
||||
.on('error', (err) => reject(err))
|
||||
.on('end', () => {
|
||||
const h = hash.digest('hex');
|
||||
console.log(path, h);
|
||||
resolve(h);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const test_connection = (conn, should_be_connected) => {
|
||||
expect(conn).toBeInstanceOf(connection);
|
||||
expect(conn.host_or_ip).toEqual(TEST_HOST);
|
||||
expect(conn.port).toEqual(TEST_PORT);
|
||||
expect(conn.password).toEqual(TEST_PASSWORD);
|
||||
expect(conn.connected).toEqual(should_be_connected);
|
||||
console.log(conn);
|
||||
};
|
||||
|
||||
test('can create a connection to repertory api', async () => {
|
||||
const conn = await repertory.connect(TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
test_connection(conn, true);
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('create_pool returns a connection if pool size is <=1', async () => {
|
||||
for (let i = 0; i < 2; i++) {
|
||||
const conn = await repertory.create_pool(
|
||||
i,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
expect(conn).toBeInstanceOf(connection);
|
||||
test_connection(conn, true);
|
||||
|
||||
await conn.disconnect();
|
||||
}
|
||||
});
|
||||
|
||||
test('can create a connection pool', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
console.log(conn);
|
||||
expect(conn).toBeInstanceOf(connection_pool);
|
||||
expect(conn.host_or_ip).toEqual(TEST_HOST);
|
||||
expect(conn.port).toEqual(TEST_PORT);
|
||||
expect(conn.password).toEqual(TEST_PASSWORD);
|
||||
expect(conn.shutdown).toEqual(false);
|
||||
expect(conn.pool._pool.max).toEqual(2);
|
||||
expect(conn.pool._pool.min).toEqual(2);
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can get drive information using api', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
const di = await api.get_drive_information();
|
||||
console.log(di);
|
||||
|
||||
expect(di.free).toBeDefined();
|
||||
expect(di.total).toBeDefined();
|
||||
expect(di.used).toBeDefined();
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can create and remove a directory using api', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.directory.create('/repertory_js')).toEqual(0);
|
||||
expect(await api.directory.exists('/repertory_js')).toEqual(true);
|
||||
expect(await api.file.exists('/repertory_js')).toEqual(false);
|
||||
expect(await api.directory.remove('/repertory_js')).toEqual(0);
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can get directory list and snapshot using api', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
|
||||
const test_results = async (remote_path, page_count, get_page) => {
|
||||
expect(remote_path).toEqual('/');
|
||||
expect(page_count).toBeGreaterThanOrEqual(1);
|
||||
expect(get_page).toBeInstanceOf(Function);
|
||||
for (let i = 0; i < page_count; i++) {
|
||||
const items = await get_page(i);
|
||||
console.log(items);
|
||||
|
||||
expect(items.length).toBeGreaterThanOrEqual(2);
|
||||
expect(items[0].directory).toBeTruthy();
|
||||
expect(items[0].path).toEqual('.');
|
||||
expect(items[1].directory).toBeTruthy();
|
||||
expect(items[1].path).toEqual('..');
|
||||
}
|
||||
};
|
||||
|
||||
await api.directory.list('/', async (remote_path, page_count, get_page) => {
|
||||
console.log(remote_path, page_count, get_page);
|
||||
await test_results(remote_path, page_count, get_page);
|
||||
});
|
||||
|
||||
const snap = await api.directory.snapshot('/');
|
||||
try {
|
||||
console.log(snap.remote_path, snap.page_count, snap.get_page);
|
||||
await test_results(snap.remote_path, snap.page_count, snap.get_page);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
} finally {
|
||||
await snap.release();
|
||||
}
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can create, close and delete a file using api', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
const f = await api.file.create_or_open('/repertory_file.dat');
|
||||
console.log(f);
|
||||
expect(f.remote_path).toEqual('/repertory_file.dat');
|
||||
expect(f.conn).toEqual(conn);
|
||||
expect(new Uint64BE(f.handle).toNumber()).toBeGreaterThanOrEqual(0);
|
||||
|
||||
expect(await f.close()).toEqual(0);
|
||||
expect(f.handle).toBeNull();
|
||||
|
||||
expect(await api.file.delete('/repertory_file.dat')).toEqual(0);
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can open, close and delete a file using api', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
let f = await api.file.create_or_open('/repertory_file.dat');
|
||||
expect(await f.close()).toEqual(0);
|
||||
|
||||
f = await api.file.open('/repertory_file.dat');
|
||||
console.log(f);
|
||||
expect(f.remote_path).toEqual('/repertory_file.dat');
|
||||
expect(f.conn).toEqual(conn);
|
||||
expect(new Uint64BE(f.handle).toNumber()).toBeGreaterThanOrEqual(0);
|
||||
|
||||
expect(await f.close()).toEqual(0);
|
||||
expect(f.handle).toBeNull();
|
||||
|
||||
expect(await api.file.delete('/repertory_file.dat')).toEqual(0);
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can write to and read from a file using api', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
const f = await api.file.create_or_open('/repertory_file.dat');
|
||||
|
||||
const buffer = Buffer.alloc(4);
|
||||
buffer[0] = 1;
|
||||
buffer[1] = 2;
|
||||
buffer[2] = 3;
|
||||
buffer[3] = 4;
|
||||
expect(await f.write(0, buffer)).toEqual(buffer.length);
|
||||
expect(new Uint64BE(await f.get_size()).toNumber()).toEqual(buffer.length);
|
||||
|
||||
const buffer2 = await f.read(0, 4);
|
||||
expect(buffer.compare(buffer2)).toEqual(0);
|
||||
|
||||
expect(await f.close()).toEqual(0);
|
||||
expect(await api.file.delete('/repertory_file.dat')).toEqual(0);
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can truncate a file using api', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
const f = await api.file.create_or_open('/repertory_file.dat');
|
||||
|
||||
expect(await f.truncate(10)).toEqual(0);
|
||||
expect(new Uint64BE(await f.get_size()).toNumber()).toEqual(10);
|
||||
|
||||
expect(await f.truncate(0)).toEqual(0);
|
||||
expect(new Uint64BE(await f.get_size()).toNumber()).toEqual(0);
|
||||
|
||||
expect(await f.close()).toEqual(0);
|
||||
expect(await api.file.delete('/repertory_file.dat')).toEqual(0);
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can upload and download a file using api', async () => {
|
||||
try {
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
} catch {}
|
||||
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(
|
||||
await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
})
|
||||
).toBeTruthy();
|
||||
|
||||
expect(
|
||||
await api.file.download(
|
||||
'/repertory_test.dat',
|
||||
'repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
}
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await calculate_sha256('test.dat')).toEqual(
|
||||
await calculate_sha256('repertory_test.dat')
|
||||
);
|
||||
|
||||
expect(await api.directory.exists('/repertory_test.dat')).toEqual(false);
|
||||
expect(await api.file.exists('/repertory_test.dat')).toEqual(true);
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
|
||||
await conn.disconnect();
|
||||
}, 60000);
|
||||
|
||||
test('can download and overwrite a file using api', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(
|
||||
await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
})
|
||||
).toBeTruthy();
|
||||
|
||||
expect(
|
||||
await api.file.download(
|
||||
'/repertory_test.dat',
|
||||
'repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
}
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(
|
||||
await api.file.download(
|
||||
'/repertory_test.dat',
|
||||
'repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
},
|
||||
true
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
|
||||
await conn.disconnect();
|
||||
}, 60000);
|
||||
|
||||
test('download fails if overwrite is false using api', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(
|
||||
await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
})
|
||||
).toBeTruthy();
|
||||
|
||||
expect(
|
||||
await api.file.download(
|
||||
'/repertory_test.dat',
|
||||
'repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
}
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
await expect(
|
||||
api.file.download(
|
||||
'/repertory_test.dat',
|
||||
'repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
},
|
||||
false
|
||||
)
|
||||
).rejects.toThrow(Error);
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
|
||||
await conn.disconnect();
|
||||
}, 60000);
|
||||
|
||||
test('can upload and overwrite a file using api', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(
|
||||
await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
})
|
||||
).toBeTruthy();
|
||||
|
||||
expect(
|
||||
await api.file.upload(
|
||||
'test.dat',
|
||||
'/repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
},
|
||||
true
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
|
||||
await conn.disconnect();
|
||||
}, 60000);
|
||||
|
||||
test('upload fails if overwrite is false using api', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(
|
||||
await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
})
|
||||
).toBeTruthy();
|
||||
|
||||
await expect(
|
||||
api.file.upload(
|
||||
'test.dat',
|
||||
'/repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
},
|
||||
false
|
||||
)
|
||||
).rejects.toThrow(Error);
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
|
||||
await conn.disconnect();
|
||||
}, 60000);
|
||||
|
||||
test('can resume download using api', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(
|
||||
await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
})
|
||||
).toBeTruthy();
|
||||
|
||||
const fd = fs.openSync('test.dat', 'r');
|
||||
const buffer = Buffer.alloc(1024);
|
||||
fs.readSync(fd, buffer, 0, buffer.length);
|
||||
fs.closeSync(fd);
|
||||
|
||||
fs.writeFileSync('repertory_test.dat', buffer);
|
||||
|
||||
expect(
|
||||
await api.file.download(
|
||||
'/repertory_test.dat',
|
||||
'repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
},
|
||||
false,
|
||||
true
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await calculate_sha256('test.dat')).toEqual(
|
||||
await calculate_sha256('repertory_test.dat')
|
||||
);
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
|
||||
await conn.disconnect();
|
||||
}, 60000);
|
||||
|
||||
test('can resume upload using api', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
|
||||
const fd = fs.openSync('test.dat', 'r');
|
||||
const buffer = Buffer.alloc(1024);
|
||||
fs.readSync(fd, buffer, 0, buffer.length);
|
||||
fs.closeSync(fd);
|
||||
|
||||
const f = await api.file.create_or_open('/repertory_test.dat');
|
||||
await f.write(0, buffer);
|
||||
await f.close();
|
||||
|
||||
expect(
|
||||
await api.file.upload(
|
||||
'test.dat',
|
||||
'/repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
},
|
||||
false,
|
||||
true
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(
|
||||
await api.file.download(
|
||||
'/repertory_test.dat',
|
||||
'repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
}
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await calculate_sha256('test.dat')).toEqual(
|
||||
await calculate_sha256('repertory_test.dat')
|
||||
);
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
|
||||
await conn.disconnect();
|
||||
}, 60000);
|
||||
|
||||
test('exists returns false if directory is not found', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.directory.exists('/cow')).toEqual(false);
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('exists returns false if file is not found', async () => {
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.file.exists('/cow')).toEqual(false);
|
||||
await conn.disconnect();
|
||||
});
|
||||
90
src/index.js
90
src/index.js
@@ -1,90 +0,0 @@
|
||||
import file from './io/file';
|
||||
import connection from './networking/connection';
|
||||
import connection_pool from './networking/connection_pool';
|
||||
import * as ops from './ops';
|
||||
|
||||
export const connect = async (host_or_ip, port, password) => {
|
||||
const conn = new connection(host_or_ip, port, password);
|
||||
await conn.connect();
|
||||
return conn;
|
||||
};
|
||||
|
||||
export const create_api = (conn) => {
|
||||
return {
|
||||
directory: {
|
||||
create: async (remote_path) => ops.create_directory(conn, remote_path),
|
||||
exists: async (remote_path) => {
|
||||
try {
|
||||
const info = await ops.get_file_attributes2(conn, remote_path);
|
||||
return info.directory;
|
||||
} catch (e) {
|
||||
if (e.message.split(':')[1].trim() == '-2') {
|
||||
return false;
|
||||
}
|
||||
throw new Error(e.message);
|
||||
}
|
||||
},
|
||||
list: async (remote_path, page_reader_cb) =>
|
||||
ops.list_directory(conn, remote_path, page_reader_cb),
|
||||
remove: async (remote_path) => ops.remove_directory(conn, remote_path),
|
||||
snapshot: async (remote_path) => {
|
||||
return ops.snapshot_directory(conn, remote_path);
|
||||
},
|
||||
},
|
||||
file: {
|
||||
create_or_open: async (remote_path) =>
|
||||
new file(
|
||||
conn,
|
||||
await ops.create_or_open_file(conn, remote_path),
|
||||
remote_path
|
||||
),
|
||||
delete: async (remote_path) => ops.delete_file(conn, remote_path),
|
||||
download: async (
|
||||
remote_path,
|
||||
local_path,
|
||||
progress_cb,
|
||||
overwrite,
|
||||
resume
|
||||
) =>
|
||||
ops.download_file(
|
||||
conn,
|
||||
remote_path,
|
||||
local_path,
|
||||
progress_cb,
|
||||
overwrite,
|
||||
resume
|
||||
),
|
||||
exists: async (remote_path) => {
|
||||
try {
|
||||
const info = await ops.get_file_attributes2(conn, remote_path);
|
||||
return !info.directory;
|
||||
} catch (e) {
|
||||
if (e.message.split(':')[1].trim() == '-2') {
|
||||
return false;
|
||||
}
|
||||
throw new Error(e.message);
|
||||
}
|
||||
},
|
||||
open: async (remote_path) =>
|
||||
new file(conn, await ops.open_file(conn, remote_path), remote_path),
|
||||
upload: async (local_path, remote_path, progress_cb, overwrite, resume) =>
|
||||
ops.upload_file(
|
||||
conn,
|
||||
local_path,
|
||||
remote_path,
|
||||
progress_cb,
|
||||
overwrite,
|
||||
resume
|
||||
),
|
||||
},
|
||||
get_drive_information: async () => ops.get_drive_information(conn),
|
||||
};
|
||||
};
|
||||
|
||||
export const create_pool = async (pool_size, host_or_ip, port, password) => {
|
||||
if (pool_size <= 1) {
|
||||
return connect(host_or_ip, port, password);
|
||||
}
|
||||
|
||||
return new connection_pool(pool_size, host_or_ip, port, password);
|
||||
};
|
||||
@@ -1,92 +0,0 @@
|
||||
import * as ops from '../ops';
|
||||
|
||||
let next_thread_id = 1;
|
||||
|
||||
export default class file {
|
||||
constructor(conn, handle, remote_path) {
|
||||
this.conn = conn;
|
||||
this.handle = handle || null;
|
||||
this.remote_path = remote_path;
|
||||
this.thread_id = next_thread_id++;
|
||||
}
|
||||
|
||||
conn;
|
||||
handle = null;
|
||||
thread_id;
|
||||
remote_path;
|
||||
|
||||
async close() {
|
||||
if (this.handle !== null) {
|
||||
const result = await ops.close_file(
|
||||
this.conn,
|
||||
this.remote_path,
|
||||
this.handle,
|
||||
this.thread_id
|
||||
);
|
||||
if (result === 0) {
|
||||
this.handle = null;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
async get_size() {
|
||||
if (this.handle === null) {
|
||||
return Promise.reject(new Error("'get_size()' failed: invalid handle"));
|
||||
}
|
||||
|
||||
const attrs = await ops.get_file_attributes(
|
||||
this.conn,
|
||||
this.handle,
|
||||
this.remote_path,
|
||||
this.thread_id
|
||||
);
|
||||
return attrs.size;
|
||||
}
|
||||
|
||||
async read(offset, length) {
|
||||
if (this.handle === null) {
|
||||
return Promise.reject(new Error("'read()' failed: invalid handle"));
|
||||
}
|
||||
|
||||
return ops.read_file(
|
||||
this.conn,
|
||||
this.handle,
|
||||
this.remote_path,
|
||||
offset,
|
||||
length,
|
||||
this.thread_id
|
||||
);
|
||||
}
|
||||
|
||||
async truncate(length) {
|
||||
if (this.handle === null) {
|
||||
return Promise.reject(new Error("'truncate()' failed: invalid handle"));
|
||||
}
|
||||
|
||||
return ops.truncate_file(
|
||||
this.conn,
|
||||
this.handle,
|
||||
this.remote_path,
|
||||
length,
|
||||
this.thread_id
|
||||
);
|
||||
}
|
||||
|
||||
async write(offset, buffer) {
|
||||
if (this.handle === null) {
|
||||
return Promise.reject(new Error("'write()' failed: invalid handle"));
|
||||
}
|
||||
|
||||
return ops.write_file(
|
||||
this.conn,
|
||||
this.handle,
|
||||
this.remote_path,
|
||||
offset,
|
||||
buffer,
|
||||
this.thread_id
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,165 +0,0 @@
|
||||
import Socket from 'net';
|
||||
|
||||
import * as constants from '../utils/constants';
|
||||
|
||||
import packet from './packet';
|
||||
|
||||
export default class connection {
|
||||
constructor(host_or_ip, port, password, socket) {
|
||||
this.host_or_ip = host_or_ip;
|
||||
this.port = port;
|
||||
this.password = password;
|
||||
if (socket) {
|
||||
this.socket = socket;
|
||||
this.connected = true;
|
||||
this.setup_socket();
|
||||
}
|
||||
}
|
||||
|
||||
connected = false;
|
||||
host_or_ip = '';
|
||||
password = '';
|
||||
port = 20000;
|
||||
reject;
|
||||
resolve;
|
||||
socket;
|
||||
|
||||
cleanup_handlers() {
|
||||
this.reject = null;
|
||||
this.resolve = null;
|
||||
}
|
||||
|
||||
async connect() {
|
||||
if (!this.socket) {
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
this.socket = Socket.createConnection(
|
||||
this.port,
|
||||
this.host_or_ip,
|
||||
(err) => {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
return reject(err);
|
||||
}
|
||||
return resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'connect()' failed: ${err}`));
|
||||
}
|
||||
|
||||
this.connected = true;
|
||||
this.setup_socket();
|
||||
}
|
||||
}
|
||||
|
||||
setup_socket() {
|
||||
let buffer;
|
||||
const cleanup = () => {
|
||||
this.cleanup_handlers();
|
||||
buffer = null;
|
||||
};
|
||||
|
||||
this.socket.on('data', (chunk) => {
|
||||
buffer = buffer ? Buffer.concat([buffer, chunk]) : chunk;
|
||||
if (buffer.length > 4) {
|
||||
const size = buffer.readUInt32BE(0);
|
||||
if (buffer.length >= size + 4) {
|
||||
const packet_data = buffer.slice(4, 4 + size);
|
||||
if (this.resolve) {
|
||||
const complete = () => {
|
||||
const reject = this.reject;
|
||||
const resolve = this.resolve;
|
||||
cleanup();
|
||||
return {
|
||||
reject,
|
||||
resolve,
|
||||
};
|
||||
};
|
||||
|
||||
const response = new packet(this.password);
|
||||
response.buffer = new Uint8Array(packet_data);
|
||||
response
|
||||
.decrypt()
|
||||
.then(() => {
|
||||
const { resolve } = complete();
|
||||
if (resolve) {
|
||||
resolve(response);
|
||||
}
|
||||
})
|
||||
.catch((e) => {
|
||||
console.log(e);
|
||||
const { reject } = complete();
|
||||
if (reject) {
|
||||
reject(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this.socket.on('error', (e) => {
|
||||
if (this.reject) {
|
||||
const reject = this.reject;
|
||||
|
||||
cleanup();
|
||||
|
||||
this.connected = false;
|
||||
console.log(e);
|
||||
if (reject) {
|
||||
reject(e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this.socket.on('close', () => {
|
||||
if (this.reject) {
|
||||
const reject = this.reject;
|
||||
|
||||
cleanup();
|
||||
|
||||
this.connected = false;
|
||||
console.log('socket closed');
|
||||
if (reject) {
|
||||
reject(new Error('socket closed'));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async disconnect() {
|
||||
try {
|
||||
if (this.socket) {
|
||||
this.socket.destroy();
|
||||
this.socket = null;
|
||||
this.cleanup_handlers();
|
||||
this.connected = false;
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
}
|
||||
|
||||
async send(method_name, packet, optional_thread_id) {
|
||||
packet.token = this.password;
|
||||
packet.encode_top_utf8(method_name);
|
||||
packet.encode_top_ui64(optional_thread_id || 1);
|
||||
packet.encode_top_utf8(constants.instance_id);
|
||||
packet.encode_top_ui32(0); // Service flags
|
||||
packet.encode_top_utf8(constants.get_version());
|
||||
await packet.encrypt();
|
||||
packet.encode_top_ui32(packet.buffer.length);
|
||||
return new Promise((resolve, reject) => {
|
||||
this.reject = reject;
|
||||
this.resolve = resolve;
|
||||
this.socket.write(Buffer.from(packet.buffer), null, (err) => {
|
||||
if (err) {
|
||||
this.cleanup_handlers();
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
import Pool from 'socket-pool';
|
||||
|
||||
import connection from './connection';
|
||||
|
||||
export default class connection_pool {
|
||||
constructor(pool_size, host_or_ip, port, password) {
|
||||
this.host_or_ip = host_or_ip;
|
||||
this.port = port;
|
||||
this.password = password;
|
||||
if (pool_size > 1) {
|
||||
this.pool = new Pool({
|
||||
connect: { host: host_or_ip, port: port },
|
||||
connectTimeout: 5000,
|
||||
pool: { max: pool_size, min: 2 },
|
||||
});
|
||||
} else {
|
||||
throw new Error("'pool_size' must be > 1");
|
||||
}
|
||||
}
|
||||
|
||||
host_or_ip = '';
|
||||
next_thread_id = 1;
|
||||
password = '';
|
||||
port = 20000;
|
||||
pool;
|
||||
shutdown = false;
|
||||
|
||||
async disconnect() {
|
||||
await this.pool._pool.drain();
|
||||
await this.pool._pool.clear();
|
||||
this.pool = null;
|
||||
this.shutdown = true;
|
||||
}
|
||||
|
||||
async send(method_name, packet, optional_thread_id) {
|
||||
try {
|
||||
const socket = await this.pool.acquire();
|
||||
if (!socket.thread_id) {
|
||||
socket.thread_id = this.next_thread_id++;
|
||||
}
|
||||
|
||||
const cleanup = () => {
|
||||
try {
|
||||
socket.release();
|
||||
} catch (err) {
|
||||
console.log(`'release()' failed: ${err}`);
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const result = await new connection(
|
||||
this.host_or_ip,
|
||||
this.port,
|
||||
this.password,
|
||||
socket
|
||||
).send(method_name, packet, optional_thread_id || socket.thread_id);
|
||||
cleanup();
|
||||
return result;
|
||||
} catch (err) {
|
||||
cleanup();
|
||||
return Promise.reject(
|
||||
new Error(`'send(${method_name})' failed: ${err}`)
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'acquire()' socket failed: ${err}`));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,299 +0,0 @@
|
||||
import { randomBytes } from 'crypto';
|
||||
import {Int64BE, Uint64BE} from 'int64-buffer';
|
||||
import crypto from 'crypto';
|
||||
import {TextEncoder} from 'text-encoding';
|
||||
|
||||
import {
|
||||
be_ui8_array_to_i16,
|
||||
be_ui8_array_to_i32,
|
||||
be_ui8_array_to_ui16,
|
||||
be_ui8_array_to_ui32,
|
||||
i16_to_be_ui8_array,
|
||||
i32_to_be_ui8_array,
|
||||
i8_to_ui8_array,
|
||||
ui16_to_be_ui8_array,
|
||||
ui32_to_be_ui8_array,
|
||||
ui8_array_to_i8,
|
||||
ui8_array_to_ui8,
|
||||
ui8_to_ui8_array,
|
||||
} from '../utils/byte_order';
|
||||
import JSChaCha20 from '../utils/jschacha20';
|
||||
|
||||
export default class packet {
|
||||
constructor(token) {
|
||||
this.token = token;
|
||||
}
|
||||
|
||||
static HEADER = new TextEncoder().encode('repertory');
|
||||
|
||||
buffer = null;
|
||||
decode_offset = 0;
|
||||
token;
|
||||
|
||||
append_buffer = (buffer) => {
|
||||
if (!(buffer instanceof Uint8Array)) {
|
||||
throw new Error('Buffer must be of type Uint8Array');
|
||||
}
|
||||
|
||||
this.buffer = this.buffer
|
||||
? new Uint8Array([...this.buffer, ...buffer])
|
||||
: buffer;
|
||||
};
|
||||
|
||||
clear = () => {
|
||||
this.buffer = null;
|
||||
this.decode_offset = 0;
|
||||
};
|
||||
|
||||
decode_buffer = (length) => {
|
||||
if (!this.buffer) {
|
||||
throw new Error('Invalid buffer');
|
||||
}
|
||||
|
||||
const ret = this.buffer.slice(
|
||||
this.decode_offset,
|
||||
this.decode_offset + length
|
||||
);
|
||||
this.decode_offset += length;
|
||||
return Buffer.from(ret);
|
||||
};
|
||||
|
||||
decode_stat = () => {
|
||||
const mode = this.decode_ui16();
|
||||
const nlink = this.decode_ui16();
|
||||
const uid = this.decode_ui32();
|
||||
const gid = this.decode_ui32();
|
||||
const atime = this.decode_ui64();
|
||||
const mtime = this.decode_ui64();
|
||||
const ctime = this.decode_ui64();
|
||||
const birth_time = this.decode_ui64();
|
||||
const size = this.decode_ui64();
|
||||
const blocks = this.decode_ui64();
|
||||
const blksize = this.decode_ui32();
|
||||
const flags = this.decode_ui32();
|
||||
const directory = !!this.decode_ui8();
|
||||
return {
|
||||
mode,
|
||||
nlink,
|
||||
uid,
|
||||
gid,
|
||||
atime,
|
||||
mtime,
|
||||
ctime,
|
||||
birth_time,
|
||||
size,
|
||||
blocks,
|
||||
blksize,
|
||||
flags,
|
||||
directory,
|
||||
};
|
||||
};
|
||||
|
||||
decode_utf8 = () => {
|
||||
if (!this.buffer) {
|
||||
throw new Error('Invalid buffer');
|
||||
}
|
||||
|
||||
const startIndex = this.decode_offset;
|
||||
const endIndex = this.buffer.indexOf(0, startIndex);
|
||||
if (endIndex >= 0) {
|
||||
let ret = '';
|
||||
for (let i = startIndex; i < endIndex; i++) {
|
||||
ret += String.fromCharCode(this.buffer[i]);
|
||||
}
|
||||
this.decode_offset = endIndex + 1;
|
||||
return ret;
|
||||
}
|
||||
|
||||
throw new Error('String not found in buffer');
|
||||
};
|
||||
|
||||
decode_i8 = () => {
|
||||
return ui8_array_to_i8(this.buffer, this.decode_offset++);
|
||||
};
|
||||
|
||||
decode_ui8 = () => {
|
||||
return ui8_array_to_ui8(this.buffer, this.decode_offset++);
|
||||
};
|
||||
|
||||
decode_i16 = () => {
|
||||
const ret = be_ui8_array_to_i16(this.buffer, this.decode_offset);
|
||||
this.decode_offset += 2;
|
||||
return ret;
|
||||
};
|
||||
|
||||
decode_ui16 = () => {
|
||||
const ret = be_ui8_array_to_ui16(this.buffer, this.decode_offset);
|
||||
this.decode_offset += 2;
|
||||
return ret;
|
||||
};
|
||||
|
||||
decode_i32 = () => {
|
||||
const ret = be_ui8_array_to_i32(this.buffer, this.decode_offset);
|
||||
this.decode_offset += 4;
|
||||
return ret;
|
||||
};
|
||||
|
||||
decode_ui32 = () => {
|
||||
const ret = be_ui8_array_to_ui32(this.buffer, this.decode_offset);
|
||||
this.decode_offset += 4;
|
||||
return ret;
|
||||
};
|
||||
|
||||
decode_i64 = () => {
|
||||
const ret = new Int64BE(
|
||||
this.buffer.slice(this.decode_offset, this.decode_offset + 8)
|
||||
);
|
||||
this.decode_offset += 8;
|
||||
return ret.toString(10);
|
||||
};
|
||||
|
||||
decode_ui64 = () => {
|
||||
const ret = new Uint64BE(
|
||||
this.buffer.slice(this.decode_offset, this.decode_offset + 8)
|
||||
);
|
||||
this.decode_offset += 8;
|
||||
return ret.toString(10);
|
||||
};
|
||||
|
||||
decrypt = async () => {
|
||||
try {
|
||||
let hash = crypto.createHash('sha256');
|
||||
hash = hash.update(new TextEncoder().encode(this.token));
|
||||
|
||||
const key = Uint8Array.from(hash.digest());
|
||||
const nonce = this.buffer.slice(0, 12);
|
||||
|
||||
this.buffer = new JSChaCha20(key, nonce, 0).decrypt(
|
||||
this.buffer.slice(12)
|
||||
);
|
||||
|
||||
this.decode_offset = packet.HEADER.length;
|
||||
|
||||
const header = this.buffer.slice(0, 9);
|
||||
if (header.toString() !== packet.HEADER.toString()) {
|
||||
return Promise.reject(new Error('Header does not match'));
|
||||
}
|
||||
|
||||
return this.buffer;
|
||||
} catch (e) {
|
||||
return Promise.reject(e);
|
||||
}
|
||||
};
|
||||
|
||||
encode_buffer = (buffer) => {
|
||||
this.append_buffer(new Uint8Array(buffer));
|
||||
};
|
||||
|
||||
encode_i8 = (num) => {
|
||||
this.append_buffer(i8_to_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_top_i8 = (num) => {
|
||||
this.push_buffer(i8_to_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_u8 = (num) => {
|
||||
this.append_buffer(ui8_to_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_top_u8 = (num) => {
|
||||
this.push_buffer(ui8_to_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_i16 = (num) => {
|
||||
this.append_buffer(i16_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_top_i16 = (num) => {
|
||||
this.push_buffer(i16_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_ui16 = (num) => {
|
||||
this.append_buffer(ui16_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_top_ui16 = (num) => {
|
||||
this.push_buffer(ui16_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_i32 = (num) => {
|
||||
this.append_buffer(i32_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_top_i32 = (num) => {
|
||||
this.push_buffer(i32_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_ui32 = (num) => {
|
||||
this.append_buffer(ui32_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_top_ui32 = (num) => {
|
||||
this.push_buffer(ui32_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_i64 = (num) => {
|
||||
this.append_buffer(new Uint8Array(new Int64BE(num).toArray()));
|
||||
};
|
||||
|
||||
encode_top_i64 = (num) => {
|
||||
this.push_buffer(new Uint8Array(new Int64BE(num).toArray()));
|
||||
};
|
||||
|
||||
encode_ui64 = (num) => {
|
||||
this.append_buffer(new Uint8Array(new Uint64BE(num).toArray()));
|
||||
};
|
||||
|
||||
encode_top_ui64 = (num) => {
|
||||
this.push_buffer(new Uint8Array(new Uint64BE(num).toArray()));
|
||||
};
|
||||
|
||||
encode_utf8 = (str) => {
|
||||
if (!(typeof str === 'string' || str instanceof String)) {
|
||||
throw new Error('Value must be of type string');
|
||||
}
|
||||
|
||||
const buffer = new Uint8Array([...new TextEncoder().encode(str), 0]);
|
||||
this.append_buffer(buffer);
|
||||
};
|
||||
|
||||
encode_top_utf8 = (str) => {
|
||||
if (!(typeof str === 'string' || str instanceof String)) {
|
||||
throw new Error('Value must be of type string');
|
||||
}
|
||||
|
||||
const buffer = new Uint8Array([...new TextEncoder().encode(str), 0]);
|
||||
this.push_buffer(buffer);
|
||||
};
|
||||
|
||||
encrypt = async (nonce) => {
|
||||
try {
|
||||
this.push_buffer(packet.HEADER);
|
||||
let hash = crypto.createHash('sha256');
|
||||
hash = hash.update(new TextEncoder().encode(this.token));
|
||||
|
||||
const key = Uint8Array.from(hash.digest());
|
||||
if (!nonce) {
|
||||
nonce = Uint8Array.from(randomBytes(12));
|
||||
}
|
||||
|
||||
this.buffer = new JSChaCha20(key, nonce, 0).encrypt(this.buffer);
|
||||
this.push_buffer(nonce);
|
||||
|
||||
return this.buffer;
|
||||
} catch (e) {
|
||||
return Promise.reject(e);
|
||||
}
|
||||
};
|
||||
|
||||
push_buffer = (buffer) => {
|
||||
if (!(buffer instanceof Uint8Array)) {
|
||||
throw new Error('Buffer must be of type Uint8Array');
|
||||
}
|
||||
|
||||
this.buffer = this.buffer
|
||||
? new Uint8Array([...buffer, ...this.buffer])
|
||||
: buffer;
|
||||
};
|
||||
}
|
||||
611
src/ops/index.js
611
src/ops/index.js
@@ -1,611 +0,0 @@
|
||||
import fs from 'fs';
|
||||
import { Uint64BE } from 'int64-buffer';
|
||||
|
||||
import file from '../io/file';
|
||||
import packet from '../networking/packet';
|
||||
|
||||
const _snapshot_directory = async (conn, remote_path) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
|
||||
const response = await conn.send(
|
||||
'::RemoteJSONCreateDirectorySnapshot',
|
||||
request
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === 0) {
|
||||
const data = JSON.parse(response.decode_utf8());
|
||||
|
||||
let released = false;
|
||||
const release = async () => {
|
||||
if (!released) {
|
||||
released = true;
|
||||
const request = new packet();
|
||||
request.encode_ui64(data.handle);
|
||||
await conn.send('::RemoteJSONReleaseDirectorySnapshot', request);
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const get_page = async (page) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(data.handle);
|
||||
request.encode_ui32(page);
|
||||
|
||||
const response = await conn.send(
|
||||
'::RemoteJSONReadDirectorySnapshot',
|
||||
request
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === 0 || result === -120) {
|
||||
const data = JSON.parse(response.decode_utf8());
|
||||
return data.directory_list;
|
||||
}
|
||||
} catch (err) {
|
||||
await release();
|
||||
return Promise.reject(new Error(`'get_page' failed: ${err}`));
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
return {
|
||||
get_page,
|
||||
page_count: data.page_count,
|
||||
release,
|
||||
remote_path,
|
||||
};
|
||||
} catch (err) {
|
||||
await release();
|
||||
return Promise.reject(new Error(`'snapshot_directory' failed: ${err}`));
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'snapshot_directory' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const close_file = async (
|
||||
conn,
|
||||
remote_path,
|
||||
handle,
|
||||
optional_thread_id
|
||||
) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(handle);
|
||||
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSERelease',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
return response.decode_i32();
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'close_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const create_directory = async (conn, remote_path) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui16((7 << 6) | (5 << 3));
|
||||
|
||||
const response = await conn.send('::RemoteFUSEMkdir', request);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
return response.decode_i32();
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'create_directory' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const create_or_open_file = async (
|
||||
conn,
|
||||
remote_path,
|
||||
optional_thread_id
|
||||
) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui16((7 << 6) | (5 << 3));
|
||||
request.encode_ui32(2 | 4); // Read-Write, Create
|
||||
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSECreate',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === 0) {
|
||||
return response.decode_ui64();
|
||||
}
|
||||
|
||||
return Promise.reject(new Error(`'create_or_open_file' error: ${result}`));
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'create_or_open_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const delete_file = async (conn, remote_path) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
|
||||
const response = await conn.send('::RemoteFUSEUnlink', request);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
return response.decode_i32();
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'delete_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const download_file = async (
|
||||
conn,
|
||||
remote_path,
|
||||
local_path,
|
||||
progress_cb,
|
||||
overwrite,
|
||||
resume
|
||||
) => {
|
||||
try {
|
||||
const src = new file(conn, await open_file(conn, remote_path), remote_path);
|
||||
const cleanup = async (fd) => {
|
||||
try {
|
||||
await src.close();
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
try {
|
||||
if (fd !== undefined) {
|
||||
fs.closeSync(fd);
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const src_size = await src.get_size();
|
||||
let dst_fd;
|
||||
|
||||
try {
|
||||
let offset = 0;
|
||||
if (overwrite) {
|
||||
dst_fd = fs.openSync(local_path, 'w+');
|
||||
} else if (resume) {
|
||||
dst_fd = fs.openSync(local_path, 'r+');
|
||||
|
||||
const dst_size = fs.fstatSync(dst_fd).size;
|
||||
if (dst_size === src_size) {
|
||||
await cleanup(dst_fd);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (dst_size > src_size) {
|
||||
await cleanup(dst_fd);
|
||||
return Promise.reject(
|
||||
new Error(
|
||||
`'download_file' failed: destination is larger than source`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
offset = dst_size;
|
||||
} else {
|
||||
if (fs.existsSync(local_path)) {
|
||||
await cleanup(dst_fd);
|
||||
return Promise.reject(
|
||||
new Error(`'download_file' failed: file exists`)
|
||||
);
|
||||
}
|
||||
|
||||
dst_fd = fs.openSync(local_path, 'wx+');
|
||||
}
|
||||
|
||||
let remain = src_size - offset;
|
||||
while (remain > 0) {
|
||||
const to_write = remain >= 65536 ? 65536 : remain;
|
||||
const buffer = await src.read(offset, to_write);
|
||||
const written = fs.writeSync(dst_fd, buffer, 0, to_write, offset);
|
||||
if (written > 0) {
|
||||
remain -= written;
|
||||
offset += written;
|
||||
if (progress_cb) {
|
||||
progress_cb(
|
||||
local_path,
|
||||
remote_path,
|
||||
((src_size - remain) / src_size) * 100.0,
|
||||
false
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (progress_cb) {
|
||||
progress_cb(local_path, remote_path, 100, true);
|
||||
}
|
||||
|
||||
await cleanup(dst_fd);
|
||||
return true;
|
||||
} catch (err) {
|
||||
await cleanup(dst_fd);
|
||||
return Promise.reject(new Error(`'download_file' failed: ${err}`));
|
||||
}
|
||||
} catch (err) {
|
||||
await cleanup();
|
||||
return Promise.reject(new Error(`'download_file' failed: ${err}`));
|
||||
}
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'download_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const get_drive_information = async (conn) => {
|
||||
try {
|
||||
const response = await conn.send(
|
||||
'::RemoteWinFSPGetVolumeInfo',
|
||||
new packet()
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === 0) {
|
||||
const total = response.decode_ui64();
|
||||
const free = response.decode_ui64();
|
||||
return {
|
||||
free,
|
||||
total,
|
||||
used: (new Uint64BE(total) - new Uint64BE(free)).toString(10),
|
||||
};
|
||||
}
|
||||
|
||||
return Promise.reject(
|
||||
new Error(`'get_drive_information' failed: ${result}`)
|
||||
);
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'get_drive_information' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const get_file_attributes = async (
|
||||
conn,
|
||||
handle,
|
||||
remote_path,
|
||||
optional_thread_id
|
||||
) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(handle);
|
||||
request.encode_ui32(0);
|
||||
request.encode_ui32(0);
|
||||
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSEFgetattr',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === 0) {
|
||||
return response.decode_stat();
|
||||
}
|
||||
|
||||
return Promise.reject(new Error(`'get_file_attributes' failed: ${result}`));
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'get_file_attributes' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const get_file_attributes2 = async (
|
||||
conn,
|
||||
remote_path,
|
||||
optional_thread_id
|
||||
) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui32(0);
|
||||
request.encode_ui32(0);
|
||||
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSEGetattr',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === 0) {
|
||||
return response.decode_stat();
|
||||
}
|
||||
|
||||
return Promise.reject(
|
||||
new Error(`'get_file_attributes2' failed: ${result}`)
|
||||
);
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'get_file_attributes2' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const list_directory = async (conn, remote_path, page_reader_cb) => {
|
||||
const dir_snapshot = await _snapshot_directory(conn, remote_path);
|
||||
try {
|
||||
await page_reader_cb(
|
||||
dir_snapshot.remote_path,
|
||||
dir_snapshot.page_count,
|
||||
dir_snapshot.get_page
|
||||
);
|
||||
await dir_snapshot.release();
|
||||
} catch (err) {
|
||||
await dir_snapshot.release();
|
||||
return Promise.reject(`'list_directory' failed: ${err}`);
|
||||
}
|
||||
};
|
||||
|
||||
export const open_file = async (conn, remote_path, optional_thread_id) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui32(2); // Read-Write
|
||||
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSEOpen',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === 0) {
|
||||
return response.decode_ui64();
|
||||
}
|
||||
return Promise.reject(new Error(`'open_file' error: ${result}`));
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'open_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const read_file = async (
|
||||
conn,
|
||||
handle,
|
||||
remote_path,
|
||||
offset,
|
||||
length,
|
||||
optional_thread_id
|
||||
) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(length);
|
||||
request.encode_ui64(offset);
|
||||
request.encode_ui64(handle);
|
||||
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSERead',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === length) {
|
||||
return response.decode_buffer(result);
|
||||
}
|
||||
return Promise.reject(new Error(`'read_file' error: ${result}`));
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'read_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const remove_directory = async (conn, remote_path) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
|
||||
const response = await conn.send('::RemoteFUSERmdir', request);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
return response.decode_i32();
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'remove_directory' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const snapshot_directory = _snapshot_directory;
|
||||
|
||||
export const truncate_file = async (
|
||||
conn,
|
||||
handle,
|
||||
remote_path,
|
||||
length,
|
||||
optional_thread_id
|
||||
) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(length);
|
||||
request.encode_ui64(handle);
|
||||
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSEFtruncate',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
return response.decode_i32();
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'truncate_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const upload_file = async (
|
||||
conn,
|
||||
local_path,
|
||||
remote_path,
|
||||
progress_cb,
|
||||
overwrite,
|
||||
resume
|
||||
) => {
|
||||
try {
|
||||
const src_fd = fs.openSync(local_path, 'r');
|
||||
const cleanup = async (f) => {
|
||||
try {
|
||||
fs.closeSync(src_fd);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
try {
|
||||
if (f) {
|
||||
await f.close();
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
};
|
||||
try {
|
||||
const src_st = fs.fstatSync(src_fd);
|
||||
let dst;
|
||||
const create_dest = async () => {
|
||||
dst = new file(
|
||||
conn,
|
||||
await create_or_open_file(conn, remote_path),
|
||||
remote_path
|
||||
);
|
||||
};
|
||||
|
||||
try {
|
||||
let offset = 0;
|
||||
if (overwrite) {
|
||||
await create_dest();
|
||||
const result = await dst.truncate(0);
|
||||
if (result !== 0) {
|
||||
await cleanup(dst);
|
||||
return Promise.reject(new Error(`'upload_file' failed: ${result}`));
|
||||
}
|
||||
} else if (resume) {
|
||||
await create_dest();
|
||||
const dst_size = new Uint64BE(await dst.get_size()).toNumber();
|
||||
if (dst_size === src_st.size) {
|
||||
await cleanup(dst);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (dst_size > src_st.size) {
|
||||
await cleanup(dst);
|
||||
return Promise.reject(
|
||||
new Error(
|
||||
`'upload_file' failed: destination is larger than source`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
offset = dst_size;
|
||||
} else {
|
||||
try {
|
||||
const f = new file(
|
||||
conn,
|
||||
await open_file(conn, remote_path),
|
||||
remote_path
|
||||
);
|
||||
await cleanup(f);
|
||||
return Promise.reject(
|
||||
new Error("'upload_file' failed: file exists")
|
||||
);
|
||||
} catch (err) {
|
||||
await create_dest();
|
||||
}
|
||||
}
|
||||
|
||||
let remain = src_st.size - offset;
|
||||
const default_buffer = Buffer.alloc(65536 * 2);
|
||||
while (remain > 0) {
|
||||
const to_write =
|
||||
remain >= default_buffer.length ? default_buffer.length : remain;
|
||||
const buffer =
|
||||
to_write === default_buffer.length
|
||||
? default_buffer
|
||||
: Buffer.alloc(to_write);
|
||||
fs.readSync(src_fd, buffer, 0, to_write, offset);
|
||||
const written = await dst.write(offset, buffer);
|
||||
if (written > 0) {
|
||||
remain -= written;
|
||||
offset += written;
|
||||
if (progress_cb) {
|
||||
progress_cb(
|
||||
local_path,
|
||||
remote_path,
|
||||
((src_st.size - remain) / src_st.size) * 100.0,
|
||||
false
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (progress_cb) {
|
||||
progress_cb(local_path, remote_path, 100, true);
|
||||
}
|
||||
|
||||
await cleanup(dst);
|
||||
return true;
|
||||
} catch (err) {
|
||||
await cleanup(dst);
|
||||
return Promise.reject(new Error(`'upload_file' failed: ${err}`));
|
||||
}
|
||||
} catch (err) {
|
||||
await cleanup();
|
||||
return Promise.reject(new Error(`'upload_file' failed: ${err}`));
|
||||
}
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'upload_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const write_file = async (
|
||||
conn,
|
||||
handle,
|
||||
remote_path,
|
||||
offset,
|
||||
buffer,
|
||||
optional_thread_id
|
||||
) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(buffer.length);
|
||||
request.encode_buffer(buffer);
|
||||
request.encode_ui64(offset);
|
||||
request.encode_ui64(handle);
|
||||
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSEWrite',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === buffer.length) {
|
||||
return result;
|
||||
}
|
||||
return Promise.reject(new Error(`'write_file' error: ${result}`));
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'write_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
@@ -1,112 +0,0 @@
|
||||
export const is_big_endian_system =
|
||||
new Uint8Array(new Uint32Array([0x12345678]).buffer)[0] === 0x12;
|
||||
|
||||
export const is_little_endian_system =
|
||||
new Uint8Array(new Uint32Array([0x12345678]).buffer)[0] === 0x78;
|
||||
|
||||
export const i8_to_ui8_array = (num) => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
|
||||
const buffer = Buffer.alloc(1);
|
||||
buffer.writeInt8(num);
|
||||
return new Uint8Array(buffer);
|
||||
};
|
||||
|
||||
export const ui8_array_to_i8 = (ar, offset) => {
|
||||
const buffer = Buffer.alloc(1);
|
||||
buffer[0] = ar[offset];
|
||||
|
||||
return buffer.readInt8(0);
|
||||
};
|
||||
|
||||
export const ui8_to_ui8_array = (num) => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
|
||||
const buffer = Buffer.alloc(1);
|
||||
buffer.writeUInt8(num);
|
||||
return new Uint8Array(buffer);
|
||||
};
|
||||
|
||||
export const ui8_array_to_ui8 = (ar, offset) => {
|
||||
const buffer = Buffer.alloc(1);
|
||||
buffer[0] = ar[offset];
|
||||
|
||||
return buffer.readUInt8(0);
|
||||
};
|
||||
|
||||
export const i16_to_be_ui8_array = (num) => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
|
||||
const buffer = Buffer.alloc(2);
|
||||
buffer.writeInt16BE(num);
|
||||
return new Uint8Array(buffer);
|
||||
};
|
||||
|
||||
export const be_ui8_array_to_i16 = (ar, offset) => {
|
||||
const buffer = Buffer.alloc(2);
|
||||
for (let i = offset; i < buffer.length + offset; i++) {
|
||||
buffer[i - offset] = ar[i];
|
||||
}
|
||||
return buffer.readInt16BE(0);
|
||||
};
|
||||
|
||||
export const ui16_to_be_ui8_array = (num) => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
|
||||
const buffer = Buffer.alloc(2);
|
||||
buffer.writeUInt16BE(num);
|
||||
return new Uint8Array(buffer);
|
||||
};
|
||||
|
||||
export const be_ui8_array_to_ui16 = (ar, offset) => {
|
||||
const buffer = Buffer.alloc(2);
|
||||
for (let i = offset; i < buffer.length + offset; i++) {
|
||||
buffer[i - offset] = ar[i];
|
||||
}
|
||||
return buffer.readUInt16BE(0);
|
||||
};
|
||||
|
||||
export const i32_to_be_ui8_array = (num) => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
|
||||
const buffer = Buffer.alloc(4);
|
||||
buffer.writeInt32BE(num);
|
||||
return new Uint8Array(buffer);
|
||||
};
|
||||
|
||||
export const be_ui8_array_to_i32 = (ar, offset) => {
|
||||
const buffer = Buffer.alloc(4);
|
||||
for (let i = offset; i < buffer.length + offset; i++) {
|
||||
buffer[i - offset] = ar[i];
|
||||
}
|
||||
return buffer.readInt32BE(0);
|
||||
};
|
||||
|
||||
export const ui32_to_be_ui8_array = (num) => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
|
||||
const buffer = Buffer.alloc(4);
|
||||
buffer.writeUInt32BE(num);
|
||||
return new Uint8Array(buffer);
|
||||
};
|
||||
|
||||
export const be_ui8_array_to_ui32 = (ar, offset) => {
|
||||
const buffer = Buffer.alloc(4);
|
||||
for (let i = offset; i < buffer.length + offset; i++) {
|
||||
buffer[i - offset] = ar[i];
|
||||
}
|
||||
|
||||
return buffer.readUInt32BE(0);
|
||||
};
|
||||
@@ -1,7 +0,0 @@
|
||||
import {v4 as uuidv4} from 'uuid';
|
||||
import _package_json from '../../package.json';
|
||||
|
||||
export const instance_id = uuidv4();
|
||||
export const package_json = _package_json;
|
||||
export const get_version = () =>
|
||||
process.env.REPERTORY_JS_FORCE_VERSION || _package_json.version;
|
||||
@@ -1,322 +0,0 @@
|
||||
'use strict';
|
||||
/*
|
||||
* Copyright (c) 2017, Bubelich Mykola
|
||||
* https://www.bubelich.com
|
||||
*
|
||||
* (。◕‿‿◕。)
|
||||
*
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met, 0x
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of the copyright holder nor the names of its contributors
|
||||
* may be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS"
|
||||
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ChaCha20 is a stream cipher designed by D. J. Bernstein.
|
||||
* It is a refinement of the Salsa20 algorithm, and it uses a 256-bit key.
|
||||
*
|
||||
* ChaCha20 successively calls the ChaCha20 block function, with the same key and nonce, and with successively increasing block counter parameters.
|
||||
* ChaCha20 then serializes the resulting state by writing the numbers in little-endian order, creating a keystream block.
|
||||
*
|
||||
* Concatenating the keystream blocks from the successive blocks forms a keystream.
|
||||
* The ChaCha20 function then performs an XOR of this keystream with the plaintext.
|
||||
* Alternatively, each keystream block can be XORed with a plaintext block before proceeding to create_or_open the next block, saving some memory.
|
||||
* There is no requirement for the plaintext to be an integral multiple of 512 bits. If there is extra keystream from the last block, it is discarded.
|
||||
*
|
||||
* The inputs to ChaCha20 are
|
||||
* - 256-bit key
|
||||
* - 32-bit initial counter
|
||||
* - 96-bit nonce. In some protocols, this is known as the Initialization Vector
|
||||
* - Arbitrary-length plaintext
|
||||
*
|
||||
* Implementation derived from chacha-ref.c version 20080118
|
||||
* See for details, 0x http, 0x//cr.yp.to/chacha/chacha-20080128.pdf
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Uint8Array} key
|
||||
* @param {Uint8Array} nonce
|
||||
* @param {number} counter
|
||||
* @throws {Error}
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
var JSChaCha20 = function (key, nonce, counter) {
|
||||
if (typeof counter === 'undefined') {
|
||||
counter = 0;
|
||||
}
|
||||
|
||||
if (!(key instanceof Uint8Array) || key.length !== 32) {
|
||||
throw new Error('Key should be 32 byte array!');
|
||||
}
|
||||
|
||||
if (!(nonce instanceof Uint8Array) || nonce.length !== 12) {
|
||||
throw new Error('Nonce should be 12 byte array!');
|
||||
}
|
||||
|
||||
this._rounds = 20;
|
||||
// Constants
|
||||
this._sigma = [0x61707865, 0x3320646e, 0x79622d32, 0x6b206574];
|
||||
|
||||
// param construction
|
||||
this._param = [
|
||||
this._sigma[0],
|
||||
this._sigma[1],
|
||||
this._sigma[2],
|
||||
this._sigma[3],
|
||||
// key
|
||||
this._get32(key, 0),
|
||||
this._get32(key, 4),
|
||||
this._get32(key, 8),
|
||||
this._get32(key, 12),
|
||||
this._get32(key, 16),
|
||||
this._get32(key, 20),
|
||||
this._get32(key, 24),
|
||||
this._get32(key, 28),
|
||||
// counter
|
||||
counter,
|
||||
// nonce
|
||||
this._get32(nonce, 0),
|
||||
this._get32(nonce, 4),
|
||||
this._get32(nonce, 8),
|
||||
];
|
||||
|
||||
// init 64 byte keystream block //
|
||||
this._keystream = [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
];
|
||||
|
||||
// internal byte counter //
|
||||
this._byteCounter = 0;
|
||||
};
|
||||
|
||||
JSChaCha20.prototype._chacha = function () {
|
||||
var mix = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
var i = 0;
|
||||
var b = 0;
|
||||
|
||||
// copy param array to mix //
|
||||
for (i = 0; i < 16; i++) {
|
||||
mix[i] = this._param[i];
|
||||
}
|
||||
|
||||
// mix rounds //
|
||||
for (i = 0; i < this._rounds; i += 2) {
|
||||
this._quarterround(mix, 0, 4, 8, 12);
|
||||
this._quarterround(mix, 1, 5, 9, 13);
|
||||
this._quarterround(mix, 2, 6, 10, 14);
|
||||
this._quarterround(mix, 3, 7, 11, 15);
|
||||
|
||||
this._quarterround(mix, 0, 5, 10, 15);
|
||||
this._quarterround(mix, 1, 6, 11, 12);
|
||||
this._quarterround(mix, 2, 7, 8, 13);
|
||||
this._quarterround(mix, 3, 4, 9, 14);
|
||||
}
|
||||
|
||||
for (i = 0; i < 16; i++) {
|
||||
// add
|
||||
mix[i] += this._param[i];
|
||||
|
||||
// store keystream
|
||||
this._keystream[b++] = mix[i] & 0xff;
|
||||
this._keystream[b++] = (mix[i] >>> 8) & 0xff;
|
||||
this._keystream[b++] = (mix[i] >>> 16) & 0xff;
|
||||
this._keystream[b++] = (mix[i] >>> 24) & 0xff;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* The basic operation of the ChaCha algorithm is the quarter round.
|
||||
* It operates on four 32-bit unsigned integers, denoted a, b, c, and d.
|
||||
*
|
||||
* @param {Array} output
|
||||
* @param {number} a
|
||||
* @param {number} b
|
||||
* @param {number} c
|
||||
* @param {number} d
|
||||
* @private
|
||||
*/
|
||||
JSChaCha20.prototype._quarterround = function (output, a, b, c, d) {
|
||||
output[d] = this._rotl(output[d] ^ (output[a] += output[b]), 16);
|
||||
output[b] = this._rotl(output[b] ^ (output[c] += output[d]), 12);
|
||||
output[d] = this._rotl(output[d] ^ (output[a] += output[b]), 8);
|
||||
output[b] = this._rotl(output[b] ^ (output[c] += output[d]), 7);
|
||||
|
||||
// JavaScript hack to make UINT32 :) //
|
||||
output[a] >>>= 0;
|
||||
output[b] >>>= 0;
|
||||
output[c] >>>= 0;
|
||||
output[d] >>>= 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* Little-endian to uint 32 bytes
|
||||
*
|
||||
* @param {Uint8Array|[number]} data
|
||||
* @param {number} index
|
||||
* @return {number}
|
||||
* @private
|
||||
*/
|
||||
JSChaCha20.prototype._get32 = function (data, index) {
|
||||
return (
|
||||
data[index++] ^
|
||||
(data[index++] << 8) ^
|
||||
(data[index++] << 16) ^
|
||||
(data[index] << 24)
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Cyclic left rotation
|
||||
*
|
||||
* @param {number} data
|
||||
* @param {number} shift
|
||||
* @return {number}
|
||||
* @private
|
||||
*/
|
||||
JSChaCha20.prototype._rotl = function (data, shift) {
|
||||
return (data << shift) | (data >>> (32 - shift));
|
||||
};
|
||||
|
||||
/**
|
||||
* Encrypt data with key and nonce
|
||||
*
|
||||
* @param {Uint8Array} data
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
JSChaCha20.prototype.encrypt = function (data) {
|
||||
return this._update(data);
|
||||
};
|
||||
|
||||
/**
|
||||
* Decrypt data with key and nonce
|
||||
*
|
||||
* @param {Uint8Array} data
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
JSChaCha20.prototype.decrypt = function (data) {
|
||||
return this._update(data);
|
||||
};
|
||||
|
||||
/**
|
||||
* Encrypt or Decrypt data with key and nonce
|
||||
*
|
||||
* @param {Uint8Array} data
|
||||
* @return {Uint8Array}
|
||||
* @private
|
||||
*/
|
||||
JSChaCha20.prototype._update = function (data) {
|
||||
if (!(data instanceof Uint8Array) || data.length === 0) {
|
||||
throw new Error('Data should be type of bytes (Uint8Array) and not empty!');
|
||||
}
|
||||
|
||||
var output = new Uint8Array(data.length);
|
||||
|
||||
// core function, build block and xor with input data //
|
||||
for (var i = 0; i < data.length; i++) {
|
||||
if (this._byteCounter === 0 || this._byteCounter === 64) {
|
||||
// generate new block //
|
||||
|
||||
this._chacha();
|
||||
// counter increment //
|
||||
this._param[12]++;
|
||||
|
||||
// reset internal counter //
|
||||
this._byteCounter = 0;
|
||||
}
|
||||
|
||||
output[i] = data[i] ^ this._keystream[this._byteCounter++];
|
||||
}
|
||||
|
||||
return output;
|
||||
};
|
||||
|
||||
// EXPORT //
|
||||
if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = JSChaCha20;
|
||||
}
|
||||
Reference in New Issue
Block a user