initial changes
This commit is contained in:
9
src/__tests__/packet.test.js
Normal file
9
src/__tests__/packet.test.js
Normal file
@@ -0,0 +1,9 @@
|
||||
import packet from '../networking/packet';
|
||||
|
||||
test('can construct a packet', () => {
|
||||
const p = new packet('my password');
|
||||
console.log(p);
|
||||
expect(p.token).toEqual('my password');
|
||||
expect(p.buffer).toBeNull();
|
||||
expect(p.decode_offset).toEqual(0);
|
||||
});
|
||||
365
src/__tests__/repertory.test.js
Normal file
365
src/__tests__/repertory.test.js
Normal file
@@ -0,0 +1,365 @@
|
||||
import crypto from 'crypto';
|
||||
import fs from 'fs';
|
||||
import {Uint64BE} from 'int64-buffer';
|
||||
|
||||
import * as repertory from '../index.js';
|
||||
import connection from '../networking/connection';
|
||||
import connection_pool from '../networking/connection_pool';
|
||||
|
||||
const TEST_HOST = process.env.TEST_HOST || 'localhost';
|
||||
const TEST_PASSWORD = process.env.TEST_PASSWORD || '';
|
||||
const TEST_PORT = process.env.TEST_PORT || 20000;
|
||||
|
||||
const calculate_sha256 = path => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const hash = crypto.createHash('sha256');
|
||||
|
||||
fs.createReadStream(path)
|
||||
.on('data', data => hash.update(data))
|
||||
.on('error', err => reject(err))
|
||||
.on('end', () => {
|
||||
const h = hash.digest('hex');
|
||||
console.log(path, h);
|
||||
resolve(h);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const test_connection = (conn, should_be_connected) => {
|
||||
expect(conn).toBeInstanceOf(connection);
|
||||
expect(conn.host_or_ip).toEqual(TEST_HOST);
|
||||
expect(conn.port).toEqual(TEST_PORT);
|
||||
expect(conn.password).toEqual(TEST_PASSWORD);
|
||||
expect(conn.connected).toEqual(should_be_connected);
|
||||
console.log(conn);
|
||||
};
|
||||
|
||||
test('can create a connection to repertory api', async () => {
|
||||
const conn = await repertory.connect(TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
test_connection(conn, true);
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('create_pool returns a connection if pool size is <=1', async () => {
|
||||
for (let i = 0; i < 2; i++) {
|
||||
const conn =
|
||||
await repertory.create_pool(i, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
expect(conn).toBeInstanceOf(connection);
|
||||
test_connection(conn, true);
|
||||
|
||||
await conn.disconnect();
|
||||
}
|
||||
});
|
||||
|
||||
test('can create a connection pool', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
console.log(conn);
|
||||
expect(conn).toBeInstanceOf(connection_pool);
|
||||
expect(conn.host_or_ip).toEqual(TEST_HOST);
|
||||
expect(conn.port).toEqual(TEST_PORT);
|
||||
expect(conn.password).toEqual(TEST_PASSWORD);
|
||||
expect(conn.shutdown).toEqual(false);
|
||||
expect(conn.pool._pool.max).toEqual(2);
|
||||
expect(conn.pool._pool.min).toEqual(2);
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can get drive information using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const api = repertory.create_api(conn);
|
||||
const di = await api.get_drive_information();
|
||||
console.log(di);
|
||||
|
||||
expect(di.free).toBeDefined();
|
||||
expect(di.total).toBeDefined();
|
||||
expect(di.used).toBeDefined();
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can create and remove a directory using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.directory.create('/repertory_js')).toEqual(0);
|
||||
expect(await api.directory.remove('/repertory_js')).toEqual(0);
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can get directory list using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const api = repertory.create_api(conn);
|
||||
await api.directory.list('/', async (remote_path, page_count, get_page) => {
|
||||
console.log(remote_path, page_count, get_page);
|
||||
expect(remote_path).toEqual('/');
|
||||
expect(page_count).toBeGreaterThanOrEqual(1);
|
||||
expect(get_page).toBeInstanceOf(Function);
|
||||
for (let i = 0; i < page_count; i++) {
|
||||
const items = await get_page(i);
|
||||
console.log(items);
|
||||
|
||||
expect(items.length).toBeGreaterThanOrEqual(2);
|
||||
expect(items[0].directory).toBeTruthy();
|
||||
expect(items[0].path).toEqual('.');
|
||||
expect(items[1].directory).toBeTruthy();
|
||||
expect(items[1].path).toEqual('..');
|
||||
}
|
||||
});
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can create, close and delete a file using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const api = repertory.create_api(conn);
|
||||
const f = await api.file.create_or_open('/repertory_file.dat');
|
||||
console.log(f);
|
||||
expect(f.remote_path).toEqual('/repertory_file.dat');
|
||||
expect(f.conn).toEqual(conn);
|
||||
expect(new Uint64BE(f.handle).toNumber()).toBeGreaterThanOrEqual(0);
|
||||
|
||||
expect(await f.close()).toEqual(0);
|
||||
expect(f.handle).toBeNull();
|
||||
|
||||
expect(await api.file.delete('/repertory_file.dat')).toEqual(0);
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can open, close and delete a file using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const api = repertory.create_api(conn);
|
||||
let f = await api.file.create_or_open('/repertory_file.dat');
|
||||
expect(await f.close()).toEqual(0);
|
||||
|
||||
f = await api.file.open('/repertory_file.dat');
|
||||
console.log(f);
|
||||
expect(f.remote_path).toEqual('/repertory_file.dat');
|
||||
expect(f.conn).toEqual(conn);
|
||||
expect(new Uint64BE(f.handle).toNumber()).toBeGreaterThanOrEqual(0);
|
||||
|
||||
expect(await f.close()).toEqual(0);
|
||||
expect(f.handle).toBeNull();
|
||||
|
||||
expect(await api.file.delete('/repertory_file.dat')).toEqual(0);
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can write to and read from a file using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const api = repertory.create_api(conn);
|
||||
const f = await api.file.create_or_open('/repertory_file.dat');
|
||||
|
||||
const buffer = Buffer.alloc(4);
|
||||
buffer[0] = 1;
|
||||
buffer[1] = 2;
|
||||
buffer[2] = 3;
|
||||
buffer[3] = 4;
|
||||
expect(await f.write(0, buffer)).toEqual(buffer.length);
|
||||
expect(new Uint64BE(await f.get_size()).toNumber()).toEqual(buffer.length);
|
||||
|
||||
const buffer2 = await f.read(0, 4);
|
||||
expect(buffer.compare(buffer2)).toEqual(0);
|
||||
|
||||
expect(await f.close()).toEqual(0);
|
||||
expect(await api.file.delete('/repertory_file.dat')).toEqual(0);
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can truncate a file using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const api = repertory.create_api(conn);
|
||||
const f = await api.file.create_or_open('/repertory_file.dat');
|
||||
|
||||
expect(await f.truncate(10)).toEqual(0);
|
||||
expect(new Uint64BE(await f.get_size()).toNumber()).toEqual(10);
|
||||
|
||||
expect(await f.truncate(0)).toEqual(0);
|
||||
expect(new Uint64BE(await f.get_size()).toNumber()).toEqual(0);
|
||||
|
||||
expect(await f.close()).toEqual(0);
|
||||
expect(await api.file.delete('/repertory_file.dat')).toEqual(0);
|
||||
|
||||
await conn.disconnect();
|
||||
});
|
||||
|
||||
test('can upload and download a file using api', async () => {
|
||||
try {
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
} catch {
|
||||
}
|
||||
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
|
||||
expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
|
||||
expect(await calculate_sha256('test.dat'))
|
||||
.toEqual(await calculate_sha256('repertory_test.dat'));
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
|
||||
await conn.disconnect();
|
||||
}, 60000);
|
||||
|
||||
test('can download and overwrite a file using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
|
||||
expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
|
||||
expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); },
|
||||
true))
|
||||
.toBeTruthy();
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
|
||||
await conn.disconnect();
|
||||
}, 60000);
|
||||
|
||||
test('download fails if overwrite is false using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
|
||||
expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
|
||||
await expect(api.file.download('/repertory_test.dat', 'repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); },
|
||||
false))
|
||||
.rejects.toThrow(Error);
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
|
||||
await conn.disconnect();
|
||||
}, 60000);
|
||||
|
||||
test('can upload and overwrite a file using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); },
|
||||
true))
|
||||
.toBeTruthy();
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
|
||||
await conn.disconnect();
|
||||
}, 60000);
|
||||
|
||||
test('upload fails if overwrite is false using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
|
||||
await expect(api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); },
|
||||
false))
|
||||
.rejects.toThrow(Error);
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
|
||||
await conn.disconnect();
|
||||
}, 60000);
|
||||
|
||||
test('can resume download using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
|
||||
const fd = fs.openSync('test.dat', 'r');
|
||||
const buffer = Buffer.alloc(1024);
|
||||
fs.readSync(fd, buffer, 0, buffer.length);
|
||||
fs.closeSync(fd);
|
||||
|
||||
fs.writeFileSync('repertory_test.dat', buffer);
|
||||
|
||||
expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); },
|
||||
false, true))
|
||||
.toBeTruthy();
|
||||
|
||||
expect(await calculate_sha256('test.dat'))
|
||||
.toEqual(await calculate_sha256('repertory_test.dat'));
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
|
||||
await conn.disconnect();
|
||||
}, 60000);
|
||||
|
||||
test('can resume upload using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const api = repertory.create_api(conn);
|
||||
|
||||
const fd = fs.openSync('test.dat', 'r');
|
||||
const buffer = Buffer.alloc(1024);
|
||||
fs.readSync(fd, buffer, 0, buffer.length);
|
||||
fs.closeSync(fd);
|
||||
|
||||
const f = await api.file.create_or_open('/repertory_test.dat');
|
||||
await f.write(0, buffer);
|
||||
await f.close();
|
||||
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); },
|
||||
false, true))
|
||||
.toBeTruthy();
|
||||
|
||||
expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
|
||||
expect(await calculate_sha256('test.dat'))
|
||||
.toEqual(await calculate_sha256('repertory_test.dat'));
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
|
||||
await conn.disconnect();
|
||||
}, 60000);
|
||||
45
src/index.js
Normal file
45
src/index.js
Normal file
@@ -0,0 +1,45 @@
|
||||
import file from './io/file'
|
||||
import connection from './networking/connection';
|
||||
import connection_pool from './networking/connection_pool';
|
||||
import * as ops from './ops'
|
||||
|
||||
export const connect = async (host_or_ip, port, password) => {
|
||||
const conn = new connection(host_or_ip, port, password);
|
||||
await conn.connect();
|
||||
return conn;
|
||||
};
|
||||
|
||||
export const create_api = conn => {
|
||||
return {
|
||||
directory : {
|
||||
list : async (remote_path, page_reader_cb) =>
|
||||
ops.list_directory(conn, remote_path, page_reader_cb),
|
||||
create : async remote_path => ops.create_directory(conn, remote_path),
|
||||
remove : async remote_path => ops.remove_directory(conn, remote_path),
|
||||
},
|
||||
file : {
|
||||
create_or_open : async remote_path => new file(
|
||||
conn, await ops.create_or_open_file(conn, remote_path), remote_path),
|
||||
delete : async (remote_path) => ops.delete_file(conn, remote_path),
|
||||
download :
|
||||
async (remote_path, local_path, progress_cb, overwrite, resume) =>
|
||||
ops.download_file(conn, remote_path, local_path, progress_cb,
|
||||
overwrite, resume),
|
||||
open : async remote_path =>
|
||||
new file(conn, await ops.open_file(conn, remote_path), remote_path),
|
||||
upload :
|
||||
async (local_path, remote_path, progress_cb, overwrite, resume) =>
|
||||
ops.upload_file(conn, local_path, remote_path, progress_cb,
|
||||
overwrite, resume),
|
||||
},
|
||||
get_drive_information : async () => ops.get_drive_information(conn),
|
||||
};
|
||||
};
|
||||
|
||||
export const create_pool = async (pool_size, host_or_ip, port, password) => {
|
||||
if (pool_size <= 1) {
|
||||
return connect(host_or_ip, port, password);
|
||||
}
|
||||
|
||||
return new connection_pool(pool_size, host_or_ip, port, password);
|
||||
};
|
||||
67
src/io/file.js
Normal file
67
src/io/file.js
Normal file
@@ -0,0 +1,67 @@
|
||||
import * as ops from '../ops';
|
||||
|
||||
let next_thread_id = 1;
|
||||
|
||||
export default class file {
|
||||
constructor(conn, handle, remote_path) {
|
||||
this.conn = conn;
|
||||
this.handle = handle;
|
||||
this.remote_path = remote_path;
|
||||
this.thread_id = next_thread_id++;
|
||||
}
|
||||
|
||||
conn;
|
||||
handle = null;
|
||||
thread_id;
|
||||
remote_path;
|
||||
|
||||
async close() {
|
||||
if (this.handle !== null) {
|
||||
const result = await ops.close_file(this.conn, this.remote_path,
|
||||
this.handle, this.thread_id);
|
||||
if (result === 0) {
|
||||
this.handle = null;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
async get_size() {
|
||||
if (this.handle === null) {
|
||||
return Promise.reject(new Error('\'get_size()\' failed: invalid handle'));
|
||||
}
|
||||
|
||||
const attrs = await ops.get_file_attributes(
|
||||
this.conn, this.handle, this.remote_path, this.thread_id);
|
||||
return attrs.size;
|
||||
}
|
||||
|
||||
async read(offset, length) {
|
||||
if (this.handle === null) {
|
||||
return Promise.reject(new Error('\'read()\' failed: invalid handle'));
|
||||
}
|
||||
|
||||
return ops.read_file(this.conn, this.handle, this.remote_path, offset,
|
||||
length, this.thread_id);
|
||||
}
|
||||
|
||||
async truncate(length) {
|
||||
if (this.handle === null) {
|
||||
return Promise.reject(new Error('\'truncate()\' failed: invalid handle'));
|
||||
}
|
||||
|
||||
return ops.truncate_file(this.conn, this.handle, this.remote_path, length,
|
||||
this.thread_id);
|
||||
}
|
||||
|
||||
async write(offset, buffer) {
|
||||
if (this.handle === null) {
|
||||
return Promise.reject(new Error('\'write()\' failed: invalid handle'));
|
||||
}
|
||||
|
||||
return ops.write_file(this.conn, this.handle, this.remote_path, offset,
|
||||
buffer, this.thread_id);
|
||||
}
|
||||
}
|
||||
142
src/networking/connection.js
Normal file
142
src/networking/connection.js
Normal file
@@ -0,0 +1,142 @@
|
||||
import Socket from 'net';
|
||||
|
||||
import package_json from '../../package.json'
|
||||
import * as constants from '../utils/constants'
|
||||
|
||||
import packet from './packet';
|
||||
|
||||
export default class connection {
|
||||
constructor(host_or_ip, port, password, socket) {
|
||||
this.host_or_ip = host_or_ip;
|
||||
this.port = port;
|
||||
this.password = password;
|
||||
if (socket) {
|
||||
this.socket = socket;
|
||||
this.connected = true;
|
||||
this.setup_socket();
|
||||
}
|
||||
}
|
||||
|
||||
connected = false;
|
||||
host_or_ip = "";
|
||||
password = "";
|
||||
port = 20000;
|
||||
reject;
|
||||
resolve;
|
||||
socket;
|
||||
|
||||
cleanup_handlers() {
|
||||
this.reject = null;
|
||||
this.resolve = null;
|
||||
}
|
||||
|
||||
async connect() {
|
||||
if (!this.socket) {
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
this.socket =
|
||||
Socket.createConnection(this.port, this.host_or_ip, err => {
|
||||
if (err) {
|
||||
return reject(err)
|
||||
}
|
||||
return resolve()
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
return Promise.reject(`'connect()' failed: ${err}`)
|
||||
}
|
||||
|
||||
this.connected = true;
|
||||
this.setup_socket();
|
||||
}
|
||||
}
|
||||
|
||||
setup_socket() {
|
||||
let buffer;
|
||||
const cleanup = () => {
|
||||
this.cleanup_handlers();
|
||||
buffer = null;
|
||||
};
|
||||
|
||||
this.socket.on('data', chunk => {
|
||||
buffer = buffer ? Buffer.concat([ buffer, chunk ]) : chunk;
|
||||
|
||||
if (buffer.length > 4) {
|
||||
const size = buffer.readUInt32BE(0);
|
||||
if (buffer.length >= size + 4) {
|
||||
const packet_data = buffer.slice(4, 4 + size);
|
||||
if (this.resolve) {
|
||||
const reject = this.reject;
|
||||
const resolve = this.resolve;
|
||||
|
||||
cleanup();
|
||||
|
||||
const response = new packet(this.password);
|
||||
response.buffer = new Uint8Array(packet_data);
|
||||
response.decrypt()
|
||||
.then(() => {resolve(response)})
|
||||
.catch(e => {reject(e)})
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this.socket.on('error', e => {
|
||||
if (this.reject) {
|
||||
const reject = this.reject;
|
||||
|
||||
cleanup();
|
||||
|
||||
this.connected = false;
|
||||
reject(e);
|
||||
}
|
||||
});
|
||||
|
||||
this.socket.on('close', () => {
|
||||
if (this.reject) {
|
||||
const reject = this.reject;
|
||||
|
||||
cleanup();
|
||||
|
||||
this.connected = false;
|
||||
reject(new Error('socket closed'));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async disconnect() {
|
||||
try {
|
||||
if (this.socket) {
|
||||
this.socket.destroy();
|
||||
this.socket = null;
|
||||
this.cleanup_handlers();
|
||||
this.connected = false;
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
}
|
||||
|
||||
async send(method_name, packet, optional_thread_id) {
|
||||
packet.token = this.password;
|
||||
packet.encode_top_utf8(method_name);
|
||||
packet.encode_top_ui64(optional_thread_id || 1);
|
||||
packet.encode_top_utf8(
|
||||
constants.instance_id ||
|
||||
'c2e3da6656a9f5cd7b95f159687da459656738af7a6d0de533f526d67af14cac');
|
||||
packet.encode_top_ui32(0); // Service flags
|
||||
packet.encode_top_utf8(package_json.version);
|
||||
await packet.encrypt();
|
||||
packet.encode_top_ui32(packet.buffer.length);
|
||||
return new Promise((resolve, reject) => {
|
||||
this.reject = reject;
|
||||
this.resolve = resolve;
|
||||
this.socket.write(Buffer.from(packet.buffer), null, err => {
|
||||
if (err) {
|
||||
this.cleanup_handlers();
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
66
src/networking/connection_pool.js
Normal file
66
src/networking/connection_pool.js
Normal file
@@ -0,0 +1,66 @@
|
||||
import Pool from 'socket-pool';
|
||||
|
||||
import connection from './connection';
|
||||
|
||||
export default class connection_pool {
|
||||
constructor(pool_size, host_or_ip, port, password) {
|
||||
this.host_or_ip = host_or_ip;
|
||||
this.port = port;
|
||||
this.password = password;
|
||||
if (pool_size > 1) {
|
||||
this.pool = new Pool({
|
||||
connect : {host : host_or_ip, port : port},
|
||||
connectTimeout : 5000,
|
||||
pool : {max : pool_size, min : 2}
|
||||
});
|
||||
} else {
|
||||
throw new Error("'pool_size' must be > 1");
|
||||
}
|
||||
}
|
||||
|
||||
host_or_ip = "";
|
||||
next_thread_id = 1;
|
||||
password = "";
|
||||
port = 20000;
|
||||
pool;
|
||||
shutdown = false;
|
||||
|
||||
async disconnect() {
|
||||
await this.pool._pool.drain();
|
||||
await this.pool._pool.clear();
|
||||
this.pool = null;
|
||||
this.shutdown = true;
|
||||
}
|
||||
|
||||
async send(method_name, packet, optional_thread_id) {
|
||||
try {
|
||||
const socket = await this.pool.acquire();
|
||||
if (!socket.thread_id) {
|
||||
socket.thread_id = this.next_thread_id++;
|
||||
}
|
||||
|
||||
const cleanup = () => {
|
||||
try {
|
||||
socket.release();
|
||||
} catch (err) {
|
||||
console.log(`'release()' failed: ${err}`);
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const result = await new connection(this.host_or_ip, this.port,
|
||||
this.password, socket)
|
||||
.send(method_name, packet,
|
||||
optional_thread_id || socket.thread_id);
|
||||
cleanup();
|
||||
return result;
|
||||
} catch (err) {
|
||||
cleanup();
|
||||
return Promise.reject(
|
||||
new Error(`'send(${method_name})' failed: ${err}`));
|
||||
}
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'acquire()' socket failed: ${err}`));
|
||||
}
|
||||
}
|
||||
}
|
||||
253
src/networking/packet.js
Normal file
253
src/networking/packet.js
Normal file
@@ -0,0 +1,253 @@
|
||||
import {randomBytes} from 'crypto';
|
||||
import {Int64BE, Uint64BE} from 'int64-buffer';
|
||||
import {sha256} from 'js-sha256';
|
||||
import {TextEncoder} from 'text-encoding';
|
||||
|
||||
import {
|
||||
be_ui8_array_to_i16,
|
||||
be_ui8_array_to_i32,
|
||||
be_ui8_array_to_ui16,
|
||||
be_ui8_array_to_ui32,
|
||||
i16_to_be_ui8_array,
|
||||
i32_to_be_ui8_array,
|
||||
i8_to_ui8_array,
|
||||
ui16_to_be_ui8_array,
|
||||
ui32_to_be_ui8_array,
|
||||
ui8_array_to_i8,
|
||||
ui8_array_to_ui8,
|
||||
ui8_to_ui8_array,
|
||||
} from '../utils/byte_order';
|
||||
import JSChaCha20 from '../utils/jschacha20';
|
||||
|
||||
export default class packet {
|
||||
constructor(token) { this.token = token; }
|
||||
|
||||
static HEADER = new TextEncoder().encode('repertory');
|
||||
|
||||
buffer = null;
|
||||
decode_offset = 0;
|
||||
token;
|
||||
|
||||
append_buffer = buffer => {
|
||||
if (!(buffer instanceof Uint8Array)) {
|
||||
throw new Error('Buffer must be of type Uint8Array');
|
||||
}
|
||||
|
||||
this.buffer =
|
||||
this.buffer ? new Uint8Array([...this.buffer, ...buffer ]) : buffer;
|
||||
};
|
||||
|
||||
clear = () => {
|
||||
this.buffer = null;
|
||||
this.decode_offset = 0;
|
||||
};
|
||||
|
||||
decode_buffer = length => {
|
||||
if (!this.buffer) {
|
||||
throw new Error('Invalid buffer');
|
||||
}
|
||||
|
||||
const ret =
|
||||
this.buffer.slice(this.decode_offset, this.decode_offset + length);
|
||||
this.decode_offset += length;
|
||||
return Buffer.from(ret);
|
||||
};
|
||||
|
||||
decode_stat = () => {
|
||||
const mode = this.decode_ui16();
|
||||
const nlink = this.decode_ui16();
|
||||
const uid = this.decode_ui32();
|
||||
const gid = this.decode_ui32();
|
||||
const atime = this.decode_ui64();
|
||||
const mtime = this.decode_ui64();
|
||||
const ctime = this.decode_ui64();
|
||||
const birth_time = this.decode_ui64();
|
||||
const size = this.decode_ui64();
|
||||
const blocks = this.decode_ui64();
|
||||
const blksize = this.decode_ui32();
|
||||
const flags = this.decode_ui32();
|
||||
const directory = !!this.decode_ui8();
|
||||
return {
|
||||
mode, nlink, uid, gid, atime, mtime, ctime, birth_time, size, blocks,
|
||||
blksize, flags, directory,
|
||||
}
|
||||
};
|
||||
|
||||
decode_utf8 = () => {
|
||||
if (!this.buffer) {
|
||||
throw new Error('Invalid buffer');
|
||||
}
|
||||
|
||||
const startIndex = this.decode_offset;
|
||||
const endIndex = this.buffer.indexOf(0, startIndex);
|
||||
if (endIndex >= 0) {
|
||||
let ret = '';
|
||||
for (let i = startIndex; i < endIndex; i++) {
|
||||
ret += String.fromCharCode(this.buffer[i]);
|
||||
}
|
||||
this.decode_offset = endIndex + 1;
|
||||
return ret;
|
||||
}
|
||||
|
||||
throw new Error('String not found in buffer');
|
||||
};
|
||||
|
||||
decode_i8 =
|
||||
() => { return ui8_array_to_i8(this.buffer, this.decode_offset++); };
|
||||
|
||||
decode_ui8 =
|
||||
() => { return ui8_array_to_ui8(this.buffer, this.decode_offset++); };
|
||||
|
||||
decode_i16 = () => {
|
||||
const ret = be_ui8_array_to_i16(this.buffer, this.decode_offset);
|
||||
this.decode_offset += 2;
|
||||
return ret;
|
||||
};
|
||||
|
||||
decode_ui16 = () => {
|
||||
const ret = be_ui8_array_to_ui16(this.buffer, this.decode_offset);
|
||||
this.decode_offset += 2;
|
||||
return ret;
|
||||
};
|
||||
|
||||
decode_i32 = () => {
|
||||
const ret = be_ui8_array_to_i32(this.buffer, this.decode_offset);
|
||||
this.decode_offset += 4;
|
||||
return ret;
|
||||
};
|
||||
|
||||
decode_ui32 = () => {
|
||||
const ret = be_ui8_array_to_ui32(this.buffer, this.decode_offset);
|
||||
this.decode_offset += 4;
|
||||
return ret;
|
||||
};
|
||||
|
||||
decode_i64 = () => {
|
||||
const ret = new Int64BE(
|
||||
this.buffer.slice(this.decode_offset, this.decode_offset + 8),
|
||||
);
|
||||
this.decode_offset += 8;
|
||||
return ret.toString(10);
|
||||
};
|
||||
|
||||
decode_ui64 = () => {
|
||||
const ret = new Uint64BE(
|
||||
this.buffer.slice(this.decode_offset, this.decode_offset + 8),
|
||||
);
|
||||
this.decode_offset += 8;
|
||||
return ret.toString(10);
|
||||
};
|
||||
|
||||
decrypt = async () => {
|
||||
try {
|
||||
const hash = sha256.create();
|
||||
hash.update(new TextEncoder().encode(this.token));
|
||||
|
||||
const key = Uint8Array.from(hash.array());
|
||||
const nonce = this.buffer.slice(0, 12);
|
||||
|
||||
this.buffer = new JSChaCha20(key, nonce, 0)
|
||||
.decrypt(
|
||||
this.buffer.slice(12),
|
||||
);
|
||||
|
||||
this.decode_offset = packet.HEADER.length;
|
||||
|
||||
const header = this.buffer.slice(0, 9);
|
||||
if (header.toString() !== packet.HEADER.toString()) {
|
||||
return Promise.reject(new Error('Header does not match'));
|
||||
}
|
||||
|
||||
return this.buffer;
|
||||
} catch (e) {
|
||||
return Promise.reject(e);
|
||||
}
|
||||
};
|
||||
|
||||
encode_buffer = buffer => { this.append_buffer(new Uint8Array(buffer)); };
|
||||
|
||||
encode_i8 = num => { this.append_buffer(i8_to_ui8_array(num)); };
|
||||
|
||||
encode_top_i8 = num => { this.push_buffer(i8_to_ui8_array(num)); };
|
||||
|
||||
encode_u8 = num => { this.append_buffer(ui8_to_ui8_array(num)); };
|
||||
|
||||
encode_top_u8 = num => { this.push_buffer(ui8_to_ui8_array(num)); };
|
||||
|
||||
encode_i16 = num => { this.append_buffer(i16_to_be_ui8_array(num)); };
|
||||
|
||||
encode_top_i16 = num => { this.push_buffer(i16_to_be_ui8_array(num)); };
|
||||
|
||||
encode_ui16 = num => { this.append_buffer(ui16_to_be_ui8_array(num)); };
|
||||
|
||||
encode_top_ui16 = num => { this.push_buffer(ui16_to_be_ui8_array(num)); };
|
||||
|
||||
encode_i32 = num => { this.append_buffer(i32_to_be_ui8_array(num)); };
|
||||
|
||||
encode_top_i32 = num => { this.push_buffer(i32_to_be_ui8_array(num)); };
|
||||
|
||||
encode_ui32 = num => { this.append_buffer(ui32_to_be_ui8_array(num)); };
|
||||
|
||||
encode_top_ui32 = num => { this.push_buffer(ui32_to_be_ui8_array(num)); };
|
||||
|
||||
encode_i64 = num => {
|
||||
this.append_buffer(new Uint8Array(new Int64BE(num).toArray()));
|
||||
};
|
||||
|
||||
encode_top_i64 =
|
||||
num => { this.push_buffer(new Uint8Array(new Int64BE(num).toArray())); };
|
||||
|
||||
encode_ui64 = num => {
|
||||
this.append_buffer(new Uint8Array(new Uint64BE(num).toArray()));
|
||||
};
|
||||
|
||||
encode_top_ui64 =
|
||||
num => { this.push_buffer(new Uint8Array(new Uint64BE(num).toArray())); };
|
||||
|
||||
encode_utf8 = str => {
|
||||
if (!(typeof str === 'string' || str instanceof String)) {
|
||||
throw new Error('Value must be of type string');
|
||||
}
|
||||
|
||||
const buffer = new Uint8Array([...new TextEncoder().encode(str), 0 ]);
|
||||
this.append_buffer(buffer);
|
||||
};
|
||||
|
||||
encode_top_utf8 = str => {
|
||||
if (!(typeof str === 'string' || str instanceof String)) {
|
||||
throw new Error('Value must be of type string');
|
||||
}
|
||||
|
||||
const buffer = new Uint8Array([...new TextEncoder().encode(str), 0 ]);
|
||||
this.push_buffer(buffer);
|
||||
};
|
||||
|
||||
encrypt = async nonce => {
|
||||
try {
|
||||
this.push_buffer(packet.HEADER);
|
||||
const hash = sha256.create();
|
||||
hash.update(new TextEncoder().encode(this.token));
|
||||
|
||||
const key = Uint8Array.from(hash.array());
|
||||
if (!nonce) {
|
||||
nonce = Uint8Array.from(randomBytes(12));
|
||||
}
|
||||
|
||||
this.buffer = new JSChaCha20(key, nonce, 0).encrypt(this.buffer);
|
||||
this.push_buffer(nonce);
|
||||
|
||||
return this.buffer;
|
||||
} catch (e) {
|
||||
return Promise.reject(e);
|
||||
}
|
||||
};
|
||||
|
||||
push_buffer = buffer => {
|
||||
if (!(buffer instanceof Uint8Array)) {
|
||||
throw new Error('Buffer must be of type Uint8Array');
|
||||
}
|
||||
|
||||
this.buffer =
|
||||
this.buffer ? new Uint8Array([...buffer, ...this.buffer ]) : buffer;
|
||||
};
|
||||
}
|
||||
458
src/ops/index.js
Normal file
458
src/ops/index.js
Normal file
@@ -0,0 +1,458 @@
|
||||
import fs from 'fs';
|
||||
import {Uint64BE} from 'int64-buffer';
|
||||
|
||||
import file from '../io/file';
|
||||
import packet from '../networking/packet';
|
||||
|
||||
export const close_file =
|
||||
async (conn, remote_path, handle, optional_thread_id) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(handle);
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteFUSERelease', request, optional_thread_id);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
return response.decode_i32();
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'close_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const create_directory = async (conn, remote_path) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui16((7 << 6) | (5 << 3));
|
||||
|
||||
const response = await conn.send('::RemoteFUSEMkdir', request);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
return response.decode_i32();
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'create_directory' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const create_or_open_file =
|
||||
async (conn, remote_path, optional_thread_id) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui16((7 << 6) | (5 << 3));
|
||||
request.encode_ui32(2 | 4); // Read-Write, Create
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteFUSECreate', request, optional_thread_id);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === 0) {
|
||||
return response.decode_ui64();
|
||||
}
|
||||
|
||||
return Promise.reject(new Error(`'create_or_open_file' error: ${result}`));
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'create_or_open_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const delete_file = async (conn, remote_path) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
|
||||
const response = await conn.send('::RemoteFUSEUnlink', request);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
return response.decode_i32();
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'delete_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const download_file =
|
||||
async (conn, remote_path, local_path, progress_cb, overwrite, resume) => {
|
||||
try {
|
||||
const src = new file(conn, await open_file(conn, remote_path), remote_path);
|
||||
const cleanup = async fd => {
|
||||
try {
|
||||
await src.close();
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
try {
|
||||
if (fd !== undefined) {
|
||||
fs.closeSync(fd);
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const src_size = await src.get_size();
|
||||
let dst_fd;
|
||||
|
||||
try {
|
||||
let offset = 0;
|
||||
if (overwrite) {
|
||||
dst_fd = fs.openSync(local_path, 'w+');
|
||||
} else if (resume) {
|
||||
dst_fd = fs.openSync(local_path, 'r+');
|
||||
|
||||
const dst_size = fs.fstatSync(dst_fd).size;
|
||||
if (dst_size === src_size) {
|
||||
await cleanup(dst_fd);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (dst_size > src_size) {
|
||||
await cleanup(dst_fd);
|
||||
return Promise.reject(new Error(
|
||||
`'download_file' failed: destination is larger than source`));
|
||||
}
|
||||
|
||||
offset = dst_size;
|
||||
} else {
|
||||
if (fs.existsSync(local_path)) {
|
||||
await cleanup(dst_fd);
|
||||
return Promise.reject(
|
||||
new Error(`'download_file' failed: file exists`));
|
||||
}
|
||||
|
||||
dst_fd = fs.openSync(local_path, 'wx+');
|
||||
}
|
||||
|
||||
let remain = src_size - offset;
|
||||
while (remain > 0) {
|
||||
const to_write = remain >= 65536 ? 65536 : remain;
|
||||
const buffer = await src.read(offset, to_write);
|
||||
const written = fs.writeSync(dst_fd, buffer, 0, to_write, offset);
|
||||
if (written > 0) {
|
||||
remain -= written;
|
||||
offset += written;
|
||||
if (progress_cb) {
|
||||
progress_cb(local_path, remote_path,
|
||||
((src_size - remain) / src_size) * 100.0, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (progress_cb) {
|
||||
progress_cb(local_path, remote_path, 100, true);
|
||||
}
|
||||
|
||||
await cleanup(dst_fd);
|
||||
return true;
|
||||
} catch (err) {
|
||||
await cleanup(dst_fd);
|
||||
return Promise.reject(new Error(`'download_file' failed: ${err}`));
|
||||
}
|
||||
} catch (err) {
|
||||
await cleanup();
|
||||
return Promise.reject(new Error(`'download_file' failed: ${err}`));
|
||||
}
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'download_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const get_drive_information = async conn => {
|
||||
try {
|
||||
const response =
|
||||
await conn.send('::RemoteWinFSPGetVolumeInfo', new packet());
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === 0) {
|
||||
const total = response.decode_ui64();
|
||||
const free = response.decode_ui64();
|
||||
return {
|
||||
free,
|
||||
total,
|
||||
used : (new Uint64BE(total) - new Uint64BE(free)).toString(10),
|
||||
};
|
||||
}
|
||||
|
||||
return Promise.reject(
|
||||
new Error(`'get_drive_information' failed: ${result}`));
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'get_drive_information' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const get_file_attributes =
|
||||
async (conn, handle, remote_path, optional_thread_id) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(handle);
|
||||
request.encode_ui32(0);
|
||||
request.encode_ui32(0);
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteFUSEFgetattr', request, optional_thread_id);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === 0) {
|
||||
return response.decode_stat();
|
||||
}
|
||||
|
||||
return Promise.reject(new Error(`'get_file_attributes' failed: ${result}`));
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'get_file_attributes' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const list_directory = async (conn, remote_path, page_reader_cb) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteJSONCreateDirectorySnapshot', request);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === 0) {
|
||||
const data = JSON.parse(response.decode_utf8());
|
||||
const cleanup = async () => {
|
||||
const request = new packet();
|
||||
request.encode_ui64(data.handle);
|
||||
await conn.send('::RemoteJSONReleaseDirectorySnapshot', request);
|
||||
};
|
||||
try {
|
||||
const get_page = async page => {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(data.handle);
|
||||
request.encode_ui32(page);
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteJSONReadDirectorySnapshot', request);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === 0 || result === -120) {
|
||||
const data = JSON.parse(response.decode_utf8());
|
||||
return data.directory_list;
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
await page_reader_cb(remote_path, data.page_count, get_page);
|
||||
await cleanup();
|
||||
} catch (err) {
|
||||
await cleanup();
|
||||
return Promise.reject(new Error(`'list_directory' failed: ${err}`));
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'list_directory' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const open_file = async (conn, remote_path, optional_thread_id) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui32(2); // Read-Write
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteFUSEOpen', request, optional_thread_id);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === 0) {
|
||||
return response.decode_ui64();
|
||||
}
|
||||
return Promise.reject(new Error(`'open_file' error: ${result}`));
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'open_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const read_file =
|
||||
async (conn, handle, remote_path, offset, length, optional_thread_id) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(length);
|
||||
request.encode_ui64(offset);
|
||||
request.encode_ui64(handle);
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteFUSERead', request, optional_thread_id);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === length) {
|
||||
return response.decode_buffer(result);
|
||||
}
|
||||
return Promise.reject(new Error(`'read_file' error: ${result}`));
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'read_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const remove_directory = async (conn, remote_path) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
|
||||
const response = await conn.send('::RemoteFUSERmdir', request);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
return response.decode_i32();
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'remove_directory' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const truncate_file =
|
||||
async (conn, handle, remote_path, length, optional_thread_id) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(length);
|
||||
request.encode_ui64(handle);
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteFUSEFtruncate', request, optional_thread_id);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
return response.decode_i32();
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'truncate_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const upload_file =
|
||||
async (conn, local_path, remote_path, progress_cb, overwrite, resume) => {
|
||||
try {
|
||||
const src_fd = fs.openSync(local_path, 'r');
|
||||
const cleanup = async f => {
|
||||
try {
|
||||
fs.closeSync(src_fd);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
try {
|
||||
if (f) {
|
||||
await f.close();
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
};
|
||||
try {
|
||||
const src_st = fs.fstatSync(src_fd);
|
||||
let dst;
|
||||
const create_dest = async () => {
|
||||
dst = new file(conn, await create_or_open_file(conn, remote_path),
|
||||
remote_path);
|
||||
};
|
||||
|
||||
try {
|
||||
let offset = 0;
|
||||
if (overwrite) {
|
||||
await create_dest();
|
||||
const result = await dst.truncate(0);
|
||||
if (result !== 0) {
|
||||
await cleanup(dst);
|
||||
return Promise.reject(new Error(`'upload_file' failed: ${result}`));
|
||||
}
|
||||
} else if (resume) {
|
||||
await create_dest();
|
||||
const dst_size = new Uint64BE(await dst.get_size()).toNumber();
|
||||
if (dst_size === src_st.size) {
|
||||
await cleanup(dst);
|
||||
return true;
|
||||
}
|
||||
|
||||
if (dst_size > src_st.size) {
|
||||
await cleanup(dst);
|
||||
return Promise.reject(new Error(
|
||||
`'upload_file' failed: destination is larger than source`));
|
||||
}
|
||||
|
||||
offset = dst_size;
|
||||
} else {
|
||||
try {
|
||||
const f =
|
||||
new file(conn, await open_file(conn, remote_path), remote_path);
|
||||
await cleanup(f);
|
||||
return Promise.reject(
|
||||
new Error("'upload_file' failed: file exists"));
|
||||
} catch (err) {
|
||||
await create_dest();
|
||||
}
|
||||
}
|
||||
|
||||
let remain = src_st.size - offset;
|
||||
const default_buffer = Buffer.alloc(65536 * 2);
|
||||
while (remain > 0) {
|
||||
const to_write =
|
||||
remain >= default_buffer.length ? default_buffer.length : remain;
|
||||
const buffer = to_write == default_buffer.length
|
||||
? default_buffer
|
||||
: Buffer.alloc(to_write);
|
||||
fs.readSync(src_fd, buffer, 0, to_write, offset);
|
||||
const written = await dst.write(offset, buffer);
|
||||
if (written > 0) {
|
||||
remain -= written;
|
||||
offset += written;
|
||||
if (progress_cb) {
|
||||
progress_cb(local_path, remote_path,
|
||||
((src_st.size - remain) / src_st.size) * 100.0,
|
||||
false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (progress_cb) {
|
||||
progress_cb(local_path, remote_path, 100, true);
|
||||
}
|
||||
|
||||
await cleanup(dst);
|
||||
return true;
|
||||
} catch (err) {
|
||||
await cleanup(dst);
|
||||
return Promise.reject(new Error(`'upload_file' failed: ${err}`));
|
||||
}
|
||||
} catch (err) {
|
||||
await cleanup();
|
||||
return Promise.reject(new Error(`'upload_file' failed: ${err}`));
|
||||
}
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'upload_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const write_file =
|
||||
async (conn, handle, remote_path, offset, buffer, optional_thread_id) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(buffer.length);
|
||||
request.encode_buffer(buffer);
|
||||
request.encode_ui64(offset);
|
||||
request.encode_ui64(handle);
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteFUSEWrite', request, optional_thread_id);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
if (result === buffer.length) {
|
||||
return result;
|
||||
}
|
||||
return Promise.reject(new Error(`'write_file' error: ${result}`));
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'write_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
112
src/utils/byte_order.js
Normal file
112
src/utils/byte_order.js
Normal file
@@ -0,0 +1,112 @@
|
||||
export const is_big_endian_system =
|
||||
new Uint8Array(new Uint32Array([ 0x12345678 ]).buffer)[0] === 0x12;
|
||||
|
||||
export const is_little_endian_system =
|
||||
new Uint8Array(new Uint32Array([ 0x12345678 ]).buffer)[0] === 0x78;
|
||||
|
||||
export const i8_to_ui8_array = num => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
|
||||
const buffer = Buffer.alloc(1);
|
||||
buffer.writeInt8(num);
|
||||
return new Uint8Array(buffer);
|
||||
};
|
||||
|
||||
export const ui8_array_to_i8 = (ar, offset) => {
|
||||
const buffer = Buffer.alloc(1);
|
||||
buffer[0] = ar[offset];
|
||||
|
||||
return buffer.readInt8(0);
|
||||
};
|
||||
|
||||
export const ui8_to_ui8_array = num => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
|
||||
const buffer = Buffer.alloc(1);
|
||||
buffer.writeUInt8(num);
|
||||
return new Uint8Array(buffer);
|
||||
};
|
||||
|
||||
export const ui8_array_to_ui8 = (ar, offset) => {
|
||||
const buffer = Buffer.alloc(1);
|
||||
buffer[0] = ar[offset];
|
||||
|
||||
return buffer.readUInt8(0);
|
||||
};
|
||||
|
||||
export const i16_to_be_ui8_array = num => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
|
||||
const buffer = Buffer.alloc(2);
|
||||
buffer.writeInt16BE(num);
|
||||
return new Uint8Array(buffer);
|
||||
};
|
||||
|
||||
export const be_ui8_array_to_i16 = (ar, offset) => {
|
||||
const buffer = Buffer.alloc(2);
|
||||
for (let i = offset; i < buffer.length + offset; i++) {
|
||||
buffer[i - offset] = ar[i];
|
||||
}
|
||||
return buffer.readInt16BE(0);
|
||||
};
|
||||
|
||||
export const ui16_to_be_ui8_array = num => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
|
||||
const buffer = Buffer.alloc(2);
|
||||
buffer.writeUInt16BE(num);
|
||||
return new Uint8Array(buffer);
|
||||
};
|
||||
|
||||
export const be_ui8_array_to_ui16 = (ar, offset) => {
|
||||
const buffer = Buffer.alloc(2);
|
||||
for (let i = offset; i < buffer.length + offset; i++) {
|
||||
buffer[i - offset] = ar[i];
|
||||
}
|
||||
return buffer.readUInt16BE(0);
|
||||
};
|
||||
|
||||
export const i32_to_be_ui8_array = num => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
|
||||
const buffer = Buffer.alloc(4);
|
||||
buffer.writeInt32BE(num);
|
||||
return new Uint8Array(buffer);
|
||||
};
|
||||
|
||||
export const be_ui8_array_to_i32 = (ar, offset) => {
|
||||
const buffer = Buffer.alloc(4);
|
||||
for (let i = offset; i < buffer.length + offset; i++) {
|
||||
buffer[i - offset] = ar[i];
|
||||
}
|
||||
return buffer.readInt32BE(0);
|
||||
};
|
||||
|
||||
export const ui32_to_be_ui8_array = num => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
|
||||
const buffer = Buffer.alloc(4);
|
||||
buffer.writeUInt32BE(num);
|
||||
return new Uint8Array(buffer);
|
||||
};
|
||||
|
||||
export const be_ui8_array_to_ui32 = (ar, offset) => {
|
||||
const buffer = Buffer.alloc(4);
|
||||
for (let i = offset; i < buffer.length + offset; i++) {
|
||||
buffer[i - offset] = ar[i];
|
||||
}
|
||||
|
||||
return buffer.readUInt32BE(0);
|
||||
};
|
||||
3
src/utils/constants.js
Normal file
3
src/utils/constants.js
Normal file
@@ -0,0 +1,3 @@
|
||||
const {v4 : uuidv4} = require('uuid');
|
||||
|
||||
export const instance_id = uuidv4();
|
||||
322
src/utils/jschacha20.js
Normal file
322
src/utils/jschacha20.js
Normal file
@@ -0,0 +1,322 @@
|
||||
'use strict';
|
||||
/*
|
||||
* Copyright (c) 2017, Bubelich Mykola
|
||||
* https://www.bubelich.com
|
||||
*
|
||||
* (。◕‿‿◕。)
|
||||
*
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met, 0x
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of the copyright holder nor the names of its contributors
|
||||
* may be used to endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS "AS IS"
|
||||
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ChaCha20 is a stream cipher designed by D. J. Bernstein.
|
||||
* It is a refinement of the Salsa20 algorithm, and it uses a 256-bit key.
|
||||
*
|
||||
* ChaCha20 successively calls the ChaCha20 block function, with the same key and nonce, and with successively increasing block counter parameters.
|
||||
* ChaCha20 then serializes the resulting state by writing the numbers in little-endian order, creating a keystream block.
|
||||
*
|
||||
* Concatenating the keystream blocks from the successive blocks forms a keystream.
|
||||
* The ChaCha20 function then performs an XOR of this keystream with the plaintext.
|
||||
* Alternatively, each keystream block can be XORed with a plaintext block before proceeding to create_or_open the next block, saving some memory.
|
||||
* There is no requirement for the plaintext to be an integral multiple of 512 bits. If there is extra keystream from the last block, it is discarded.
|
||||
*
|
||||
* The inputs to ChaCha20 are
|
||||
* - 256-bit key
|
||||
* - 32-bit initial counter
|
||||
* - 96-bit nonce. In some protocols, this is known as the Initialization Vector
|
||||
* - Arbitrary-length plaintext
|
||||
*
|
||||
* Implementation derived from chacha-ref.c version 20080118
|
||||
* See for details, 0x http, 0x//cr.yp.to/chacha/chacha-20080128.pdf
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Uint8Array} key
|
||||
* @param {Uint8Array} nonce
|
||||
* @param {number} counter
|
||||
* @throws {Error}
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
var JSChaCha20 = function(key, nonce, counter) {
|
||||
if (typeof counter === 'undefined') {
|
||||
counter = 0;
|
||||
}
|
||||
|
||||
if (!(key instanceof Uint8Array) || key.length !== 32) {
|
||||
throw new Error('Key should be 32 byte array!');
|
||||
}
|
||||
|
||||
if (!(nonce instanceof Uint8Array) || nonce.length !== 12) {
|
||||
throw new Error('Nonce should be 12 byte array!');
|
||||
}
|
||||
|
||||
this._rounds = 20;
|
||||
// Constants
|
||||
this._sigma = [0x61707865, 0x3320646e, 0x79622d32, 0x6b206574];
|
||||
|
||||
// param construction
|
||||
this._param = [
|
||||
this._sigma[0],
|
||||
this._sigma[1],
|
||||
this._sigma[2],
|
||||
this._sigma[3],
|
||||
// key
|
||||
this._get32(key, 0),
|
||||
this._get32(key, 4),
|
||||
this._get32(key, 8),
|
||||
this._get32(key, 12),
|
||||
this._get32(key, 16),
|
||||
this._get32(key, 20),
|
||||
this._get32(key, 24),
|
||||
this._get32(key, 28),
|
||||
// counter
|
||||
counter,
|
||||
// nonce
|
||||
this._get32(nonce, 0),
|
||||
this._get32(nonce, 4),
|
||||
this._get32(nonce, 8),
|
||||
];
|
||||
|
||||
// init 64 byte keystream block //
|
||||
this._keystream = [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
];
|
||||
|
||||
// internal byte counter //
|
||||
this._byteCounter = 0;
|
||||
};
|
||||
|
||||
JSChaCha20.prototype._chacha = function() {
|
||||
var mix = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
var i = 0;
|
||||
var b = 0;
|
||||
|
||||
// copy param array to mix //
|
||||
for (i = 0; i < 16; i++) {
|
||||
mix[i] = this._param[i];
|
||||
}
|
||||
|
||||
// mix rounds //
|
||||
for (i = 0; i < this._rounds; i += 2) {
|
||||
this._quarterround(mix, 0, 4, 8, 12);
|
||||
this._quarterround(mix, 1, 5, 9, 13);
|
||||
this._quarterround(mix, 2, 6, 10, 14);
|
||||
this._quarterround(mix, 3, 7, 11, 15);
|
||||
|
||||
this._quarterround(mix, 0, 5, 10, 15);
|
||||
this._quarterround(mix, 1, 6, 11, 12);
|
||||
this._quarterround(mix, 2, 7, 8, 13);
|
||||
this._quarterround(mix, 3, 4, 9, 14);
|
||||
}
|
||||
|
||||
for (i = 0; i < 16; i++) {
|
||||
// add
|
||||
mix[i] += this._param[i];
|
||||
|
||||
// store keystream
|
||||
this._keystream[b++] = mix[i] & 0xff;
|
||||
this._keystream[b++] = (mix[i] >>> 8) & 0xff;
|
||||
this._keystream[b++] = (mix[i] >>> 16) & 0xff;
|
||||
this._keystream[b++] = (mix[i] >>> 24) & 0xff;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* The basic operation of the ChaCha algorithm is the quarter round.
|
||||
* It operates on four 32-bit unsigned integers, denoted a, b, c, and d.
|
||||
*
|
||||
* @param {Array} output
|
||||
* @param {number} a
|
||||
* @param {number} b
|
||||
* @param {number} c
|
||||
* @param {number} d
|
||||
* @private
|
||||
*/
|
||||
JSChaCha20.prototype._quarterround = function(output, a, b, c, d) {
|
||||
output[d] = this._rotl(output[d] ^ (output[a] += output[b]), 16);
|
||||
output[b] = this._rotl(output[b] ^ (output[c] += output[d]), 12);
|
||||
output[d] = this._rotl(output[d] ^ (output[a] += output[b]), 8);
|
||||
output[b] = this._rotl(output[b] ^ (output[c] += output[d]), 7);
|
||||
|
||||
// JavaScript hack to make UINT32 :) //
|
||||
output[a] >>>= 0;
|
||||
output[b] >>>= 0;
|
||||
output[c] >>>= 0;
|
||||
output[d] >>>= 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* Little-endian to uint 32 bytes
|
||||
*
|
||||
* @param {Uint8Array|[number]} data
|
||||
* @param {number} index
|
||||
* @return {number}
|
||||
* @private
|
||||
*/
|
||||
JSChaCha20.prototype._get32 = function(data, index) {
|
||||
return (
|
||||
data[index++] ^
|
||||
(data[index++] << 8) ^
|
||||
(data[index++] << 16) ^
|
||||
(data[index] << 24)
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Cyclic left rotation
|
||||
*
|
||||
* @param {number} data
|
||||
* @param {number} shift
|
||||
* @return {number}
|
||||
* @private
|
||||
*/
|
||||
JSChaCha20.prototype._rotl = function(data, shift) {
|
||||
return (data << shift) | (data >>> (32 - shift));
|
||||
};
|
||||
|
||||
/**
|
||||
* Encrypt data with key and nonce
|
||||
*
|
||||
* @param {Uint8Array} data
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
JSChaCha20.prototype.encrypt = function(data) {
|
||||
return this._update(data);
|
||||
};
|
||||
|
||||
/**
|
||||
* Decrypt data with key and nonce
|
||||
*
|
||||
* @param {Uint8Array} data
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
JSChaCha20.prototype.decrypt = function(data) {
|
||||
return this._update(data);
|
||||
};
|
||||
|
||||
/**
|
||||
* Encrypt or Decrypt data with key and nonce
|
||||
*
|
||||
* @param {Uint8Array} data
|
||||
* @return {Uint8Array}
|
||||
* @private
|
||||
*/
|
||||
JSChaCha20.prototype._update = function(data) {
|
||||
if (!(data instanceof Uint8Array) || data.length === 0) {
|
||||
throw new Error('Data should be type of bytes (Uint8Array) and not empty!');
|
||||
}
|
||||
|
||||
var output = new Uint8Array(data.length);
|
||||
|
||||
// core function, build block and xor with input data //
|
||||
for (var i = 0; i < data.length; i++) {
|
||||
if (this._byteCounter === 0 || this._byteCounter === 64) {
|
||||
// generate new block //
|
||||
|
||||
this._chacha();
|
||||
// counter increment //
|
||||
this._param[12]++;
|
||||
|
||||
// reset internal counter //
|
||||
this._byteCounter = 0;
|
||||
}
|
||||
|
||||
output[i] = data[i] ^ this._keystream[this._byteCounter++];
|
||||
}
|
||||
|
||||
return output;
|
||||
};
|
||||
|
||||
// EXPORT //
|
||||
if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = JSChaCha20;
|
||||
}
|
||||
Reference in New Issue
Block a user