Merged 1.3.x_branch into master
This commit is contained in:
17
.eslintrc.json
Normal file
17
.eslintrc.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es2021": true,
|
||||
"jest/globals": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 12,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": {
|
||||
"jest"
|
||||
},
|
||||
"rules": {
|
||||
}
|
||||
}
|
||||
7
.prettierrc.json
Normal file
7
.prettierrc.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"trailingComma": "es5",
|
||||
"tabWidth": 2,
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"jsxBracketSameLine": true
|
||||
}
|
||||
@@ -5,7 +5,7 @@
|
||||
## Installing
|
||||
|
||||
```shell
|
||||
npm i @scottg1/repertory-js
|
||||
npm i @blockstorage/repertory-js
|
||||
```
|
||||
|
||||
## Repertory Configuration
|
||||
@@ -55,7 +55,7 @@ also be set to a strong, random password.
|
||||
## Example API Usage
|
||||
|
||||
```javascript
|
||||
const rep = require('@scottg1/repertory-js');
|
||||
const rep = require('@blockstorage/repertory-js');
|
||||
|
||||
|
||||
// Repertory host settings
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "@scottg1/repertory-js",
|
||||
"name": "@blockstorage/repertory-js",
|
||||
"version": "1.3.1-r1",
|
||||
"description": "A Node.js module for interfacing with Repertory's remote mount API",
|
||||
"scripts": {
|
||||
@@ -35,6 +35,8 @@
|
||||
"@babel/plugin-transform-runtime": "^7.13.9",
|
||||
"@babel/preset-env": "^7.13.9",
|
||||
"babel-plugin-transform-class-properties": "^6.24.1",
|
||||
"eslint": "^7.22.0",
|
||||
"eslint-plugin-jest": "^24.3.2",
|
||||
"jest": "^26.6.3"
|
||||
},
|
||||
"type": "module",
|
||||
|
||||
@@ -19,21 +19,23 @@ test(`socket receive data fails when decryption fails`, async () => {
|
||||
on: (name, cb) => {
|
||||
cbl[name] = cb;
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
const conn = new connection('', 0, 'b', socket);
|
||||
let reject;
|
||||
const mock_reject = jest.fn().mockImplementation(e => reject(e));
|
||||
const mock_reject = jest.fn().mockImplementation((e) => reject(e));
|
||||
conn.reject = mock_reject;
|
||||
conn.resolve = jest.fn();
|
||||
|
||||
const p = new packet('a');
|
||||
await p.encrypt();
|
||||
p.encode_top_ui32(p.buffer.length);
|
||||
await expect(new Promise((_, r) => {
|
||||
await expect(
|
||||
new Promise((_, r) => {
|
||||
reject = r;
|
||||
cbl['data'](Buffer.from(p.buffer));
|
||||
})).rejects.toThrow(Error);
|
||||
})
|
||||
).rejects.toThrow(Error);
|
||||
expect(mock_reject.mock.calls.length).toBe(1);
|
||||
});
|
||||
|
||||
@@ -42,8 +44,7 @@ test(`disconnect succeeds if an error is thrown`, async () => {
|
||||
destroy: () => {
|
||||
throw new Error('mock destroy error');
|
||||
},
|
||||
on: () => {
|
||||
},
|
||||
on: () => {},
|
||||
};
|
||||
|
||||
const conn = new connection('', 0, 'b', socket);
|
||||
@@ -56,7 +57,7 @@ test(`send fails on socket error`, async () => {
|
||||
on: (name, cb) => {
|
||||
cbl[name] = cb;
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
const conn = new connection('', 0, 'b', socket);
|
||||
const mock_reject = jest.fn();
|
||||
@@ -73,7 +74,7 @@ test(`error is thrown when socket is closed`, async () => {
|
||||
on: (name, cb) => {
|
||||
cbl[name] = cb;
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
const conn = new connection('', 0, 'b', socket);
|
||||
const mock_reject = jest.fn();
|
||||
|
||||
@@ -19,7 +19,7 @@ test(`error on socket release is ignored`, async () => {
|
||||
invoked = true;
|
||||
throw new Error('mock release error');
|
||||
},
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
const mock_send = jest.fn();
|
||||
@@ -45,9 +45,8 @@ test(`connection pool send fails when connection send fails`, async () => {
|
||||
const conn = new connection_pool(2, '', 20000);
|
||||
jest.spyOn(conn.pool, 'acquire').mockImplementation(() => {
|
||||
return {
|
||||
release: () => {
|
||||
},
|
||||
}
|
||||
release: () => {},
|
||||
};
|
||||
});
|
||||
|
||||
const mock_send = jest.fn();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import {get_version, instance_id, package_json} from '../utils/constants'
|
||||
import { get_version, instance_id, package_json } from '../utils/constants';
|
||||
|
||||
const uuid = require('uuid');
|
||||
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import file from '../io/file';
|
||||
|
||||
jest.mock('../ops/index.js', () => (
|
||||
{
|
||||
...(jest.requireActual('../ops/index.js')),
|
||||
jest.mock('../ops/index.js', () => ({
|
||||
...jest.requireActual('../ops/index.js'),
|
||||
close_file: jest.fn(),
|
||||
}
|
||||
));
|
||||
}));
|
||||
|
||||
import {close_file} from '../ops/index';
|
||||
import { close_file } from '../ops/index';
|
||||
|
||||
test(`can close a closed file`, async () => {
|
||||
const f = new file();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import crypto from 'crypto';
|
||||
import fs from 'fs';
|
||||
import {Uint64BE} from 'int64-buffer';
|
||||
import { Uint64BE } from 'int64-buffer';
|
||||
|
||||
import * as repertory from '../index.js';
|
||||
import connection from '../networking/connection';
|
||||
@@ -10,13 +10,13 @@ const TEST_HOST = process.env.TEST_HOST || 'localhost';
|
||||
const TEST_PASSWORD = process.env.TEST_PASSWORD || '';
|
||||
const TEST_PORT = process.env.TEST_PORT || 20000;
|
||||
|
||||
const calculate_sha256 = path => {
|
||||
const calculate_sha256 = (path) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const hash = crypto.createHash('sha256');
|
||||
|
||||
fs.createReadStream(path)
|
||||
.on('data', data => hash.update(data))
|
||||
.on('error', err => reject(err))
|
||||
.on('data', (data) => hash.update(data))
|
||||
.on('error', (err) => reject(err))
|
||||
.on('end', () => {
|
||||
const h = hash.digest('hex');
|
||||
console.log(path, h);
|
||||
@@ -43,8 +43,12 @@ test('can create a connection to repertory api', async () => {
|
||||
|
||||
test('create_pool returns a connection if pool size is <=1', async () => {
|
||||
for (let i = 0; i < 2; i++) {
|
||||
const conn =
|
||||
await repertory.create_pool(i, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
i,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
expect(conn).toBeInstanceOf(connection);
|
||||
test_connection(conn, true);
|
||||
|
||||
@@ -53,8 +57,12 @@ test('create_pool returns a connection if pool size is <=1', async () => {
|
||||
});
|
||||
|
||||
test('can create a connection pool', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
console.log(conn);
|
||||
expect(conn).toBeInstanceOf(connection_pool);
|
||||
expect(conn.host_or_ip).toEqual(TEST_HOST);
|
||||
@@ -68,8 +76,12 @@ test('can create a connection pool', async () => {
|
||||
});
|
||||
|
||||
test('can get drive information using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
const di = await api.get_drive_information();
|
||||
console.log(di);
|
||||
@@ -82,8 +94,12 @@ test('can get drive information using api', async () => {
|
||||
});
|
||||
|
||||
test('can create and remove a directory using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.directory.create('/repertory_js')).toEqual(0);
|
||||
expect(await api.directory.remove('/repertory_js')).toEqual(0);
|
||||
@@ -92,8 +108,12 @@ test('can create and remove a directory using api', async () => {
|
||||
});
|
||||
|
||||
test('can get directory list and snapshot using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
|
||||
const test_results = async (remote_path, page_count, get_page) => {
|
||||
@@ -131,8 +151,12 @@ test('can get directory list and snapshot using api', async () => {
|
||||
});
|
||||
|
||||
test('can create, close and delete a file using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
const f = await api.file.create_or_open('/repertory_file.dat');
|
||||
console.log(f);
|
||||
@@ -149,8 +173,12 @@ test('can create, close and delete a file using api', async () => {
|
||||
});
|
||||
|
||||
test('can open, close and delete a file using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
let f = await api.file.create_or_open('/repertory_file.dat');
|
||||
expect(await f.close()).toEqual(0);
|
||||
@@ -170,8 +198,12 @@ test('can open, close and delete a file using api', async () => {
|
||||
});
|
||||
|
||||
test('can write to and read from a file using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
const f = await api.file.create_or_open('/repertory_file.dat');
|
||||
|
||||
@@ -193,8 +225,12 @@ test('can write to and read from a file using api', async () => {
|
||||
});
|
||||
|
||||
test('can truncate a file using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
const f = await api.file.create_or_open('/repertory_file.dat');
|
||||
|
||||
@@ -213,22 +249,34 @@ test('can truncate a file using api', async () => {
|
||||
test('can upload and download a file using api', async () => {
|
||||
try {
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
} catch {
|
||||
}
|
||||
} catch {}
|
||||
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
expect(
|
||||
await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
})
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
expect(
|
||||
await api.file.download(
|
||||
'/repertory_test.dat',
|
||||
'repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
}
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await calculate_sha256('test.dat'))
|
||||
.toEqual(await calculate_sha256('repertory_test.dat'));
|
||||
expect(await calculate_sha256('test.dat')).toEqual(
|
||||
await calculate_sha256('repertory_test.dat')
|
||||
);
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
@@ -237,21 +285,39 @@ test('can upload and download a file using api', async () => {
|
||||
}, 60000);
|
||||
|
||||
test('can download and overwrite a file using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
expect(
|
||||
await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
})
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
expect(
|
||||
await api.file.download(
|
||||
'/repertory_test.dat',
|
||||
'repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
}
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); },
|
||||
true))
|
||||
.toBeTruthy();
|
||||
expect(
|
||||
await api.file.download(
|
||||
'/repertory_test.dat',
|
||||
'repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
},
|
||||
true
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
@@ -260,21 +326,39 @@ test('can download and overwrite a file using api', async () => {
|
||||
}, 60000);
|
||||
|
||||
test('download fails if overwrite is false using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
expect(
|
||||
await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
})
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
expect(
|
||||
await api.file.download(
|
||||
'/repertory_test.dat',
|
||||
'repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
}
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
await expect(api.file.download('/repertory_test.dat', 'repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); },
|
||||
false))
|
||||
.rejects.toThrow(Error);
|
||||
await expect(
|
||||
api.file.download(
|
||||
'/repertory_test.dat',
|
||||
'repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
},
|
||||
false
|
||||
)
|
||||
).rejects.toThrow(Error);
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
@@ -283,17 +367,29 @@ test('download fails if overwrite is false using api', async () => {
|
||||
}, 60000);
|
||||
|
||||
test('can upload and overwrite a file using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
expect(
|
||||
await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
})
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); },
|
||||
true))
|
||||
.toBeTruthy();
|
||||
expect(
|
||||
await api.file.upload(
|
||||
'test.dat',
|
||||
'/repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
},
|
||||
true
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
|
||||
@@ -301,17 +397,29 @@ test('can upload and overwrite a file using api', async () => {
|
||||
}, 60000);
|
||||
|
||||
test('upload fails if overwrite is false using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
expect(
|
||||
await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
})
|
||||
).toBeTruthy();
|
||||
|
||||
await expect(api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); },
|
||||
false))
|
||||
.rejects.toThrow(Error);
|
||||
await expect(
|
||||
api.file.upload(
|
||||
'test.dat',
|
||||
'/repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
},
|
||||
false
|
||||
)
|
||||
).rejects.toThrow(Error);
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
|
||||
@@ -319,12 +427,18 @@ test('upload fails if overwrite is false using api', async () => {
|
||||
}, 60000);
|
||||
|
||||
test('can resume download using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
expect(
|
||||
await api.file.upload('test.dat', '/repertory_test.dat', (l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
})
|
||||
).toBeTruthy();
|
||||
|
||||
const fd = fs.openSync('test.dat', 'r');
|
||||
const buffer = Buffer.alloc(1024);
|
||||
@@ -333,13 +447,21 @@ test('can resume download using api', async () => {
|
||||
|
||||
fs.writeFileSync('repertory_test.dat', buffer);
|
||||
|
||||
expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); },
|
||||
false, true))
|
||||
.toBeTruthy();
|
||||
expect(
|
||||
await api.file.download(
|
||||
'/repertory_test.dat',
|
||||
'repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
},
|
||||
false,
|
||||
true
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await calculate_sha256('test.dat'))
|
||||
.toEqual(await calculate_sha256('repertory_test.dat'));
|
||||
expect(await calculate_sha256('test.dat')).toEqual(
|
||||
await calculate_sha256('repertory_test.dat')
|
||||
);
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
@@ -348,8 +470,12 @@ test('can resume download using api', async () => {
|
||||
}, 60000);
|
||||
|
||||
test('can resume upload using api', async () => {
|
||||
const conn =
|
||||
await repertory.create_pool(2, TEST_HOST, TEST_PORT, TEST_PASSWORD);
|
||||
const conn = await repertory.create_pool(
|
||||
2,
|
||||
TEST_HOST,
|
||||
TEST_PORT,
|
||||
TEST_PASSWORD
|
||||
);
|
||||
const api = repertory.create_api(conn);
|
||||
|
||||
const fd = fs.openSync('test.dat', 'r');
|
||||
@@ -361,17 +487,31 @@ test('can resume upload using api', async () => {
|
||||
await f.write(0, buffer);
|
||||
await f.close();
|
||||
|
||||
expect(await api.file.upload('test.dat', '/repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); },
|
||||
false, true))
|
||||
.toBeTruthy();
|
||||
expect(
|
||||
await api.file.upload(
|
||||
'test.dat',
|
||||
'/repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
},
|
||||
false,
|
||||
true
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await api.file.download('/repertory_test.dat', 'repertory_test.dat',
|
||||
(l, r, p, c) => { console.log(l, r, p, c); }))
|
||||
.toBeTruthy();
|
||||
expect(
|
||||
await api.file.download(
|
||||
'/repertory_test.dat',
|
||||
'repertory_test.dat',
|
||||
(l, r, p, c) => {
|
||||
console.log(l, r, p, c);
|
||||
}
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
expect(await calculate_sha256('test.dat'))
|
||||
.toEqual(await calculate_sha256('repertory_test.dat'));
|
||||
expect(await calculate_sha256('test.dat')).toEqual(
|
||||
await calculate_sha256('repertory_test.dat')
|
||||
);
|
||||
|
||||
expect(await api.file.delete('/repertory_test.dat')).toEqual(0);
|
||||
fs.unlinkSync('repertory_test.dat');
|
||||
|
||||
62
src/index.js
62
src/index.js
@@ -1,7 +1,7 @@
|
||||
import file from './io/file'
|
||||
import file from './io/file';
|
||||
import connection from './networking/connection';
|
||||
import connection_pool from './networking/connection_pool';
|
||||
import * as ops from './ops'
|
||||
import * as ops from './ops';
|
||||
|
||||
export const connect = async (host_or_ip, port, password) => {
|
||||
const conn = new connection(host_or_ip, port, password);
|
||||
@@ -9,33 +9,53 @@ export const connect = async (host_or_ip, port, password) => {
|
||||
return conn;
|
||||
};
|
||||
|
||||
export const create_api = conn => {
|
||||
export const create_api = (conn) => {
|
||||
return {
|
||||
directory : {
|
||||
create: async remote_path => ops.create_directory(conn, remote_path),
|
||||
directory: {
|
||||
create: async (remote_path) => ops.create_directory(conn, remote_path),
|
||||
list: async (remote_path, page_reader_cb) =>
|
||||
ops.list_directory(conn, remote_path, page_reader_cb),
|
||||
remove: async remote_path => ops.remove_directory(conn, remote_path),
|
||||
snapshot: async remote_path => {
|
||||
remove: async (remote_path) => ops.remove_directory(conn, remote_path),
|
||||
snapshot: async (remote_path) => {
|
||||
return ops.snapshot_directory(conn, remote_path);
|
||||
},
|
||||
},
|
||||
file : {
|
||||
create_or_open : async remote_path => new file(
|
||||
conn, await ops.create_or_open_file(conn, remote_path), remote_path),
|
||||
delete : async (remote_path) => ops.delete_file(conn, remote_path),
|
||||
download :
|
||||
async (remote_path, local_path, progress_cb, overwrite, resume) =>
|
||||
ops.download_file(conn, remote_path, local_path, progress_cb,
|
||||
overwrite, resume),
|
||||
open : async remote_path =>
|
||||
file: {
|
||||
create_or_open: async (remote_path) =>
|
||||
new file(
|
||||
conn,
|
||||
await ops.create_or_open_file(conn, remote_path),
|
||||
remote_path
|
||||
),
|
||||
delete: async (remote_path) => ops.delete_file(conn, remote_path),
|
||||
download: async (
|
||||
remote_path,
|
||||
local_path,
|
||||
progress_cb,
|
||||
overwrite,
|
||||
resume
|
||||
) =>
|
||||
ops.download_file(
|
||||
conn,
|
||||
remote_path,
|
||||
local_path,
|
||||
progress_cb,
|
||||
overwrite,
|
||||
resume
|
||||
),
|
||||
open: async (remote_path) =>
|
||||
new file(conn, await ops.open_file(conn, remote_path), remote_path),
|
||||
upload :
|
||||
async (local_path, remote_path, progress_cb, overwrite, resume) =>
|
||||
ops.upload_file(conn, local_path, remote_path, progress_cb,
|
||||
overwrite, resume),
|
||||
upload: async (local_path, remote_path, progress_cb, overwrite, resume) =>
|
||||
ops.upload_file(
|
||||
conn,
|
||||
local_path,
|
||||
remote_path,
|
||||
progress_cb,
|
||||
overwrite,
|
||||
resume
|
||||
),
|
||||
},
|
||||
get_drive_information : async () => ops.get_drive_information(conn),
|
||||
get_drive_information: async () => ops.get_drive_information(conn),
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -17,8 +17,12 @@ export default class file {
|
||||
|
||||
async close() {
|
||||
if (this.handle !== null) {
|
||||
const result = await ops.close_file(this.conn, this.remote_path,
|
||||
this.handle, this.thread_id);
|
||||
const result = await ops.close_file(
|
||||
this.conn,
|
||||
this.remote_path,
|
||||
this.handle,
|
||||
this.thread_id
|
||||
);
|
||||
if (result === 0) {
|
||||
this.handle = null;
|
||||
}
|
||||
@@ -30,38 +34,59 @@ export default class file {
|
||||
|
||||
async get_size() {
|
||||
if (this.handle === null) {
|
||||
return Promise.reject(new Error('\'get_size()\' failed: invalid handle'));
|
||||
return Promise.reject(new Error("'get_size()' failed: invalid handle"));
|
||||
}
|
||||
|
||||
const attrs = await ops.get_file_attributes(
|
||||
this.conn, this.handle, this.remote_path, this.thread_id);
|
||||
this.conn,
|
||||
this.handle,
|
||||
this.remote_path,
|
||||
this.thread_id
|
||||
);
|
||||
return attrs.size;
|
||||
}
|
||||
|
||||
async read(offset, length) {
|
||||
if (this.handle === null) {
|
||||
return Promise.reject(new Error('\'read()\' failed: invalid handle'));
|
||||
return Promise.reject(new Error("'read()' failed: invalid handle"));
|
||||
}
|
||||
|
||||
return ops.read_file(this.conn, this.handle, this.remote_path, offset,
|
||||
length, this.thread_id);
|
||||
return ops.read_file(
|
||||
this.conn,
|
||||
this.handle,
|
||||
this.remote_path,
|
||||
offset,
|
||||
length,
|
||||
this.thread_id
|
||||
);
|
||||
}
|
||||
|
||||
async truncate(length) {
|
||||
if (this.handle === null) {
|
||||
return Promise.reject(new Error('\'truncate()\' failed: invalid handle'));
|
||||
return Promise.reject(new Error("'truncate()' failed: invalid handle"));
|
||||
}
|
||||
|
||||
return ops.truncate_file(this.conn, this.handle, this.remote_path, length,
|
||||
this.thread_id);
|
||||
return ops.truncate_file(
|
||||
this.conn,
|
||||
this.handle,
|
||||
this.remote_path,
|
||||
length,
|
||||
this.thread_id
|
||||
);
|
||||
}
|
||||
|
||||
async write(offset, buffer) {
|
||||
if (this.handle === null) {
|
||||
return Promise.reject(new Error('\'write()\' failed: invalid handle'));
|
||||
return Promise.reject(new Error("'write()' failed: invalid handle"));
|
||||
}
|
||||
|
||||
return ops.write_file(this.conn, this.handle, this.remote_path, offset,
|
||||
buffer, this.thread_id);
|
||||
return ops.write_file(
|
||||
this.conn,
|
||||
this.handle,
|
||||
this.remote_path,
|
||||
offset,
|
||||
buffer,
|
||||
this.thread_id
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import Socket from 'net';
|
||||
|
||||
import * as constants from '../utils/constants'
|
||||
import * as constants from '../utils/constants';
|
||||
|
||||
import packet from './packet';
|
||||
|
||||
@@ -33,13 +33,16 @@ export default class connection {
|
||||
if (!this.socket) {
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
this.socket =
|
||||
Socket.createConnection(this.port, this.host_or_ip, err => {
|
||||
this.socket = Socket.createConnection(
|
||||
this.port,
|
||||
this.host_or_ip,
|
||||
(err) => {
|
||||
if (err) {
|
||||
return reject(err)
|
||||
return reject(err);
|
||||
}
|
||||
return resolve()
|
||||
});
|
||||
return resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'connect()' failed: ${err}`));
|
||||
@@ -57,7 +60,7 @@ export default class connection {
|
||||
buffer = null;
|
||||
};
|
||||
|
||||
this.socket.on('data', chunk => {
|
||||
this.socket.on('data', (chunk) => {
|
||||
buffer = buffer ? Buffer.concat([buffer, chunk]) : chunk;
|
||||
if (buffer.length > 4) {
|
||||
const size = buffer.readUInt32BE(0);
|
||||
@@ -71,19 +74,20 @@ export default class connection {
|
||||
|
||||
const response = new packet(this.password);
|
||||
response.buffer = new Uint8Array(packet_data);
|
||||
response.decrypt()
|
||||
response
|
||||
.decrypt()
|
||||
.then(() => {
|
||||
resolve(response)
|
||||
})
|
||||
.catch(e => {
|
||||
reject(e)
|
||||
resolve(response);
|
||||
})
|
||||
.catch((e) => {
|
||||
reject(e);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this.socket.on('error', e => {
|
||||
this.socket.on('error', (e) => {
|
||||
if (this.reject) {
|
||||
const reject = this.reject;
|
||||
|
||||
@@ -115,7 +119,7 @@ export default class connection {
|
||||
this.connected = false;
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
console.log(e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -131,7 +135,7 @@ export default class connection {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.reject = reject;
|
||||
this.resolve = resolve;
|
||||
this.socket.write(Buffer.from(packet.buffer), null, err => {
|
||||
this.socket.write(Buffer.from(packet.buffer), null, (err) => {
|
||||
if (err) {
|
||||
this.cleanup_handlers();
|
||||
reject(err);
|
||||
|
||||
@@ -9,18 +9,18 @@ export default class connection_pool {
|
||||
this.password = password;
|
||||
if (pool_size > 1) {
|
||||
this.pool = new Pool({
|
||||
connect : {host : host_or_ip, port : port},
|
||||
connectTimeout : 5000,
|
||||
pool : {max : pool_size, min : 2}
|
||||
connect: { host: host_or_ip, port: port },
|
||||
connectTimeout: 5000,
|
||||
pool: { max: pool_size, min: 2 },
|
||||
});
|
||||
} else {
|
||||
throw new Error("'pool_size' must be > 1");
|
||||
}
|
||||
}
|
||||
|
||||
host_or_ip = "";
|
||||
host_or_ip = '';
|
||||
next_thread_id = 1;
|
||||
password = "";
|
||||
password = '';
|
||||
port = 20000;
|
||||
pool;
|
||||
shutdown = false;
|
||||
@@ -48,16 +48,19 @@ export default class connection_pool {
|
||||
};
|
||||
|
||||
try {
|
||||
const result = await new connection(this.host_or_ip, this.port,
|
||||
this.password, socket)
|
||||
.send(method_name, packet,
|
||||
optional_thread_id || socket.thread_id);
|
||||
const result = await new connection(
|
||||
this.host_or_ip,
|
||||
this.port,
|
||||
this.password,
|
||||
socket
|
||||
).send(method_name, packet, optional_thread_id || socket.thread_id);
|
||||
cleanup();
|
||||
return result;
|
||||
} catch (err) {
|
||||
cleanup();
|
||||
return Promise.reject(
|
||||
new Error(`'send(${method_name})' failed: ${err}`));
|
||||
new Error(`'send(${method_name})' failed: ${err}`)
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'acquire()' socket failed: ${err}`));
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import {randomBytes} from 'crypto';
|
||||
import {Int64BE, Uint64BE} from 'int64-buffer';
|
||||
import {sha256} from 'js-sha256';
|
||||
import {TextEncoder} from 'text-encoding';
|
||||
import { randomBytes } from 'crypto';
|
||||
import { Int64BE, Uint64BE } from 'int64-buffer';
|
||||
import { sha256 } from 'js-sha256';
|
||||
import { TextEncoder } from 'text-encoding';
|
||||
|
||||
import {
|
||||
be_ui8_array_to_i16,
|
||||
@@ -20,7 +20,9 @@ import {
|
||||
import JSChaCha20 from '../utils/jschacha20';
|
||||
|
||||
export default class packet {
|
||||
constructor(token) { this.token = token; }
|
||||
constructor(token) {
|
||||
this.token = token;
|
||||
}
|
||||
|
||||
static HEADER = new TextEncoder().encode('repertory');
|
||||
|
||||
@@ -28,13 +30,14 @@ export default class packet {
|
||||
decode_offset = 0;
|
||||
token;
|
||||
|
||||
append_buffer = buffer => {
|
||||
append_buffer = (buffer) => {
|
||||
if (!(buffer instanceof Uint8Array)) {
|
||||
throw new Error('Buffer must be of type Uint8Array');
|
||||
}
|
||||
|
||||
this.buffer =
|
||||
this.buffer ? new Uint8Array([...this.buffer, ...buffer ]) : buffer;
|
||||
this.buffer = this.buffer
|
||||
? new Uint8Array([...this.buffer, ...buffer])
|
||||
: buffer;
|
||||
};
|
||||
|
||||
clear = () => {
|
||||
@@ -42,13 +45,15 @@ export default class packet {
|
||||
this.decode_offset = 0;
|
||||
};
|
||||
|
||||
decode_buffer = length => {
|
||||
decode_buffer = (length) => {
|
||||
if (!this.buffer) {
|
||||
throw new Error('Invalid buffer');
|
||||
}
|
||||
|
||||
const ret =
|
||||
this.buffer.slice(this.decode_offset, this.decode_offset + length);
|
||||
const ret = this.buffer.slice(
|
||||
this.decode_offset,
|
||||
this.decode_offset + length
|
||||
);
|
||||
this.decode_offset += length;
|
||||
return Buffer.from(ret);
|
||||
};
|
||||
@@ -68,9 +73,20 @@ export default class packet {
|
||||
const flags = this.decode_ui32();
|
||||
const directory = !!this.decode_ui8();
|
||||
return {
|
||||
mode, nlink, uid, gid, atime, mtime, ctime, birth_time, size, blocks,
|
||||
blksize, flags, directory,
|
||||
}
|
||||
mode,
|
||||
nlink,
|
||||
uid,
|
||||
gid,
|
||||
atime,
|
||||
mtime,
|
||||
ctime,
|
||||
birth_time,
|
||||
size,
|
||||
blocks,
|
||||
blksize,
|
||||
flags,
|
||||
directory,
|
||||
};
|
||||
};
|
||||
|
||||
decode_utf8 = () => {
|
||||
@@ -92,11 +108,13 @@ export default class packet {
|
||||
throw new Error('String not found in buffer');
|
||||
};
|
||||
|
||||
decode_i8 =
|
||||
() => { return ui8_array_to_i8(this.buffer, this.decode_offset++); };
|
||||
decode_i8 = () => {
|
||||
return ui8_array_to_i8(this.buffer, this.decode_offset++);
|
||||
};
|
||||
|
||||
decode_ui8 =
|
||||
() => { return ui8_array_to_ui8(this.buffer, this.decode_offset++); };
|
||||
decode_ui8 = () => {
|
||||
return ui8_array_to_ui8(this.buffer, this.decode_offset++);
|
||||
};
|
||||
|
||||
decode_i16 = () => {
|
||||
const ret = be_ui8_array_to_i16(this.buffer, this.decode_offset);
|
||||
@@ -124,7 +142,7 @@ export default class packet {
|
||||
|
||||
decode_i64 = () => {
|
||||
const ret = new Int64BE(
|
||||
this.buffer.slice(this.decode_offset, this.decode_offset + 8),
|
||||
this.buffer.slice(this.decode_offset, this.decode_offset + 8)
|
||||
);
|
||||
this.decode_offset += 8;
|
||||
return ret.toString(10);
|
||||
@@ -132,7 +150,7 @@ export default class packet {
|
||||
|
||||
decode_ui64 = () => {
|
||||
const ret = new Uint64BE(
|
||||
this.buffer.slice(this.decode_offset, this.decode_offset + 8),
|
||||
this.buffer.slice(this.decode_offset, this.decode_offset + 8)
|
||||
);
|
||||
this.decode_offset += 8;
|
||||
return ret.toString(10);
|
||||
@@ -146,9 +164,8 @@ export default class packet {
|
||||
const key = Uint8Array.from(hash.array());
|
||||
const nonce = this.buffer.slice(0, 12);
|
||||
|
||||
this.buffer = new JSChaCha20(key, nonce, 0)
|
||||
.decrypt(
|
||||
this.buffer.slice(12),
|
||||
this.buffer = new JSChaCha20(key, nonce, 0).decrypt(
|
||||
this.buffer.slice(12)
|
||||
);
|
||||
|
||||
this.decode_offset = packet.HEADER.length;
|
||||
@@ -164,65 +181,93 @@ export default class packet {
|
||||
}
|
||||
};
|
||||
|
||||
encode_buffer = buffer => { this.append_buffer(new Uint8Array(buffer)); };
|
||||
encode_buffer = (buffer) => {
|
||||
this.append_buffer(new Uint8Array(buffer));
|
||||
};
|
||||
|
||||
encode_i8 = num => { this.append_buffer(i8_to_ui8_array(num)); };
|
||||
encode_i8 = (num) => {
|
||||
this.append_buffer(i8_to_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_top_i8 = num => { this.push_buffer(i8_to_ui8_array(num)); };
|
||||
encode_top_i8 = (num) => {
|
||||
this.push_buffer(i8_to_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_u8 = num => { this.append_buffer(ui8_to_ui8_array(num)); };
|
||||
encode_u8 = (num) => {
|
||||
this.append_buffer(ui8_to_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_top_u8 = num => { this.push_buffer(ui8_to_ui8_array(num)); };
|
||||
encode_top_u8 = (num) => {
|
||||
this.push_buffer(ui8_to_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_i16 = num => { this.append_buffer(i16_to_be_ui8_array(num)); };
|
||||
encode_i16 = (num) => {
|
||||
this.append_buffer(i16_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_top_i16 = num => { this.push_buffer(i16_to_be_ui8_array(num)); };
|
||||
encode_top_i16 = (num) => {
|
||||
this.push_buffer(i16_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_ui16 = num => { this.append_buffer(ui16_to_be_ui8_array(num)); };
|
||||
encode_ui16 = (num) => {
|
||||
this.append_buffer(ui16_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_top_ui16 = num => { this.push_buffer(ui16_to_be_ui8_array(num)); };
|
||||
encode_top_ui16 = (num) => {
|
||||
this.push_buffer(ui16_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_i32 = num => { this.append_buffer(i32_to_be_ui8_array(num)); };
|
||||
encode_i32 = (num) => {
|
||||
this.append_buffer(i32_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_top_i32 = num => { this.push_buffer(i32_to_be_ui8_array(num)); };
|
||||
encode_top_i32 = (num) => {
|
||||
this.push_buffer(i32_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_ui32 = num => { this.append_buffer(ui32_to_be_ui8_array(num)); };
|
||||
encode_ui32 = (num) => {
|
||||
this.append_buffer(ui32_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_top_ui32 = num => { this.push_buffer(ui32_to_be_ui8_array(num)); };
|
||||
encode_top_ui32 = (num) => {
|
||||
this.push_buffer(ui32_to_be_ui8_array(num));
|
||||
};
|
||||
|
||||
encode_i64 = num => {
|
||||
encode_i64 = (num) => {
|
||||
this.append_buffer(new Uint8Array(new Int64BE(num).toArray()));
|
||||
};
|
||||
|
||||
encode_top_i64 =
|
||||
num => { this.push_buffer(new Uint8Array(new Int64BE(num).toArray())); };
|
||||
encode_top_i64 = (num) => {
|
||||
this.push_buffer(new Uint8Array(new Int64BE(num).toArray()));
|
||||
};
|
||||
|
||||
encode_ui64 = num => {
|
||||
encode_ui64 = (num) => {
|
||||
this.append_buffer(new Uint8Array(new Uint64BE(num).toArray()));
|
||||
};
|
||||
|
||||
encode_top_ui64 =
|
||||
num => { this.push_buffer(new Uint8Array(new Uint64BE(num).toArray())); };
|
||||
encode_top_ui64 = (num) => {
|
||||
this.push_buffer(new Uint8Array(new Uint64BE(num).toArray()));
|
||||
};
|
||||
|
||||
encode_utf8 = str => {
|
||||
encode_utf8 = (str) => {
|
||||
if (!(typeof str === 'string' || str instanceof String)) {
|
||||
throw new Error('Value must be of type string');
|
||||
}
|
||||
|
||||
const buffer = new Uint8Array([...new TextEncoder().encode(str), 0 ]);
|
||||
const buffer = new Uint8Array([...new TextEncoder().encode(str), 0]);
|
||||
this.append_buffer(buffer);
|
||||
};
|
||||
|
||||
encode_top_utf8 = str => {
|
||||
encode_top_utf8 = (str) => {
|
||||
if (!(typeof str === 'string' || str instanceof String)) {
|
||||
throw new Error('Value must be of type string');
|
||||
}
|
||||
|
||||
const buffer = new Uint8Array([...new TextEncoder().encode(str), 0 ]);
|
||||
const buffer = new Uint8Array([...new TextEncoder().encode(str), 0]);
|
||||
this.push_buffer(buffer);
|
||||
};
|
||||
|
||||
encrypt = async nonce => {
|
||||
encrypt = async (nonce) => {
|
||||
try {
|
||||
this.push_buffer(packet.HEADER);
|
||||
const hash = sha256.create();
|
||||
@@ -242,12 +287,13 @@ export default class packet {
|
||||
}
|
||||
};
|
||||
|
||||
push_buffer = buffer => {
|
||||
push_buffer = (buffer) => {
|
||||
if (!(buffer instanceof Uint8Array)) {
|
||||
throw new Error('Buffer must be of type Uint8Array');
|
||||
}
|
||||
|
||||
this.buffer =
|
||||
this.buffer ? new Uint8Array([...buffer, ...this.buffer ]) : buffer;
|
||||
this.buffer = this.buffer
|
||||
? new Uint8Array([...buffer, ...this.buffer])
|
||||
: buffer;
|
||||
};
|
||||
}
|
||||
|
||||
226
src/ops/index.js
226
src/ops/index.js
@@ -1,5 +1,5 @@
|
||||
import fs from 'fs';
|
||||
import {Uint64BE} from 'int64-buffer';
|
||||
import { Uint64BE } from 'int64-buffer';
|
||||
|
||||
import file from '../io/file';
|
||||
import packet from '../networking/packet';
|
||||
@@ -9,8 +9,10 @@ const _snapshot_directory = async (conn, remote_path) => {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteJSONCreateDirectorySnapshot', request);
|
||||
const response = await conn.send(
|
||||
'::RemoteJSONCreateDirectorySnapshot',
|
||||
request
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
@@ -28,15 +30,17 @@ const _snapshot_directory = async (conn, remote_path) => {
|
||||
};
|
||||
|
||||
try {
|
||||
const get_page = async page => {
|
||||
const get_page = async (page) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(data.handle);
|
||||
request.encode_ui32(page);
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteJSONReadDirectorySnapshot', request);
|
||||
const response = await conn.send(
|
||||
'::RemoteJSONReadDirectorySnapshot',
|
||||
request
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
@@ -67,22 +71,29 @@ const _snapshot_directory = async (conn, remote_path) => {
|
||||
}
|
||||
};
|
||||
|
||||
export const close_file =
|
||||
async (conn, remote_path, handle, optional_thread_id) => {
|
||||
export const close_file = async (
|
||||
conn,
|
||||
remote_path,
|
||||
handle,
|
||||
optional_thread_id
|
||||
) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(handle);
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteFUSERelease', request, optional_thread_id);
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSERelease',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
return response.decode_i32();
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'close_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
export const create_directory = async (conn, remote_path) => {
|
||||
try {
|
||||
@@ -99,16 +110,22 @@ export const create_directory = async (conn, remote_path) => {
|
||||
}
|
||||
};
|
||||
|
||||
export const create_or_open_file =
|
||||
async (conn, remote_path, optional_thread_id) => {
|
||||
export const create_or_open_file = async (
|
||||
conn,
|
||||
remote_path,
|
||||
optional_thread_id
|
||||
) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui16((7 << 6) | (5 << 3));
|
||||
request.encode_ui32(2 | 4); // Read-Write, Create
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteFUSECreate', request, optional_thread_id);
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSECreate',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
@@ -120,7 +137,7 @@ export const create_or_open_file =
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'create_or_open_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
export const delete_file = async (conn, remote_path) => {
|
||||
try {
|
||||
@@ -136,11 +153,17 @@ export const delete_file = async (conn, remote_path) => {
|
||||
}
|
||||
};
|
||||
|
||||
export const download_file =
|
||||
async (conn, remote_path, local_path, progress_cb, overwrite, resume) => {
|
||||
export const download_file = async (
|
||||
conn,
|
||||
remote_path,
|
||||
local_path,
|
||||
progress_cb,
|
||||
overwrite,
|
||||
resume
|
||||
) => {
|
||||
try {
|
||||
const src = new file(conn, await open_file(conn, remote_path), remote_path);
|
||||
const cleanup = async fd => {
|
||||
const cleanup = async (fd) => {
|
||||
try {
|
||||
await src.close();
|
||||
} catch (err) {
|
||||
@@ -174,8 +197,11 @@ export const download_file =
|
||||
|
||||
if (dst_size > src_size) {
|
||||
await cleanup(dst_fd);
|
||||
return Promise.reject(new Error(
|
||||
`'download_file' failed: destination is larger than source`));
|
||||
return Promise.reject(
|
||||
new Error(
|
||||
`'download_file' failed: destination is larger than source`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
offset = dst_size;
|
||||
@@ -183,7 +209,8 @@ export const download_file =
|
||||
if (fs.existsSync(local_path)) {
|
||||
await cleanup(dst_fd);
|
||||
return Promise.reject(
|
||||
new Error(`'download_file' failed: file exists`));
|
||||
new Error(`'download_file' failed: file exists`)
|
||||
);
|
||||
}
|
||||
|
||||
dst_fd = fs.openSync(local_path, 'wx+');
|
||||
@@ -198,8 +225,12 @@ export const download_file =
|
||||
remain -= written;
|
||||
offset += written;
|
||||
if (progress_cb) {
|
||||
progress_cb(local_path, remote_path,
|
||||
((src_size - remain) / src_size) * 100.0, false);
|
||||
progress_cb(
|
||||
local_path,
|
||||
remote_path,
|
||||
((src_size - remain) / src_size) * 100.0,
|
||||
false
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -221,12 +252,14 @@ export const download_file =
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'download_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
export const get_drive_information = async conn => {
|
||||
export const get_drive_information = async (conn) => {
|
||||
try {
|
||||
const response =
|
||||
await conn.send('::RemoteWinFSPGetVolumeInfo', new packet());
|
||||
const response = await conn.send(
|
||||
'::RemoteWinFSPGetVolumeInfo',
|
||||
new packet()
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
@@ -241,14 +274,19 @@ export const get_drive_information = async conn => {
|
||||
}
|
||||
|
||||
return Promise.reject(
|
||||
new Error(`'get_drive_information' failed: ${result}`));
|
||||
new Error(`'get_drive_information' failed: ${result}`)
|
||||
);
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'get_drive_information' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
|
||||
export const get_file_attributes =
|
||||
async (conn, handle, remote_path, optional_thread_id) => {
|
||||
export const get_file_attributes = async (
|
||||
conn,
|
||||
handle,
|
||||
remote_path,
|
||||
optional_thread_id
|
||||
) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
@@ -256,8 +294,11 @@ export const get_file_attributes =
|
||||
request.encode_ui32(0);
|
||||
request.encode_ui32(0);
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteFUSEFgetattr', request, optional_thread_id);
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSEFgetattr',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
@@ -269,12 +310,16 @@ export const get_file_attributes =
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'get_file_attributes' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
export const list_directory = async (conn, remote_path, page_reader_cb) => {
|
||||
const dir_snapshot = await _snapshot_directory(conn, remote_path);
|
||||
try {
|
||||
await page_reader_cb(dir_snapshot.remote_path, dir_snapshot.page_count, dir_snapshot.get_page);
|
||||
await page_reader_cb(
|
||||
dir_snapshot.remote_path,
|
||||
dir_snapshot.page_count,
|
||||
dir_snapshot.get_page
|
||||
);
|
||||
await dir_snapshot.release();
|
||||
} catch (err) {
|
||||
await dir_snapshot.release();
|
||||
@@ -288,8 +333,11 @@ export const open_file = async (conn, remote_path, optional_thread_id) => {
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui32(2); // Read-Write
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteFUSEOpen', request, optional_thread_id);
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSEOpen',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
@@ -302,8 +350,14 @@ export const open_file = async (conn, remote_path, optional_thread_id) => {
|
||||
}
|
||||
};
|
||||
|
||||
export const read_file =
|
||||
async (conn, handle, remote_path, offset, length, optional_thread_id) => {
|
||||
export const read_file = async (
|
||||
conn,
|
||||
handle,
|
||||
remote_path,
|
||||
offset,
|
||||
length,
|
||||
optional_thread_id
|
||||
) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
@@ -311,8 +365,11 @@ export const read_file =
|
||||
request.encode_ui64(offset);
|
||||
request.encode_ui64(handle);
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteFUSERead', request, optional_thread_id);
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSERead',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
@@ -323,7 +380,7 @@ export const read_file =
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'read_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
export const remove_directory = async (conn, remote_path) => {
|
||||
try {
|
||||
@@ -341,29 +398,43 @@ export const remove_directory = async (conn, remote_path) => {
|
||||
|
||||
export const snapshot_directory = _snapshot_directory;
|
||||
|
||||
export const truncate_file =
|
||||
async (conn, handle, remote_path, length, optional_thread_id) => {
|
||||
export const truncate_file = async (
|
||||
conn,
|
||||
handle,
|
||||
remote_path,
|
||||
length,
|
||||
optional_thread_id
|
||||
) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
request.encode_ui64(length);
|
||||
request.encode_ui64(handle);
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteFUSEFtruncate', request, optional_thread_id);
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSEFtruncate',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
return response.decode_i32();
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'truncate_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
export const upload_file =
|
||||
async (conn, local_path, remote_path, progress_cb, overwrite, resume) => {
|
||||
export const upload_file = async (
|
||||
conn,
|
||||
local_path,
|
||||
remote_path,
|
||||
progress_cb,
|
||||
overwrite,
|
||||
resume
|
||||
) => {
|
||||
try {
|
||||
const src_fd = fs.openSync(local_path, 'r');
|
||||
const cleanup = async f => {
|
||||
const cleanup = async (f) => {
|
||||
try {
|
||||
fs.closeSync(src_fd);
|
||||
} catch (err) {
|
||||
@@ -381,8 +452,11 @@ export const upload_file =
|
||||
const src_st = fs.fstatSync(src_fd);
|
||||
let dst;
|
||||
const create_dest = async () => {
|
||||
dst = new file(conn, await create_or_open_file(conn, remote_path),
|
||||
remote_path);
|
||||
dst = new file(
|
||||
conn,
|
||||
await create_or_open_file(conn, remote_path),
|
||||
remote_path
|
||||
);
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -404,18 +478,25 @@ export const upload_file =
|
||||
|
||||
if (dst_size > src_st.size) {
|
||||
await cleanup(dst);
|
||||
return Promise.reject(new Error(
|
||||
`'upload_file' failed: destination is larger than source`));
|
||||
return Promise.reject(
|
||||
new Error(
|
||||
`'upload_file' failed: destination is larger than source`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
offset = dst_size;
|
||||
} else {
|
||||
try {
|
||||
const f =
|
||||
new file(conn, await open_file(conn, remote_path), remote_path);
|
||||
const f = new file(
|
||||
conn,
|
||||
await open_file(conn, remote_path),
|
||||
remote_path
|
||||
);
|
||||
await cleanup(f);
|
||||
return Promise.reject(
|
||||
new Error('\'upload_file\' failed: file exists'));
|
||||
new Error("'upload_file' failed: file exists")
|
||||
);
|
||||
} catch (err) {
|
||||
await create_dest();
|
||||
}
|
||||
@@ -426,7 +507,8 @@ export const upload_file =
|
||||
while (remain > 0) {
|
||||
const to_write =
|
||||
remain >= default_buffer.length ? default_buffer.length : remain;
|
||||
const buffer = to_write === default_buffer.length
|
||||
const buffer =
|
||||
to_write === default_buffer.length
|
||||
? default_buffer
|
||||
: Buffer.alloc(to_write);
|
||||
fs.readSync(src_fd, buffer, 0, to_write, offset);
|
||||
@@ -435,9 +517,12 @@ export const upload_file =
|
||||
remain -= written;
|
||||
offset += written;
|
||||
if (progress_cb) {
|
||||
progress_cb(local_path, remote_path,
|
||||
progress_cb(
|
||||
local_path,
|
||||
remote_path,
|
||||
((src_st.size - remain) / src_st.size) * 100.0,
|
||||
false);
|
||||
false
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -459,10 +544,16 @@ export const upload_file =
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'upload_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
export const write_file =
|
||||
async (conn, handle, remote_path, offset, buffer, optional_thread_id) => {
|
||||
export const write_file = async (
|
||||
conn,
|
||||
handle,
|
||||
remote_path,
|
||||
offset,
|
||||
buffer,
|
||||
optional_thread_id
|
||||
) => {
|
||||
try {
|
||||
const request = new packet();
|
||||
request.encode_utf8(remote_path);
|
||||
@@ -471,8 +562,11 @@ export const write_file =
|
||||
request.encode_ui64(offset);
|
||||
request.encode_ui64(handle);
|
||||
|
||||
const response =
|
||||
await conn.send('::RemoteFUSEWrite', request, optional_thread_id);
|
||||
const response = await conn.send(
|
||||
'::RemoteFUSEWrite',
|
||||
request,
|
||||
optional_thread_id
|
||||
);
|
||||
response.decode_ui32(); // Service flags
|
||||
|
||||
const result = response.decode_i32();
|
||||
@@ -483,4 +577,4 @@ export const write_file =
|
||||
} catch (err) {
|
||||
return Promise.reject(new Error(`'write_file' failed: ${err}`));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
export const is_big_endian_system =
|
||||
new Uint8Array(new Uint32Array([ 0x12345678 ]).buffer)[0] === 0x12;
|
||||
new Uint8Array(new Uint32Array([0x12345678]).buffer)[0] === 0x12;
|
||||
|
||||
export const is_little_endian_system =
|
||||
new Uint8Array(new Uint32Array([ 0x12345678 ]).buffer)[0] === 0x78;
|
||||
new Uint8Array(new Uint32Array([0x12345678]).buffer)[0] === 0x78;
|
||||
|
||||
export const i8_to_ui8_array = num => {
|
||||
export const i8_to_ui8_array = (num) => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
@@ -21,7 +21,7 @@ export const ui8_array_to_i8 = (ar, offset) => {
|
||||
return buffer.readInt8(0);
|
||||
};
|
||||
|
||||
export const ui8_to_ui8_array = num => {
|
||||
export const ui8_to_ui8_array = (num) => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
@@ -38,7 +38,7 @@ export const ui8_array_to_ui8 = (ar, offset) => {
|
||||
return buffer.readUInt8(0);
|
||||
};
|
||||
|
||||
export const i16_to_be_ui8_array = num => {
|
||||
export const i16_to_be_ui8_array = (num) => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
@@ -56,7 +56,7 @@ export const be_ui8_array_to_i16 = (ar, offset) => {
|
||||
return buffer.readInt16BE(0);
|
||||
};
|
||||
|
||||
export const ui16_to_be_ui8_array = num => {
|
||||
export const ui16_to_be_ui8_array = (num) => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
@@ -74,7 +74,7 @@ export const be_ui8_array_to_ui16 = (ar, offset) => {
|
||||
return buffer.readUInt16BE(0);
|
||||
};
|
||||
|
||||
export const i32_to_be_ui8_array = num => {
|
||||
export const i32_to_be_ui8_array = (num) => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
@@ -92,7 +92,7 @@ export const be_ui8_array_to_i32 = (ar, offset) => {
|
||||
return buffer.readInt32BE(0);
|
||||
};
|
||||
|
||||
export const ui32_to_be_ui8_array = num => {
|
||||
export const ui32_to_be_ui8_array = (num) => {
|
||||
if (typeof num === 'string' || num instanceof String) {
|
||||
num = parseInt(num, 10);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const {v4: uuidv4} = require('uuid');
|
||||
import _package_json from '../../package.json'
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
import _package_json from '../../package.json';
|
||||
|
||||
export const instance_id = uuidv4();
|
||||
export const package_json = _package_json;
|
||||
export const get_version = () => process.env.REPERTORY_JS_FORCE_VERSION || _package_json.version;
|
||||
export const get_version = () =>
|
||||
process.env.REPERTORY_JS_FORCE_VERSION || _package_json.version;
|
||||
|
||||
@@ -63,7 +63,7 @@
|
||||
*
|
||||
* @constructor
|
||||
*/
|
||||
var JSChaCha20 = function(key, nonce, counter) {
|
||||
var JSChaCha20 = function (key, nonce, counter) {
|
||||
if (typeof counter === 'undefined') {
|
||||
counter = 0;
|
||||
}
|
||||
@@ -175,7 +175,7 @@ var JSChaCha20 = function(key, nonce, counter) {
|
||||
this._byteCounter = 0;
|
||||
};
|
||||
|
||||
JSChaCha20.prototype._chacha = function() {
|
||||
JSChaCha20.prototype._chacha = function () {
|
||||
var mix = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
var i = 0;
|
||||
var b = 0;
|
||||
@@ -221,7 +221,7 @@ JSChaCha20.prototype._chacha = function() {
|
||||
* @param {number} d
|
||||
* @private
|
||||
*/
|
||||
JSChaCha20.prototype._quarterround = function(output, a, b, c, d) {
|
||||
JSChaCha20.prototype._quarterround = function (output, a, b, c, d) {
|
||||
output[d] = this._rotl(output[d] ^ (output[a] += output[b]), 16);
|
||||
output[b] = this._rotl(output[b] ^ (output[c] += output[d]), 12);
|
||||
output[d] = this._rotl(output[d] ^ (output[a] += output[b]), 8);
|
||||
@@ -242,7 +242,7 @@ JSChaCha20.prototype._quarterround = function(output, a, b, c, d) {
|
||||
* @return {number}
|
||||
* @private
|
||||
*/
|
||||
JSChaCha20.prototype._get32 = function(data, index) {
|
||||
JSChaCha20.prototype._get32 = function (data, index) {
|
||||
return (
|
||||
data[index++] ^
|
||||
(data[index++] << 8) ^
|
||||
@@ -259,7 +259,7 @@ JSChaCha20.prototype._get32 = function(data, index) {
|
||||
* @return {number}
|
||||
* @private
|
||||
*/
|
||||
JSChaCha20.prototype._rotl = function(data, shift) {
|
||||
JSChaCha20.prototype._rotl = function (data, shift) {
|
||||
return (data << shift) | (data >>> (32 - shift));
|
||||
};
|
||||
|
||||
@@ -269,7 +269,7 @@ JSChaCha20.prototype._rotl = function(data, shift) {
|
||||
* @param {Uint8Array} data
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
JSChaCha20.prototype.encrypt = function(data) {
|
||||
JSChaCha20.prototype.encrypt = function (data) {
|
||||
return this._update(data);
|
||||
};
|
||||
|
||||
@@ -279,7 +279,7 @@ JSChaCha20.prototype.encrypt = function(data) {
|
||||
* @param {Uint8Array} data
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
JSChaCha20.prototype.decrypt = function(data) {
|
||||
JSChaCha20.prototype.decrypt = function (data) {
|
||||
return this._update(data);
|
||||
};
|
||||
|
||||
@@ -290,7 +290,7 @@ JSChaCha20.prototype.decrypt = function(data) {
|
||||
* @return {Uint8Array}
|
||||
* @private
|
||||
*/
|
||||
JSChaCha20.prototype._update = function(data) {
|
||||
JSChaCha20.prototype._update = function (data) {
|
||||
if (!(data instanceof Uint8Array) || data.length === 0) {
|
||||
throw new Error('Data should be type of bytes (Uint8Array) and not empty!');
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user