first commit

This commit is contained in:
Stefan Hacker
2026-04-03 09:38:48 +02:00
commit 37ad745546
47450 changed files with 3120798 additions and 0 deletions
+20
View File
@@ -0,0 +1,20 @@
import type * as misc from 'memfs/lib/node/types/misc';
import type { Nfsv4Client } from './types';
import * as msg from '../messages';
export declare class NfsFsDir implements misc.IDir {
readonly path: string;
private readonly nfs;
private readonly operations;
private entries;
private position;
private closed;
constructor(path: string, nfs: Nfsv4Client, operations: msg.Nfsv4Request[]);
private ensureLoaded;
close(): Promise<void>;
close(callback?: (err?: Error) => void): Promise<void>;
closeSync(): void;
read(): Promise<misc.IDirent | null>;
read(callback?: (err: Error | null, dir?: misc.IDirent | null) => void): Promise<misc.IDirent | null>;
readSync(): misc.IDirent | null;
[Symbol.asyncIterator](): AsyncIterableIterator<misc.IDirent>;
}
+129
View File
@@ -0,0 +1,129 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.NfsFsDir = void 0;
const NfsFsDirent_1 = require("./NfsFsDirent");
const builder_1 = require("../builder");
const Reader_1 = require("@jsonjoy.com/buffers/lib/Reader");
const XdrDecoder_1 = require("../../../xdr/XdrDecoder");
class NfsFsDir {
constructor(path, nfs, operations) {
this.path = path;
this.nfs = nfs;
this.operations = operations;
this.entries = [];
this.position = 0;
this.closed = false;
}
async ensureLoaded() {
if (this.entries.length > 0 || this.closed)
return;
const attrNums = [1];
const attrMask = [];
for (const attrNum of attrNums) {
const wordIndex = Math.floor(attrNum / 32);
const bitIndex = attrNum % 32;
while (attrMask.length <= wordIndex)
attrMask.push(0);
attrMask[wordIndex] |= 1 << bitIndex;
}
const operations = [...this.operations];
operations.push(builder_1.nfs.READDIR(attrMask));
const response = await this.nfs.compound(operations);
if (response.status !== 0)
throw new Error(`Failed to read directory: ${response.status}`);
const readdirRes = response.resarray[response.resarray.length - 1];
if (readdirRes.status !== 0 || !readdirRes.resok)
throw new Error(`Failed to read directory: ${readdirRes.status}`);
const entryList = readdirRes.resok.entries;
for (let i = 0; i < entryList.length; i++) {
const entry = entryList[i];
const name = entry.name;
const fattr = entry.attrs;
const reader = new Reader_1.Reader();
reader.reset(fattr.attrVals);
const xdr = new XdrDecoder_1.XdrDecoder(reader);
let fileType = 1;
const returnedMask = fattr.attrmask.mask;
for (let i = 0; i < returnedMask.length; i++) {
const word = returnedMask[i];
if (!word)
continue;
for (let bit = 0; bit < 32; bit++) {
if (!(word & (1 << bit)))
continue;
const attrNum = i * 32 + bit;
if (attrNum === 1) {
fileType = xdr.readUnsignedInt();
}
}
}
this.entries.push(new NfsFsDirent_1.NfsFsDirent(name, fileType));
}
}
async close(callback) {
this.closed = true;
this.entries = [];
this.position = 0;
if (callback) {
try {
callback();
}
catch (err) {
callback(err);
}
}
}
closeSync() {
this.closed = true;
this.entries = [];
this.position = 0;
}
async read(callback) {
try {
if (this.closed) {
const err = new Error('Directory is closed');
if (callback) {
callback(err, null);
return null;
}
throw err;
}
await this.ensureLoaded();
if (this.position >= this.entries.length) {
if (callback) {
callback(null, null);
}
return null;
}
const entry = this.entries[this.position++];
if (callback) {
callback(null, entry);
}
return entry;
}
catch (err) {
if (callback) {
callback(err, null);
return null;
}
throw err;
}
}
readSync() {
if (this.closed) {
throw new Error('Directory is closed');
}
if (this.position >= this.entries.length) {
return null;
}
return this.entries[this.position++];
}
async *[Symbol.asyncIterator]() {
await this.ensureLoaded();
for (const entry of this.entries) {
yield entry;
}
}
}
exports.NfsFsDir = NfsFsDir;
//# sourceMappingURL=NfsFsDir.js.map
@@ -0,0 +1 @@
{"version":3,"file":"NfsFsDir.js","sourceRoot":"","sources":["../../../../src/nfs/v4/client/NfsFsDir.ts"],"names":[],"mappings":";;;AAEA,+CAA0C;AAC1C,wCAA+B;AAG/B,4DAAuD;AACvD,wDAAmD;AAKnD,MAAa,QAAQ;IAKnB,YACkB,IAAY,EACX,GAAgB,EAChB,UAA8B;QAF/B,SAAI,GAAJ,IAAI,CAAQ;QACX,QAAG,GAAH,GAAG,CAAa;QAChB,eAAU,GAAV,UAAU,CAAoB;QAPzC,YAAO,GAAkB,EAAE,CAAC;QAC5B,aAAQ,GAAW,CAAC,CAAC;QACrB,WAAM,GAAY,KAAK,CAAC;IAM7B,CAAC;IAEI,KAAK,CAAC,YAAY;QACxB,IAAI,IAAI,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,CAAC,MAAM;YAAE,OAAO;QACnD,MAAM,QAAQ,GAAG,GAAuB,CAAC;QACzC,MAAM,QAAQ,GAAa,EAAE,CAAC;QAC9B,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE,CAAC;YAC/B,MAAM,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,CAAC;YAC3C,MAAM,QAAQ,GAAG,OAAO,GAAG,EAAE,CAAC;YAC9B,OAAO,QAAQ,CAAC,MAAM,IAAI,SAAS;gBAAE,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YACtD,QAAQ,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,QAAQ,CAAC;QACvC,CAAC;QACD,MAAM,UAAU,GAAG,CAAC,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC;QACxC,UAAU,CAAC,IAAI,CAAC,aAAG,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC;QACvC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QACrD,IAAI,QAAQ,CAAC,MAAM,MAAsB;YAAE,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC;QAC3G,MAAM,UAAU,GAAG,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAA6B,CAAC;QAC/F,IAAI,UAAU,CAAC,MAAM,MAAsB,IAAI,CAAC,UAAU,CAAC,KAAK;YAC9D,MAAM,IAAI,KAAK,CAAC,6BAA6B,UAAU,CAAC,MAAM,EAAE,CAAC,CAAC;QACpE,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC;QAC3C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YAC1C,MAAM,KAAK,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;YAC3B,MAAM,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC;YACxB,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC;YAC1B,MAAM,MAAM,GAAG,IAAI,eAAM,EAAE,CAAC;YAC5B,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;YAC7B,MAAM,GAAG,GAAG,IAAI,uBAAU,CAAC,MAAM,CAAC,CAAC;YACnC,IAAI,QAAQ,IAAoB,CAAC;YACjC,MAAM,YAAY,GAAG,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC;YACzC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC7C,MAAM,IAAI,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC;gBAC7B,IAAI,CAAC,IAAI;oBAAE,SAAS;gBACpB,KAAK,IAAI,GAAG,GAAG,CAAC,EAAE,GAAG,GAAG,EAAE,EAAE,GAAG,EAAE,EAAE,CAAC;oBAClC,IAAI,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC;wBAAE,SAAS;oBACnC,MAAM,OAAO,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC;oBAC7B,IAAI,OAAO,MAA0B,EAAE,CAAC;wBACtC,QAAQ,GAAG,GAAG,CAAC,eAAe,EAAE,CAAC;oBACnC,CAAC;gBACH,CAAC;YACH,CAAC;YACD,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,yBAAW,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC,CAAC;QACrD,CAAC;IACH,CAAC;IAIM,KAAK,CAAC,KAAK,CAAC,QAAgC;QACjD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC;QACnB,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;QAClB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC;QAClB,IAAI,QAAQ,EAAE,CAAC;YACb,IAAI,CAAC;gBACH,QAAQ,EAAE,CAAC;YACb,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,QAAQ,CAAC,GAAY,CAAC,CAAC;YACzB,CAAC;QACH,CAAC;IACH,CAAC;IAEM,SAAS;QACd,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC;QACnB,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;QAClB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC;IACpB,CAAC;IAIM,KAAK,CAAC,IAAI,CAAC,QAAiE;QACjF,IAAI,CAAC;YACH,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;gBAChB,MAAM,GAAG,GAAG,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;gBAC7C,IAAI,QAAQ,EAAE,CAAC;oBACb,QAAQ,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;oBACpB,OAAO,IAAI,CAAC;gBACd,CAAC;gBACD,MAAM,GAAG,CAAC;YACZ,CAAC;YACD,MAAM,IAAI,CAAC,YAAY,EAAE,CAAC;YAC1B,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC;gBACzC,IAAI,QAAQ,EAAE,CAAC;oBACb,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;gBACvB,CAAC;gBACD,OAAO,IAAI,CAAC;YACd,CAAC;YACD,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;YAC5C,IAAI,QAAQ,EAAE,CAAC;gBACb,QAAQ,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;YACxB,CAAC;YACD,OAAO,KAAK,CAAC;QACf,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,IAAI,QAAQ,EAAE,CAAC;gBACb,QAAQ,CAAC,GAAY,EAAE,IAAI,CAAC,CAAC;gBAC7B,OAAO,IAAI,CAAC;YACd,CAAC;YACD,MAAM,GAAG,CAAC;QACZ,CAAC;IACH,CAAC;IAEM,QAAQ;QACb,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YAChB,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;QACzC,CAAC;QACD,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC;YACzC,OAAO,IAAI,CAAC;QACd,CAAC;QACD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;IACvC,CAAC;IAEM,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC;QAClC,MAAM,IAAI,CAAC,YAAY,EAAE,CAAC;QAC1B,KAAK,MAAM,KAAK,IAAI,IAAI,CAAC,OAAO,EAAE,CAAC;YACjC,MAAM,KAAK,CAAC;QACd,CAAC;IACH,CAAC;CACF;AA3HD,4BA2HC"}
+14
View File
@@ -0,0 +1,14 @@
import type * as misc from 'memfs/lib/node/types/misc';
import { Nfsv4FType } from '../constants';
export declare class NfsFsDirent implements misc.IDirent {
name: string;
private type;
constructor(name: string, type: Nfsv4FType);
isDirectory(): boolean;
isFile(): boolean;
isBlockDevice(): boolean;
isCharacterDevice(): boolean;
isSymbolicLink(): boolean;
isFIFO(): boolean;
isSocket(): boolean;
}
+32
View File
@@ -0,0 +1,32 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.NfsFsDirent = void 0;
class NfsFsDirent {
constructor(name, type) {
this.name = name;
this.type = type;
}
isDirectory() {
return this.type === 2;
}
isFile() {
return this.type === 1;
}
isBlockDevice() {
return this.type === 3;
}
isCharacterDevice() {
return this.type === 4;
}
isSymbolicLink() {
return this.type === 5;
}
isFIFO() {
return this.type === 7;
}
isSocket() {
return this.type === 6;
}
}
exports.NfsFsDirent = NfsFsDirent;
//# sourceMappingURL=NfsFsDirent.js.map
@@ -0,0 +1 @@
{"version":3,"file":"NfsFsDirent.js","sourceRoot":"","sources":["../../../../src/nfs/v4/client/NfsFsDirent.ts"],"names":[],"mappings":";;;AAMA,MAAa,WAAW;IACtB,YACS,IAAY,EACX,IAAgB;QADjB,SAAI,GAAJ,IAAI,CAAQ;QACX,SAAI,GAAJ,IAAI,CAAY;IACvB,CAAC;IAEJ,WAAW;QACT,OAAO,IAAI,CAAC,IAAI,MAAsB,CAAC;IACzC,CAAC;IAED,MAAM;QACJ,OAAO,IAAI,CAAC,IAAI,MAAsB,CAAC;IACzC,CAAC;IAED,aAAa;QACX,OAAO,IAAI,CAAC,IAAI,MAAsB,CAAC;IACzC,CAAC;IAED,iBAAiB;QACf,OAAO,IAAI,CAAC,IAAI,MAAsB,CAAC;IACzC,CAAC;IAED,cAAc;QACZ,OAAO,IAAI,CAAC,IAAI,MAAsB,CAAC;IACzC,CAAC;IAED,MAAM;QACJ,OAAO,IAAI,CAAC,IAAI,MAAuB,CAAC;IAC1C,CAAC;IAED,QAAQ;QACN,OAAO,IAAI,CAAC,IAAI,MAAuB,CAAC;IAC1C,CAAC;CACF;AAjCD,kCAiCC"}
@@ -0,0 +1,34 @@
/// <reference types="node" />
/// <reference types="node" />
import { EventEmitter } from 'events';
import * as structs from '../structs';
import type * as misc from 'memfs/lib/node/types/misc';
import type * as opts from 'memfs/lib/node/types/options';
import type { Nfsv4FsClient } from './Nfsv4FsClient';
export declare class NfsFsFileHandle extends EventEmitter implements misc.IFileHandle {
readonly path: string;
private readonly client;
private readonly stateid;
private readonly openOwner;
readonly fd: number;
private closed;
constructor(fd: number, path: string, client: Nfsv4FsClient, stateid: structs.Nfsv4Stateid, openOwner: structs.Nfsv4OpenOwner);
getAsyncId(): number;
close(): Promise<void>;
stat(options?: opts.IStatOptions): Promise<misc.IStats>;
appendFile(data: misc.TData, options?: opts.IAppendFileOptions | string): Promise<void>;
chmod(mode: misc.TMode): Promise<void>;
chown(uid: number, gid: number): Promise<void>;
datasync(): Promise<void>;
read(buffer: Buffer | Uint8Array, offset: number, length: number, position?: number | null): Promise<misc.TFileHandleReadResult>;
readFile(options?: opts.IReadFileOptions | string): Promise<misc.TDataOut>;
truncate(len?: number): Promise<void>;
utimes(atime: misc.TTime, mtime: misc.TTime): Promise<void>;
write(buffer: Buffer | ArrayBufferView | DataView, offset?: number, length?: number, position?: number | null): Promise<misc.TFileHandleWriteResult>;
writeFile(data: misc.TData, options?: opts.IWriteFileOptions): Promise<void>;
readv(buffers: ArrayBufferView[], position?: number | null): Promise<misc.TFileHandleReadvResult>;
writev(buffers: ArrayBufferView[], position?: number | null): Promise<misc.TFileHandleWritevResult>;
readableWebStream(options?: opts.IReadableWebStreamOptions): ReadableStream;
createReadStream(options?: opts.IFileHandleReadStreamOptions): misc.IReadStream;
createWriteStream(options?: opts.IFileHandleWriteStreamOptions): misc.IWriteStream;
}
@@ -0,0 +1,268 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.NfsFsFileHandle = void 0;
const events_1 = require("events");
const stream_1 = require("stream");
const builder_1 = require("../builder");
class NfsFsFileHandle extends events_1.EventEmitter {
constructor(fd, path, client, stateid, openOwner) {
super();
this.path = path;
this.client = client;
this.stateid = stateid;
this.openOwner = openOwner;
this.closed = false;
this.fd = fd;
}
getAsyncId() {
return this.fd;
}
async close() {
if (this.closed)
return;
this.closed = true;
await this.client.closeStateid(this.openOwner, this.stateid);
this.emit('close');
}
async stat(options) {
if (this.closed)
throw new Error('File handle is closed');
return this.client.stat(this.path, options);
}
async appendFile(data, options) {
if (this.closed)
throw new Error('File handle is closed');
return this.client.appendFile(this.path, data, options);
}
async chmod(mode) {
if (this.closed)
throw new Error('File handle is closed');
return this.client.chmod(this.path, mode);
}
async chown(uid, gid) {
if (this.closed)
throw new Error('File handle is closed');
return this.client.chown(this.path, uid, gid);
}
async datasync() {
if (this.closed)
throw new Error('File handle is closed');
}
async read(buffer, offset, length, position) {
if (this.closed)
throw new Error('File handle is closed');
const readPos = position !== null && position !== undefined ? BigInt(position) : BigInt(0);
const readOps = [builder_1.nfs.READ(readPos, length, this.stateid)];
const response = await this.client.fs.compound(readOps);
if (response.status !== 0) {
throw new Error(`Failed to read file: ${response.status}`);
}
const readRes = response.resarray[0];
if (readRes.status !== 0 || !readRes.resok) {
throw new Error(`Failed to read file: ${readRes.status}`);
}
const data = readRes.resok.data;
const bytesToCopy = Math.min(data.length, length);
for (let i = 0; i < bytesToCopy; i++) {
buffer[offset + i] = data[i];
}
return { bytesRead: bytesToCopy, buffer };
}
async readFile(options) {
if (this.closed)
throw new Error('File handle is closed');
return this.client.readFile(this.path, options);
}
async truncate(len) {
if (this.closed)
throw new Error('File handle is closed');
return this.client.truncate(this.path, len);
}
async utimes(atime, mtime) {
if (this.closed)
throw new Error('File handle is closed');
return this.client.utimes(this.path, atime, mtime);
}
async write(buffer, offset, length, position) {
if (this.closed)
throw new Error('File handle is closed');
const actualOffset = offset ?? 0;
const actualLength = length ?? buffer.byteLength - actualOffset;
const writePos = position !== null && position !== undefined ? BigInt(position) : BigInt(0);
let data;
if (buffer instanceof Uint8Array) {
data = Uint8Array.prototype.slice.call(buffer, actualOffset, actualOffset + actualLength);
}
else if (Buffer.isBuffer(buffer)) {
data = new Uint8Array(buffer.buffer, buffer.byteOffset + actualOffset, actualLength);
}
else if (buffer instanceof DataView) {
data = new Uint8Array(buffer.buffer, buffer.byteOffset + actualOffset, actualLength);
}
else {
data = new Uint8Array(buffer.buffer, buffer.byteOffset + actualOffset, actualLength);
}
const writeOps = [builder_1.nfs.WRITE(this.stateid, writePos, 2, data)];
const response = await this.client.fs.compound(writeOps);
if (response.status !== 0) {
throw new Error(`Failed to write file: ${response.status}`);
}
const writeRes = response.resarray[0];
if (writeRes.status !== 0 || !writeRes.resok) {
throw new Error(`Failed to write file: ${writeRes.status}`);
}
const resultBuffer = buffer instanceof Uint8Array || Buffer.isBuffer(buffer) ? buffer : new Uint8Array(buffer.buffer);
return { bytesWritten: writeRes.resok.count, buffer: resultBuffer };
}
async writeFile(data, options) {
if (this.closed)
throw new Error('File handle is closed');
return this.client.writeFile(this.path, data, options);
}
async readv(buffers, position) {
if (this.closed)
throw new Error('File handle is closed');
let currentPosition = position !== null && position !== undefined ? BigInt(position) : BigInt(0);
let totalBytesRead = 0;
for (const buffer of buffers) {
const readOps = [builder_1.nfs.READ(currentPosition, buffer.byteLength, this.stateid)];
const response = await this.client.fs.compound(readOps);
if (response.status !== 0) {
throw new Error(`Failed to read file: ${response.status}`);
}
const readRes = response.resarray[0];
if (readRes.status !== 0 || !readRes.resok) {
throw new Error(`Failed to read file: ${readRes.status}`);
}
const data = readRes.resok.data;
const bytesToCopy = Math.min(data.length, buffer.byteLength);
const uint8View = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength);
for (let i = 0; i < bytesToCopy; i++) {
uint8View[i] = data[i];
}
totalBytesRead += bytesToCopy;
currentPosition += BigInt(bytesToCopy);
if (readRes.resok.eof || bytesToCopy < buffer.byteLength)
break;
}
return { bytesRead: totalBytesRead, buffers };
}
async writev(buffers, position) {
if (this.closed)
throw new Error('File handle is closed');
let currentPosition = position !== null && position !== undefined ? BigInt(position) : BigInt(0);
let totalBytesWritten = 0;
for (const buffer of buffers) {
const data = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength);
const writeOps = [builder_1.nfs.WRITE(this.stateid, currentPosition, 2, data)];
const response = await this.client.fs.compound(writeOps);
if (response.status !== 0) {
throw new Error(`Failed to write file: ${response.status}`);
}
const writeRes = response.resarray[0];
if (writeRes.status !== 0 || !writeRes.resok) {
throw new Error(`Failed to write file: ${writeRes.status}`);
}
totalBytesWritten += writeRes.resok.count;
currentPosition += BigInt(writeRes.resok.count);
}
return { bytesWritten: totalBytesWritten, buffers };
}
readableWebStream(options) {
if (this.closed)
throw new Error('File handle is closed');
const stream = this.createReadStream(options);
return stream_1.Readable.toWeb(stream);
}
createReadStream(options) {
if (this.closed)
throw new Error('File handle is closed');
const start = options?.start ?? 0;
const end = options?.end;
const highWaterMark = options?.highWaterMark ?? 64 * 1024;
let position = typeof start === 'number' ? start : 0;
const endPosition = typeof end === 'number' ? end : Infinity;
let reading = false;
const self = this;
const stream = new stream_1.Readable({
highWaterMark,
async read(size) {
if (reading)
return;
reading = true;
try {
while (true) {
if (position >= endPosition) {
this.push(null);
break;
}
const bytesToRead = Math.min(size, endPosition - position);
if (bytesToRead <= 0) {
this.push(null);
break;
}
const buffer = Buffer.alloc(bytesToRead);
const result = await self.read(buffer, 0, bytesToRead, position);
if (result.bytesRead === 0) {
this.push(null);
break;
}
position += result.bytesRead;
const chunk = buffer.slice(0, result.bytesRead);
if (!this.push(chunk))
break;
if (result.bytesRead < bytesToRead) {
this.push(null);
break;
}
}
}
catch (err) {
this.destroy(err);
}
finally {
reading = false;
}
},
});
stream.path = this.path;
return stream;
}
createWriteStream(options) {
if (this.closed)
throw new Error('File handle is closed');
const start = options?.start ?? 0;
const highWaterMark = options?.highWaterMark ?? 64 * 1024;
let position = typeof start === 'number' ? start : 0;
const self = this;
const stream = new stream_1.Writable({
highWaterMark,
async write(chunk, encoding, callback) {
try {
const buffer = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
const result = await self.write(buffer, 0, buffer.length, position);
position += result.bytesWritten;
callback();
}
catch (err) {
callback(err);
}
},
async writev(chunks, callback) {
try {
const buffers = chunks.map(({ chunk }) => (Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)));
const result = await self.writev(buffers, position);
position += result.bytesWritten;
callback();
}
catch (err) {
callback(err);
}
},
});
stream.path = this.path;
return stream;
}
}
exports.NfsFsFileHandle = NfsFsFileHandle;
//# sourceMappingURL=NfsFsFileHandle.js.map
File diff suppressed because one or more lines are too long
+31
View File
@@ -0,0 +1,31 @@
import type * as misc from 'memfs/lib/node/types/misc';
import { Nfsv4FType } from '../constants';
export declare class NfsFsStats implements misc.IStats<number> {
uid: number;
gid: number;
rdev: number;
blksize: number;
ino: number;
size: number;
blocks: number;
atime: Date;
mtime: Date;
ctime: Date;
birthtime: Date;
atimeMs: number;
mtimeMs: number;
ctimeMs: number;
birthtimeMs: number;
dev: number;
mode: number;
nlink: number;
private type;
constructor(uid: number, gid: number, rdev: number, blksize: number, ino: number, size: number, blocks: number, atime: Date, mtime: Date, ctime: Date, birthtime: Date, atimeMs: number, mtimeMs: number, ctimeMs: number, birthtimeMs: number, dev: number, mode: number, nlink: number, type: Nfsv4FType);
isDirectory(): boolean;
isFile(): boolean;
isBlockDevice(): boolean;
isCharacterDevice(): boolean;
isSymbolicLink(): boolean;
isFIFO(): boolean;
isSocket(): boolean;
}
+49
View File
@@ -0,0 +1,49 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.NfsFsStats = void 0;
class NfsFsStats {
constructor(uid, gid, rdev, blksize, ino, size, blocks, atime, mtime, ctime, birthtime, atimeMs, mtimeMs, ctimeMs, birthtimeMs, dev, mode, nlink, type) {
this.uid = uid;
this.gid = gid;
this.rdev = rdev;
this.blksize = blksize;
this.ino = ino;
this.size = size;
this.blocks = blocks;
this.atime = atime;
this.mtime = mtime;
this.ctime = ctime;
this.birthtime = birthtime;
this.atimeMs = atimeMs;
this.mtimeMs = mtimeMs;
this.ctimeMs = ctimeMs;
this.birthtimeMs = birthtimeMs;
this.dev = dev;
this.mode = mode;
this.nlink = nlink;
this.type = type;
}
isDirectory() {
return this.type === 2;
}
isFile() {
return this.type === 1;
}
isBlockDevice() {
return this.type === 3;
}
isCharacterDevice() {
return this.type === 4;
}
isSymbolicLink() {
return this.type === 5;
}
isFIFO() {
return this.type === 7;
}
isSocket() {
return this.type === 6;
}
}
exports.NfsFsStats = NfsFsStats;
//# sourceMappingURL=NfsFsStats.js.map
@@ -0,0 +1 @@
{"version":3,"file":"NfsFsStats.js","sourceRoot":"","sources":["../../../../src/nfs/v4/client/NfsFsStats.ts"],"names":[],"mappings":";;;AAMA,MAAa,UAAU;IACrB,YACS,GAAW,EACX,GAAW,EACX,IAAY,EACZ,OAAe,EACf,GAAW,EACX,IAAY,EACZ,MAAc,EACd,KAAW,EACX,KAAW,EACX,KAAW,EACX,SAAe,EACf,OAAe,EACf,OAAe,EACf,OAAe,EACf,WAAmB,EACnB,GAAW,EACX,IAAY,EACZ,KAAa,EACZ,IAAgB;QAlBjB,QAAG,GAAH,GAAG,CAAQ;QACX,QAAG,GAAH,GAAG,CAAQ;QACX,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAQ;QACf,QAAG,GAAH,GAAG,CAAQ;QACX,SAAI,GAAJ,IAAI,CAAQ;QACZ,WAAM,GAAN,MAAM,CAAQ;QACd,UAAK,GAAL,KAAK,CAAM;QACX,UAAK,GAAL,KAAK,CAAM;QACX,UAAK,GAAL,KAAK,CAAM;QACX,cAAS,GAAT,SAAS,CAAM;QACf,YAAO,GAAP,OAAO,CAAQ;QACf,YAAO,GAAP,OAAO,CAAQ;QACf,YAAO,GAAP,OAAO,CAAQ;QACf,gBAAW,GAAX,WAAW,CAAQ;QACnB,QAAG,GAAH,GAAG,CAAQ;QACX,SAAI,GAAJ,IAAI,CAAQ;QACZ,UAAK,GAAL,KAAK,CAAQ;QACZ,SAAI,GAAJ,IAAI,CAAY;IACvB,CAAC;IAEJ,WAAW;QACT,OAAO,IAAI,CAAC,IAAI,MAAsB,CAAC;IACzC,CAAC;IAED,MAAM;QACJ,OAAO,IAAI,CAAC,IAAI,MAAsB,CAAC;IACzC,CAAC;IAED,aAAa;QACX,OAAO,IAAI,CAAC,IAAI,MAAsB,CAAC;IACzC,CAAC;IAED,iBAAiB;QACf,OAAO,IAAI,CAAC,IAAI,MAAsB,CAAC;IACzC,CAAC;IAED,cAAc;QACZ,OAAO,IAAI,CAAC,IAAI,MAAsB,CAAC;IACzC,CAAC;IAED,MAAM;QACJ,OAAO,IAAI,CAAC,IAAI,MAAuB,CAAC;IAC1C,CAAC;IAED,QAAQ;QACN,OAAO,IAAI,CAAC,IAAI,MAAuB,CAAC;IAC1C,CAAC;CACF;AAlDD,gCAkDC"}
@@ -0,0 +1,54 @@
/// <reference types="node" />
import type { NfsFsClient, Nfsv4Client } from './types';
import * as misc from 'memfs/lib/node/types/misc';
import * as opts from 'memfs/lib/node/types/options';
import * as structs from '../structs';
export declare class Nfsv4FsClient implements NfsFsClient {
readonly fs: Nfsv4Client;
constructor(fs: Nfsv4Client);
private readonly openOwnerSeqids;
private readonly defaultOpenOwnerId;
private makeOpenOwnerKey;
private nextOpenOwnerSeqid;
private createDefaultOpenOwner;
private attrNumsToBitmap;
private parsePath;
private navigateToParent;
private navigateToPath;
private encodeData;
private decodeData;
readonly closeStateid: (openOwner: structs.Nfsv4OpenOwner, stateid: structs.Nfsv4Stateid) => Promise<void>;
readonly readFile: (id: misc.TFileHandle, options?: opts.IReadFileOptions | string) => Promise<misc.TDataOut>;
readonly writeFile: (id: misc.TFileHandle, data: misc.TPromisesData, options?: opts.IWriteFileOptions) => Promise<void>;
readonly stat: (path: misc.PathLike, options?: opts.IStatOptions) => Promise<misc.IStats>;
readonly lstat: (path: misc.PathLike, options?: opts.IStatOptions) => Promise<misc.IStats>;
readonly mkdir: (path: misc.PathLike, options?: misc.TMode | opts.IMkdirOptions) => Promise<string | undefined>;
readonly readdir: (path: misc.PathLike, options?: opts.IReaddirOptions | string) => Promise<misc.TDataOut[] | misc.IDirent[]>;
readonly appendFile: (path: misc.TFileHandle, data: misc.TData, options?: opts.IAppendFileOptions | string) => Promise<void>;
readonly truncate: (path: misc.PathLike, len?: number) => Promise<void>;
readonly unlink: (path: misc.PathLike) => Promise<void>;
readonly rmdir: (path: misc.PathLike, options?: opts.IRmdirOptions) => Promise<void>;
readonly rm: (path: misc.PathLike, options?: opts.IRmOptions) => Promise<void>;
readonly access: (path: misc.PathLike, mode?: number) => Promise<void>;
readonly rename: (oldPath: misc.PathLike, newPath: misc.PathLike) => Promise<void>;
readonly copyFile: (src: misc.PathLike, dest: misc.PathLike, flags?: misc.TFlagsCopy) => Promise<void>;
readonly realpath: (path: misc.PathLike, options?: opts.IRealpathOptions | string) => Promise<misc.TDataOut>;
readonly link: (existingPath: misc.PathLike, newPath: misc.PathLike) => Promise<void>;
readonly symlink: (target: misc.PathLike, path: misc.PathLike, type?: misc.symlink.Type) => Promise<void>;
readonly utimes: (path: misc.PathLike, atime: misc.TTime, mtime: misc.TTime) => Promise<void>;
readonly readlink: (path: misc.PathLike, options?: opts.IOptions) => Promise<misc.TDataOut>;
readonly opendir: (path: misc.PathLike, options?: opts.IOpendirOptions) => Promise<misc.IDir>;
readonly mkdtemp: (prefix: string, options?: opts.IOptions) => Promise<misc.TDataOut>;
readonly chmod: (path: misc.PathLike, mode: misc.TMode) => Promise<void>;
readonly chown: (path: misc.PathLike, uid: number, gid: number) => Promise<void>;
readonly lchmod: (path: misc.PathLike, mode: misc.TMode) => Promise<void>;
readonly lchown: (path: misc.PathLike, uid: number, gid: number) => Promise<void>;
readonly lutimes: (path: misc.PathLike, atime: misc.TTime, mtime: misc.TTime) => Promise<void>;
readonly open: (path: misc.PathLike, flags?: misc.TFlags, mode?: misc.TMode) => Promise<misc.IFileHandle>;
readonly statfs: (path: misc.PathLike, options?: opts.IStatOptions) => Promise<misc.IStatFs>;
readonly watch: (filename: misc.PathLike, options?: opts.IWatchOptions) => AsyncIterableIterator<{
eventType: string;
filename: string | Buffer;
}>;
readonly glob: (pattern: string, options?: opts.IGlobOptions) => Promise<string[]>;
}
+812
View File
@@ -0,0 +1,812 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Nfsv4FsClient = void 0;
const tslib_1 = require("tslib");
const builder_1 = require("../builder");
const structs = tslib_1.__importStar(require("../structs"));
const Writer_1 = require("@jsonjoy.com/buffers/lib/Writer");
const Reader_1 = require("@jsonjoy.com/buffers/lib/Reader");
const XdrEncoder_1 = require("../../../xdr/XdrEncoder");
const XdrDecoder_1 = require("../../../xdr/XdrDecoder");
const NfsFsStats_1 = require("./NfsFsStats");
const NfsFsDir_1 = require("./NfsFsDir");
const NfsFsDirent_1 = require("./NfsFsDirent");
const NfsFsFileHandle_1 = require("./NfsFsFileHandle");
class Nfsv4FsClient {
constructor(fs) {
this.fs = fs;
this.openOwnerSeqids = new Map();
this.defaultOpenOwnerId = new Uint8Array([1, 2, 3, 4]);
this.closeStateid = async (openOwner, stateid) => {
const key = this.makeOpenOwnerKey(openOwner);
const previousSeqid = this.openOwnerSeqids.get(key);
const seqid = this.nextOpenOwnerSeqid(openOwner);
const response = await this.fs.compound([builder_1.nfs.CLOSE(seqid, stateid)]);
if (response.status !== 0) {
if (previousSeqid !== undefined) {
this.openOwnerSeqids.set(key, previousSeqid);
}
else {
this.openOwnerSeqids.delete(key);
}
throw new Error(`Failed to close file: ${response.status}`);
}
};
this.readFile = async (id, options) => {
const encoding = typeof options === 'string' ? options : options?.encoding;
const path = typeof id === 'string' ? id : id.toString();
const parts = this.parsePath(path);
const operations = this.navigateToParent(parts);
const filename = parts[parts.length - 1];
const openOwner = this.createDefaultOpenOwner();
const claim = builder_1.nfs.OpenClaimNull(filename);
const openSeqid = this.nextOpenOwnerSeqid(openOwner);
operations.push(builder_1.nfs.OPEN(openSeqid, 1, 0, openOwner, builder_1.nfs.OpenHowNoCreate(), claim));
const openResponse = await this.fs.compound(operations);
if (openResponse.status !== 0) {
throw new Error(`Failed to open file: ${openResponse.status}`);
}
const openRes = openResponse.resarray[openResponse.resarray.length - 1];
if (openRes.status !== 0 || !openRes.resok) {
throw new Error(`Failed to open file: ${openRes.status}`);
}
const stateid = openRes.resok.stateid;
const chunks = [];
let offset = BigInt(0);
const chunkSize = 65536;
try {
while (true) {
const readResponse = await this.fs.compound([builder_1.nfs.READ(offset, chunkSize, stateid)]);
if (readResponse.status !== 0) {
throw new Error(`Failed to read file: ${readResponse.status}`);
}
const readRes = readResponse.resarray[0];
if (readRes.status !== 0 || !readRes.resok) {
throw new Error(`Failed to read file: ${readRes.status}`);
}
if (readRes.resok.data.length > 0) {
chunks.push(readRes.resok.data);
offset += BigInt(readRes.resok.data.length);
}
if (readRes.resok.eof)
break;
}
}
finally {
await this.closeStateid(openOwner, stateid);
}
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
const result = new Uint8Array(totalLength);
let position = 0;
for (const chunk of chunks) {
result.set(chunk, position);
position += chunk.length;
}
return this.decodeData(result, encoding);
};
this.writeFile = async (id, data, options) => {
const path = typeof id === 'string' ? id : id.toString();
const parts = this.parsePath(path);
const operations = this.navigateToParent(parts);
const filename = parts[parts.length - 1];
const openOwner = this.createDefaultOpenOwner();
const claim = builder_1.nfs.OpenClaimNull(filename);
const openSeqid = this.nextOpenOwnerSeqid(openOwner);
operations.push(builder_1.nfs.OPEN(openSeqid, 2, 0, openOwner, builder_1.nfs.OpenHowCreateUnchecked(), claim));
const writer = new Writer_1.Writer(16);
const xdr = new XdrEncoder_1.XdrEncoder(writer);
xdr.writeUnsignedHyper(BigInt(0));
const attrVals = writer.flush();
const truncateAttrs = builder_1.nfs.Fattr([4], attrVals);
const stateid = builder_1.nfs.Stateid(0, new Uint8Array(12));
operations.push(builder_1.nfs.SETATTR(stateid, truncateAttrs));
const openResponse = await this.fs.compound(operations);
if (openResponse.status !== 0) {
throw new Error(`Failed to open file: ${openResponse.status}`);
}
const openRes = openResponse.resarray[openResponse.resarray.length - 2];
if (openRes.status !== 0 || !openRes.resok) {
throw new Error(`Failed to open file: ${openRes.status}`);
}
const openStateid = openRes.resok.stateid;
const buffer = this.encodeData(data);
const chunkSize = 65536;
try {
let offset = BigInt(0);
for (let i = 0; i < buffer.length; i += chunkSize) {
const chunk = buffer.slice(i, Math.min(i + chunkSize, buffer.length));
const writeResponse = await this.fs.compound([
builder_1.nfs.WRITE(openStateid, offset, 2, chunk),
]);
if (writeResponse.status !== 0) {
throw new Error(`Failed to write file: ${writeResponse.status}`);
}
const writeRes = writeResponse.resarray[0];
if (writeRes.status !== 0 || !writeRes.resok) {
throw new Error(`Failed to write file: ${writeRes.status}`);
}
offset += BigInt(writeRes.resok.count);
}
}
finally {
await this.closeStateid(openOwner, openStateid);
}
};
this.stat = async (path, options) => {
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
const operations = this.navigateToPath(parts);
const attrNums = [
1,
4,
20,
33,
35,
45,
47,
53,
52,
];
const attrMask = this.attrNumsToBitmap(attrNums);
operations.push(builder_1.nfs.GETATTR(attrMask));
const response = await this.fs.compound(operations);
if (response.status !== 0) {
throw new Error(`Failed to stat file: ${response.status}`);
}
const getattrRes = response.resarray[response.resarray.length - 1];
if (getattrRes.status !== 0 || !getattrRes.resok) {
throw new Error(`Failed to get attributes: ${getattrRes.status}`);
}
const fattr = getattrRes.resok.objAttributes;
const reader = new Reader_1.Reader();
reader.reset(fattr.attrVals);
const xdr = new XdrDecoder_1.XdrDecoder(reader);
let fileType = 1;
let size = 0;
let fileid = 0;
let mode = 0;
let nlink = 1;
let spaceUsed = 0;
let atime = new Date(0);
let mtime = new Date(0);
let ctime = new Date(0);
const returnedMask = fattr.attrmask.mask;
for (let i = 0; i < returnedMask.length; i++) {
const word = returnedMask[i];
if (!word)
continue;
for (let bit = 0; bit < 32; bit++) {
if (!(word & (1 << bit)))
continue;
const attrNum = i * 32 + bit;
switch (attrNum) {
case 1:
fileType = xdr.readUnsignedInt();
break;
case 4:
size = Number(xdr.readUnsignedHyper());
break;
case 20:
fileid = Number(xdr.readUnsignedHyper());
break;
case 33:
mode = xdr.readUnsignedInt();
break;
case 35:
nlink = xdr.readUnsignedInt();
break;
case 45:
spaceUsed = Number(xdr.readUnsignedHyper());
break;
case 47: {
const seconds = Number(xdr.readHyper());
const nseconds = xdr.readUnsignedInt();
atime = new Date(seconds * 1000 + nseconds / 1000000);
break;
}
case 53: {
const seconds = Number(xdr.readHyper());
const nseconds = xdr.readUnsignedInt();
mtime = new Date(seconds * 1000 + nseconds / 1000000);
break;
}
case 52: {
const seconds = Number(xdr.readHyper());
const nseconds = xdr.readUnsignedInt();
ctime = new Date(seconds * 1000 + nseconds / 1000000);
break;
}
}
}
}
const blocks = Math.ceil(spaceUsed / 512);
return new NfsFsStats_1.NfsFsStats(0, 0, 0, 4096, fileid, size, blocks, atime, mtime, ctime, mtime, atime.getTime(), mtime.getTime(), ctime.getTime(), mtime.getTime(), 0, mode, nlink, fileType);
};
this.lstat = async (path, options) => {
return this.stat(path, options);
};
this.mkdir = async (path, options) => {
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
if (parts.length === 0) {
throw new Error('Cannot create root directory');
}
const operations = this.navigateToParent(parts);
const dirname = parts[parts.length - 1];
const createType = builder_1.nfs.CreateTypeDir();
const emptyAttrs = builder_1.nfs.Fattr([], new Uint8Array(0));
operations.push(builder_1.nfs.CREATE(createType, dirname, emptyAttrs));
const response = await this.fs.compound(operations);
if (response.status !== 0) {
throw new Error(`Failed to create directory: ${response.status}`);
}
const createRes = response.resarray[response.resarray.length - 1];
if (createRes.status !== 0) {
throw new Error(`Failed to create directory: ${createRes.status}`);
}
return undefined;
};
this.readdir = async (path, options) => {
const pathStr = typeof path === 'string' ? path : path.toString();
const withFileTypes = typeof options === 'object' && options?.withFileTypes;
const encoding = typeof options === 'string' ? options : options?.encoding;
const parts = this.parsePath(pathStr);
const operations = this.navigateToPath(parts);
const attrNums = withFileTypes ? [1] : [];
const attrMask = this.attrNumsToBitmap(attrNums);
operations.push(builder_1.nfs.READDIR(attrMask));
const response = await this.fs.compound(operations);
if (response.status !== 0) {
throw new Error(`Failed to read directory: ${response.status}`);
}
const readdirRes = response.resarray[response.resarray.length - 1];
if (readdirRes.status !== 0 || !readdirRes.resok) {
throw new Error(`Failed to read directory: ${readdirRes.status}`);
}
const entries = [];
const dirents = [];
const entryList = readdirRes.resok.entries;
for (let i = 0; i < entryList.length; i++) {
const entry = entryList[i];
const name = entry.name;
if (withFileTypes) {
const fattr = entry.attrs;
const reader = new Reader_1.Reader();
reader.reset(fattr.attrVals);
const xdr = new XdrDecoder_1.XdrDecoder(reader);
let fileType = 1;
const returnedMask = fattr.attrmask.mask;
for (let i = 0; i < returnedMask.length; i++) {
const word = returnedMask[i];
if (!word)
continue;
for (let bit = 0; bit < 32; bit++) {
if (!(word & (1 << bit)))
continue;
const attrNum = i * 32 + bit;
if (attrNum === 1) {
fileType = xdr.readUnsignedInt();
}
}
}
dirents.push(new NfsFsDirent_1.NfsFsDirent(name, fileType));
}
else {
entries.push(name);
}
}
if (withFileTypes) {
return dirents;
}
if (encoding && encoding !== 'utf8') {
return entries.map((name) => Buffer.from(name, 'utf8'));
}
return entries;
};
this.appendFile = async (path, data, options) => {
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
const operations = this.navigateToParent(parts);
const filename = parts[parts.length - 1];
const openOwner = this.createDefaultOpenOwner();
const claim = builder_1.nfs.OpenClaimNull(filename);
const openSeqid = this.nextOpenOwnerSeqid(openOwner);
operations.push(builder_1.nfs.OPEN(openSeqid, 2, 0, openOwner, builder_1.nfs.OpenHowNoCreate(), claim));
const attrNums = [4];
const attrMask = this.attrNumsToBitmap(attrNums);
operations.push(builder_1.nfs.GETATTR(attrMask));
const openResponse = await this.fs.compound(operations);
if (openResponse.status !== 0) {
throw new Error(`Failed to open file: ${openResponse.status}`);
}
const openRes = openResponse.resarray[openResponse.resarray.length - 2];
if (openRes.status !== 0 || !openRes.resok) {
throw new Error(`Failed to open file: ${openRes.status}`);
}
const getattrRes = openResponse.resarray[openResponse.resarray.length - 1];
if (getattrRes.status !== 0 || !getattrRes.resok) {
throw new Error(`Failed to get attributes: ${getattrRes.status}`);
}
const fattr = getattrRes.resok.objAttributes;
const reader = new Reader_1.Reader();
reader.reset(fattr.attrVals);
const xdr = new XdrDecoder_1.XdrDecoder(reader);
const currentSize = Number(xdr.readUnsignedHyper());
const openStateid = openRes.resok.stateid;
const buffer = this.encodeData(data);
const chunkSize = 65536;
try {
let offset = BigInt(currentSize);
for (let i = 0; i < buffer.length; i += chunkSize) {
const chunk = buffer.slice(i, Math.min(i + chunkSize, buffer.length));
const writeResponse = await this.fs.compound([
builder_1.nfs.WRITE(openStateid, offset, 2, chunk),
]);
if (writeResponse.status !== 0) {
throw new Error(`Failed to write file: ${writeResponse.status}`);
}
const writeRes = writeResponse.resarray[0];
if (writeRes.status !== 0 || !writeRes.resok) {
throw new Error(`Failed to write file: ${writeRes.status}`);
}
offset += BigInt(writeRes.resok.count);
}
}
finally {
await this.closeStateid(openOwner, openStateid);
}
};
this.truncate = async (path, len = 0) => {
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
const operations = this.navigateToPath(parts);
const writer = new Writer_1.Writer(16);
const xdr = new XdrEncoder_1.XdrEncoder(writer);
xdr.writeUnsignedHyper(BigInt(len));
const attrVals = writer.flush();
const sizeAttrs = builder_1.nfs.Fattr([4], attrVals);
const stateid = builder_1.nfs.Stateid(0, new Uint8Array(12));
operations.push(builder_1.nfs.SETATTR(stateid, sizeAttrs));
const response = await this.fs.compound(operations);
if (response.status !== 0) {
throw new Error(`Failed to truncate file: ${response.status}`);
}
const setattrRes = response.resarray[response.resarray.length - 1];
if (setattrRes.status !== 0) {
throw new Error(`Failed to truncate file: ${setattrRes.status}`);
}
};
this.unlink = async (path) => {
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
if (parts.length === 0) {
throw new Error('Cannot unlink root directory');
}
const operations = this.navigateToParent(parts);
const filename = parts[parts.length - 1];
operations.push(builder_1.nfs.REMOVE(filename));
const response = await this.fs.compound(operations);
if (response.status !== 0) {
throw new Error(`Failed to unlink file: ${response.status}`);
}
const removeRes = response.resarray[response.resarray.length - 1];
if (removeRes.status !== 0) {
throw new Error(`Failed to unlink file: ${removeRes.status}`);
}
};
this.rmdir = async (path, options) => {
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
if (parts.length === 0) {
throw new Error('Cannot remove root directory');
}
const operations = this.navigateToParent(parts);
const dirname = parts[parts.length - 1];
operations.push(builder_1.nfs.REMOVE(dirname));
const response = await this.fs.compound(operations);
if (response.status !== 0) {
throw new Error(`Failed to remove directory: ${response.status}`);
}
const removeRes = response.resarray[response.resarray.length - 1];
if (removeRes.status !== 0) {
throw new Error(`Failed to remove directory: ${removeRes.status}`);
}
};
this.rm = async (path, options) => {
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
if (parts.length === 0) {
throw new Error('Cannot remove root directory');
}
const force = options?.force ?? false;
const recursive = options?.recursive ?? false;
if (recursive) {
try {
const stats = await this.stat(path);
if (stats.isDirectory()) {
const entries = await this.readdir(path);
for (const entry of entries) {
const entryPath = pathStr + '/' + entry;
await this.rm(entryPath, options);
}
}
}
catch (err) {
if (!force)
throw err;
return;
}
}
try {
const operations = this.navigateToParent(parts);
const name = parts[parts.length - 1];
operations.push(builder_1.nfs.REMOVE(name));
const response = await this.fs.compound(operations);
if (response.status !== 0) {
if (!force)
throw new Error(`Failed to remove: ${response.status}`);
return;
}
const removeRes = response.resarray[response.resarray.length - 1];
if (removeRes.status !== 0) {
if (!force)
throw new Error(`Failed to remove: ${removeRes.status}`);
}
}
catch (err) {
if (!force)
throw err;
}
};
this.access = async (path, mode = 0) => {
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
const operations = this.navigateToPath(parts);
let accessMask = 0;
if (mode === 0) {
accessMask = 1;
}
else {
if (mode & 4)
accessMask |= 1;
if (mode & 2)
accessMask |= 4;
if (mode & 1)
accessMask |= 32;
}
operations.push(builder_1.nfs.ACCESS(accessMask));
const response = await this.fs.compound(operations);
if (response.status !== 0) {
throw new Error(`Access denied: ${response.status}`);
}
const accessRes = response.resarray[response.resarray.length - 1];
if (accessRes.status !== 0) {
throw new Error(`Access denied: ${accessRes.status}`);
}
};
this.rename = async (oldPath, newPath) => {
const oldPathStr = typeof oldPath === 'string' ? oldPath : oldPath.toString();
const newPathStr = typeof newPath === 'string' ? newPath : newPath.toString();
const oldParts = this.parsePath(oldPathStr);
const newParts = this.parsePath(newPathStr);
if (oldParts.length === 0 || newParts.length === 0) {
throw new Error('Cannot rename root directory');
}
const operations = [];
operations.push(builder_1.nfs.PUTROOTFH());
for (const part of oldParts.slice(0, -1)) {
operations.push(builder_1.nfs.LOOKUP(part));
}
operations.push(builder_1.nfs.SAVEFH());
operations.push(builder_1.nfs.PUTROOTFH());
for (const part of newParts.slice(0, -1)) {
operations.push(builder_1.nfs.LOOKUP(part));
}
const oldname = oldParts[oldParts.length - 1];
const newname = newParts[newParts.length - 1];
operations.push(builder_1.nfs.RENAME(oldname, newname));
const response = await this.fs.compound(operations);
if (response.status !== 0) {
throw new Error(`Failed to rename: ${response.status}`);
}
const renameRes = response.resarray[response.resarray.length - 1];
if (renameRes.status !== 0) {
throw new Error(`Failed to rename: ${renameRes.status}`);
}
};
this.copyFile = async (src, dest, flags) => {
const data = await this.readFile(src);
await this.writeFile(dest, data);
};
this.realpath = async (path, options) => {
const encoding = typeof options === 'string' ? options : options?.encoding;
const pathStr = typeof path === 'string' ? path : path.toString();
const normalized = '/' + this.parsePath(pathStr).join('/');
if (!encoding || encoding === 'utf8') {
return normalized;
}
return Buffer.from(normalized, 'utf8');
};
this.link = async (existingPath, newPath) => {
const existingPathStr = typeof existingPath === 'string' ? existingPath : existingPath.toString();
const newPathStr = typeof newPath === 'string' ? newPath : newPath.toString();
const existingParts = this.parsePath(existingPathStr);
const newParts = this.parsePath(newPathStr);
if (newParts.length === 0) {
throw new Error('Cannot create link at root');
}
const operations = this.navigateToPath(existingParts);
operations.push(builder_1.nfs.SAVEFH());
operations.push(builder_1.nfs.PUTROOTFH());
for (const part of newParts.slice(0, -1)) {
operations.push(builder_1.nfs.LOOKUP(part));
}
const newname = newParts[newParts.length - 1];
operations.push(builder_1.nfs.LINK(newname));
const response = await this.fs.compound(operations);
if (response.status !== 0) {
throw new Error(`Failed to create link: ${response.status}`);
}
const linkRes = response.resarray[response.resarray.length - 1];
if (linkRes.status !== 0) {
throw new Error(`Failed to create link: ${linkRes.status}`);
}
};
this.symlink = async (target, path, type) => {
const targetStr = typeof target === 'string' ? target : target.toString();
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
if (parts.length === 0) {
throw new Error('Cannot create symlink at root');
}
const operations = this.navigateToParent(parts);
const linkname = parts[parts.length - 1];
const createType = new structs.Nfsv4CreateType(5, new structs.Nfsv4CreateTypeLink(targetStr));
const emptyAttrs = builder_1.nfs.Fattr([], new Uint8Array(0));
operations.push(builder_1.nfs.CREATE(createType, linkname, emptyAttrs));
const response = await this.fs.compound(operations);
if (response.status !== 0) {
throw new Error(`Failed to create symlink: ${response.status}`);
}
const createRes = response.resarray[response.resarray.length - 1];
if (createRes.status !== 0) {
throw new Error(`Failed to create symlink: ${createRes.status}`);
}
};
this.utimes = async (path, atime, mtime) => {
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
const operations = this.navigateToPath(parts);
const atimeMs = typeof atime === 'number' ? atime : atime instanceof Date ? atime.getTime() : Date.now();
const mtimeMs = typeof mtime === 'number' ? mtime : mtime instanceof Date ? mtime.getTime() : Date.now();
const writer = new Writer_1.Writer(64);
const xdr = new XdrEncoder_1.XdrEncoder(writer);
xdr.writeUnsignedInt(1);
xdr.writeHyper(BigInt(Math.floor(atimeMs / 1000)));
xdr.writeUnsignedInt((atimeMs % 1000) * 1000000);
xdr.writeUnsignedInt(1);
xdr.writeHyper(BigInt(Math.floor(mtimeMs / 1000)));
xdr.writeUnsignedInt((mtimeMs % 1000) * 1000000);
const attrVals = writer.flush();
const timeAttrs = builder_1.nfs.Fattr([48, 54], attrVals);
const stateid = builder_1.nfs.Stateid(0, new Uint8Array(12));
operations.push(builder_1.nfs.SETATTR(stateid, timeAttrs));
const response = await this.fs.compound(operations);
if (response.status !== 0) {
throw new Error(`Failed to set times: ${response.status}`);
}
const setattrRes = response.resarray[response.resarray.length - 1];
if (setattrRes.status !== 0) {
throw new Error(`Failed to set times: ${setattrRes.status}`);
}
};
this.readlink = async (path, options) => {
const encoding = typeof options === 'string' ? options : options?.encoding;
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
const operations = this.navigateToPath(parts);
operations.push(builder_1.nfs.READLINK());
const response = await this.fs.compound(operations);
if (response.status !== 0) {
throw new Error(`Failed to read link: ${response.status}`);
}
const readlinkRes = response.resarray[response.resarray.length - 1];
if (readlinkRes.status !== 0 || !readlinkRes.resok) {
throw new Error(`Failed to read link: ${readlinkRes.status}`);
}
if (!encoding || encoding === 'utf8') {
return readlinkRes.resok.link;
}
return Buffer.from(readlinkRes.resok.link, 'utf8');
};
this.opendir = async (path, options) => {
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
const operations = this.navigateToPath(parts);
return new NfsFsDir_1.NfsFsDir(pathStr, this.fs, operations);
};
this.mkdtemp = async (prefix, options) => {
const encoding = typeof options === 'string' ? options : options?.encoding;
const randomSuffix = Math.random().toString(36).substring(2, 8);
const dirName = prefix + randomSuffix;
await this.mkdir(dirName);
if (!encoding || encoding === 'utf8')
return dirName;
return Buffer.from(dirName, 'utf8');
};
this.chmod = async (path, mode) => {
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
const operations = this.navigateToPath(parts);
const modeValue = typeof mode === 'number' ? mode : parseInt(mode.toString(), 8);
const writer = new Writer_1.Writer(8);
const xdr = new XdrEncoder_1.XdrEncoder(writer);
xdr.writeUnsignedInt(modeValue);
const attrVals = writer.flush();
const attrs = builder_1.nfs.Fattr([33], attrVals);
const stateid = builder_1.nfs.Stateid(0, new Uint8Array(12));
operations.push(builder_1.nfs.SETATTR(stateid, attrs));
const response = await this.fs.compound(operations);
if (response.status !== 0) {
throw new Error(`Failed to chmod: ${response.status}`);
}
const setattrRes = response.resarray[response.resarray.length - 1];
if (setattrRes.status !== 0) {
throw new Error(`Failed to chmod: ${setattrRes.status}`);
}
};
this.chown = async (path, uid, gid) => {
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
const operations = this.navigateToPath(parts);
const writer = new Writer_1.Writer(64);
const xdr = new XdrEncoder_1.XdrEncoder(writer);
xdr.writeStr(uid.toString());
xdr.writeStr(gid.toString());
const attrVals = writer.flush();
const attrs = builder_1.nfs.Fattr([36, 37], attrVals);
const stateid = builder_1.nfs.Stateid(0, new Uint8Array(12));
operations.push(builder_1.nfs.SETATTR(stateid, attrs));
const response = await this.fs.compound(operations);
if (response.status !== 0) {
throw new Error(`Failed to chown: ${response.status}`);
}
const setattrRes = response.resarray[response.resarray.length - 1];
if (setattrRes.status !== 0) {
throw new Error(`Failed to chown: ${setattrRes.status}`);
}
};
this.lchmod = async (path, mode) => {
return this.chmod(path, mode);
};
this.lchown = async (path, uid, gid) => {
return this.chown(path, uid, gid);
};
this.lutimes = async (path, atime, mtime) => {
return this.utimes(path, atime, mtime);
};
this.open = async (path, flags, mode) => {
const pathStr = typeof path === 'string' ? path : path.toString();
const parts = this.parsePath(pathStr);
const operations = this.navigateToParent(parts);
const filename = parts[parts.length - 1];
const openOwner = this.createDefaultOpenOwner();
const claim = builder_1.nfs.OpenClaimNull(filename);
let access = 1;
const openSeqid = this.nextOpenOwnerSeqid(openOwner);
if (typeof flags === 'string') {
if (flags.includes('r') && flags.includes('+')) {
access = 3;
}
else if (flags.includes('w') || flags.includes('a')) {
access = 2;
if (flags.includes('+')) {
access = 3;
}
}
}
else if (typeof flags === 'number') {
const O_RDONLY = 0;
const O_WRONLY = 1;
const O_RDWR = 2;
const O_ACCMODE = 3;
const accessMode = flags & O_ACCMODE;
switch (accessMode) {
case O_RDONLY:
access = 1;
break;
case O_WRONLY:
access = 2;
break;
case O_RDWR:
access = 3;
break;
}
}
operations.push(builder_1.nfs.OPEN(openSeqid, access, 0, openOwner, builder_1.nfs.OpenHowNoCreate(), claim));
const openResponse = await this.fs.compound(operations);
if (openResponse.status !== 0) {
throw new Error(`Failed to open file: ${openResponse.status}`);
}
const openRes = openResponse.resarray[openResponse.resarray.length - 1];
if (openRes.status !== 0 || !openRes.resok) {
throw new Error(`Failed to open file: ${openRes.status}`);
}
const stateid = openRes.resok.stateid;
const fd = Math.floor(Math.random() * 1000000);
return new NfsFsFileHandle_1.NfsFsFileHandle(fd, pathStr, this, stateid, openOwner);
};
this.statfs = (path, options) => {
throw new Error('Not implemented.');
};
this.watch = (filename, options) => {
throw new Error('Not implemented.');
};
this.glob = (pattern, options) => {
throw new Error('Not implemented.');
};
}
makeOpenOwnerKey(owner) {
return `${owner.clientid}:${Buffer.from(owner.owner).toString('hex')}`;
}
nextOpenOwnerSeqid(owner) {
const key = this.makeOpenOwnerKey(owner);
const last = this.openOwnerSeqids.get(key);
const next = last === undefined ? 0 : last === 0xffffffff ? 1 : (last + 1) >>> 0;
this.openOwnerSeqids.set(key, next);
return next;
}
createDefaultOpenOwner() {
return builder_1.nfs.OpenOwner(BigInt(1), new Uint8Array(this.defaultOpenOwnerId));
}
attrNumsToBitmap(attrNums) {
const bitmap = [];
for (const attrNum of attrNums) {
const wordIndex = Math.floor(attrNum / 32);
const bitIndex = attrNum % 32;
while (bitmap.length <= wordIndex) {
bitmap.push(0);
}
bitmap[wordIndex] |= 1 << bitIndex;
}
return bitmap;
}
parsePath(path) {
const normalized = path.replace(/^\/+/, '').replace(/\/+$/, '');
if (!normalized)
return [];
return normalized.split('/').filter((part) => part.length > 0);
}
navigateToParent(parts) {
const operations = [builder_1.nfs.PUTROOTFH()];
for (const part of parts.slice(0, -1)) {
operations.push(builder_1.nfs.LOOKUP(part));
}
return operations;
}
navigateToPath(parts) {
const operations = [builder_1.nfs.PUTROOTFH()];
for (const part of parts) {
operations.push(builder_1.nfs.LOOKUP(part));
}
return operations;
}
encodeData(data) {
if (data instanceof Uint8Array)
return data;
if (data instanceof ArrayBuffer)
return new Uint8Array(data);
if (typeof data === 'string')
return new TextEncoder().encode(data);
if (Buffer.isBuffer(data))
return new Uint8Array(data);
throw new Error('Unsupported data type');
}
decodeData(data, encoding) {
if (!encoding || encoding === 'buffer')
return Buffer.from(data);
return new TextDecoder(encoding).decode(data);
}
}
exports.Nfsv4FsClient = Nfsv4FsClient;
//# sourceMappingURL=Nfsv4FsClient.js.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,41 @@
/// <reference types="node" />
import * as stream from 'node:stream';
import { Nfsv4CompoundRequest, Nfsv4CompoundResponse, Nfsv4Request } from '../messages';
import type { Nfsv4Client } from './types';
export interface Nfsv4TcpClientOpts {
host?: string;
port?: number;
timeout?: number;
debug?: boolean;
logger?: Pick<typeof console, 'log' | 'error'>;
}
export declare class Nfsv4TcpClient implements Nfsv4Client {
static fromDuplex(duplex: stream.Duplex, opts?: Nfsv4TcpClientOpts): Nfsv4TcpClient;
readonly host: string;
readonly port: number;
readonly timeout: number;
debug: boolean;
logger: Pick<typeof console, 'log' | 'error'>;
private socket;
private connected;
private connecting;
private xid;
private seqid;
private pendingRequests;
private readonly rmDecoder;
private readonly rpcDecoder;
private readonly nfsDecoder;
private readonly nfsEncoder;
constructor(opts?: Nfsv4TcpClientOpts);
private nextXid;
connect(): Promise<void>;
protected setSocket(socket: stream.Duplex): void;
private onData;
private onRpcMessage;
private onClose;
compound(request: Nfsv4CompoundRequest): Promise<Nfsv4CompoundResponse>;
compound(operations: Nfsv4Request[], tag?: string, minorversion?: number): Promise<Nfsv4CompoundResponse>;
null(): Promise<void>;
close(): void;
isConnected(): boolean;
}
+216
View File
@@ -0,0 +1,216 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Nfsv4TcpClient = void 0;
const tslib_1 = require("tslib");
const net = tslib_1.__importStar(require("node:net"));
const Nfsv4Decoder_1 = require("../Nfsv4Decoder");
const Nfsv4FullEncoder_1 = require("../Nfsv4FullEncoder");
const rm_1 = require("../../../rm");
const rpc_1 = require("../../../rpc");
const constants_1 = require("../constants");
const messages_1 = require("../messages");
class Nfsv4TcpClient {
static fromDuplex(duplex, opts = {}) {
const client = new Nfsv4TcpClient(opts);
client.setSocket(duplex);
return client;
}
constructor(opts = {}) {
this.socket = null;
this.connected = false;
this.connecting = false;
this.xid = 0;
this.seqid = 0;
this.pendingRequests = new Map();
this.host = opts.host || '127.0.0.1';
this.port = opts.port || 2049;
this.timeout = opts.timeout || 30000;
this.debug = !!opts.debug;
this.logger = opts.logger || console;
this.rmDecoder = new rm_1.RmRecordDecoder();
this.rpcDecoder = new rpc_1.RpcMessageDecoder();
this.nfsDecoder = new Nfsv4Decoder_1.Nfsv4Decoder();
this.nfsEncoder = new Nfsv4FullEncoder_1.Nfsv4FullEncoder();
}
nextXid() {
this.xid = (this.xid + 1) >>> 0;
if (this.xid === 0)
this.xid = 1;
return this.xid;
}
async connect() {
if (this.connected)
return;
if (this.connecting)
throw new Error('Connection already in progress');
return new Promise((resolve, reject) => {
this.connecting = true;
const onError = (err) => {
this.connecting = false;
this.connected = false;
if (this.debug)
this.logger.error('Socket error:', err);
reject(err);
};
const socket = net.connect({ host: this.host, port: this.port }, () => {
if (this.debug)
this.logger.log(`Connected to NFSv4 server at ${this.host}:${this.port}`);
socket.removeListener('error', onError);
resolve();
this.setSocket(socket);
});
socket.once('error', onError);
});
}
setSocket(socket) {
socket.on('data', this.onData.bind(this));
socket.on('close', this.onClose.bind(this));
socket.on('error', (err) => {
this.connecting = false;
this.connected = false;
if (this.debug)
this.logger.error('Socket error:', err);
});
this.connected = true;
this.connecting = false;
this.socket = socket;
}
onData(data) {
const { rmDecoder, rpcDecoder } = this;
rmDecoder.push(data);
let record = rmDecoder.readRecord();
while (record) {
if (record.size()) {
const rpcMessage = rpcDecoder.decodeMessage(record);
if (rpcMessage)
this.onRpcMessage(rpcMessage);
else if (this.debug)
this.logger.error('Failed to decode RPC message');
}
record = rmDecoder.readRecord();
}
}
onRpcMessage(msg) {
if (msg instanceof rpc_1.RpcAcceptedReplyMessage) {
const pending = this.pendingRequests.get(msg.xid);
if (!pending) {
if (this.debug)
this.logger.error(`No pending request for XID ${msg.xid}`);
return;
}
this.pendingRequests.delete(msg.xid);
if (pending.timeout)
clearTimeout(pending.timeout);
if (msg.stat !== 0) {
pending.reject(new Error(`RPC accepted reply error: stat=${msg.stat}`));
return;
}
if (!msg.results) {
if (pending.resolve.length === 0) {
pending.resolve();
return;
}
pending.reject(new Error('No results in accepted reply'));
return;
}
const response = this.nfsDecoder.decodeCompoundResponse(msg.results);
if (!response) {
pending.reject(new Error('Failed to decode COMPOUND response'));
return;
}
pending.resolve(response);
}
else if (msg instanceof rpc_1.RpcRejectedReplyMessage) {
const pending = this.pendingRequests.get(msg.xid);
if (!pending) {
if (this.debug)
this.logger.error(`No pending request for XID ${msg.xid}`);
return;
}
this.pendingRequests.delete(msg.xid);
if (pending.timeout)
clearTimeout(pending.timeout);
pending.reject(new Error(`RPC rejected reply: stat=${msg.stat}`));
}
else {
if (this.debug)
this.logger.error('Unexpected RPC message type:', msg);
}
}
onClose() {
this.connected = false;
this.connecting = false;
if (this.debug)
this.logger.log('Connection closed');
const error = new Error('Connection closed');
this.pendingRequests.forEach((pending, xid) => {
if (pending.timeout)
clearTimeout(pending.timeout);
pending.reject(error);
});
this.pendingRequests.clear();
}
async compound(requestOrOps, tag = '', minorversion = 0) {
if (!this.connected)
throw new Error('Not connected');
const request = requestOrOps instanceof messages_1.Nfsv4CompoundRequest
? requestOrOps
: new messages_1.Nfsv4CompoundRequest(tag, minorversion, requestOrOps);
const xid = this.nextXid();
const cred = new rpc_1.RpcOpaqueAuth(0, constants_1.EMPTY_READER);
const verf = new rpc_1.RpcOpaqueAuth(0, constants_1.EMPTY_READER);
const encoded = this.nfsEncoder.encodeCall(xid, 1, cred, verf, request);
return new Promise((resolve, reject) => {
const timeout = setTimeout(() => {
this.pendingRequests.delete(xid);
reject(new Error(`Request timeout (XID ${xid})`));
}, this.timeout);
this.pendingRequests.set(xid, { resolve, reject, timeout });
this.socket.write(encoded);
if (this.debug) {
this.logger.log(`Sent COMPOUND request (XID ${xid}): ${request.argarray.length} operations`);
}
});
}
async null() {
if (!this.connected)
throw new Error('Not connected');
const xid = this.nextXid();
const cred = new rpc_1.RpcOpaqueAuth(0, constants_1.EMPTY_READER);
const verf = new rpc_1.RpcOpaqueAuth(0, constants_1.EMPTY_READER);
const writer = this.nfsEncoder.writer;
const rmEncoder = this.nfsEncoder.rmEncoder;
const rpcEncoder = this.nfsEncoder.rpcEncoder;
const state = rmEncoder.startRecord();
rpcEncoder.writeCall(xid, 100003, 4, 0, cred, verf);
rmEncoder.endRecord(state);
const encoded = writer.flush();
return new Promise((resolve, reject) => {
const timeout = setTimeout(() => {
this.pendingRequests.delete(xid);
reject(new Error(`NULL request timeout (XID ${xid})`));
}, this.timeout);
this.pendingRequests.set(xid, {
resolve: () => resolve(),
reject,
timeout,
});
this.socket.write(encoded);
if (this.debug)
this.logger.log(`Sent NULL request (XID ${xid})`);
});
}
close() {
if (this.socket) {
this.socket.end();
this.socket = null;
}
this.connected = false;
this.connecting = false;
}
isConnected() {
return this.connected;
}
}
exports.Nfsv4TcpClient = Nfsv4TcpClient;
//# sourceMappingURL=Nfsv4TcpClient.js.map
File diff suppressed because one or more lines are too long
+9
View File
@@ -0,0 +1,9 @@
import type { FsPromisesApi } from 'memfs/lib/node/types';
import * as msg from '../messages';
export interface Nfsv4Client {
compound(request: msg.Nfsv4CompoundRequest): Promise<msg.Nfsv4CompoundResponse>;
compound(operations: msg.Nfsv4Request[], tag?: string, minorversion?: number): Promise<msg.Nfsv4CompoundResponse>;
null(): Promise<void>;
}
export interface NfsFsClient extends Pick<FsPromisesApi, 'readFile' | 'writeFile' | 'readdir' | 'mkdir' | 'access' | 'appendFile' | 'copyFile' | 'link' | 'realpath' | 'rename' | 'rmdir' | 'truncate' | 'unlink' | 'utimes' | 'symlink' | 'stat' | 'readlink' | 'opendir' | 'open' | 'chmod' | 'rm' | 'chown' | 'lchmod' | 'lchown' | 'lutimes' | 'lstat' | 'mkdtemp' | 'statfs' | 'watch' | 'glob'> {
}
+3
View File
@@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=types.js.map
+1
View File
@@ -0,0 +1 @@
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../../src/nfs/v4/client/types.ts"],"names":[],"mappings":""}