您好,登录后才能下订单哦!
密码登录
登录注册
点击 登录注册 即表示同意《亿速云用户服务条款》
# Node.js文件操作和Stream的示例分析
## 前言
Node.js作为基于Chrome V8引擎的JavaScript运行时,其非阻塞I/O模型特别适合处理高并发的I/O密集型应用。文件系统操作和流(Stream)处理是Node.js核心功能中最重要的两个部分,几乎在所有Node.js应用中都会涉及。本文将深入探讨Node.js中的文件操作API和各种流类型,通过大量实际示例分析它们的特性和使用场景。
## 一、Node.js文件系统基础
### 1.1 fs模块概述
Node.js通过内置的`fs`模块提供文件系统操作功能,该模块提供了同步和异步两种API:
```javascript
const fs = require('fs');
// 异步读取文件
fs.readFile('/path/to/file', 'utf8', (err, data) => {
if (err) throw err;
console.log(data);
});
// 同步读取文件
try {
const data = fs.readFileSync('/path/to/file', 'utf8');
console.log(data);
} catch (err) {
console.error(err);
}
// 写入文件
fs.writeFile('example.txt', 'Hello Node.js', 'utf8', (err) => {
if (err) throw err;
console.log('File written successfully');
});
// 追加写入
fs.appendFile('example.txt', '\nNew content', (err) => {
if (err) throw err;
console.log('Content appended');
});
// 获取文件状态
fs.stat('example.txt', (err, stats) => {
if (err) throw err;
console.log(`File size: ${stats.size} bytes`);
console.log(`Is directory: ${stats.isDirectory()}`);
console.log(`Last modified: ${stats.mtime}`);
});
// 创建目录
fs.mkdir('new-directory', { recursive: true }, (err) => {
if (err) throw err;
console.log('Directory created');
});
// 读取目录内容
fs.readdir('./', (err, files) => {
if (err) throw err;
console.log('Directory contents:', files);
});
同步API虽然编写简单,但会阻塞事件循环,在性能敏感的应用中应避免使用。异步API配合Promise或async/await能获得更好的性能:
const fs = require('fs').promises;
async function processFiles() {
try {
const data = await fs.readFile('large-file.txt', 'utf8');
await fs.writeFile('output.txt', data.toUpperCase());
console.log('File processed successfully');
} catch (err) {
console.error('Error:', err);
}
}
当处理大文件时,一次性读取整个文件到内存中会带来显著的内存压力。例如,一个2GB的文件:
// 不推荐的做法 - 内存问题
fs.readFile('huge-file.mp4', (err, data) => {
// 整个2GB文件会加载到内存中
});
Stream通过分块(chunk)处理数据,可以显著降低内存使用:
const readStream = fs.createReadStream('huge-file.mp4');
let totalSize = 0;
readStream.on('data', (chunk) => {
totalSize += chunk.length;
// 每次只处理一小部分数据
});
readStream.on('end', () => {
console.log(`Total size: ${totalSize} bytes`);
});
Node.js中有四种基本流类型:
fs.createReadStream
)fs.createWriteStream
)net.Socket
)zlib.createGzip
)const readable = fs.createReadStream('source.txt', {
encoding: 'utf8',
highWaterMark: 1024 // 每次读取1KB
});
readable.on('data', (chunk) => {
console.log(`Received ${chunk.length} bytes of data`);
});
readable.on('end', () => {
console.log('No more data');
});
readable.on('error', (err) => {
console.error('Error:', err);
});
const writable = fs.createWriteStream('destination.txt');
writable.write('First line\n');
writable.write('Second line\n');
writable.end('Final line');
writable.on('finish', () => {
console.log('All data written');
});
writable.on('error', (err) => {
console.error('Error:', err);
});
管道是流之间数据传输的高效方式:
// 基本管道
fs.createReadStream('source.txt')
.pipe(fs.createWriteStream('dest.txt'));
// 带转换的管道
const zlib = require('zlib');
fs.createReadStream('source.txt')
.pipe(zlib.createGzip())
.pipe(fs.createWriteStream('source.txt.gz'));
const { Transform } = require('stream');
class UppercaseTransform extends Transform {
_transform(chunk, encoding, callback) {
this.push(chunk.toString().toUpperCase());
callback();
}
}
const transformer = new UppercaseTransform();
fs.createReadStream('input.txt')
.pipe(transformer)
.pipe(fs.createWriteStream('output.txt'));
// 监视文件变化
const watcher = fs.watch('watched-file.txt', (eventType, filename) => {
console.log(`Event type: ${eventType}, file: ${filename}`);
});
// 停止监视
setTimeout(() => {
watcher.close();
console.log('Stopped watching');
}, 60000);
const readStream = fs.createReadStream('large-file.txt');
const writeStream = fs.createWriteStream('copy.txt');
readStream.on('data', (chunk) => {
if (!writeStream.write(chunk)) {
// 写入缓冲区满,暂停读取
readStream.pause();
}
});
writeStream.on('drain', () => {
// 缓冲区已清空,恢复读取
readStream.resume();
});
readStream.on('end', () => {
writeStream.end();
});
function copyFileWithStreams(source, target) {
return new Promise((resolve, reject) => {
const readStream = fs.createReadStream(source);
const writeStream = fs.createWriteStream(target);
// 统一错误处理
const handleError = (err) => {
readStream.destroy();
writeStream.destroy();
reject(err);
};
readStream.on('error', handleError);
writeStream.on('error', handleError);
writeStream.on('finish', resolve);
readStream.pipe(writeStream);
});
}
const crypto = require('crypto');
function calculateFileHash(filePath, algorithm = 'sha256') {
return new Promise((resolve, reject) => {
const hash = crypto.createHash(algorithm);
const stream = fs.createReadStream(filePath);
stream.on('data', (chunk) => hash.update(chunk));
stream.on('end', () => resolve(hash.digest('hex')));
stream.on('error', reject);
});
}
const readline = require('readline');
function monitorLogFile(filePath, keyword) {
const rl = readline.createInterface({
input: fs.createReadStream(filePath),
crlfDelay: Infinity
});
rl.on('line', (line) => {
if (line.includes(keyword)) {
console.log(`Found keyword in line: ${line}`);
}
});
rl.on('close', () => {
console.log('Finished reading log file');
});
}
const { Transform } = require('stream');
class CSVParseStream extends Transform {
constructor(options) {
super({ ...options, objectMode: true });
this.remaining = '';
}
_transform(chunk, encoding, callback) {
const data = this.remaining + chunk.toString();
const lines = data.split('\n');
this.remaining = lines.pop();
for (const line of lines) {
if (line.trim()) {
const values = line.split(',');
this.push(values);
}
}
callback();
}
_flush(callback) {
if (this.remaining.trim()) {
const values = this.remaining.split(',');
this.push(values);
}
callback();
}
}
// 使用示例
fs.createReadStream('data.csv')
.pipe(new CSVParseStream())
.on('data', (row) => {
console.log('Parsed row:', row);
});
合理设置highWaterMark:根据应用场景调整缓冲区大小
// 对于视频文件,使用更大的缓冲区
const videoStream = fs.createReadStream('video.mp4', {
highWaterMark: 64 * 1024 // 64KB
});
避免数据滞留:确保写入流能跟上读取流的速度
// 使用pipeline代替pipe,自动处理错误和清理
const { pipeline } = require('stream');
pipeline(
fs.createReadStream('input.txt'),
zlib.createGzip(),
fs.createWriteStream('input.txt.gz'),
(err) => {
if (err) {
console.error('Pipeline failed:', err);
} else {
console.log('Pipeline succeeded');
}
}
);
// 监控内存使用
setInterval(() => {
const used = process.memoryUsage().heapUsed / 1024 / 1024;
console.log(`Memory usage: ${Math.round(used * 100) / 100} MB`);
}, 5000);
Node.js的文件操作和流处理能力是其最强大的特性之一。通过本文的示例分析,我们可以看到:
fs
模块的简单API已经足够掌握这些技术将使你能够构建高效、可扩展的Node.js应用程序,特别是在需要处理大量I/O操作的场景中。
”`
这篇文章详细介绍了Node.js文件操作和Stream的各个方面,包含大量实用示例和最佳实践,总字数约4400字。文章采用Markdown格式,包含代码块、标题层级和清晰的段落结构,便于阅读和理解。
免责声明:本站发布的内容(图片、视频和文字)以原创、转载和分享为主,文章观点不代表本网站立场,如果涉及侵权请联系站长邮箱:is@yisu.com进行举报,并提供相关证据,一经查实,将立刻删除涉嫌侵权内容。