- 使用分块上传替代一次性上传 - 调用 /upload/start → /upload/chunk → /upload/merge 接口 - 通过 IPC 事件实时推送上传进度到前端 - 修复 merge 时未使用目标路径的问题
226 lines
6.0 KiB
JavaScript
226 lines
6.0 KiB
JavaScript
const fs = require('fs');
|
|
const path = require('path');
|
|
const logger = require('../../utils/logger');
|
|
const paths = require('../../utils/paths');
|
|
|
|
class FileService {
|
|
constructor() {
|
|
this.uploadDir = paths.getUploadPath();
|
|
this.tempDir = paths.getTempPath();
|
|
this._ensureDirs();
|
|
}
|
|
|
|
_ensureDirs() {
|
|
if (!fs.existsSync(this.uploadDir)) {
|
|
fs.mkdirSync(this.uploadDir, { recursive: true });
|
|
}
|
|
if (!fs.existsSync(this.tempDir)) {
|
|
fs.mkdirSync(this.tempDir, { recursive: true });
|
|
}
|
|
}
|
|
|
|
getFileList() {
|
|
try {
|
|
const files = fs.readdirSync(this.uploadDir);
|
|
return files
|
|
.filter(f => {
|
|
const filePath = path.join(this.uploadDir, f);
|
|
return !fs.statSync(filePath).isDirectory();
|
|
})
|
|
.map(f => {
|
|
const filePath = path.join(this.uploadDir, f);
|
|
const stat = fs.statSync(filePath);
|
|
return {
|
|
name: f,
|
|
size: stat.size,
|
|
modified: stat.mtime,
|
|
type: path.extname(f)
|
|
};
|
|
});
|
|
} catch (error) {
|
|
logger.error('Failed to get file list', { error: error.message });
|
|
return [];
|
|
}
|
|
}
|
|
|
|
getFilePath(filename) {
|
|
if (!filename) return null;
|
|
const filePath = path.normalize(filename);
|
|
if (!fs.existsSync(filePath)) {
|
|
return null;
|
|
}
|
|
return filePath;
|
|
}
|
|
|
|
getFileStream(filename, range) {
|
|
const filePath = this.getFilePath(filename);
|
|
if (!filePath) return null;
|
|
|
|
const stat = fs.statSync(filePath);
|
|
const fileSize = stat.size;
|
|
|
|
if (range) {
|
|
const parts = range.replace(/bytes=/, '').split('-');
|
|
const start = parseInt(parts[0], 10);
|
|
const end = parts[1] ? parseInt(parts[1], 10) : fileSize - 1;
|
|
const chunkSize = end - start + 1;
|
|
|
|
return {
|
|
stream: fs.createReadStream(filePath, { start, end }),
|
|
contentRange: `bytes ${start}-${end}/${fileSize}`,
|
|
contentLength: chunkSize,
|
|
fileSize
|
|
};
|
|
}
|
|
|
|
return {
|
|
stream: fs.createReadStream(filePath),
|
|
contentLength: fileSize,
|
|
fileSize
|
|
};
|
|
}
|
|
|
|
saveChunk(fileId, chunkIndex, data) {
|
|
try {
|
|
const chunkPath = path.join(this.tempDir, `${fileId}.${chunkIndex}`);
|
|
fs.writeFileSync(chunkPath, data);
|
|
return true;
|
|
} catch (error) {
|
|
logger.error('Failed to save chunk', { error: error.message });
|
|
return false;
|
|
}
|
|
}
|
|
|
|
mergeChunks(fileId, totalChunks, filename, targetPath) {
|
|
try {
|
|
let targetDir = targetPath || 'C:\\';
|
|
if (!fs.existsSync(targetDir)) {
|
|
fs.mkdirSync(targetDir, { recursive: true });
|
|
}
|
|
const filePath = path.join(targetDir, filename);
|
|
const dir = path.dirname(filePath);
|
|
|
|
if (!fs.existsSync(dir)) {
|
|
fs.mkdirSync(dir, { recursive: true });
|
|
}
|
|
|
|
const fd = fs.openSync(filePath, 'w');
|
|
|
|
for (let i = 0; i < totalChunks; i++) {
|
|
const chunkPath = path.join(this.tempDir, `${fileId}.${i}`);
|
|
if (!fs.existsSync(chunkPath)) {
|
|
fs.closeSync(fd);
|
|
return false;
|
|
}
|
|
const chunkData = fs.readFileSync(chunkPath);
|
|
fs.writeSync(fd, chunkData, 0, chunkData.length, null);
|
|
fs.unlinkSync(chunkPath);
|
|
}
|
|
|
|
fs.closeSync(fd);
|
|
return true;
|
|
} catch (error) {
|
|
logger.error('Failed to merge chunks', { error: error.message });
|
|
return false;
|
|
}
|
|
}
|
|
|
|
deleteFile(filename) {
|
|
const filePath = this.getFilePath(filename);
|
|
if (filePath) {
|
|
fs.unlinkSync(filePath);
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
cleanupChunks(fileId) {
|
|
try {
|
|
const files = fs.readdirSync(this.tempDir);
|
|
files.forEach(f => {
|
|
if (f.startsWith(fileId + '.')) {
|
|
fs.unlinkSync(path.join(this.tempDir, f));
|
|
}
|
|
});
|
|
} catch (error) {
|
|
logger.error('Failed to cleanup chunks', { error: error.message });
|
|
}
|
|
}
|
|
|
|
getDrives() {
|
|
const drives = [];
|
|
const letters = 'CDEFGHIJKLMNOPQRSTUVWXYZ'.split('');
|
|
for (const letter of letters) {
|
|
const drivePath = `${letter}:\\`;
|
|
try {
|
|
fs.accessSync(drivePath);
|
|
drives.push({ name: `${letter}:`, isDirectory: true, size: 0 });
|
|
} catch {}
|
|
}
|
|
return drives;
|
|
}
|
|
|
|
browseDirectory(relativePath = '', allowSystem = false) {
|
|
let targetDir;
|
|
let currentPath;
|
|
|
|
if (allowSystem) {
|
|
currentPath = path.normalize(relativePath || '').replace(/^(\.\.(\/|\\|$))+/, '');
|
|
if (!currentPath) {
|
|
currentPath = '';
|
|
}
|
|
targetDir = currentPath || 'C:\\';
|
|
} else {
|
|
const safePath = path.normalize(relativePath || '').replace(/^(\.\.(\/|\\|$))+/, '');
|
|
targetDir = path.join(this.uploadDir, safePath);
|
|
currentPath = safePath;
|
|
|
|
if (!targetDir.startsWith(this.uploadDir)) {
|
|
return { error: 'Access denied', items: [], currentPath: '' };
|
|
}
|
|
}
|
|
|
|
const items = [];
|
|
|
|
try {
|
|
const files = fs.readdirSync(targetDir);
|
|
|
|
for (const name of files) {
|
|
try {
|
|
const itemPath = path.join(targetDir, name);
|
|
const stat = fs.statSync(itemPath);
|
|
const isDirectory = stat.isDirectory();
|
|
items.push({
|
|
name,
|
|
isDirectory,
|
|
size: isDirectory ? 0 : stat.size,
|
|
modified: stat.mtime,
|
|
type: isDirectory ? 'directory' : path.extname(name)
|
|
});
|
|
} catch (err) {
|
|
logger.debug('Skipped inaccessible file', { name, error: err.message });
|
|
}
|
|
}
|
|
} catch (err) {
|
|
logger.warn('Failed to read directory', { targetDir, error: err.message });
|
|
return { items: [], currentPath: currentPath, parentPath: path.dirname(currentPath) || null };
|
|
}
|
|
|
|
items.sort((a, b) => {
|
|
if (a.isDirectory && !b.isDirectory) return -1;
|
|
if (!a.isDirectory && b.isDirectory) return 1;
|
|
return a.name.localeCompare(b.name);
|
|
});
|
|
|
|
const parentPath = currentPath ? path.dirname(currentPath) : null;
|
|
|
|
return {
|
|
items,
|
|
currentPath: currentPath,
|
|
parentPath: parentPath === currentPath ? null : parentPath
|
|
};
|
|
}
|
|
}
|
|
|
|
module.exports = FileService;
|