185 lines
5.0 KiB
JavaScript
185 lines
5.0 KiB
JavaScript
|
|
const fs = require('fs');
|
||
|
|
const path = require('path');
|
||
|
|
const logger = require('../../utils/logger');
|
||
|
|
const paths = require('../../utils/paths');
|
||
|
|
|
||
|
|
class FileService {
|
||
|
|
constructor() {
|
||
|
|
this.uploadDir = paths.getUploadPath();
|
||
|
|
this.tempDir = paths.getTempPath();
|
||
|
|
this._ensureDirs();
|
||
|
|
}
|
||
|
|
|
||
|
|
_ensureDirs() {
|
||
|
|
if (!fs.existsSync(this.uploadDir)) {
|
||
|
|
fs.mkdirSync(this.uploadDir, { recursive: true });
|
||
|
|
}
|
||
|
|
if (!fs.existsSync(this.tempDir)) {
|
||
|
|
fs.mkdirSync(this.tempDir, { recursive: true });
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
getFileList() {
|
||
|
|
try {
|
||
|
|
const files = fs.readdirSync(this.uploadDir);
|
||
|
|
return files
|
||
|
|
.filter(f => {
|
||
|
|
const filePath = path.join(this.uploadDir, f);
|
||
|
|
return !fs.statSync(filePath).isDirectory();
|
||
|
|
})
|
||
|
|
.map(f => {
|
||
|
|
const filePath = path.join(this.uploadDir, f);
|
||
|
|
const stat = fs.statSync(filePath);
|
||
|
|
return {
|
||
|
|
name: f,
|
||
|
|
size: stat.size,
|
||
|
|
modified: stat.mtime,
|
||
|
|
type: path.extname(f)
|
||
|
|
};
|
||
|
|
});
|
||
|
|
} catch (error) {
|
||
|
|
logger.error('Failed to get file list', { error: error.message });
|
||
|
|
return [];
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
getFilePath(filename) {
|
||
|
|
const filePath = path.join(this.uploadDir, path.basename(filename));
|
||
|
|
if (!fs.existsSync(filePath)) {
|
||
|
|
return null;
|
||
|
|
}
|
||
|
|
return filePath;
|
||
|
|
}
|
||
|
|
|
||
|
|
getFileStream(filename, range) {
|
||
|
|
const filePath = this.getFilePath(filename);
|
||
|
|
if (!filePath) return null;
|
||
|
|
|
||
|
|
const stat = fs.statSync(filePath);
|
||
|
|
const fileSize = stat.size;
|
||
|
|
|
||
|
|
if (range) {
|
||
|
|
const parts = range.replace(/bytes=/, '').split('-');
|
||
|
|
const start = parseInt(parts[0], 10);
|
||
|
|
const end = parts[1] ? parseInt(parts[1], 10) : fileSize - 1;
|
||
|
|
const chunkSize = end - start + 1;
|
||
|
|
|
||
|
|
return {
|
||
|
|
stream: fs.createReadStream(filePath, { start, end }),
|
||
|
|
contentRange: `bytes ${start}-${end}/${fileSize}`,
|
||
|
|
contentLength: chunkSize,
|
||
|
|
fileSize
|
||
|
|
};
|
||
|
|
}
|
||
|
|
|
||
|
|
return {
|
||
|
|
stream: fs.createReadStream(filePath),
|
||
|
|
contentLength: fileSize,
|
||
|
|
fileSize
|
||
|
|
};
|
||
|
|
}
|
||
|
|
|
||
|
|
saveChunk(fileId, chunkIndex, data) {
|
||
|
|
try {
|
||
|
|
const chunkPath = path.join(this.tempDir, `${fileId}.${chunkIndex}`);
|
||
|
|
fs.writeFileSync(chunkPath, data);
|
||
|
|
return true;
|
||
|
|
} catch (error) {
|
||
|
|
logger.error('Failed to save chunk', { error: error.message });
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
mergeChunks(fileId, totalChunks, filename) {
|
||
|
|
try {
|
||
|
|
const filePath = path.join(this.uploadDir, path.basename(filename));
|
||
|
|
const fd = fs.openSync(filePath, 'w');
|
||
|
|
|
||
|
|
for (let i = 0; i < totalChunks; i++) {
|
||
|
|
const chunkPath = path.join(this.tempDir, `${fileId}.${i}`);
|
||
|
|
if (!fs.existsSync(chunkPath)) {
|
||
|
|
fs.closeSync(fd);
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
const chunkData = fs.readFileSync(chunkPath);
|
||
|
|
fs.writeSync(fd, chunkData, 0, chunkData.length, null);
|
||
|
|
fs.unlinkSync(chunkPath);
|
||
|
|
}
|
||
|
|
|
||
|
|
fs.closeSync(fd);
|
||
|
|
return true;
|
||
|
|
} catch (error) {
|
||
|
|
logger.error('Failed to merge chunks', { error: error.message });
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
deleteFile(filename) {
|
||
|
|
const filePath = this.getFilePath(filename);
|
||
|
|
if (filePath) {
|
||
|
|
fs.unlinkSync(filePath);
|
||
|
|
return true;
|
||
|
|
}
|
||
|
|
return false;
|
||
|
|
}
|
||
|
|
|
||
|
|
cleanupChunks(fileId) {
|
||
|
|
try {
|
||
|
|
const files = fs.readdirSync(this.tempDir);
|
||
|
|
files.forEach(f => {
|
||
|
|
if (f.startsWith(fileId + '.')) {
|
||
|
|
fs.unlinkSync(path.join(this.tempDir, f));
|
||
|
|
}
|
||
|
|
});
|
||
|
|
} catch (error) {
|
||
|
|
logger.error('Failed to cleanup chunks', { error: error.message });
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
browseDirectory(relativePath = '') {
|
||
|
|
try {
|
||
|
|
const safePath = path.normalize(relativePath || '').replace(/^(\.\.(\/|\\|$))+/, '');
|
||
|
|
const targetDir = path.join(this.uploadDir, safePath);
|
||
|
|
|
||
|
|
if (!targetDir.startsWith(this.uploadDir)) {
|
||
|
|
return { error: 'Access denied', items: [], currentPath: '' };
|
||
|
|
}
|
||
|
|
|
||
|
|
if (!fs.existsSync(targetDir)) {
|
||
|
|
return { error: 'Directory not found', items: [], currentPath: safePath };
|
||
|
|
}
|
||
|
|
|
||
|
|
const items = fs.readdirSync(targetDir).map(name => {
|
||
|
|
const itemPath = path.join(targetDir, name);
|
||
|
|
const stat = fs.statSync(itemPath);
|
||
|
|
const isDirectory = stat.isDirectory();
|
||
|
|
|
||
|
|
return {
|
||
|
|
name,
|
||
|
|
isDirectory,
|
||
|
|
size: isDirectory ? 0 : stat.size,
|
||
|
|
modified: stat.mtime,
|
||
|
|
type: isDirectory ? 'directory' : path.extname(name)
|
||
|
|
};
|
||
|
|
});
|
||
|
|
|
||
|
|
items.sort((a, b) => {
|
||
|
|
if (a.isDirectory && !b.isDirectory) return -1;
|
||
|
|
if (!a.isDirectory && b.isDirectory) return 1;
|
||
|
|
return a.name.localeCompare(b.name);
|
||
|
|
});
|
||
|
|
|
||
|
|
return {
|
||
|
|
items,
|
||
|
|
currentPath: safePath,
|
||
|
|
parentPath: safePath ? path.dirname(safePath) : null
|
||
|
|
};
|
||
|
|
} catch (error) {
|
||
|
|
logger.error('Failed to browse directory', { error: error.message });
|
||
|
|
return { error: error.message, items: [], currentPath: relativePath };
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
module.exports = FileService;
|