Its a good interview question, processing large files in React can be challenging due to performance bottlenecks, memory usage, and user experience concerns.
Use Streaming for File Uploads
const uploadFile = async (file) => {
const formData = new FormData();
formData.append('file', file);
const config = {
onUploadProgress: (progressEvent) => {
const percentCompleted = Math.round((progressEvent.loaded * 100) / progressEvent.total);
console.log(`Upload Progress: ${percentCompleted}%`);
},
};
await axios.post('/upload', formData, config);
};
Optimize File Reading
const readFileInChunks = (file, chunkSize = 1024 * 1024) => {
let offset = 0;
const readChunk = () => {
const reader = new FileReader();
const chunk = file.slice(offset, offset + chunkSize);
reader.onload = (e) => {
console.log('Chunk:', e.target.result);
offset += chunkSize;
if (offset < file.size) {
readChunk();
}
};
reader.readAsArrayBuffer(chunk);
};
readChunk();
};
Use Web Workers for Heavy Processing
// worker.js
self.onmessage = (e) => {
const file = e.data;
// Process file here
self.postMessage({ result: 'Processed file' });
};
// React Component
const worker = new Worker('worker.js');
worker.postMessage(largeFile);
worker.onmessage = (e) => {
console.log('Processed result:', e.data.result);
};
Implement Lazy Loading for File Rendering
import LazyLoad from 'react-lazyload';
const LargeFileComponent = ({ file }) => (
<LazyLoad height={200} offset={100}>
<img src={file.url} alt="Large file" />
</LazyLoad>
);
Compress Files Before Upload
import Compressor from 'compressorjs';
const compressFile = (file) => {
new Compressor(file, {
quality: 0.6,
success(result) {
console.log('Compressed file:', result);
},
});
};
Use Virtualization for Large Lists
import { FixedSizeList as List } from 'react-window';
const Row = ({ index, style }) => (
<div style={style}>Row {index}</div>
);
const LargeList = () => (
<List height={400} itemCount={1000} itemSize={35} width={300}>
{Row}
</List>
);