Examples
Large Dataset Example
Handle 10,000+ rows efficiently with virtual scrolling
This example demonstrates how ImportCSV handles large CSV files with thousands of rows using virtual scrolling and progressive validation.
Live Example
import { CSVImporter } from '@importcsv/react';
import { useState } from 'react';
export default function LargeDatasetExample() {
const [importStats, setImportStats] = useState(null);
const [isOpen, setIsOpen] = useState(false);
const columns = [
{
id: 'transaction_id',
label: 'Transaction ID',
validators: [
{ type: 'required' },
{ type: 'unique', message: 'Transaction ID must be unique' }
]
},
{
id: 'date',
label: 'Date',
type: 'date',
validators: [{ type: 'required' }],
transformations: [
{ type: 'normalize_date', format: 'YYYY-MM-DD' }
]
},
{
id: 'customer_email',
label: 'Customer Email',
type: 'email',
validators: [{ type: 'required' }],
transformations: [
{ type: 'trim' },
{ type: 'lowercase' }
]
},
{
id: 'amount',
label: 'Amount',
type: 'number',
validators: [
{ type: 'required' },
{ type: 'min', value: 0, message: 'Amount must be positive' }
]
},
{
id: 'status',
label: 'Status',
type: 'select',
options: ['pending', 'completed', 'failed', 'refunded'],
validators: [{ type: 'required' }],
transformations: [
{ type: 'lowercase' }
]
},
{
id: 'notes',
label: 'Notes',
transformations: [
{ type: 'trim' },
{ type: 'default', value: 'N/A' }
]
}
];
const handleComplete = (data) => {
setImportStats({
totalRows: data.num_rows,
totalColumns: data.num_columns,
sampleData: data.rows.slice(0, 5), // Show first 5 rows
timestamp: new Date().toISOString()
});
setIsOpen(false);
console.log(`Successfully imported ${data.num_rows} rows`);
};
return (
<div>
<button
onClick={() => setIsOpen(true)}
className="px-4 py-2 bg-blue-500 text-white rounded"
>
Import Large Dataset
</button>
<CSVImporter
modalIsOpen={isOpen}
modalOnCloseTriggered={() => setIsOpen(false)}
columns={columns}
onComplete={handleComplete}
primaryColor="#3B82F6"
darkMode={false}
/>
{importStats && (
<div className="mt-4 p-4 border rounded">
<h3 className="font-bold mb-2">Import Statistics</h3>
<p>✅ Successfully imported {importStats.totalRows} rows</p>
<p>📊 Total columns: {importStats.totalColumns}</p>
<p>🕐 Imported at: {new Date(importStats.timestamp).toLocaleTimeString()}</p>
<h4 className="font-bold mt-4 mb-2">Sample Data (First 5 Rows)</h4>
<pre className="text-xs bg-gray-100 p-2 rounded overflow-auto">
{JSON.stringify(importStats.sampleData, null, 2)}
</pre>
</div>
)}
</div>
);
}
Performance Features in Action
1. Virtual Scrolling
When you load a file with thousands of rows, notice:
- Smooth scrolling without lag
- Instant initial render
- Memory usage remains constant
2. Progressive Validation
Watch the validation process:
- First 50 rows validate instantly
- "Validating..." status appears for remaining rows
- UI remains responsive during validation
- Errors appear progressively
Sample Large Dataset
Generate a test CSV with 10,000 rows:
// Generate sample CSV data
function generateLargeCSV(rows = 10000) {
const statuses = ['pending', 'completed', 'failed', 'refunded'];
const headers = 'Transaction ID,Date,Customer Email,Amount,Status,Notes\n';
let csv = headers;
for (let i = 1; i <= rows; i++) {
const row = [
`TXN${String(i).padStart(6, '0')}`,
`2024-${String((i % 12) + 1).padStart(2, '0')}-${String((i % 28) + 1).padStart(2, '0')}`,
`customer${i}@example.com`,
(Math.random() * 1000).toFixed(2),
statuses[i % statuses.length],
i % 10 === 0 ? `Special order #${i}` : ''
].join(',');
csv += row + '\n';
}
return csv;
}
// Create and download the CSV
function downloadLargeCSV() {
const csv = generateLargeCSV(10000);
const blob = new Blob([csv], { type: 'text/csv' });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = 'large-dataset-10k-rows.csv';
a.click();
}
Performance Monitoring
Monitor performance while importing:
function PerformanceMonitor({ onImport }) {
const [metrics, setMetrics] = useState({
startTime: null,
endTime: null,
memoryUsage: null
});
const startMonitoring = () => {
setMetrics({
startTime: performance.now(),
memoryUsage: performance.memory?.usedJSHeapSize || 0
});
};
const endMonitoring = () => {
setMetrics(prev => ({
...prev,
endTime: performance.now(),
finalMemory: performance.memory?.usedJSHeapSize || 0
}));
};
const importTime = metrics.endTime - metrics.startTime;
const memoryDelta = metrics.finalMemory - metrics.memoryUsage;
return (
<div>
<CSVImporter
onComplete={(data) => {
endMonitoring();
onImport(data);
}}
modalOnOpenTriggered={startMonitoring}
// ... other props
/>
{metrics.endTime && (
<div className="performance-stats">
<p>⏱️ Import time: {(importTime / 1000).toFixed(2)}s</p>
<p>💾 Memory used: {(memoryDelta / 1024 / 1024).toFixed(2)}MB</p>
</div>
)}
</div>
);
}
Tips for Large Files
1. Optimize Validators
// ❌ Complex regex - slower
{ type: 'regex', pattern: '^(?:[a-z0-9!#$%&\'*+/=?^_`{|}~-]+(?:\\.[a-z0-9!#$%&\'*+/=?^_`{|}~-]+)*|"(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21\\x23-\\x5b\\x5d-\\x7f]|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])*")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)*[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21-\\x5a\\x53-\\x7f]|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])+)\\])$' }
// ✅ Use built-in type - faster
{ type: 'email' }
2. Minimize Transformations
// Only apply necessary transformations
columns: [
{
id: 'email',
transformations: [
{ type: 'trim' }, // Essential
{ type: 'lowercase' } // Essential
// Don't add unnecessary transformations
]
}
]
3. Handle Results Efficiently
const handleComplete = async (data) => {
// Process in batches for very large datasets
const BATCH_SIZE = 1000;
for (let i = 0; i < data.rows.length; i += BATCH_SIZE) {
const batch = data.rows.slice(i, i + BATCH_SIZE);
await processBatch(batch);
// Update progress
setProgress((i + BATCH_SIZE) / data.rows.length * 100);
}
};
Expected Performance
Based on real-world testing:
Rows | Load Time | Validation Time | Smooth Scrolling |
---|---|---|---|
100 | Instant | < 0.1s | ✅ Yes |
1,000 | Instant | < 0.5s | ✅ Yes |
5,000 | Instant | ~2s | ✅ Yes |
10,000 | Instant | ~4s | ✅ Yes |
Browser Requirements
For optimal performance with large datasets:
- Chrome 90+ (recommended)
- 4GB+ RAM
- Modern CPU (2015+)
Troubleshooting
File takes too long to validate
- Simplify validators (use built-in types)
- Reduce number of required fields
- Consider server-side validation for 10,000+ rows
Browser becomes unresponsive
- Check file size (should be < 50MB)
- Close other tabs to free memory
- Try Chrome for best performance
Memory issues
- Monitor with Chrome DevTools
- Process data in batches after import
- Consider pagination for display