Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions .husky/pre-commit
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"

# Run TypeScript type checking
npx tsc --noEmit

Expand Down
86 changes: 59 additions & 27 deletions src/components/CSVImportDialog.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,11 @@ import {
} from '@mui/material';
import { Upload } from '@mui/icons-material';
import {
parseCSVToPVs,
createTagMapping,
parseCSVToPVsAsync,
createValidationSummary,
validateCSVTags,
ParsedCSVRow,
ParseProgress,
} from '../utils/csvParser';

interface CSVImportDialogProps {
Expand All @@ -50,6 +51,8 @@ export function CSVImportDialog({
const [importing, setImporting] = useState(false);
const [fileSelected, setFileSelected] = useState(false);
const [importError, setImportError] = useState<string | null>(null);
const [parsingProgress, setParsingProgress] = useState<ParseProgress | null>(null);
const [parsing, setParsing] = useState(false);

const handleClose = () => {
setCSVData([]);
Expand All @@ -67,49 +70,54 @@ export function CSVImportDialog({
if (!file) return;

try {
setParsing(true);
setParsingProgress({ processedRows: 0, totalRows: 0, status: 'parsing' });
setParseErrors([]);
setCSVData([]);
setValidationSummary('');
setFileSelected(false);

const content = await file.text();
const result = parseCSVToPVs(content);

// Use async parser for better performance with large files
const result = await parseCSVToPVsAsync(content, (progress) => {
setParsingProgress(progress);
});

if (result.errors.length > 0) {
setParseErrors(result.errors);
setCSVData([]);
setValidationSummary('');
setFileSelected(false);
setParsing(false);
setParsingProgress(null);
return;
}

setCSVData(result.data);
setParseErrors([]);
setFileSelected(true);
setParsing(false);
setParsingProgress(null);

// Validate tags
// Validate tags with progress feedback
if (result.data.length > 0) {
// Collect all rejected groups and values across all rows
const allRejectedGroups = new Set<string>();
const allRejectedValues: Record<string, Set<string>> = {};

result.data.forEach((row) => {
const mapping = createTagMapping(row.groups, availableTagGroups);

mapping.rejectedGroups.forEach((group) => allRejectedGroups.add(group));

Object.entries(mapping.rejectedValues).forEach(([group, values]) => {
if (!allRejectedValues[group]) {
allRejectedValues[group] = new Set();
}
values.forEach((value) => allRejectedValues[group].add(value));
});
setParsing(true);
setParsingProgress({
processedRows: 0,
totalRows: result.data.length,
status: 'validating',
});

// Convert sets to arrays
const rejectedGroups = Array.from(allRejectedGroups);
const rejectedValues: Record<string, string[]> = {};
Object.entries(allRejectedValues).forEach(([group, valueSet]) => {
rejectedValues[group] = Array.from(valueSet);
});
const validationResults = await validateCSVTags(result.data, availableTagGroups);

const summary = createValidationSummary(rejectedGroups, rejectedValues);
const summary = createValidationSummary(
validationResults.rejectedGroups,
validationResults.rejectedValues
);
setValidationSummary(summary);
setParsing(false);
setParsingProgress(null);
}
} catch (error) {
setParseErrors([
Expand All @@ -118,6 +126,8 @@ export function CSVImportDialog({
setCSVData([]);
setValidationSummary('');
setFileSelected(false);
setParsing(false);
setParsingProgress(null);
}

// Reset file input
Expand Down Expand Up @@ -158,13 +168,35 @@ export function CSVImportDialog({
variant="contained"
component="span"
startIcon={<Upload />}
disabled={importing}
disabled={importing || parsing}
>
Select CSV File
</Button>
</label>
</Box>

{/* Parsing Progress */}
{parsing && parsingProgress && (
<Alert severity="info">
<Box sx={{ display: 'flex', alignItems: 'center', gap: 2 }}>
<CircularProgress size={20} />
<Box sx={{ flex: 1 }}>
<Typography variant="body2" sx={{ fontWeight: 'bold' }}>
{parsingProgress.status === 'parsing'
? 'Parsing CSV file...'
: 'Validating tags...'}
</Typography>
<Typography variant="caption" color="text.secondary">
Processed {parsingProgress.processedRows} of {parsingProgress.totalRows} rows
</Typography>
</Box>
<Typography variant="caption" color="text.secondary">
{Math.round((parsingProgress.processedRows / parsingProgress.totalRows) * 100)}%
</Typography>
</Box>
</Alert>
)}

{/* CSV Format Instructions */}
<Alert severity="info">
<Typography variant="body2" sx={{ mb: 1 }}>
Expand Down
191 changes: 136 additions & 55 deletions src/utils/csvParser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,12 @@ export interface ParsedCSVResult {
errors: string[];
}

export interface ParseProgress {
processedRows: number;
totalRows: number;
status: 'parsing' | 'validating' | 'complete';
}

/**
* Parse a single CSV line, handling quoted fields
* Simple CSV parser that handles basic quoting
Expand Down Expand Up @@ -57,19 +63,17 @@ function parseCSVLine(line: string): string[] {
}

/**
* Parse CSV file content into PV data structure
*
* CSV Format (matches Python parse_csv_to_dict):
* - Required columns: "Setpoint" or "Readback" (at least one)
* - Optional columns: "Device", "Description"
* - Any additional columns are treated as tag groups
* - Tag values can be comma-separated (e.g., "tag1, tag2")
* - Filters out 'nan' and 'none' values
* Async CSV parser with progress feedback for large files
* Processes the file in chunks to prevent UI blocking and provides progress updates
*
* @param csvContent - Raw CSV file content as string
* @param onProgress - Optional callback for progress updates (processedRows, totalRows, status)
* @returns Parsed PV data with tag groups and any errors
*/
export function parseCSVToPVs(csvContent: string): ParsedCSVResult {
export async function parseCSVToPVsAsync(
csvContent: string,
onProgress?: (progress: ParseProgress) => void
): Promise<ParsedCSVResult> {
const errors: string[] = [];
const data: ParsedCSVRow[] = [];

Expand Down Expand Up @@ -100,58 +104,81 @@ export function parseCSVToPVs(csvContent: string): ParsedCSVResult {
const standardColumns = ['Setpoint', 'Readback', 'Device', 'Description'];
const groupColumns = cleanedHeaders.filter((col) => !standardColumns.includes(col));

// Parse data rows (starting from row 2 in 1-indexed terms, row 1 in 0-indexed)
for (let i = 1; i < lines.length; i += 1) {
const line = lines[i].trim();
if (line) {
const rowValues = parseCSVLine(line);
// Process data rows in chunks to prevent blocking
const totalRows = lines.length - 1; // Exclude header
const chunkSize = 100; // Process 100 rows at a time

// Create a row dictionary
const rowDict: Record<string, string> = {};
cleanedHeaders.forEach((header, index) => {
rowDict[header] = index < rowValues.length ? rowValues[index].trim() : '';
});
for (let start = 1; start < lines.length; start += chunkSize) {
const chunkEnd = Math.min(start + chunkSize, lines.length);
const chunkLines = lines.slice(start, chunkEnd);

const setpoint = rowDict.Setpoint || '';
const readback = rowDict.Readback || '';

// Only process row if at least one of setpoint or readback is present
if (setpoint || readback) {
const device = rowDict.Device || '';
const description = rowDict.Description || '';

// Parse tag groups
const groups: Record<string, string[]> = {};

groupColumns.forEach((groupName) => {
const cellValue = rowDict[groupName] || '';
const trimmedValue = cellValue.trim();

if (
trimmedValue &&
trimmedValue.toLowerCase() !== 'nan' &&
trimmedValue.toLowerCase() !== 'none'
) {
// Split comma-separated values and filter
const tagValues = trimmedValue
.split(',')
.map((val) => val.trim())
.filter((val) => val);
groups[groupName] = tagValues;
} else {
groups[groupName] = [];
}
});
// Process chunk
for (let i = 0; i < chunkLines.length; i += 1) {
const line = chunkLines[i].trim();
if (line) {
const rowValues = parseCSVLine(line);

data.push({
Setpoint: setpoint,
Readback: readback,
Device: device,
Description: description,
groups,
// Create a row dictionary
const rowDict: Record<string, string> = {};
cleanedHeaders.forEach((header, index) => {
rowDict[header] = index < rowValues.length ? rowValues[index].trim() : '';
});

const setpoint = rowDict.Setpoint || '';
const readback = rowDict.Readback || '';

// Only process row if at least one of setpoint or readback is present
if (setpoint || readback) {
const device = rowDict.Device || '';
const description = rowDict.Description || '';

// Parse tag groups
const groups: Record<string, string[]> = {};

groupColumns.forEach((groupName) => {
const cellValue = rowDict[groupName] || '';
const trimmedValue = cellValue.trim();

if (
trimmedValue &&
trimmedValue.toLowerCase() !== 'nan' &&
trimmedValue.toLowerCase() !== 'none'
) {
// Split comma-separated values and filter
const tagValues = trimmedValue
.split(',')
.map((val) => val.trim())
.filter((val) => val);
groups[groupName] = tagValues;
} else {
groups[groupName] = [];
}
});

data.push({
Setpoint: setpoint,
Readback: readback,
Device: device,
Description: description,
groups,
});
}
}
}

// Report progress and yield control to prevent blocking
const processedRows = Math.min(start + chunkLines.length - 1, totalRows);
if (onProgress) {
onProgress({
processedRows,
totalRows,
status: 'parsing',
});
}

// Yield control to allow UI updates
// eslint-disable-next-line no-await-in-loop, no-promise-executor-return
await new Promise((resolve) => setTimeout(resolve, 0));
}

return {
Expand Down Expand Up @@ -252,3 +279,57 @@ export function createValidationSummary(

return summaryParts.length > 0 ? summaryParts.join(' • ') : 'All groups and values are valid';
}

/**
* Bulk tag validation for CSV import
* Validates all CSV rows against available tag groups
*
* @param csvData - Array of parsed CSV rows with tag groups
* @param availableTagGroups - Available tag groups from backend
* @returns Validation results with rejected groups and values
*/
export interface BulkTagValidationResult {
rejectedGroups: string[];
rejectedValues: Record<string, string[]>;
}

export function validateCSVTags(
csvData: ParsedCSVRow[],
availableTagGroups: Array<{
id: string;
name: string;
tags: Array<{ id: string; name: string }>;
}>
): BulkTagValidationResult {
// Collect all rejected groups and values across all rows
const allRejectedGroups = new Set<string>();
const allRejectedValues: Record<string, Set<string>> = {};

// Process each row
csvData.forEach((row) => {
const mapping = createTagMapping(row.groups, availableTagGroups);

// Collect rejected groups (groups that don't exist in backend)
mapping.rejectedGroups.forEach((group) => allRejectedGroups.add(group));

// Collect rejected values (values that don't exist for their group in backend)
Object.entries(mapping.rejectedValues).forEach(([group, values]) => {
if (!allRejectedValues[group]) {
allRejectedValues[group] = new Set();
}
values.forEach((value) => allRejectedValues[group].add(value));
});
});

// Convert sets to arrays
const rejectedGroups = Array.from(allRejectedGroups);
const rejectedValues: Record<string, string[]> = {};
Object.entries(allRejectedValues).forEach(([group, valueSet]) => {
rejectedValues[group] = Array.from(valueSet);
});

return {
rejectedGroups,
rejectedValues,
};
}