Commit 4845396
Changed files (3)
lib
components
dialoges
model
export_import
lib/components/dialoges/import_preview_dialoge.dart
@@ -47,9 +47,6 @@ class _ImportPreviewDialogeState extends State<ImportPreviewDialoge> {
/// Whether to limit shown rows to [_kRowLimit] for faster rendering.
bool _limitRows = true;
- /// Whether the CSV file has a title row that should be ignored.
- bool _csvHasTitle = true;
-
@override
void initState() {
super.initState();
@@ -57,6 +54,8 @@ class _ImportPreviewDialogeState extends State<ImportPreviewDialoge> {
SchedulerBinding.instance.addPostFrameCallback((_) => _updateBanner());
}
+ // FIXME: multiple columns update type
+
void _updateBanner() {
if (_showingError) {
_showingError = false;
@@ -138,10 +137,10 @@ class _ImportPreviewDialogeState extends State<ImportPreviewDialoge> {
),
),
],
- value: _actor.columnParsers[_actor.columnNames[colIdx]],
+ value: _actor.columnParsers[colIdx],
onChanged: (parser) {
setState(() {
- _actor.changeColumnParser(_actor.columnNames[colIdx], parser);
+ _actor.changeColumnParser(colIdx, parser);
});
_updateBanner();
},
@@ -153,7 +152,7 @@ class _ImportPreviewDialogeState extends State<ImportPreviewDialoge> {
_buildCell(
rowIdx,
_actor.dataLines[rowIdx][colIdx],
- _actor.columnParsers[_actor.columnNames[colIdx]],
+ _actor.columnParsers[colIdx],
),
if (_limitRows && _kRowLimit < _actor.dataLines.length)
Align(
lib/model/export_import/csv_converter.dart
@@ -52,15 +52,19 @@ class CsvConverter {
// Get and validate columns from csv title.
final List<String> titles = lines.removeAt(0).cast();
- final Map<String, ExportColumn> columns = getColumns(titles);
+ final List<ExportColumn?> columns = [];
+ final assumedColumns = getColumns(titles);
+ for (final csvName in titles) {
+ columns.add(assumedColumns[csvName]);
+ }
// TODO: consider returning `RecordParsingResult.err(RecordParsingErrorUnknownColumn(columnTitle))` for unknownColumns
- if (columns.values.none((e) => e.restoreAbleType == RowDataFieldType.timestamp)) {
+ if (columns.none((e) => e?.restoreAbleType == RowDataFieldType.timestamp)) {
return RecordParsingResult.err(RecordParsingErrorTimeNotRestoreable());
}
// Convert data to records.
- return parseRecords(lines, titles, columns);
+ return parseRecords(lines, columns);
}
/// Parses lines from csv files according to settings.
@@ -90,30 +94,29 @@ class CsvConverter {
return columns;
}
- /// Parse csv data in [dataLines] using [parsers] according to [orderedColumns].
+ /// Parse csv data in [dataLines] using [parsers].
///
/// [dataLines] contains all lines of the csv file without the headline and
- /// [orderedColumns] must have the same length as every line in [dataLines]
+ /// [parsers] must have the same length as every line in [dataLines]
/// for parsing to succeed.
///
/// [assumeHeadline] controls whether the line number should be offset by one
/// in case of error.
RecordParsingResult parseRecords(
List<List<String>> dataLines,
- List<String> orderedColumns,
- Map<String,ExportColumn> parsers, [
+ List<ExportColumn?> parsers, [
bool assumeHeadline = true,
]) {
final List<BloodPressureRecord> records = [];
int currentLineNumber = assumeHeadline ? 1 : 0;
for (final currentLine in dataLines) {
- if (currentLine.length < orderedColumns.length) {
+ if (currentLine.length < parsers.length) {
return RecordParsingResult.err(RecordParsingErrorExpectedMoreFields(currentLineNumber));
}
final List<(RowDataFieldType, dynamic)> recordPieces = [];
- for (int fieldIndex = 0; fieldIndex < orderedColumns.length; fieldIndex++) {
- final parser = parsers[orderedColumns[fieldIndex]];
+ for (int fieldIndex = 0; fieldIndex < parsers.length; fieldIndex++) {
+ final parser = parsers[fieldIndex];
final piece = parser?.decode(currentLine[fieldIndex]);
// Validate that the column parsed the expected type.
// Null can be the result of empty fields.
lib/model/export_import/csv_record_parsing_actor.dart
@@ -13,7 +13,11 @@ class CsvRecordParsingActor {
_firstLine = lines.removeAt(0);
_bodyLines = lines;
_columnNames = _firstLine ?? [];
- _columnParsers = _converter.getColumns(_columnNames);
+
+ final assumedColumns = _converter.getColumns(_columnNames);
+ for (final csvName in _columnNames) {
+ _columnParsers.add(assumedColumns[csvName]);
+ }
}
final CsvConverter _converter;
@@ -36,28 +40,26 @@ class CsvRecordParsingActor {
/// All columns defined in the csv headline.
List<String> get columnNames => _columnNames;
- late Map<String, ExportColumn> _columnParsers;
+ late final List<ExportColumn?> _columnParsers = [];
/// The current interpretation of columns in the csv data.
///
+ /// These parsers are ordered the same way as [columnNames].
+ ///
/// There is no guarantee that every column in [columnNames] has a parser.
- Map<String, ExportColumn> get columnParsers => _columnParsers;
+ UnmodifiableListView<ExportColumn?> get columnParsers
+ => UnmodifiableListView(_columnParsers);
/// Whether the CSV file has a title row (first line) that contains no data.
bool hasHeadline = true;
/// Override a columns with a custom one.
- void changeColumnParser(String columnName, ExportColumn? parser) {
- assert(_columnNames.contains(columnName));
- if (parser == null) {
- _columnParsers.remove(columnName);
- return;
- }
- _columnParsers[columnName] = parser;
+ void changeColumnParser(int columnIdx, ExportColumn? parser) {
+ assert(_columnParsers.length > columnIdx);
+ _columnParsers[columnIdx] = parser;
}
/// Try to parse the data with the current configuration.
- RecordParsingResult attemptParse() {
- return _converter.parseRecords(dataLines, columnNames, columnParsers, false);
- }
+ RecordParsingResult attemptParse() =>
+ _converter.parseRecords(dataLines, columnParsers, false);
}