diff --git a/src/daq/store/csv.py b/src/daq/store/csv.py index c4b9c93..cc08f52 100644 --- a/src/daq/store/csv.py +++ b/src/daq/store/csv.py @@ -98,21 +98,14 @@ def store_loop(self): files_to_delete.append(file_path) continue writer = csv.writer(file.file) - rows_to_write = [] - - # Write rows in batches - rows_to_write.clear() - for _ in range(DAQ_JOB_STORE_CSV_WRITE_BATCH_SIZE): - try: - rows_to_write.append(file.write_queue.pop()) - except IndexError: - break - if len(rows_to_write) > 0: - writer.writerows(rows_to_write) + + row_size = len(file.write_queue) + if row_size > 0: + writer.writerows(list(file.write_queue)) # Flush if the flush time is up if self._flush(file): - self._logger.debug(f"Flushed '{file.file.name}'") + self._logger.debug(f"Flushed '{file.file.name}' ({row_size} rows)") for file_path in files_to_delete: del self._open_csv_files[file_path] diff --git a/src/tests/test_csv.py b/src/tests/test_csv.py index 6c5843b..3a0c8fd 100644 --- a/src/tests/test_csv.py +++ b/src/tests/test_csv.py @@ -110,7 +110,7 @@ def test_store_loop_writerows(self, mock_csv_writer): self.store.store_loop() mock_writer_instance.writerows.assert_called_with( - [["row2_col1", "row2_col2"], ["row1_col1", "row1_col2"]] + [["row1_col1", "row1_col2"], ["row2_col1", "row2_col2"]] ) self.assertTrue(file.file.flush.called)