From c38d0b3037095c2a3364e9d9f40d1da402193604 Mon Sep 17 00:00:00 2001 From: Gavin Wiggins <6828967+wigging@users.noreply.github.com> Date: Thu, 9 Jan 2025 13:50:33 -0500 Subject: [PATCH 1/5] Remove duplicate make target and command --- Makefile | 4 ---- 1 file changed, 4 deletions(-) diff --git a/Makefile b/Makefile index b9d622ee..eb7c0f6c 100644 --- a/Makefile +++ b/Makefile @@ -96,7 +96,3 @@ tests: .PHONY: tests-notebooks tests-notebooks: pytest --nbmake "notebooks/" --nbmake-timeout=600 --ignore=notebooks/dask_from_CLI.ipynb - -.PHONY: tests-all -tests-all: - pytest From a9b30bdc3ca5e335e361f1aeedb3fb93a4568b65 Mon Sep 17 00:00:00 2001 From: Gavin Wiggins <6828967+wigging@users.noreply.github.com> Date: Thu, 9 Jan 2025 14:01:44 -0500 Subject: [PATCH 2/5] Clean up checks workflow --- .github/workflows/checks.yml | 31 ++++++++++++++++++++++++++ .github/workflows/run-checks.yml | 38 -------------------------------- 2 files changed, 31 insertions(+), 38 deletions(-) create mode 100644 .github/workflows/checks.yml delete mode 100644 .github/workflows/run-checks.yml diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml new file mode 100644 index 00000000..51e6adf7 --- /dev/null +++ b/.github/workflows/checks.yml @@ -0,0 +1,31 @@ +name: Linter, formatter, and docs checks + +on: pull_request + +jobs: + + checks: + runs-on: ubuntu-22.04 + if: "!contains(github.event.head_commit.message, 'CI Bot')" + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: "3.12" + cache: "pip" + + - name: Install package and dependencies + run: | + pip install --upgrade pip + pip install ruff + pip install .[docs] + + - name: Run linter and formatter checks using ruff + # run: make checks + run: ruff format --check --diff src + + - name: Run HTML builder for Sphinx documentation + run: make docs diff --git a/.github/workflows/run-checks.yml b/.github/workflows/run-checks.yml deleted file mode 100644 index 9f7d2410..00000000 --- a/.github/workflows/run-checks.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: Linter, formatter, and docs checks -on: [pull_request] - -permissions: - contents: read - -jobs: - build: - runs-on: ubuntu-latest - if: "!contains(github.event.head_commit.message, 'CI Bot')" - - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 1 - - - name: Set up Python 3.10 - uses: actions/setup-python@v5 - with: - python-version: "3.10" - cache: "pip" - - - name: Install package and dependencies - run: | - python -m pip install --upgrade pip - python -m pip install ruff - python -m pip install .[docs] - - - name: Run linter and formatter checks using ruff - run: make checks - - - name: Run HTML builder for Sphinx documentation - run: make docs - - - name: Clean up - run: | - make clean - find /home/runner/runners/ -type f -name "*.log" -exec sh -c 'echo {}; >"{}"' \; || true From 489d440dfb211ba148bbddbb7147b862f0ec1e7e Mon Sep 17 00:00:00 2001 From: Gavin Wiggins <6828967+wigging@users.noreply.github.com> Date: Thu, 9 Jan 2025 14:26:10 -0500 Subject: [PATCH 3/5] Fix format errors --- src/flowcept/commons/daos/mq_dao/mq_dao_kafka.py | 4 +--- src/flowcept/commons/daos/mq_dao/mq_dao_redis.py | 4 +--- src/flowcept/commons/settings_factory.py | 2 +- .../adapters/tensorboard/tensorboard_interceptor.py | 5 ++--- .../flowceptor/adapters/zambeze/zambeze_interceptor.py | 6 ++---- src/flowcept/flowceptor/consumers/document_inserter.py | 10 +++++----- 6 files changed, 12 insertions(+), 19 deletions(-) diff --git a/src/flowcept/commons/daos/mq_dao/mq_dao_kafka.py b/src/flowcept/commons/daos/mq_dao/mq_dao_kafka.py index c64b1db6..b2e180db 100644 --- a/src/flowcept/commons/daos/mq_dao/mq_dao_kafka.py +++ b/src/flowcept/commons/daos/mq_dao/mq_dao_kafka.py @@ -72,9 +72,7 @@ def send_message(self, message: dict, channel=MQ_CHANNEL, serializer=msgpack.dum def _bulk_publish(self, buffer, channel=MQ_CHANNEL, serializer=msgpack.dumps): for message in buffer: try: - self.logger.debug( - f"Going to send Message:" f"\n\t[BEGIN_MSG]{message}\n[END_MSG]\t" - ) + self.logger.debug(f"Going to send Message:\n\t[BEGIN_MSG]{message}\n[END_MSG]\t") self._producer.produce(channel, key=channel, value=serializer(message)) except Exception as e: self.logger.exception(e) diff --git a/src/flowcept/commons/daos/mq_dao/mq_dao_redis.py b/src/flowcept/commons/daos/mq_dao/mq_dao_redis.py index 0f0fb798..56c1d578 100644 --- a/src/flowcept/commons/daos/mq_dao/mq_dao_redis.py +++ b/src/flowcept/commons/daos/mq_dao/mq_dao_redis.py @@ -50,9 +50,7 @@ def _bulk_publish(self, buffer, channel=MQ_CHANNEL, serializer=msgpack.dumps): pipe = self._producer.pipeline() for message in buffer: try: - self.logger.debug( - f"Going to send Message:" f"\n\t[BEGIN_MSG]{message}\n[END_MSG]\t" - ) + self.logger.debug(f"Going to send Message:\n\t[BEGIN_MSG]{message}\n[END_MSG]\t") pipe.publish(MQ_CHANNEL, serializer(message)) except Exception as e: self.logger.exception(e) diff --git a/src/flowcept/commons/settings_factory.py b/src/flowcept/commons/settings_factory.py index 9791d5b8..6801db9c 100644 --- a/src/flowcept/commons/settings_factory.py +++ b/src/flowcept/commons/settings_factory.py @@ -41,7 +41,7 @@ def get_settings(adapter_key: str) -> BaseSettings: settings_dict = settings[Vocabulary.Settings.ADAPTERS][adapter_key] if not settings_dict: raise Exception( - f"You must specify the adapter <<{adapter_key}>> in" f" the settings YAML file." + f"You must specify the adapter <<{adapter_key}>> in the settings YAML file." ) settings_dict["key"] = adapter_key kind = settings_dict[Vocabulary.Settings.KIND] diff --git a/src/flowcept/flowceptor/adapters/tensorboard/tensorboard_interceptor.py b/src/flowcept/flowceptor/adapters/tensorboard/tensorboard_interceptor.py index ec6e5fc3..84980fb8 100644 --- a/src/flowcept/flowceptor/adapters/tensorboard/tensorboard_interceptor.py +++ b/src/flowcept/flowceptor/adapters/tensorboard/tensorboard_interceptor.py @@ -110,11 +110,10 @@ def observe(self): event_handler = InterceptionEventHandler(self, self.__class__.callback) while not os.path.isdir(self.settings.file_path): self.logger.debug( - f"I can't watch the file {self.settings.file_path}," f" as it does not exist." + f"I can't watch the file {self.settings.file_path}, as it does not exist." ) self.logger.debug( - f"\tI will sleep for {self.settings.watch_interval_sec} sec." - f" to see if it appears." + f"\tI will sleep for {self.settings.watch_interval_sec} s to see if it appears." ) sleep(self.settings.watch_interval_sec) diff --git a/src/flowcept/flowceptor/adapters/zambeze/zambeze_interceptor.py b/src/flowcept/flowceptor/adapters/zambeze/zambeze_interceptor.py index 05efc3ca..1062e390 100644 --- a/src/flowcept/flowceptor/adapters/zambeze/zambeze_interceptor.py +++ b/src/flowcept/flowceptor/adapters/zambeze/zambeze_interceptor.py @@ -54,7 +54,7 @@ def stop(self) -> bool: self._channel.stop_consuming() except Exception as e: self.logger.warning( - f"This exception is expected to occur after " f"channel.basic_cancel: {e}" + f"This exception is expected to occur after channel.basic_cancel: {e}" ) sleep(2) self._observer_thread.join() @@ -88,9 +88,7 @@ def observe(self): ) def _intercept(self, body_obj): - self.logger.debug( - f"Zambeze interceptor needs to intercept this:" f"\n\t{json.dumps(body_obj)}" - ) + self.logger.debug(f"Zambeze interceptor needs to intercept this:\n\t{json.dumps(body_obj)}") task_msg = self.prepare_task_msg(body_obj) self.intercept(task_msg.to_dict()) diff --git a/src/flowcept/flowceptor/consumers/document_inserter.py b/src/flowcept/flowceptor/consumers/document_inserter.py index 325135ad..69d6bc0b 100644 --- a/src/flowcept/flowceptor/consumers/document_inserter.py +++ b/src/flowcept/flowceptor/consumers/document_inserter.py @@ -109,13 +109,13 @@ def _set_buffer_size(self): def flush_function(buffer, doc_daos, logger): """Flush it.""" logger.info( - f"Current Doc buffer size: {len(buffer)}, " f"Gonna flush {len(buffer)} msgs to DocDBs!" + f"Current Doc buffer size: {len(buffer)}, Gonna flush {len(buffer)} msgs to DocDBs!" ) for dao in doc_daos: dao.insert_and_update_many_tasks(buffer, TaskObject.task_id_field()) logger.debug( - f"DocDao={id(dao)},DocDaoClass={dao.__class__.__name__};" - f" Flushed {len(buffer)} msgs to this DocDB!" + f"DocDao={id(dao)},DocDaoClass={dao.__class__.__name__};\ + Flushed {len(buffer)} msgs to this DocDB!" ) # TODO: add name def _handle_task_message(self, message: Dict): @@ -145,14 +145,14 @@ def _handle_task_message(self, message: Dict): remove_empty_fields_from_dict(message) self.logger.debug( - f"Received following Task msg in DocInserter:" f"\n\t[BEGIN_MSG]{message}\n[END_MSG]\t" + f"Received following Task msg in DocInserter:\n\t[BEGIN_MSG]{message}\n[END_MSG]\t" ) self.buffer.append(message) def _handle_workflow_message(self, message: Dict): message.pop("type") self.logger.debug( - f"Received following Workflow msg in DocInserter: \n\t[BEGIN_MSG]{message}\n[END_MSG]\t" + f"Received following Workflow msg in DocInserter:\n\t[BEGIN_MSG]{message}\n[END_MSG]\t" ) if REMOVE_EMPTY_FIELDS: remove_empty_fields_from_dict(message) From 478db55aff7b2180f8a5a6c7ef5636ce1061c114 Mon Sep 17 00:00:00 2001 From: Gavin Wiggins <6828967+wigging@users.noreply.github.com> Date: Wed, 8 Jan 2025 11:12:58 -0500 Subject: [PATCH 4/5] Delay log file creation --- .github/workflows/checks.yml | 4 +--- src/flowcept/commons/flowcept_logger.py | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 51e6adf7..acc034e5 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -7,7 +7,6 @@ jobs: checks: runs-on: ubuntu-22.04 if: "!contains(github.event.head_commit.message, 'CI Bot')" - steps: - uses: actions/checkout@v4 @@ -24,8 +23,7 @@ jobs: pip install .[docs] - name: Run linter and formatter checks using ruff - # run: make checks - run: ruff format --check --diff src + run: make checks - name: Run HTML builder for Sphinx documentation run: make docs diff --git a/src/flowcept/commons/flowcept_logger.py b/src/flowcept/commons/flowcept_logger.py index dbf0567f..ecbc97f0 100644 --- a/src/flowcept/commons/flowcept_logger.py +++ b/src/flowcept/commons/flowcept_logger.py @@ -37,7 +37,7 @@ def _build_logger(cls): logger.addHandler(stream_handler) if file_level <= logging.CRITICAL: - file_handler = logging.FileHandler(LOG_FILE_PATH, mode="a+") + file_handler = logging.FileHandler(LOG_FILE_PATH, delay=True, mode="a+") file_handler.setLevel(file_level) file_format = logging.Formatter(f"[%(asctime)s]{_BASE_FORMAT}") file_handler.setFormatter(file_format) From a946e0b9650c232faba4165ff0dc50c617377401 Mon Sep 17 00:00:00 2001 From: Gavin Wiggins <6828967+wigging@users.noreply.github.com> Date: Thu, 9 Jan 2025 15:20:49 -0500 Subject: [PATCH 5/5] Re-add clean up step --- .github/workflows/checks.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index acc034e5..6d294de1 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -27,3 +27,8 @@ jobs: - name: Run HTML builder for Sphinx documentation run: make docs + + - name: Clean up + run: | + make clean + find /home/runner/runners/ -type f -name "*.log" -exec sh -c 'echo {}; >"{}"' \; || true