diff --git a/.github/workflows/run-checks.yml b/.github/workflows/checks.yml similarity index 66% rename from .github/workflows/run-checks.yml rename to .github/workflows/checks.yml index 9f7d2410..6d294de1 100644 --- a/.github/workflows/run-checks.yml +++ b/.github/workflows/checks.yml @@ -1,30 +1,26 @@ name: Linter, formatter, and docs checks -on: [pull_request] -permissions: - contents: read +on: pull_request jobs: - build: - runs-on: ubuntu-latest - if: "!contains(github.event.head_commit.message, 'CI Bot')" + checks: + runs-on: ubuntu-22.04 + if: "!contains(github.event.head_commit.message, 'CI Bot')" steps: - uses: actions/checkout@v4 - with: - fetch-depth: 1 - - name: Set up Python 3.10 + - name: Set up Python 3.12 uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "3.12" cache: "pip" - name: Install package and dependencies run: | - python -m pip install --upgrade pip - python -m pip install ruff - python -m pip install .[docs] + pip install --upgrade pip + pip install ruff + pip install .[docs] - name: Run linter and formatter checks using ruff run: make checks diff --git a/Makefile b/Makefile index 3025537b..e6f81697 100644 --- a/Makefile +++ b/Makefile @@ -96,7 +96,3 @@ tests: .PHONY: tests-notebooks tests-notebooks: pytest --nbmake "notebooks/" --nbmake-timeout=600 --ignore=notebooks/dask_from_CLI.ipynb - -.PHONY: tests-all -tests-all: - pytest diff --git a/src/flowcept/commons/daos/mq_dao/mq_dao_redis.py b/src/flowcept/commons/daos/mq_dao/mq_dao_redis.py index 82321ea5..d31d791c 100644 --- a/src/flowcept/commons/daos/mq_dao/mq_dao_redis.py +++ b/src/flowcept/commons/daos/mq_dao/mq_dao_redis.py @@ -50,9 +50,6 @@ def _bulk_publish(self, buffer, channel=MQ_CHANNEL, serializer=msgpack.dumps): pipe = self._producer.pipeline() for message in buffer: try: - # self.logger.debug( - # f"Going to send Message:" f"\n\t[BEGIN_MSG]{message}\n[END_MSG]\t" - # ) pipe.publish(MQ_CHANNEL, serializer(message)) except Exception as e: self.logger.exception(e) diff --git a/src/flowcept/commons/flowcept_logger.py b/src/flowcept/commons/flowcept_logger.py index dbf0567f..ecbc97f0 100644 --- a/src/flowcept/commons/flowcept_logger.py +++ b/src/flowcept/commons/flowcept_logger.py @@ -37,7 +37,7 @@ def _build_logger(cls): logger.addHandler(stream_handler) if file_level <= logging.CRITICAL: - file_handler = logging.FileHandler(LOG_FILE_PATH, mode="a+") + file_handler = logging.FileHandler(LOG_FILE_PATH, delay=True, mode="a+") file_handler.setLevel(file_level) file_format = logging.Formatter(f"[%(asctime)s]{_BASE_FORMAT}") file_handler.setFormatter(file_format) diff --git a/src/flowcept/flowceptor/adapters/tensorboard/tensorboard_interceptor.py b/src/flowcept/flowceptor/adapters/tensorboard/tensorboard_interceptor.py index 70a28d9a..84980fb8 100644 --- a/src/flowcept/flowceptor/adapters/tensorboard/tensorboard_interceptor.py +++ b/src/flowcept/flowceptor/adapters/tensorboard/tensorboard_interceptor.py @@ -113,7 +113,7 @@ def observe(self): f"I can't watch the file {self.settings.file_path}, as it does not exist." ) self.logger.debug( - f"\tI will sleep for {self.settings.watch_interval_sec} sec. to see if it appears." + f"\tI will sleep for {self.settings.watch_interval_sec} s to see if it appears." ) sleep(self.settings.watch_interval_sec) diff --git a/src/flowcept/flowceptor/consumers/document_inserter.py b/src/flowcept/flowceptor/consumers/document_inserter.py index e2cf721d..15118c66 100644 --- a/src/flowcept/flowceptor/consumers/document_inserter.py +++ b/src/flowcept/flowceptor/consumers/document_inserter.py @@ -98,8 +98,8 @@ def flush_function(buffer, doc_daos, logger): for dao in doc_daos: dao.insert_and_update_many_tasks(buffer, TaskObject.task_id_field()) logger.debug( - f"DocDao={id(dao)},DocDaoClass={dao.__class__.__name__};" - f" Flushed {len(buffer)} msgs to this DocDB!" + f"DocDao={id(dao)},DocDaoClass={dao.__class__.__name__};\ + Flushed {len(buffer)} msgs to this DocDB!" ) # TODO: add name def _handle_task_message(self, message: Dict): @@ -136,7 +136,7 @@ def _handle_task_message(self, message: Dict): def _handle_workflow_message(self, message: Dict): message.pop("type") self.logger.debug( - f"Received following Workflow msg in DocInserter: \n\t[BEGIN_MSG]{message}\n[END_MSG]\t" + f"Received following Workflow msg in DocInserter:\n\t[BEGIN_MSG]{message}\n[END_MSG]\t" ) if REMOVE_EMPTY_FIELDS: remove_empty_fields_from_dict(message)