diff --git a/apps/log_extract/views/tasks_views.py b/apps/log_extract/views/tasks_views.py index 83bcaf13d..32368fe39 100644 --- a/apps/log_extract/views/tasks_views.py +++ b/apps/log_extract/views/tasks_views.py @@ -461,7 +461,7 @@ def download_file(self, request): target_file_dir = os.path.join(settings.EXTRACT_SAAS_STORE_DIR, target_file) if not os.path.isfile(target_file_dir): raise TaskFileLinkNotExist - with open(target_file_dir, "rb") as f: + with open(target_file_dir, "rb", encoding="utf-8") as f: content = f.read() response = HttpResponse(content=content) response["Content-Type"] = "application/octet-stream" diff --git a/apps/log_search/apps.py b/apps/log_search/apps.py index a7a7dd769..8fe2d9c01 100644 --- a/apps/log_search/apps.py +++ b/apps/log_search/apps.py @@ -55,7 +55,7 @@ def sync_package_version(self): from apps.log_search.models import GlobalConfig try: - with open(os.path.join(settings.PROJECT_ROOT, "VERSION")) as fd: + with open(os.path.join(settings.PROJECT_ROOT, "VERSION"), encoding="utf-8") as fd: version = fd.read().strip() except Exception: # pylint: disable=broad-except version = "" diff --git a/apps/log_search/tasks/async_export.py b/apps/log_search/tasks/async_export.py index df6c8f740..97abda267 100644 --- a/apps/log_search/tasks/async_export.py +++ b/apps/log_search/tasks/async_export.py @@ -183,7 +183,7 @@ def export_package(self): os.makedirs(ASYNC_DIR) result = self.search_handler.pre_get_result(sorted_fields=self.sorted_fields, size=MAX_RESULT_WINDOW) - with open(self.file_path, "a+") as f: + with open(self.file_path, "a+", encoding="utf-8") as f: result_list = self.search_handler._deal_query_result(result_dict=result).get("origin_log_list") for item in result_list: f.write("%s\n" % json.dumps(item)) diff --git a/bk_dataview/grafana/provisioning.py b/bk_dataview/grafana/provisioning.py index 6cff81495..0c18caafd 100644 --- a/bk_dataview/grafana/provisioning.py +++ b/bk_dataview/grafana/provisioning.py @@ -85,7 +85,7 @@ def read_conf(self, name, suffix): paths = os.path.join(grafana_settings.PROVISIONING_PATH, name, f"*.{suffix}") for path in glob.glob(paths): - with open(path, "rb") as fh: + with open(path, "rb", encoding="utf-8") as fh: conf = fh.read() expand_conf = os.path.expandvars(conf) ds = yaml.load(expand_conf) @@ -108,7 +108,7 @@ def dashboards(self, request, org_name: str, org_id: int) -> List[Dashboard]: dashboard_path = os.path.expandvars(p["options"]["path"]) paths = os.path.join(dashboard_path, "*.json") for path in glob.glob(paths): - with open(path, "rb") as fh: + with open(path, "rb", encoding="utf-8") as fh: dashboard = json.loads(fh.read()) title = dashboard.get("title") if not title: diff --git a/config/env.py b/config/env.py index 14f271b88..e955a36f9 100644 --- a/config/env.py +++ b/config/env.py @@ -73,7 +73,7 @@ def load_env(): project_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) env = os.path.join(project_path, f"{ENVIRONMENT}.env.yml" if not env else f"{env}.{ENVIRONMENT}.env.yml") assert os.path.exists(env), f"{env} not exists" - with open(env) as f: + with open(env, encoding="utf-8") as f: content = yaml.load(f, Loader=yaml.FullLoader) assert content, f"{env} must have content" return content diff --git a/iam/contrib/iam_migration/utils/do_migrate.py b/iam/contrib/iam_migration/utils/do_migrate.py index a88727c3c..39b9a70d2 100644 --- a/iam/contrib/iam_migration/utils/do_migrate.py +++ b/iam/contrib/iam_migration/utils/do_migrate.py @@ -36,7 +36,7 @@ def load_data(filename): """ data = {} try: - with open(filename) as data_file: + with open(filename, encoding="utf-8") as data_file: data = json.load(data_file) print("parser json data file success!") except Exception as error: diff --git a/scripts/check_commit_message.py b/scripts/check_commit_message.py index e405c00b4..9ddf83e07 100644 --- a/scripts/check_commit_message.py +++ b/scripts/check_commit_message.py @@ -33,7 +33,7 @@ def get_commit_message(): print("Warning: The path of file `COMMIT_EDITMSG` not given, skipped!") return 0 commit_message_filepath = args[1] - with open(commit_message_filepath, "r") as fd: + with open(commit_message_filepath, "r", encoding="utf-8") as fd: content = fd.read() return content.strip().lower() diff --git a/scripts/check_migrate/check_migrate.py b/scripts/check_migrate/check_migrate.py index 7a4183b54..d6d214735 100755 --- a/scripts/check_migrate/check_migrate.py +++ b/scripts/check_migrate/check_migrate.py @@ -39,7 +39,7 @@ def read_csv(): def save(content): if content: json_path = os.path.join(BASE_DIR, "field_library.json") - with open(json_path, "w") as fp: + with open(json_path, "w", encoding="utf-8") as fp: json.dump(content, fp) @@ -48,7 +48,7 @@ def get_field_library(): if not os.path.exists(json_path): field_library = read_csv() save(field_library) - with open(json_path, "r") as fp: + with open(json_path, "r", encoding="utf-8") as fp: content = json.load(fp) return content @@ -61,7 +61,7 @@ def get_str_md5(content): def handle_rename_model(file_path, library): rename_content = {} - with open(file_path, "r") as fp: + with open(file_path, "r", encoding="utf-8") as fp: ret = fp.readlines() rename = False index = 0 @@ -100,7 +100,7 @@ def handle_rename_model(file_path, library): def handle_add_alter_model(file_path, library): add_alter_content = {} - with open(file_path, "r") as fp: + with open(file_path, "r", encoding="utf-8") as fp: ret = fp.readlines() alter = False index = 0 @@ -135,7 +135,7 @@ def handle_add_alter_model(file_path, library): def handle_create_model(file_path, library): create_content = {} - with open(file_path, "r") as fp: + with open(file_path, "r", encoding="utf-8") as fp: ret = fp.readlines() create = False index = 0 @@ -182,14 +182,14 @@ def handle_create_model(file_path, library): def get_new_field(result): exist_field = [] if "field_error_detail.log" in os.listdir("."): - with open("field_error_detail.log", "r") as fp: + with open("field_error_detail.log", "r", encoding="utf-8") as fp: exist_field = eval(fp.read()) new_field = [] for line in result: if get_str_md5(line) not in exist_field: print(line) new_field.append(get_str_md5(line)) - with open("field_error_detail.log", "w") as fp: + with open("field_error_detail.log", "w", encoding="utf-8") as fp: fp.write(str(exist_field + new_field)) return new_field diff --git a/scripts/i18n/fill_po_with_po.py b/scripts/i18n/fill_po_with_po.py index 077329c24..28d309f8d 100644 --- a/scripts/i18n/fill_po_with_po.py +++ b/scripts/i18n/fill_po_with_po.py @@ -32,7 +32,7 @@ def safe_encode(s): def scan(self, po_file): write_list = [] - with open(po_file, "rb") as f: + with open(po_file, "rb", encoding="utf-8") as f: ori_content = [] for line in f.readlines(): line = self.safe_encode(line) @@ -51,7 +51,7 @@ def scan(self, po_file): write_list.append(line) content = "".join(write_list) - with open(po_file, "wb") as f: + with open(po_file, "wb", encoding="utf-8") as f: f.write(content.encode("utf-8")) pass