Skip to content

Commit

Permalink
feature: open 文件添加encoding为utf-8
Browse files Browse the repository at this point in the history
  • Loading branch information
EvildoerXiaoyy committed Aug 17, 2021
1 parent 5e38039 commit 96b5ffb
Show file tree
Hide file tree
Showing 9 changed files with 17 additions and 17 deletions.
2 changes: 1 addition & 1 deletion apps/log_extract/views/tasks_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -461,7 +461,7 @@ def download_file(self, request):
target_file_dir = os.path.join(settings.EXTRACT_SAAS_STORE_DIR, target_file)
if not os.path.isfile(target_file_dir):
raise TaskFileLinkNotExist
with open(target_file_dir, "rb") as f:
with open(target_file_dir, "rb", encoding="utf-8") as f:
content = f.read()
response = HttpResponse(content=content)
response["Content-Type"] = "application/octet-stream"
Expand Down
2 changes: 1 addition & 1 deletion apps/log_search/apps.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def sync_package_version(self):
from apps.log_search.models import GlobalConfig

try:
with open(os.path.join(settings.PROJECT_ROOT, "VERSION")) as fd:
with open(os.path.join(settings.PROJECT_ROOT, "VERSION"), encoding="utf-8") as fd:
version = fd.read().strip()
except Exception: # pylint: disable=broad-except
version = ""
Expand Down
2 changes: 1 addition & 1 deletion apps/log_search/tasks/async_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def export_package(self):
os.makedirs(ASYNC_DIR)

result = self.search_handler.pre_get_result(sorted_fields=self.sorted_fields, size=MAX_RESULT_WINDOW)
with open(self.file_path, "a+") as f:
with open(self.file_path, "a+", encoding="utf-8") as f:
result_list = self.search_handler._deal_query_result(result_dict=result).get("origin_log_list")
for item in result_list:
f.write("%s\n" % json.dumps(item))
Expand Down
4 changes: 2 additions & 2 deletions bk_dataview/grafana/provisioning.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def read_conf(self, name, suffix):

paths = os.path.join(grafana_settings.PROVISIONING_PATH, name, f"*.{suffix}")
for path in glob.glob(paths):
with open(path, "rb") as fh:
with open(path, "rb", encoding="utf-8") as fh:
conf = fh.read()
expand_conf = os.path.expandvars(conf)
ds = yaml.load(expand_conf)
Expand All @@ -108,7 +108,7 @@ def dashboards(self, request, org_name: str, org_id: int) -> List[Dashboard]:
dashboard_path = os.path.expandvars(p["options"]["path"])
paths = os.path.join(dashboard_path, "*.json")
for path in glob.glob(paths):
with open(path, "rb") as fh:
with open(path, "rb", encoding="utf-8") as fh:
dashboard = json.loads(fh.read())
title = dashboard.get("title")
if not title:
Expand Down
2 changes: 1 addition & 1 deletion config/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def load_env():
project_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
env = os.path.join(project_path, f"{ENVIRONMENT}.env.yml" if not env else f"{env}.{ENVIRONMENT}.env.yml")
assert os.path.exists(env), f"{env} not exists"
with open(env) as f:
with open(env, encoding="utf-8") as f:
content = yaml.load(f, Loader=yaml.FullLoader)
assert content, f"{env} must have content"
return content
Expand Down
2 changes: 1 addition & 1 deletion iam/contrib/iam_migration/utils/do_migrate.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def load_data(filename):
"""
data = {}
try:
with open(filename) as data_file:
with open(filename, encoding="utf-8") as data_file:
data = json.load(data_file)
print("parser json data file success!")
except Exception as error:
Expand Down
2 changes: 1 addition & 1 deletion scripts/check_commit_message.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def get_commit_message():
print("Warning: The path of file `COMMIT_EDITMSG` not given, skipped!")
return 0
commit_message_filepath = args[1]
with open(commit_message_filepath, "r") as fd:
with open(commit_message_filepath, "r", encoding="utf-8") as fd:
content = fd.read()
return content.strip().lower()

Expand Down
14 changes: 7 additions & 7 deletions scripts/check_migrate/check_migrate.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def read_csv():
def save(content):
if content:
json_path = os.path.join(BASE_DIR, "field_library.json")
with open(json_path, "w") as fp:
with open(json_path, "w", encoding="utf-8") as fp:
json.dump(content, fp)


Expand All @@ -48,7 +48,7 @@ def get_field_library():
if not os.path.exists(json_path):
field_library = read_csv()
save(field_library)
with open(json_path, "r") as fp:
with open(json_path, "r", encoding="utf-8") as fp:
content = json.load(fp)
return content

Expand All @@ -61,7 +61,7 @@ def get_str_md5(content):

def handle_rename_model(file_path, library):
rename_content = {}
with open(file_path, "r") as fp:
with open(file_path, "r", encoding="utf-8") as fp:
ret = fp.readlines()
rename = False
index = 0
Expand Down Expand Up @@ -100,7 +100,7 @@ def handle_rename_model(file_path, library):

def handle_add_alter_model(file_path, library):
add_alter_content = {}
with open(file_path, "r") as fp:
with open(file_path, "r", encoding="utf-8") as fp:
ret = fp.readlines()
alter = False
index = 0
Expand Down Expand Up @@ -135,7 +135,7 @@ def handle_add_alter_model(file_path, library):

def handle_create_model(file_path, library):
create_content = {}
with open(file_path, "r") as fp:
with open(file_path, "r", encoding="utf-8") as fp:
ret = fp.readlines()
create = False
index = 0
Expand Down Expand Up @@ -182,14 +182,14 @@ def handle_create_model(file_path, library):
def get_new_field(result):
exist_field = []
if "field_error_detail.log" in os.listdir("."):
with open("field_error_detail.log", "r") as fp:
with open("field_error_detail.log", "r", encoding="utf-8") as fp:
exist_field = eval(fp.read())
new_field = []
for line in result:
if get_str_md5(line) not in exist_field:
print(line)
new_field.append(get_str_md5(line))
with open("field_error_detail.log", "w") as fp:
with open("field_error_detail.log", "w", encoding="utf-8") as fp:
fp.write(str(exist_field + new_field))
return new_field

Expand Down
4 changes: 2 additions & 2 deletions scripts/i18n/fill_po_with_po.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def safe_encode(s):

def scan(self, po_file):
write_list = []
with open(po_file, "rb") as f:
with open(po_file, "rb", encoding="utf-8") as f:
ori_content = []
for line in f.readlines():
line = self.safe_encode(line)
Expand All @@ -51,7 +51,7 @@ def scan(self, po_file):
write_list.append(line)

content = "".join(write_list)
with open(po_file, "wb") as f:
with open(po_file, "wb", encoding="utf-8") as f:
f.write(content.encode("utf-8"))
pass

Expand Down

0 comments on commit 96b5ffb

Please sign in to comment.