diff --git a/solutions/assignment-4/diff.py b/solutions/assignment-4/diff.py new file mode 100644 index 00000000..11548b2d --- /dev/null +++ b/solutions/assignment-4/diff.py @@ -0,0 +1,104 @@ +class Diff: + + def __init__(self): + pass + + def run(self,file1,file2): + file1_lines = open(file1, 'r').readlines() + file2_lines = open(file2, 'r').readlines() + + lcs_table = self.generate_lcs_table(file1_lines, file2_lines) + + rngs = list() + + self.generate_ranges(lcs_table,file1_lines, file2_lines, len(file1_lines)-1, len(file2_lines)-1, rngs) + rngs = rngs[::-1] + self.merge_ranges(rngs) + + self.print_diff(file1_lines, file2_lines, rngs) + + + def generate_lcs_table(self, file1_lines, file2_lines): + + lcs_table = [[0 for i in range(len(file2_lines)+1)] for j in range(len(file1_lines)+1)] + + for i in range(len(file1_lines)): + for j in range(len(file2_lines)): + if file1_lines[i] == file2_lines[j]: + lcs_table[i][j] = lcs_table[i-1][j-1] + 1 + else: + lcs_table[i][j] = max(lcs_table[i-1][j], lcs_table[i][j-1]) + + return lcs_table + + def generate_ranges(self,lcs_table, file1_lines, file2_lines, i, j, rngs): + if i<0 and j<0: + return + elif i < 0: + new_rng = rng(i+1,i+1,j,j+1) + rngs.append(new_rng) + print("added i<0:", new_rng) + self.generate_ranges(lcs_table, file1_lines, file2_lines, i, j-1, rngs) + elif j < 0: + new_rng = rng(i,i+1,j+1,j+1) + rngs.append(new_rng) + print("added j<0:", new_rng) + self.generate_ranges(lcs_table, file1_lines, file2_lines, i-1, j,rngs) + elif file1_lines[i] == file2_lines[j]: + self.generate_ranges(lcs_table, file1_lines, file2_lines, i-1, j-1,rngs) + elif lcs_table[i][j-1] >= lcs_table[i-1][j]: + new_rng = rng(i+1,i+1,j,j+1) + rngs.append(new_rng) + print("added >:", new_rng) + self.generate_ranges(lcs_table, file1_lines, file2_lines, i, j-1,rngs) + elif lcs_table[i][j-1] < lcs_table[i-1][j]: + new_rng = rng(i,i+1,j+1,j+1) + rngs.append(new_rng) + print("added <:", new_rng) + self.generate_ranges(lcs_table, file1_lines, file2_lines, i-1, j,rngs) + + def merge_ranges(self, li): + i = 0 + while True: + if i >= len(li)-1: + break + if li[i].file_1_end == li[i+1].file_1_start: + new_rng = rng(li[i].file_1_start, li[i+1].file_1_end, li[i].file_2_start, li[i+1].file_2_end) + del li[i:i+2] + li.insert(i, new_rng) + elif li[i].file_2_end == li[i+1].file_2_start: + new_rng = rng(li[i].file_1_start, li[i+1].file_1_end, li[i].file_2_start, li[i+1].file_2_end) + del li[i:i+2] + li.insert(i, new_rng) + else: i += 1 + return li + + def print_diff(self, file1_lines, file2_lines, rngs): + #just for individual ranges for now + i = 0 + up_context_lines = 3 + down_context_lines = 3 + while True: + if i >= len(rngs): + break + line_top = rngs[i].file_1_start + line_bottom = rngs[i].file_2_end + + for j in range(line_top-3,line_top): + if not j < 0: + print(" " + file1_lines[j], end="") + + for j in range(rngs[i].file_1_start, rngs[i].file_1_end): + print("-" + file1_lines[j], end="") + for j in range(rngs[i].file_2_start, rngs[i].file_2_end): + print("+" + file2_lines[j],end="") + + + for j in range(line_bottom,line_bottom+4): + try: + print(" " + file2_lines[j], end="") + except IndexError: + pass + print("--------------------------------") + + i += 1 diff --git a/solutions/assignment-4/ngc.py b/solutions/assignment-4/ngc.py new file mode 100644 index 00000000..7fc141b5 --- /dev/null +++ b/solutions/assignment-4/ngc.py @@ -0,0 +1,238 @@ +import os +import time +import objects +import logging +import json +from pathlib import Path + +class Ngc: + + USER_NAME = 'user_name' + USER_EMAIL = 'user_email' + TIMESTAMP = 'timestamp' + + def __init__(self, repo_path=os.getcwd()): + self.repo_path = repo_path + self.user_details = self._get_user_details() + self.author_details = self._get_author_details() + self.head = self._get_current_commit_hash() + self.obj_blob = objects.Blob() + self.obj_tree = objects.Tree(self.create_logger("Tree log"), self.repo_path) + self.obj_commit = objects.Commit(self.repo_path) + self.logger = self.create_logger("ngc_obj", log_level=logging.WARNING) + + def init(self): + self.author_details = self.user_details + self.author_details[self.TIMESTAMP] = time.time() + self._set_author_details() + ngc_path = os.path.join(self.repo_path, ".ngc") + objects_path = os.path.join(ngc_path, "objects") + + if not os.path.exists(ngc_path): os.makedirs(ngc_path) + if not os.path.exists(objects_path): os.makedirs(objects_path) + + def status(self): + modified_files = list() + def print_mod(file_path, blob_path): + file_name = file_path.split("/")[-1] + print(f"modified: {file_name}") + def print_del(file_path, blob_path): + file_name = file_path.split("/")[-1] + print(f"deleted: {file_name}") + def print_add(file_path): + file_name = file_path.split("/")[-1] + print(f"added: {file_name}") + + + print(f"On branch {self.head}") + print("Changes not committed:") + self._check_modified_files(mod_list=modified_files, mod_func=print_mod, del_func=print_del) + self._check_added_files(mod_list=modified_files, add_func=print_add) + print('Use "ngc commit" to add changes to a new commit') + + def diff(self): + pass + + def commit(self, message): + tree_hash = self.obj_tree.create() + parent_hash = self._get_current_commit_hash() + self.user_details[self.TIMESTAMP] = time.time() + commit_hash = self.obj_commit.create(tree_hash=tree_hash, author_details=self.author_details, committer_details=self.user_details, message=message, parent_hash=parent_hash) + self._update_commit_hash(commit_hash) + + def reset(self): + modified_files = list() + def restore_file(file_path, blob_path): + self.obj_blob.extract_content(dst=file_path, file_path=blob_path) + def delete_file(file_path): + os.remove(file_path) + + self._check_modified_files(mod_list=modified_files, mod_func=restore_file, del_func=restore_file) + self._check_added_files(mod_list=modified_files, add_func=delete_file) + + def log(self, commit_hash=None): + if commit_hash is None: commit_hash = self.head + self.logger.info("logging...") + + with open(os.path.join(self.repo_path, '.ngc/HEAD'), 'rb') as head_file: + current_hash = head_file.read().decode() + self.logger.debug("current_hash variable: %s" % (current_hash)) + self.logger.debug("commit_hash_hash variable: %s" % (commit_hash)) + self.logger.debug("self.head variable: %s" % (self.head)) + + while True: + if current_hash == commit_hash: + break + else: + with open(os.path.join(self.repo_path, f'.ngc/objects/{current_hash}'), 'rb') as commit_file: + commit_data = json.load(commit_file) + current_hash = commit_data[self.obj_commit.PARENT] + + while True: + self.obj_commit.print_commit_data(current_hash) + with open(os.path.join(self.repo_path, f'.ngc/objects/{current_hash}'), 'rb') as commit_file: + commit_data = json.load(commit_file) + if self.obj_commit.PARENT not in commit_data: break + else: current_hash = commit_data[self.obj_commit.PARENT] + + + def checkout(self, commit_hash=None): + if commit_hash is None: commit_hash = self.head + + for dirpath, dirnames, filenames in os.walk(self.repo_path): + if ".ngc" in dirpath: + continue + for file in filenames: + if file is ".authorinfo" : continue + os.remove(os.path.join(dirpath, file)) + + tree_hash = self.obj_commit.get_tree_hash(commit_hash) + self._restore_files(tree_hash, self.repo_path) + + def config_user(self, user_name, user_email): + self.user_details[self.USER_NAME] = user_name + self.user_details[self.USER_EMAIL] = user_email + + with open(os.path.join(str(Path.home()), ".userinfo"), 'w') as user_info_file: + json.dump(self.user_details, user_info_file) + + def _get_user_details(self): + info_file_path = os.path.join(str(Path.home()), ".userinfo") + user_details = {self.USER_NAME : None, + self.USER_EMAIL : None, + self.TIMESTAMP : None} + + if os.path.exists(info_file_path): + with open(info_file_path, "r") as user_info_file: + user_details = json.load(user_info_file) + + return user_details + + def _set_author_details(self): + with open(os.path.join(self.repo_path, '.authorinfo'), 'w') as author_info_file: + json.dump(self.author_details, author_info_file) + + def _get_author_details(self): + author_details = {self.USER_NAME : None, + self.USER_EMAIL : None, + self.TIMESTAMP : None} + author_file_path = os.path.join(self.repo_path, '.authorinfo') + if os.path.exists(author_file_path): + with open(author_file_path, 'r') as author_info_file: + author_details = json.load(author_info_file) + + return author_details + + def _get_current_commit_hash(self): + commit_hash = None + try: + with open(os.path.join(self.repo_path, ".ngc/HEAD"), "r") as head_file: + commit_hash = head_file.read() + except FileNotFoundError: + pass + return commit_hash + + def _restore_files(self, tree_hash, dir_path): + tree_dict = self.obj_tree.get_tree_dict(tree_hash) + + for file in tree_dict[self.obj_tree.FILES]: + blob_name = tree_dict[self.obj_tree.FILES][file] + blob_path = os.path.join(self.repo_path, ".ngc/objects", blob_name) + file_path = os.path.join(dir_path, file) + self.obj_blob.extract_content(blob_path, file_path) + + for subdir in tree_dict[self.obj_tree.SUBDIRS]: + subdir_path = os.path.join(dir_path, subdir) + subdir_hash = tree_dict[self.obj_tree.SUBDIRS][subdir] + self._restore_files(subdir_hash, subdir_path) + + def _update_commit_hash(self, new_commit_hash): + self.head = new_commit_hash + self.logger.info("Updated self.head to %s" % (self.head)) + + with open(os.path.join(self.repo_path, ".ngc/HEAD"), "w") as head_file: + head_file.write(new_commit_hash) + + def _check_modified_files(self, tree=None, path=None, mod_list=None, mod_func=None, del_func=None): + if path is None : path = self.repo_path + if tree is None : tree = self.head + + if tree is self.head: + with open(os.path.join(self.repo_path, ".ngc/objects", tree), "rb") as commit_file: + commit_data = json.load(commit_file) + tree = commit_data[self.obj_commit.TREE] + with open(os.path.join(self.repo_path, ".ngc/objects", tree), "rb") as tree_file: + tree_json = json.load(tree_file) + for file in tree_json[self.obj_tree.FILES]: + file_path = os.path.join(path, file) + blob_path = os.path.join(self.repo_path, ".ngc/objects", tree_json[self.obj_tree.FILES][file]) + if os.path.exists(file_path): + file_content = open(file_path, 'rb').read() + blob_content = self.obj_blob.get_content(blob_path) + if file_content != blob_content: + if type(mod_list) is list: + mod_list.append(file) + try: + mod_func(file_path, blob_path) + except TypeError: + pass + else: + try: + del_func(file_path, blob_path) + except TypeError: + pass + for subdir in tree_json[self.obj_tree.SUBDIRS]: + new_path = os.path.join(path, subdir) + new_tree = tree_json[self.obj_tree.SUBDIRS][subdir] + self._check_modified_files(tree=new_tree, path=new_path, mod_list=mod_list, mod_func=mod_func, del_func=del_func) + + def _check_added_files(self, mod_list=None, add_func=None): + + for dirpath, dirnames, filenames in os.walk(self.repo_path): + if "." in dirpath: + continue + for file in filenames: + if file.startswith("."): + continue + file_path = os.path.join(dirpath, file) + hexdigest = self.obj_blob._get_file_hash(file_path) + if hexdigest not in os.listdir(os.path.join(self.repo_path, '.ngc/objects')): + if type(mod_list) is list: + if file in mod_list: + continue + try: + add_func(file_path) + except TypeError: + pass + + @staticmethod + def create_logger(logger_name, log_level=logging.WARNING): + + logger = logging.getLogger() + handler = logging.StreamHandler() + formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + handler.setFormatter(formatter) + logger.addHandler(handler) + logger.setLevel(log_level) + + return logger diff --git a/solutions/assignment-4/ngc_cli.py b/solutions/assignment-4/ngc_cli.py new file mode 100644 index 00000000..fc01b556 --- /dev/null +++ b/solutions/assignment-4/ngc_cli.py @@ -0,0 +1,35 @@ +import ngc +import argparse +from os import getcwd + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('command', nargs='+') + parser.add_argument('--location', type=str, default=getcwd()) + args = parser.parse_args() + + ngc_obj = ngc.Ngc(repo_path=args.location) + + if args.command[0] == 'init': + ngc_obj.init() + elif args.command[0] == 'status': + ngc_obj.status() + elif args.command[0] == 'commit': + commit_message = input("Enter commit message: ") + ngc_obj.commit(message=commit_message) + elif args.command[0] == 'log': + if len(args.command) == 1: + ngc_obj.log() + else: + ngc_obj.log(commit_hash=args.command[1]) + elif args.command[0] == 'config_user': + ngc_obj.config_user(user_name=args.command[1], user_email=args.command[2]) + elif args.command[0] == 'checkout': + if len(args.command) > 1: + ngc_obj.checkout(commit_hash=args.command[1]) + else: + ngc_obj.checkout() + elif args.command[0] == 'reset': + ngc_obj.reset() + else: + print("Error: Command not recognized") diff --git a/solutions/assignment-4/objects.py b/solutions/assignment-4/objects.py new file mode 100644 index 00000000..5b676c26 --- /dev/null +++ b/solutions/assignment-4/objects.py @@ -0,0 +1,259 @@ +import gzip +import shutil +import hashlib +import os +import json +import time + +class NgcObject: + + BUF_SIZE = 65536 + HASHING_FUNCTION = 'sha1' + COMPRESSION_METHOD = 'zlib' + + def __init__(self): + pass + + def compress_obj(self, obj_path, dst): + with open(obj_path, 'rb') as f_in: + with gzip.open(dst, "wb") as f_out: + shutil.copyfileobj(f_in, f_out, self.BUF_SIZE) + + def extract_obj(self, obj_path, dst): + pass + + + def _get_file_hash(self, file_path): + hashf = hashlib.new(self.HASHING_FUNCTION) + + with open(file_path, "rb") as f_in: + while True: + data = f_in.read(self.BUF_SIZE) + if not data: + break + hashf.update(data) + + return hashf.hexdigest() + + +class Blob(NgcObject): + + def __init__(self): + pass + + def create(self, file_path, obj_path): + compressed_filename = self._get_file_hash(file_path) + header = bytes(self._create_header(os.path.getsize(file_path)), 'ascii') + + with open(file_path, 'rb') as f_in: + with gzip.open(os.path.join(obj_path, compressed_filename), "wb") as f_out: + f_out.write(header) + shutil.copyfileobj(f_in, f_out, self.BUF_SIZE) + + return compressed_filename + + def read_header(self, file_path): + temp = b'' + header = b'' + + with gzip.open(file_path, "rb") as blob_obj: + while b"\x00" not in temp: + temp = blob_obj.read(1) + header += temp + return header.decode() + + def get_content_chunk(self, file_path, fptr): + + with open(file_path, "rb") as blob_obj: + blob_obj.seek(fptr) + data_chunk = blob_obj.read(self.BUF_SIZE) + + return data_chunk + + def get_content(self, file_path): + temp = b"" + header = b"" + + with gzip.open(file_path, "rb") as f_in: + while b"\x00" not in header: + temp = f_in.read(1) + header += temp + content = f_in.read() + + return content.decode() + + + def extract_content(self, file_path, dst): + header = b"" + + with gzip.open(file_path, "rb") as f_in: + with open(dst, "wb") as f_out: + while b"\x00" not in header: + header = f_in.read(1) + while True: + buf_data = f_in.read(self.BUF_SIZE) + if not buf_data: + break + f_out.write(buf_data) + + + def _get_file_hash(self, file_path): + + compressed_filename = None + hashf = hashlib.new(self.HASHING_FUNCTION) + header = self._create_header(os.path.getsize(file_path)) + + hashf.update(bytes(header, 'ascii')) + + with open(file_path, "rb") as f_in: + while True: + + data = f_in.read(self.BUF_SIZE) + + if not data: + break + hashf.update(data) + + + compressed_filename = hashf.hexdigest() + return compressed_filename + + def _create_header(self, content_length): + return f"blob {content_length}\x00" + +class Tree(NgcObject): + + FILES = 'files' + SUBDIRS = 'subdirs' + + def __init__(self, logger, path=os.getcwd()): + self.path = path + self.obj_path = os.path.join(self.path, '.ngc/objects') + if not os.path.exists(self.obj_path): os.makedirs(self.obj_path) + self.logger = logger + self.root = None + self.blob = Blob() + + def create(self, path=None): + if not path: path = self.path + tree_obj = dict() + files = dict() + subdirs = dict() + + for item in os.listdir(path): + self.logger.info("location traversing-%s" % (path)) + self.logger.debug("item - %s" % (item)) + item_path = os.path.join(path, item) + self.logger.debug("item path: %s" % (item_path)) + + if item.startswith("."): + continue + + if os.path.isfile(item_path): + file_hash = self.blob._get_file_hash(item_path) + #self.logger.debug("hash: %s" % (file_hash)) + if not os.path.exists(os.path.join(self.obj_path, file_hash)): + file_hash = self.blob.create(item_path, self.obj_path) + self.logger.debug("%s blob created" % (item)) + files[item] = file_hash + self.logger.info("%s done" % (item)) + + elif os.path.isdir(item_path): + + subdir_hash = self.create(item_path) + subdirs[item] = subdir_hash + self.logger.info("%s dir done" % (item)) + else: + pass #handle error here + + tree_obj[self.FILES] = files + tree_obj[self.SUBDIRS] = subdirs + tree_json = json.dumps(tree_obj) + tree_json_bytes = tree_json.encode() + + hashf = hashlib.new(self.HASHING_FUNCTION) + hashf.update(tree_json_bytes) + hashed_value = hashf.hexdigest() + tree_obj_path = os.path.join(self.obj_path, hashed_value) + + with open(tree_obj_path, 'wb') as tree_file: + tree_file.write(tree_json_bytes) + + return hashed_value + + def get_tree_dict(self, tree_hash): + tree_file_path = os.path.join(self.obj_path, tree_hash) + tree_dict = None + + with open(tree_file_path, "rb") as tree_file: + tree_dict = json.load(tree_file) + + return tree_dict + + +class Commit(NgcObject): + + TREE = "tree" + PARENT = "parent" + AUTHOR = 'author' + COMMITTER = 'committer' + MSG = 'message' + + def __init__(self, path=os.getcwd()): + self.path = path + self.obj_path = os.path.join(path, ".ngc/objects") + + def create(self, tree_hash, author_details, committer_details, message, parent_hash=None): + commit_obj = dict() + time_stamp = time.time() + + commit_obj[self.TREE] = tree_hash + commit_obj[self.AUTHOR] = author_details + commit_obj[self.COMMITTER] = committer_details + commit_obj[self.MSG] = message + if parent_hash: + commit_obj[self.PARENT] = parent_hash + + commit_json = json.dumps(commit_obj) + commit_json_bytes = commit_json.encode() + + hashf = hashlib.new(self.HASHING_FUNCTION) + hashf.update(commit_json_bytes) + hashed_value = hashf.hexdigest() + commit_obj_path = os.path.join(self.obj_path, hashed_value) + + with open(commit_obj_path, 'wb') as tree_file: + tree_file.write(commit_json_bytes) + + return hashed_value + + def read(self, commit_hash): + pass + + def print_commit_data(self, commit_hash): + commit_path = os.path.join(self.obj_path, commit_hash) + + with open(commit_path, 'rb') as commit_file: + commit_json = json.load(commit_file) + + print(self.TREE, commit_json[self.TREE]) + if self.PARENT in commit_json: print(self.PARENT, commit_json[self.PARENT]) + print(self.AUTHOR, commit_json[self.AUTHOR]) + print(self.COMMITTER, commit_json[self.COMMITTER]) + print('\n', commit_json[self.MSG]) + + def get_commit_dict(self, commit_hash): + commit_path = os.path.join(self.obj_path, commit_hash) + + with open(commit_path, 'rb') as commit_file: + commit_json = json.load(commit_file) + + return commit_json + + def get_tree_hash(self, commit_hash): + commit_path = os.path.join(self.obj_path, commit_hash) + + with open(commit_path, 'rb') as commit_file: + commit_json = json.load(commit_file) + + return commit_json[self.TREE] diff --git a/solutions/assignment-4/test/test_dir/file1 b/solutions/assignment-4/test/test_dir/file1 new file mode 100644 index 00000000..b75caba5 --- /dev/null +++ b/solutions/assignment-4/test/test_dir/file1 @@ -0,0 +1 @@ +}s Ta4L3#3d z \ No newline at end of file diff --git a/solutions/assignment-4/test/test_dir/subdir1/file2 b/solutions/assignment-4/test/test_dir/subdir1/file2 new file mode 100644 index 00000000..bfc38589 --- /dev/null +++ b/solutions/assignment-4/test/test_dir/subdir1/file2 @@ -0,0 +1 @@ +\iQS}J.>Mz-4ps'6M"nM$vLg=OIDf z zhovE{LizU"0 TTO'19 8^DPr~i2Q +ZRFKCQA`mqN )k^Q,Ep17VbT-k b7ySi8 \ No newline at end of file diff --git a/solutions/assignment-4/unittest_objects.py b/solutions/assignment-4/unittest_objects.py new file mode 100644 index 00000000..3c998aa0 --- /dev/null +++ b/solutions/assignment-4/unittest_objects.py @@ -0,0 +1,86 @@ +import unittest +import tempfile +import random +import string +import os +from distutils.dir_util import copy_tree +import logging +import objects + +class ObjectsTest(unittest.TestCase): + TEST_DIR = os.path.join(os.getcwd(),'test/test_dir') + + FILE_LIST = {'file1':'b75caba50711332063088fc53744f1c55b4445fe', + 'subdir1/file2':'bfc385898eb83d5f15e84635a1ab1649cced4fcb', + 'subdir2/file3':'41cd77870f7d97e5b5a32b87a12343312ffbc065', + 'subdir2/subdir3/file4':'f0ef9dfd4b499f54cdcc15d0fc95204d92207790'} + + SUBDIR_LIST = {'.':'0fbd657ff0d946213275023ae722c244c3026682', + 'subdir1':'d2b945ace691fe8522e868ebde016d0a53ac40ca', + 'subdir2':'35d9d594f1d830505fb3132d8959e73119755114', + 'subdir2/subdir3':'17aeddcc4d19a24eedc6024163cdb9b0fceb5faa'} + + def test_blob(self): + blob_obj = objects.Blob() + + for file in self.FILE_LIST: + with tempfile.TemporaryDirectory() as temp_dir: + file_path = os.path.join(self.TEST_DIR, file) + file_name = blob_obj.create(file_path, temp_dir) + blob_path = os.path.join(temp_dir,file_name) + self.assertEqual(file_name, self.FILE_LIST[file]) + + blob_file_header = blob_obj.read_header(blob_path) + file_header = f"blob {os.stat(file_path).st_size}\x00" + self.assertEqual(blob_file_header, file_header) + + with open(file_path, 'rb') as f: + file_data = f.read().decode() + blob_file_data = blob_obj.get_content(blob_path) + self.assertEqual(file_data, blob_file_data) + + + def test_tree(self): + with tempfile.TemporaryDirectory() as temp_dir: + copy_tree(self.TEST_DIR, temp_dir) + tree_obj = objects.Tree(self.create_logger('tree_logger'), path=temp_dir) + tree_obj.create() + for subdir in self.SUBDIR_LIST: + tree_path = os.path.join(tree_obj.obj_path, self.SUBDIR_LIST[subdir]) + self.assertTrue(os.path.exists(tree_path)) + + def test_commit(self): + commit_data = {"tree": "0fbd657ff0d946213275023ae722c244c3026682", + "author": None, + "committer": None, + "message": "test"} + + with tempfile.TemporaryDirectory() as temp_dir: + copy_tree(self.TEST_DIR, temp_dir) + tree_obj = objects.Tree(self.create_logger('tree_logger'), path=temp_dir) + commit_obj = objects.Commit(path=temp_dir) + tree_obj.create() + commit_hash = commit_obj.create(tree_hash=self.SUBDIR_LIST["."], + author_details=None, + committer_details=None, + message="test") + commit_path = os.path.join(commit_obj.obj_path, commit_hash) + self.assertTrue(os.path.exists(commit_path)) + + commit_json_data = commit_obj.get_commit_dict(commit_hash) + self.assertEqual(commit_data, commit_json_data) + + @staticmethod + def create_logger(logger_name, log_level=logging.WARNING): + logger = logging.getLogger() + handler = logging.StreamHandler() + formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') + handler.setFormatter(formatter) + logger.addHandler(handler) + logger.setLevel(log_level) + + return logger + + +if __name__ == '__main__': + unittest.main()