diff --git a/requirements.txt b/requirements.txt index bf0d9d411..c764435a1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,11 @@ -requests==2.28.2 \ No newline at end of file +annotated-types==0.6.0 +certifi==2023.11.17 +charset-normalizer==3.3.2 +idna==3.6 +pydantic==2.5.3 +pydantic_core==2.14.6 +requests==2.28.2 +retrying==1.3.4 +six==1.16.0 +typing_extensions==4.9.0 +urllib3==1.26.18 diff --git a/scripts/auth_tools.py b/scripts/auth_tools.py index 110f01eca..03c9ebf99 100644 --- a/scripts/auth_tools.py +++ b/scripts/auth_tools.py @@ -1,67 +1,130 @@ -import os import argparse import json -from constants import URL +import logging +import os import requests +import stat +from pathlib import Path +from urllib.parse import urljoin +import constants -def create_credentials_file(file_name: str, value: str): - with open(os.path.dirname(os.path.abspath(__file__)) + f"/.{file_name}", "w") as f: - f.write(value) +logger = logging.getLogger(__name__) -def read_credentials_file(file_name: str) -> str: - credentials_file_path = ( - os.path.dirname(os.path.abspath(__file__)) + f"/.{file_name}" - ) - if not os.path.exists(credentials_file_path): - return None +def _warn_if_wide_permissions(path): + file_stat = os.stat(path) + mode = file_stat.st_mode + if mode & stat.S_IRWXG or mode & stat.S_IRWXO: + logger.warning(f"File {path} has too wide permissions: {oct(mode & 0o777)}") - return open(credentials_file_path, "r").read().strip("\n") +class Authenticator: + USER_ENV = "NIL_USER" + SECRET_ENV = "NIL_SECRET" + STORAGE_DIR = Path.home().joinpath(".config", "proof-market") + CREDENTIALS_BASENAME = "credentials.json" + AUTH_BASENAME = "auth.json" -secret = read_credentials_file("secret") -user = read_credentials_file("user") + class Helpers: + @staticmethod + def get_base_dir(directory): + return Authenticator.STORAGE_DIR if directory is None else Path(directory) + @staticmethod + def get_auth_file_path(directory): + return Authenticator.Helpers.get_base_dir(directory) / Authenticator.AUTH_BASENAME -def update_auth(auth): + @staticmethod + def get_credentials_file_path(directory): + return Authenticator.Helpers.get_base_dir(directory) / Authenticator.CREDENTIALS_BASENAME - url = URL + "/user/signin" - body = {"username": user, "password": secret} + def __init__(self, url: str, directory: str | None = None): + self.credentials_path = Authenticator.Helpers.get_credentials_file_path(directory) + self.auth_path = Authenticator.Helpers.get_auth_file_path(directory) + self.username, self.secret = self.get_credentials() + self.url = url - response = requests.post(url, json=body) - if response.status_code != 200: - print(f"Update auth error: {response.status_code} {response.text}") - else: - with open(auth, "w") as f: - headers = {"Authorization": f'Bearer {response.json()["jwt"]}'} - json.dump(headers, f) - return response + def get_credentials(self) -> tuple[str, str]: + username = os.environ.get(self.USER_ENV) + secret = os.environ.get(self.SECRET_ENV) + if username is None or secret is None: + logger.info(f"Environment variables not set. Reading credentials from {self.credentials_path}.") + _warn_if_wide_permissions(self.credentials_path) + with open(self.credentials_path, 'r') as file: + data = json.load(file) + username = data.get('username') + secret = data.get('secret') + if not username or not secret: + raise ValueError("Credentials not found in JSON file.") + return username, secret -def get_headers(auth): - headers = {} - if auth is None: - auth = "auth.json" - response = update_auth(auth) + def update_auth_file(self): + url = urljoin(self.url, "/user/signin") + body = {"username": self.username, "password": self.secret} + + response = requests.post(url, json=body) if response.status_code != 200: - return - with open(auth, "r") as f: + raise RuntimeError(f"Failed to fetch auth: {response.status_code} {response.text}") + with open(self.auth_path, "w", opener=lambda path, flags: os.open(path, flags, 0o600)) as f: + headers = {"Authorization": f"Bearer {response.json()['jwt']}"} + json.dump(headers, f) + logger.info("Auth file updated.") + + @staticmethod + def create_credentials_file(username: str, secret: str, directory: str | None = None): + credentials_data = { + "username": username, + "secret": secret, + } + credentials_json = json.dumps(credentials_data) + credentials_path = Authenticator.Helpers.get_credentials_file_path(directory) + os.makedirs(os.path.dirname(credentials_path), exist_ok=True) + with open(credentials_path, "w", opener=lambda path, flags: os.open(path, flags, 0o600)) as f: + f.write(credentials_json) + logger.info(f"Credentials saved to {credentials_path}") + + +def get_headers(directory=None, url=None): + auth_path = Authenticator.Helpers.get_auth_file_path(directory) + if not os.path.exists(auth_path): + logger.info(f"Auth file {auth_path} does not exist.") + if url is not None: + logger.info(f"Trying to create it with request to {url}") + authenticator = Authenticator(url) + authenticator.update_auth_file() + else: + raise RuntimeError("No way to fetch auth.json") + + _warn_if_wide_permissions(auth_path) + with open(auth_path, "r") as f: auth_data = json.load(f) - headers.update(auth_data) - return headers + return auth_data -if __name__ == "__main__": +def main(): parser = argparse.ArgumentParser() parser.add_argument( - "-a", - "--auth", + "-u", + "--url", action="store", - default="auth.json", - help="file to store jwt token", + default=constants.URL, + help="URL of proof market API", + ) + parser.add_argument( + "-d", + "--dir", + action="store", + default=constants.URL, + help="Directory to store credentials and auth file", ) args = parser.parse_args() - update_auth(args.auth) + Authenticator(args.url, args.dir).update_auth_path() + logger.info(f"Auth file at {args.dir} updated") + + +if __name__ == "__main__": + main() diff --git a/scripts/constants.py b/scripts/constants.py index e15cbe67c..7fb9d95a5 100644 --- a/scripts/constants.py +++ b/scripts/constants.py @@ -1,8 +1,7 @@ import os VERSION = open(os.path.dirname(os.path.abspath(__file__)) + "/../VERSION", "r").read() -URL = "https://api.proof.market.nil.foundation/" +URL = "http://49.12.15.410:83000" DB_NAME = "market" MOUNT = "/v" + VERSION.replace(".", "_") -AUTH_FILE = "./.auth.json" REQUEST_TIMEOUT = 100 diff --git a/scripts/proof_producer/constants.py b/scripts/proof_producer/constants.py index 54acb8bfd..5e7e55a39 100644 --- a/scripts/proof_producer/constants.py +++ b/scripts/proof_producer/constants.py @@ -3,17 +3,12 @@ VERSION = open( os.path.dirname(os.path.abspath(__file__)) + "/../../VERSION", "r" ).read() -URL = "https://api.proof.market.nil.foundation/" +URL = "http://49.12.15.40:3000" DB_NAME = "market" MOUNT = "/v" + VERSION.replace(".", "_") AUTH_FILE = "./.auth.json" PROOFS_DIR = "./proofs/statements/" WAIT_BEFORE_SEND_PROOF = 30 ASK_UPDATE_INTERVAL = 20 -MY_STATEMENTS = { - "32326": {"cost": 5, "asks_limit": 10}, - "32292": {"cost": 5, "asks_limit": 10}, - "79169223": {"cost": 5, "asks_limit": 10}, -} USER = "skm" REQUEST_TIMEOUT = 100 diff --git a/scripts/proof_producer/proof-producer.service b/scripts/proof_producer/proof-producer.service index 858972d42..2ca50a6c4 100644 --- a/scripts/proof_producer/proof-producer.service +++ b/scripts/proof_producer/proof-producer.service @@ -10,4 +10,4 @@ Restart=always ExecStart=/usr/bin/python3 /proof-market-toolchain/scripts/proof_producer/proof_producer.py start -p /proof-market-toolchain/build/bin/proof-generator/proof-generator [Install] -WantedBy=multi-user.target \ No newline at end of file +WantedBy=multi-user.target diff --git a/scripts/proof_producer/proof_producer.py b/scripts/proof_producer/proof_producer.py index 3735e8ba0..8149be4c7 100644 --- a/scripts/proof_producer/proof_producer.py +++ b/scripts/proof_producer/proof_producer.py @@ -1,194 +1,334 @@ """Proof Producer daemon PoC""" -import requests -import sys -import logging import argparse import json -import time +import logging import os -import random -from threading import Thread -from constants import ( - MY_STATEMENTS, - DB_NAME, - URL, - MOUNT, - USER, - AUTH_FILE, - PROOFS_DIR, - ASK_UPDATE_INTERVAL, -) +import selectors import subprocess +import sys +import tempfile +import time +import traceback +from threading import Lock, Thread +from urllib.parse import urljoin + +from retrying import retry +# TODO: restructure project sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../") -from public_input_get import get as get_public_input -from auth_tools import update_auth, get_headers -from statement_tools import get as get_statement -from proposal_tools import push as push_proposal +import constants +from auth_tools import Authenticator, get_headers from proof_tools import push as push_proof +from proposal_tools import get as get_proposals +from public_input_get import get as get_public_input +from statement_tools import get_statements -secret = open(os.path.dirname(os.path.abspath(__file__)) + "/.secret", "r").read() -user = open(os.path.dirname(os.path.abspath(__file__)) + "/.user", "r").read() +logger = logging.getLogger(__name__) -def auth_loop(): - update_auth(AUTH_FILE) - wait_time = 20 * 60 # 20 minutes - while True: - time.sleep(wait_time) - update_auth(AUTH_FILE) +class CommandRunner(): + @staticmethod + def _log_stream(selector, logger_method, prefix): + while True: + for key, _ in selector.select(): + data = key.fileobj.readline() + if not data: + return + logger_method(f"{prefix}: {data.strip()}") -def get_statements(auth): - keys = MY_STATEMENTS.keys() - statements = {} - for key in keys: - try: - statements[key] = get_statement(auth, key, None) - except: - logging.error(f"Get statement error") - continue - return statements - - -def get_my_proposals(status="processing"): - url = URL + "/proposal" - url += f'?status={status}' - res = requests.get(url=url, headers=get_headers(AUTH_FILE)) - if res.status_code != 200: - logging.error(f"Get my proposals error: {res.status_code} {res.text}") - sys.exit(1) - else: - proposals = res.json() - my_statements_proposals = [] - for proposal in proposals: - if proposal["statement_key"] in MY_STATEMENTS: - my_statements_proposals.append(proposal) - return my_statements_proposals - - -def proposals_loop(): - while True: - time.sleep(ASK_UPDATE_INTERVAL) + @staticmethod + def run(command, prefix="Subprocess") -> int: + process = subprocess.Popen( + command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + bufsize=1, + ) + + sel = selectors.DefaultSelector() + sel.register(process.stdout, selectors.EVENT_READ) + sel.register(process.stderr, selectors.EVENT_READ) + + CommandRunner._log_stream(sel, logger.info, prefix) + CommandRunner._log_stream(sel, logger.error, prefix) + + process.wait() + + return process.returncode + + +def exponential_none_retry(func): + return retry( + retry_on_result=lambda x: x is None, + stop_max_attempt_number=1, + wait_exponential_multiplier=1 * 1000, + wait_exponential_max=20 * 1000, + )(func) + + +def periodic(interval): + def decorator(func): + def wrapper(*args, **kwargs): + while True: + try: + func(*args, **kwargs) + except Exception as e: + logger.error(f"An error occurred: {e}") + logger.error(traceback.format_exc()) + time.sleep(interval) + return wrapper + return decorator + + +class ProofProducerDaemon: + TASK_FETCH_PERIOD = 30 + AUTH_PERIOD = 20 * 60 + + def __init__(self, base_url, db_path, statements_file, proof_generator_binary_path, assigner_binary_path, log_level, limit): + self.base_url = base_url + self.db_path = db_path + self.proof_generator_binary_path = proof_generator_binary_path + self.assigner_binary_path = assigner_binary_path + self.log_level = log_level + self.limit = limit + self.authenticator = Authenticator(base_url) + self.processing_tasks = set() + + self.statements_ids_to_process = None + if statements_file is not None: + # If file is provided, we process only ids from it. Otherwise, we process everything. + with open(statements_file, 'r') as f: + self.statements_ids_to_process = [l.strip() for l in f if l.strip()] + + def fetch_statements(self): + logger.info("Fetching statements from proof market...") + statements_list = get_statements(self.base_url) + if self.statements_ids_to_process is not None: + statements_list = list(filter(lambda s: str(s["id"]) in self.statements_ids_to_process, statements_list)) + + statements = {} + error_met = False + for key in statements_list: + try: + statements[key["id"]] = get_statements(self.base_url, str(key["id"]), None) + except Exception as e: + error_met = True + logger.error("Get statement error: ", e) + continue + none_statements_n = sum(map(lambda x: x is None, statements.values())) + logger.info(f"{len(statements) - none_statements_n} statements were loaded, {none_statements_n} were not.") + + statements_dir = os.path.join(self.db_path, "statements") + os.makedirs(statements_dir, exist_ok=True) + for key in statements: + with open(os.path.join(statements_dir, f"{str(key)}.json"), "w") as f: + json.dump(statements[key], f, indent=2) + logging.info("Statements saved to data base.") + return not error_met + + def process_single_task(self, request_key: str, statement_key: str): try: - createdProposals = get_my_proposals("created") - processingProposals = get_my_proposals("processing") - except: - logging.error(f"Get proposals error") - continue - - for st in MY_STATEMENTS: - proposalsFound = 0 - for proposal in createdProposals: - if proposal["statement_key"] == st: - proposalsFound += 1 - for proposal in processingProposals: - if proposal["statement_key"] == st: - proposalsFound += 1 - if proposalsFound < MY_STATEMENTS[st]["proposals_limit"]: - cost = MY_STATEMENTS[st]["cost"] + round(random.uniform(0, 1), 1) - push_proposal(AUTH_FILE, st, cost) - -def produce_proof(proposal, proof_generator_binary_path, assigner_binary_path, db_path, log_level, auth): - try: - public_input_data = get_public_input(proposal["request_key"], auth).json()["input"] - except: - logging.error(f"Get public input parsing error for proposal with key: {proposal['_key']}") - return - - request_dir = db_path + "/requests/" + proposal["request_key"] + "/" - os.makedirs(request_dir, exist_ok=True) - - pulic_input_file = request_dir + "public_input.json" - with open(pulic_input_file, "w") as f: - json.dump(public_input_data, f, indent=4) - - statement_file = db_path + "/statements/" + proposal["statement_key"] + ".json" - try: - with open(statement_file, 'r') as file: - data = json.load(file) - bytecode_data = data['definition']['proving_key'] - except: - logging.error(f"Get bytecode parsing error for proposal with key: {proposal['_key']}") - return - bytecode_file = request_dir + "bytecode.ll" - with open(bytecode_file, "w") as f: - f.write(bytecode_data) - - assignment_table_path = request_dir + "assignment.tbl" - circuit_path = request_dir + "circuit.crct" - assigner = subprocess.Popen( - [ - assigner_binary_path, - "--bytecode=" + bytecode_file, - "--public-input=" + pulic_input_file, - "--circuit=" + circuit_path, - "--assignment-table=" + assignment_table_path, - "--elliptic-curve-type=" + "pallas", - "--log-level=" + log_level, - ] - ) - assigner.communicate() - - proof_path = request_dir + "proof.bin" - proof_generator = subprocess.Popen( - [ - proof_generator_binary_path, - "--circuit=" + circuit_path, - "--assignment-table=" + assignment_table_path, - "--proof=" + proof_path, - "--log-level=" + log_level, - ] - ) - proof_generator.communicate() - try: - push_proof(auth, proof_path, request_key=proposal["request_key"], proposal_key=proposal["_key"]) - except: - logging.error(f"Push proof error") - return + logger.info(f"Generating proof for {request_key}...") + res = self.process_assigned_proposal( + request_key, + statement_key, + ) + logger.info(f"Generating proof for {request_key} was {'successful' if res else 'unsuccessful'}") + finally: + self.processing_tasks.remove(request_key) + def run_assigned_tasks(self): + # For now /proposal/ returns only assigned to current producer, undone requests. + # No need to ask for assignment. + logger.info("Fetching proposals...") + proposals = exponential_none_retry(get_proposals)(self.base_url) + if proposals is None: + logger.error("Can't get proposals list.") + return False -def proofs_loop(proof_generator_binary_path, assigner_binary_path, db_path, log_level): - while True: - time.sleep(ASK_UPDATE_INTERVAL) + if self.statements_ids_to_process is not None: + proposals_for_current_worker = list(filter(lambda p: p["statement_key"] in self.statements_ids_to_process, proposals)) + else: + proposals_for_current_worker = proposals + + assigned_amount = len(proposals_for_current_worker) + can_process = self.limit - len(self.processing_tasks) + logger.info( + "%d are assigned to current worker. Remaining slots for processing: %d", + assigned_amount, + can_process, + ) + if can_process == 0: + return True + + unprocessed_proposals = list(filter(lambda p: p["request_key"] not in self.processing_tasks, proposals_for_current_worker)) + for proposal in unprocessed_proposals[:can_process]: + self.processing_tasks.add(proposal["request_key"]) + Thread(target=self.process_single_task, args=(proposal["request_key"], proposal["statement_key"])).start() + + def process_assigned_proposal(self, request_key, statement_key): try: - matchedProposals = get_my_proposals("processing") - except: - logging.error(f"Get processing proposals error") - continue - for proposal in matchedProposals: - produce_proof(proposal, proof_generator_binary_path, assigner_binary_path, db_path, log_level, AUTH_FILE) + with open(os.path.join(self.db_path, "statements", str(statement_key) + ".json"), 'r') as file: + data = json.load(file) + bytecode_data = data['definition']['proving_key'] + except Exception as e: + logger.error( + "Bytecode parsing error from statement file %s\nError: %s", + self.statement_path, + e, + ) + return False + public_input_data = exponential_none_retry(get_public_input)(request_key, self.base_url) + if public_input_data is None: + logger.error("Can't get public input, no further attempts will be made.") + return False -def start(args): - Thread(target=auth_loop).start() - time.sleep(10) - Thread(target=proposals_loop).start() - Thread(target=proofs_loop(args.proof_generator, args.assigner, args.db_path, args.log_level)).start() + with tempfile.NamedTemporaryFile(mode='w', delete=False) as public_input_fd, \ + tempfile.NamedTemporaryFile(mode='w', delete=False) as bytecode_fd, \ + tempfile.NamedTemporaryFile(mode='w', delete=False) as assignment_table_fd, \ + tempfile.NamedTemporaryFile(mode='w', delete=False) as circuit_fd, \ + tempfile.NamedTemporaryFile(mode='w', delete=False) as proof_fd: + + json.dump(public_input_data, public_input_fd, indent=2) + public_input_fd.flush() + + bytecode_fd.write(bytecode_data) + bytecode_fd.flush() + + logger.info("Running assigner") + assigner = subprocess.Popen( + [ + self.assigner_binary_path, + "--bytecode=" + bytecode_fd.name, + "--public-input=" + public_input_fd.name, + "--circuit=" + circuit_fd.name, + "--assignment-table=" + assignment_table_fd.name, + "--elliptic-curve-type=" + "pallas", + "--log-level=" + self.log_level, + ] + ) + + assigner.communicate() + if assigner.returncode != 0: + logger.error("Assigner failed.") + return False + + logger.info("Running proof generator") + proof_generator_ret = CommandRunner.run( + [ + self.proof_generator_binary_path, + "--circuit=" + circuit_fd.name, + "--assignment-table=" + assignment_table_fd.name, + "--proof=" + proof_fd.name, + "--log-level=" + self.log_level, + ], + f'{request_key} generator' + ) + # proof_generator = subprocess.Popen( + # [ + # self.proof_generator_binary_path, + # "--circuit=" + circuit_fd.name, + # "--assignment-table=" + assignment_table_fd.name, + # "--proof=" + proof_fd.name, + # "--log-level=" + self.log_level, + # ] + # ) + # proof_generator.communicate() + # if proof_generator.returncode != 0: + if proof_generator_ret != 0: + logger.error("Proof generator failed.") + return False + + push_result = exponential_none_retry(push_proof)( + self.base_url, + proof_fd.name, + request_key=request_key, + proposal_key=request_key, + ) + if push_result is None: + logger.error("Can't push proof, no further attempts will be made.") + return False + + return True + + def start(self): + self.authenticator.update_auth_file() + threads = [ + Thread( + daemon=True, + target=periodic(interval=self.AUTH_PERIOD)( + self.authenticator.update_auth_file + ) + ), + Thread( + daemon=True, + target=periodic(interval=self.TASK_FETCH_PERIOD)( + self.run_assigned_tasks + ) + ), + ] + for thread in threads: + thread.start() + + for thread in threads: + # Will never reach it, they are immortal + thread.join() def prepare(args): - update_auth(AUTH_FILE) - statements = get_statements(AUTH_FILE) + daemon = ProofProducerDaemon( + base_url=args.url, + db_path=args.db_path, + statements_file=args.statements, + proof_generator_binary_path=None, + assigner_binary_path=None, + log_level=None, + limit=None, + ) + return daemon.fetch_statements() - statements_dir = args.db_path + "/statements/" - os.makedirs(statements_dir, exist_ok=True) +def start(args): + daemon = ProofProducerDaemon( + base_url=args.url, + db_path=args.db_path, + statements_file=args.statements, + proof_generator_binary_path=args.proof_generator, + assigner_binary_path=args.assigner, + log_level=args.log_level, + limit=args.limit, + ) + daemon.start() - for key in statements: - with open(statements_dir + key + ".json", "w") as f: - json.dump(statements[key], f, indent=4) - logging.info(f"Statements prepared") +def main(): + logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s") -if __name__ == "__main__": - logging.basicConfig(level=logging.INFO, format="%(message)s") + parent_parser = argparse.ArgumentParser(add_help=False) + parent_parser.add_argument("--url", action="store", default=constants.URL, help="URL of a producer") + parent_parser.add_argument( + "--db-path", help="directory with producer's data", default="/tmp/proof-producer/" + ) + parent_parser.add_argument("--statements", action="store", required=True, help="Files with statements IDs you want to process. One ID per line.") parser = argparse.ArgumentParser() + # If we ever need more sub-commands subparsers = parser.add_subparsers(help="sub-command help") + + parser_prepare = subparsers.add_parser( + "prepare", + help="Fetch statements from proof market. Only statements provided in --statements file will be fetched.", + parents=[parent_parser], + ) + parser_prepare.set_defaults(func=prepare) + parser_start = subparsers.add_parser( "start", - help="start Proof Producer daemon (do not forget to prepare statements first)", + help="start Proof Producer daemon (do not forget to call `prepare` first)", + parents=[parent_parser], ) parser_start.add_argument( "--proof-generator", help="path to proof generator binary", required=True @@ -200,17 +340,18 @@ def prepare(args): "--log-level", help="log level", choices=['trace', 'debug', 'info', 'warning', 'error', 'fatal'], default="info" ) parser_start.add_argument( - "--db-path", help="directory with producer's data", default="/tmp/proof-producer/" + "--limit", help="Number of tasks we want to process simultaneously", default=1 ) parser_start.set_defaults(func=start) - parser_prepare = subparsers.add_parser( - "prepare", - help="download statements from Proof Market (do not forget to setup constants.py first)", - ) - parser_prepare.add_argument( - "--db-path", help="directory with producer's data", default="/tmp/proof-producer/" - ) - parser_prepare.set_defaults(func=prepare) args = parser.parse_args() - args.func(args) + if not hasattr(args, 'func'): + # invalid subparser + parser.print_help() + sys.exit(1) + + sys.exit(0 if args.func(args) else 1) + + +if __name__ == "__main__": + main() diff --git a/scripts/proof_producer/statements.txt b/scripts/proof_producer/statements.txt new file mode 100644 index 000000000..94ebaf900 --- /dev/null +++ b/scripts/proof_producer/statements.txt @@ -0,0 +1,4 @@ +1 +2 +3 +4 diff --git a/scripts/proof_tools.py b/scripts/proof_tools.py index a88be9c77..793e41ad2 100644 --- a/scripts/proof_tools.py +++ b/scripts/proof_tools.py @@ -1,20 +1,16 @@ -import sys -import requests -import logging import argparse -import os.path -import inspect import json -from constants import DB_NAME, URL, MOUNT -from auth_tools import get_headers -import request_tools +import logging +import requests +import sys +from urllib.parse import urljoin -currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) -parentdir = os.path.dirname(currentdir) -sys.path.insert(0, parentdir) +import constants +import request_tools +from auth_tools import get_headers -def push(auth, file, request_key=None, proposal_key=None): +def push(url, file, request_key=None, proposal_key=None): proof = open(file, "r").read() data = {"proof": proof} if request_key: @@ -22,59 +18,66 @@ def push(auth, file, request_key=None, proposal_key=None): if proposal_key: data["proposal_key"] = proposal_key - headers = get_headers(auth) - url = URL + f"/proof" + headers = get_headers() + url = urljoin(url, "/proof") res = requests.post(url=url, json=data, headers=headers) if res.status_code != 200: logging.error(f"Error: {res.status_code} {res.text}") - return + return None + + if request_key: + logging.info(f"Proof for request {request_key} is pushed") else: - if request_key: - logging.info(f"Proof for request {request_key} is pushed") - else: - logging.info(f"Proof for proposal {proposal_key} is pushed") - return + logging.info(f"Proof for proposal {proposal_key} is pushed") + return res.json() -def get(auth, request_key=None, proof_key=None, file=None): - headers = get_headers(auth) - url = URL + "/proof/" +def get(url, request_key=None, proof_key=None, file=None): + headers = get_headers() + url = urljoin(url, "/proof") if request_key: - proof_key = request_tools.get(auth, key=request_key)["proof_key"] + proof_key = request_tools.get(key=request_key)["proof_key"] url += proof_key + "?full=true" elif proof_key: url += proof_key + "?full=true" res = requests.get(url=url, headers=headers) if res.status_code != 200: logging.error(f"Error: {res.status_code} {res.reason}") - exit(1) + return None + + res_json = res.json() + if file and "proof" in res_json: + with open(file, "w") as f: + f.write(res_json.pop("proof")) + logging.info(f"Proof is saved to {file}") else: - res_json = res.json() - if file and "proof" in res_json: - with open(file, "w") as f: - f.write(res_json.pop("proof")) - logging.info(f"Proof is saved to {file}") - else: - logging.info(f"Proof:\t\t {json.dumps(res_json, indent=4)}") + logging.info(f"Proof:\t\t {json.dumps(res_json, indent=4)}") + + return res_json def push_parser(args): - push(args.auth, args.proof, args.request_key, args.proposal_key) + return push(args.proof, args.request_key, args.proposal_key) is not None def get_parser(args): - get(args.auth, args.request_key, args.proof_key, args.proof) + return get(args.request_key, args.proof_key, args.proof) is not None if __name__ == "__main__": logging.basicConfig(level=logging.INFO, format="%(message)s") + + parent_parser = argparse.ArgumentParser(add_help=False) + parent_parser.add_argument( + "--url", action="store", default=constants.URL, help="url of a producer" + ) + parser = argparse.ArgumentParser() - parser.add_argument("--auth", type=str, help="auth") subparsers = parser.add_subparsers(help="sub-command help") - parser_push = subparsers.add_parser("push", help="push proof") + parser_push = subparsers.add_parser("push", help="push proof", parents=[parent_parser]) parser_push.set_defaults(func=push_parser) - parser_get = subparsers.add_parser("get", help="get proof") + parser_get = subparsers.add_parser("get", help="get proof", parents=[parent_parser]) parser_get.set_defaults(func=get_parser) parser_push.add_argument("--proposal_key", type=str, default=None, help="proposal_key") @@ -86,4 +89,11 @@ def get_parser(args): parser_get.add_argument("-p", "--proof", type=str, help="path to store the proof", default="proof.bin") parser_get.add_argument("--request_key", type=str, help="request_key") args = parser.parse_args() + args.func(args=args) + if not hasattr(args, 'func'): + # invalid subparser + parser.print_help() + sys.exit(1) + + sys.exit(0 if args.func(args) else 1) diff --git a/scripts/proposal_tools.py b/scripts/proposal_tools.py index c41a179c9..76fb64622 100644 --- a/scripts/proposal_tools.py +++ b/scripts/proposal_tools.py @@ -1,61 +1,66 @@ """Proposal get and push functionality""" -import requests +import argparse import json import logging -import argparse -from constants import DB_NAME, URL, MOUNT, REQUEST_TIMEOUT +import requests +import sys +from urllib.parse import urljoin + +import constants from auth_tools import get_headers -def push(auth, key, cost): +def push(url, key, cost): data = { "statement_key": key, "cost": cost, } - headers = get_headers(auth) - url = URL + f"/proposal/" - res = requests.post(url=url, json=data, headers=headers, timeout=REQUEST_TIMEOUT) + headers = get_headers() + url = urljoin(url, "/proposal/") + res = requests.post(url=url, json=data, headers=headers, timeout=constants.REQUEST_TIMEOUT) if res.status_code != 200: logging.error(f"Error: {res.status_code} {res.text}") - return + return None else: - logging.info(f"Limit proposal:\t {res.json()}") return res.json() -def get(auth, key): - headers = get_headers(auth) - url = URL + f"/proposal/" - if key: - url += key - else: - url += "?limit=100" - res = requests.get(url=url, headers=headers, timeout=REQUEST_TIMEOUT) +def get(url, status=None): + headers = get_headers() + url = urljoin(url, "/proposal/") + if status is not None: + url = urljoin(url, '?status={status}') + res = requests.get(url=url, headers=headers, timeout=constants.REQUEST_TIMEOUT) if res.status_code != 200: logging.error(f"Error: {res.status_code} {res.text}") - return + return None else: logging.info(f"Proposal:\n {json.dumps(res.json(), indent=4)}") return res.json() def push_parser(push_args): - return push(push_args.auth, push_args.key, push_args.cost) + res = push(push_args.url, push_args.key, push_args.cost) + return res is not None def get_parser(get_args): - return get(get_args.auth, get_args.key) + return get(get_args.url, get_args.key) is not None if __name__ == "__main__": logging.basicConfig(level=logging.INFO, format="%(message)s") + parent_parser = argparse.ArgumentParser(add_help=False) + parent_parser.add_argument( + "--url", action="store", default=constants.URL, help="url of a producer" + ) + parser = argparse.ArgumentParser() - parser.add_argument("--auth", type=str, help="auth file") subparsers = parser.add_subparsers(help="sub-command help") - parser_push = subparsers.add_parser("push", help="push proposal") + parser_push = subparsers.add_parser("push", help="push proposal", parents=[parent_parser]) parser_push.set_defaults(func=push_parser) - parser_get = subparsers.add_parser("get", help="get proposal") + parser_get = subparsers.add_parser("get", help="get proposal", parents=[parent_parser]) parser_get.set_defaults(func=get_parser) parser_push.add_argument("--cost", type=float, required=True, help="cost") parser_push.add_argument( @@ -63,4 +68,9 @@ def get_parser(get_args): ) parser_get.add_argument("--key", type=str, help="key of the proposal") args = parser.parse_args() - args.func(args) + if not hasattr(args, 'func'): + # invalid subparser + parser.print_help() + sys.exit(1) + + sys.exit(0 if args.func(args) else 1) diff --git a/scripts/public_input_get.py b/scripts/public_input_get.py index 61ca709ba..aec9de16e 100644 --- a/scripts/public_input_get.py +++ b/scripts/public_input_get.py @@ -1,39 +1,51 @@ """Get public input for a request.""" -import requests -import sys -import logging import argparse import json -from constants import DB_NAME, URL, MOUNT, REQUEST_TIMEOUT +import logging +import requests +import sys +from urllib.parse import urljoin + +import constants from auth_tools import get_headers -def get(key, auth): - headers = get_headers(auth) - url = URL + f"/request/" +logger = logging.getLogger(__name__) + + +def get(key, url): + headers = get_headers() + url = urljoin(url, "/request/") if key: url += key - res = requests.get(url=url, headers=headers, timeout=REQUEST_TIMEOUT) + res = requests.get(url=url, headers=headers, timeout=constants.REQUEST_TIMEOUT) if res.status_code != 200: - logging.error( + logger.error( f"Get public input error for request {key}: {res.status_code} {res.text}" ) - sys.exit(1) - else: - return res + return None + return res.json()["input"] -if __name__ == "__main__": +def main(): logging.basicConfig(level=logging.INFO, format="%(message)s") parser = argparse.ArgumentParser() parser.add_argument("-k", "--key", metavar="key", type=str, help="key of the request") - parser.add_argument("--auth", metavar="auth", type=str, help="auth") + parser.add_argument( + "--url", action="store", default=constants.URL, help="url of a producer" + ) parser.add_argument( "-o", "--output", metavar="file", type=str, required=True, help="output file" ) args = parser.parse_args() - public_input = get(args.key, args.auth) + public_input = get(args.key, args.url) + if public_input is None: + sys.exit(1) with open(args.output, "w") as f: output = public_input.json()["input"] json.dump(output, f, indent=4) + + +if __name__ == "__main__": + main() diff --git a/scripts/request_tools.py b/scripts/request_tools.py index 2a6617f41..ce3021c7e 100644 --- a/scripts/request_tools.py +++ b/scripts/request_tools.py @@ -1,8 +1,11 @@ -import logging -import json import argparse +import json +import logging import requests -from constants import DB_NAME, URL, MOUNT +import sys +from urllib.parse import urljoin + +import constants from auth_tools import get_headers @@ -11,7 +14,8 @@ def get_prepared_input(input_file): input = json.load(f) return input -def push(auth, key, file, cost, aggregated_mode_id=None, verbose=False): + +def push(url, key, file, cost, aggregated_mode_id=None, verbose=False): data = { "statement_key": key, "input": get_prepared_input(file), @@ -20,12 +24,12 @@ def push(auth, key, file, cost, aggregated_mode_id=None, verbose=False): if aggregated_mode_id is not None: data["aggregated_mode_id"] = aggregated_mode_id - headers = get_headers(auth) - url = URL + "/request" + headers = get_headers(url=url) + url = urljoin(url, "/request/") res = requests.post(url=url, json=data, headers=headers) if res.status_code != 200: logging.error(f"Error: {res.status_code} {res.text}") - return + return None else: log_data = res.json() if not verbose: @@ -35,11 +39,11 @@ def push(auth, key, file, cost, aggregated_mode_id=None, verbose=False): return res.json() -def get(auth, key=None, request_status=None, verbose=False): - headers = get_headers(auth) - url = URL + f"/request/" +def get(url, key=None, request_status=None, verbose=False): + headers = get_headers(url=url) + url = urljoin(url, "/request/") if request_status: - url += f'?q=[{{"key" : "status", "value" : "{request_status}"}}]&limit=100' + url += f'?q=[{{"key" : "status", "value" : "{request_status}"}}]&limit=100' # FIXME: adjust backend to either consume it from body or from query params elif key: url += str(key) else: @@ -47,7 +51,7 @@ def get(auth, key=None, request_status=None, verbose=False): res = requests.get(url=url, headers=headers) if res.status_code != 200: logging.error(f"Error: {res.status_code} {res.text}") - return + return None else: log_data = res.json() if not verbose and '_key' in log_data: @@ -58,28 +62,31 @@ def get(auth, key=None, request_status=None, verbose=False): def push_parser(args): - push(args.auth, args.statement_key, args.input, args.cost, verbose=args.verbose) + return push(args.url, args.statement_key, args.input, args.cost, verbose=args.verbose) is not None def get_parser(args): - get(args.auth, args.request_key, args.request_status, args.verbose) + return get(args.url, args.request_key, args.request_status, args.verbose) is not None if __name__ == "__main__": logging.basicConfig(level=logging.INFO, format="%(message)s") - parser = argparse.ArgumentParser() - parser.add_argument("--auth", type=str, help="auth file") - parser.add_argument( + parent_parser = argparse.ArgumentParser(add_help=False) + parent_parser.add_argument("--url", action="store", default=constants.URL, help="URL of a producer") + parent_parser.add_argument( "-v", "--verbose", action="store_true", help="increase output verbosity" ) + + parser = argparse.ArgumentParser() subparsers = parser.add_subparsers(help="sub-command help") - parser_get = subparsers.add_parser("get", help="get request") + + parser_get = subparsers.add_parser("get", help="get request", parents=[parent_parser]) parser_get.set_defaults(func=get_parser) parser_get.add_argument("--request-key", type=str, help="request key") parser_get.add_argument("--request-status", type=str, help="request status") - parser_push = subparsers.add_parser("push", help="push request") + parser_push = subparsers.add_parser("push", help="push request", parents=[parent_parser]) parser_push.set_defaults(func=push_parser) parser_push.add_argument("--cost", type=float, required=True, help="cost") parser_push.add_argument( @@ -93,4 +100,9 @@ def get_parser(args): help="required proof time generation (in mins)", ) args = parser.parse_args() - args.func(args=args) + if not hasattr(args, 'func'): + # invalid subparser + parser.print_help() + sys.exit(1) + + sys.exit(0 if args.func(args) else 1) diff --git a/scripts/signup.py b/scripts/signup.py index 11bcc06e1..632f54667 100644 --- a/scripts/signup.py +++ b/scripts/signup.py @@ -1,10 +1,14 @@ import argparse +import logging import requests import sys from urllib.parse import urljoin -from auth_tools import get_headers, create_credentials_file -from constants import URL +import constants +from auth_tools import get_headers, Authenticator + + +logger = logging.getLogger(__name__) def signup(user, password, email, url): @@ -16,15 +20,11 @@ def signup(user, password, email, url): } response = requests.post(url, json=body) - if response.status_code != 200: - print(f"Error: {response.status_code} {response.text}") - else: - print(response.text) return response def register_producer(description, url, logo, eth_address): - headers = get_headers(None) + headers = get_headers(url=url) url = urljoin(url, "/producer") body = {"description": description} if url is not None: @@ -36,21 +36,20 @@ def register_producer(description, url, logo, eth_address): response = requests.post(url, json=body, headers=headers) if response.status_code == 200: - pass + logger.info("Producer registered.") else: - print(f"Error: {response.status_code} {response.text}") + logger.error(f"{response.status_code} {response.text}") - print(response.text) + logger.debug(response.text) return response.status_code in [200] def signup_parser(args) -> bool: response = signup(args.user, args.password, args.email, args.url) if response.status_code == 200: - create_credentials_file("secret", args.password) - create_credentials_file("user", args.user) + Authenticator.create_credentials_file(args.user, args.password) else: - print(f"Error: {response.status_code} {response.text}") + logger.error(f"Error during signup API call: {response.status_code} {response.text}") return response.status_code in [200] @@ -62,12 +61,11 @@ def register_producer_parser(args) -> bool: def main(): - parser = argparse.ArgumentParser() + logging.basicConfig(level=logging.INFO, format="%(message)s") parent_parser = argparse.ArgumentParser(add_help=False) - parent_parser.add_argument( - "--url", action="store", default=URL, help="url of a producer" - ) + parent_parser.add_argument("--url", action="store", default=constants.URL, help="URL of a producer") + parser = argparse.ArgumentParser() subparsers = parser.add_subparsers(help="sub-command help") parser_user = subparsers.add_parser( @@ -101,12 +99,12 @@ def main(): parser_producer.set_defaults(func=register_producer_parser) args = parser.parse_args() - if hasattr(args, 'func'): - # valid submodules - sys.exit(0 if args.func(args) else 1) + if not hasattr(args, 'func'): + # invalid subparser + parser.print_help() + sys.exit(1) - parser.print_help() - sys.exit(1) + sys.exit(0 if args.func(args) else 1) if __name__ == "__main__": diff --git a/scripts/statement_tools.py b/scripts/statement_tools.py index 7ced6d8fc..7ae6b7838 100644 --- a/scripts/statement_tools.py +++ b/scripts/statement_tools.py @@ -1,67 +1,103 @@ -import requests +import argparse import json import logging -import argparse -from constants import DB_NAME, URL, MOUNT +import requests +import sys +from urllib.parse import urljoin + +import constants from auth_tools import get_headers -def push(auth, file): +logger = logging.getLogger(__name__) + + +def push_statement(file, url): if file: f = open(file, "r") data = json.load(f) else: - logging.error("Error: file is required") - return + logger.error("Error: file is required") + return None - headers = get_headers(auth) - url = URL + "/statement" + headers = get_headers(url=url) + url = urljoin(url, "/statement") res = requests.post(url=url, json=data, headers=headers) if res.status_code != 200: - logging.error(f"Error: {res.status_code} {res.text}") - return + logger.error(f"Error: {res.status_code} {res.text}") + return None else: - logging.info(f"Statement from {file} was pushed") + logger.info(f"Statement from {file} was pushed") return res -def push_parser(args): - return push(args.auth, args.file) - - -def get(auth, key, output): - headers = get_headers(auth) - url = URL + "/statement/" - if key: - url += key +def list_statements(url) -> list[dict]: + headers = get_headers(url=url) + url = urljoin(url, "/statement") res = requests.get(url=url, headers=headers) if res.status_code != 200: - logging.error(f"Error: {res.status_code} {res.text}") - return + logger.error(f"Error: {res.status_code} {res.text}") + return None else: - logging.info(f"Statements:\n {json.dumps(res.json(), indent=4)}") - if output: - with open(output, "w") as f: - json.dump(res.json(), f, indent=4) + logger.debug("Statements:\n%s", json.dumps(res.json(), indent=2)) return res.json() +def get_statements(url, key=None, output=None): + headers = get_headers(url=url) + url = urljoin(url, "/statement/") + if key is not None: + url = urljoin(url, key) + res = requests.get(url=url, headers=headers) + if res.status_code != 200: + logger.error(f"Error: {res.status_code} {res.text}") + return None + return res.json() + + +def push_parser(args): + return push_statement(args.file, args.url) is not None + + def get_parser(args): - return get(args.auth, args.key, args.output) + statements = get_statements(args.url, args.key, args.output) is not None + if args.output: + with open(args.output, "w") as f: + json.dump(statements, f, indent=2) + logger.info("Statements are saved into %s", args.output) + else: + print(f"Statements:\n{json.dumps(statements, indent=2)}") -if __name__ == "__main__": + +def main(): logging.basicConfig(level=logging.INFO, format="%(message)s") + parent_parser = argparse.ArgumentParser(add_help=False) + parent_parser.add_argument( + "--url", action="store", default=constants.URL, help="url of a producer" + ) + parser = argparse.ArgumentParser() - parser.add_argument("--auth", type=str, help="auth") subparsers = parser.add_subparsers(help="sub-command help") - parser_push = subparsers.add_parser("push", help="push statement") + + parser_push = subparsers.add_parser("push", help="push statement", parents=[parent_parser]) parser_push.set_defaults(func=push_parser) - parser_get = subparsers.add_parser("get", help="get statement") - parser_get.set_defaults(func=get_parser) parser_push.add_argument("--file", type=str, required=True, help="file") + + parser_get = subparsers.add_parser("get", help="get statement", parents=[parent_parser]) + parser_get.set_defaults(func=get_parser) parser_get.add_argument("--key", type=str, help="statement key") parser_get.add_argument("-o", "--output", type=str, help="output file") + args = parser.parse_args() - args.func(args=args) + if not hasattr(args, 'func'): + # invalid subparser + parser.print_help() + sys.exit(1) + + sys.exit(0 if args.func(args) else 1) + + +if __name__ == "__main__": + main()