diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 37767b21c..9d2b196e7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -324,7 +324,7 @@ jobs: - name: Run integration tests of the HTTP frontend run: | - ./admin/local/cli/qserv --log-level DEBUG itest-http \ + ./admin/local/cli/qserv --log-level DEBUG itest-http --reload --load-http \ --qserv-image ${{ needs.image-names.outputs.qserv-image }} \ --mariadb-image ${{ needs.image-names.outputs.mariadb-image }} diff --git a/admin/tools/docker/base/Dockerfile b/admin/tools/docker/base/Dockerfile index 4bf84680c..d02fc3ccd 100644 --- a/admin/tools/docker/base/Dockerfile +++ b/admin/tools/docker/base/Dockerfile @@ -171,7 +171,8 @@ RUN pip3 install \ sqlalchemy \ sqlalchemy-stubs \ types-PyYAML \ - types-requests + types-requests \ + requests_toolbelt RUN useradd --create-home --uid 1000 --shell /bin/bash qserv @@ -282,4 +283,5 @@ RUN pip3 install \ "mysql-connector-python==8.0.31" \ pyyaml \ requests \ + requests_toolbelt \ sqlalchemy diff --git a/src/admin/python/lsst/qserv/admin/cli/_integration_test.py b/src/admin/python/lsst/qserv/admin/cli/_integration_test.py index e686325d1..2c4988066 100644 --- a/src/admin/python/lsst/qserv/admin/cli/_integration_test.py +++ b/src/admin/python/lsst/qserv/admin/cli/_integration_test.py @@ -114,6 +114,7 @@ def run_integration_tests( unload: bool, load: Optional[bool], reload: bool, + load_http: bool, cases: List[str], run_tests: bool, tests_yaml: str, @@ -133,6 +134,8 @@ def run_integration_tests( database matches the qserv database.) reload : `bool` Remove test databases and re-add them. + load_http : `bool` + The protocol to use for loading the data. cases : `list` [`str`] Run (and load/reload data if those flags are set) these test cases only. run_tests : `bool` @@ -197,6 +200,7 @@ def run_integration_tests( admin_auth_key=tests_data["repl-admin-auth-key"], cases=cases, load=load, + load_http=load_http, ) if run_tests: @@ -225,6 +229,7 @@ def run_integration_tests_http( unload: bool, load: Optional[bool], reload: bool, + load_http: bool, cases: List[str], run_tests: bool, tests_yaml: str, @@ -244,6 +249,8 @@ def run_integration_tests_http( database matches the qserv database.) reload : `bool` Remove test databases and re-add them. + load_http : `bool` + The protocol to use for loading the data. cases : `list` [`str`] Run (and load/reload data if those flags are set) these test cases only. run_tests : `bool` @@ -308,6 +315,7 @@ def run_integration_tests_http( admin_auth_key=tests_data["repl-admin-auth-key"], cases=cases, load=load, + load_http=load_http, ) if run_tests: diff --git a/src/admin/python/lsst/qserv/admin/cli/entrypoint.py b/src/admin/python/lsst/qserv/admin/cli/entrypoint.py index 34905ec26..32c409a70 100644 --- a/src/admin/python/lsst/qserv/admin/cli/entrypoint.py +++ b/src/admin/python/lsst/qserv/admin/cli/entrypoint.py @@ -51,6 +51,7 @@ option_options_file, option_mysql_monitor_password, option_reload, + option_load_http, option_repl_auth_key, option_repl_admin_auth_key, option_repl_connection, @@ -326,12 +327,13 @@ def entrypoint(log_level: str) -> None: @entrypoint.command() @option_repl_auth_key() @click.argument("repl_ctrl_uri") -def load_simple(repl_ctrl_uri: str, repl_auth_key: str) -> None: +@option_load_http() +def load_simple(repl_ctrl_uri: str, repl_auth_key: str, load_http: bool) -> None: """Load a small test dataset into qserv. REPL_CTRL_URI is the uri to the replication controller. """ - script.load_simple(repl_ctrl_uri, auth_key=repl_auth_key) + script.load_simple(repl_ctrl_uri, auth_key=repl_auth_key, load_http=load_http) @entrypoint.command() @@ -342,6 +344,7 @@ def load_simple(repl_ctrl_uri: str, repl_auth_key: str) -> None: @option_unload() @option_load() @option_reload() +@option_load_http() @option_case() @option_run_tests() @option_tests_yaml() @@ -351,6 +354,7 @@ def integration_test( unload: bool, load: Optional[bool], reload: bool, + load_http: bool, cases: List[str], run_tests: bool, tests_yaml: str, @@ -366,6 +370,7 @@ def integration_test( unload=unload, load=load, reload=reload, + load_http=load_http, cases=cases, run_tests=run_tests, tests_yaml=tests_yaml, @@ -383,6 +388,7 @@ def integration_test( @option_unload() @option_load() @option_reload() +@option_load_http() @option_case() @option_run_tests() @option_tests_yaml() @@ -392,6 +398,7 @@ def integration_test_http( unload: bool, load: Optional[bool], reload: bool, + load_http: bool, cases: List[str], run_tests: bool, tests_yaml: str, @@ -407,6 +414,7 @@ def integration_test_http( unload=unload, load=load, reload=reload, + load_http=load_http, cases=cases, run_tests=run_tests, tests_yaml=tests_yaml, diff --git a/src/admin/python/lsst/qserv/admin/cli/options.py b/src/admin/python/lsst/qserv/admin/cli/options.py index 8a5dc9d15..26bfee53f 100644 --- a/src/admin/python/lsst/qserv/admin/cli/options.py +++ b/src/admin/python/lsst/qserv/admin/cli/options.py @@ -266,6 +266,12 @@ def __call__(self, f: Callable) -> Callable: is_flag=True, ) +option_load_http = partial( + click.option, + "--load-http", + help="HTTP-based table loading protocol. Used with --load and --reload", + is_flag=True, +) option_run_tests = partial( click.option, diff --git a/src/admin/python/lsst/qserv/admin/cli/script.py b/src/admin/python/lsst/qserv/admin/cli/script.py index 7dbbf3471..0ddd17ad3 100644 --- a/src/admin/python/lsst/qserv/admin/cli/script.py +++ b/src/admin/python/lsst/qserv/admin/cli/script.py @@ -962,7 +962,7 @@ def delete_database( repl.delete_database(database, admin) -def load_simple(repl_ctrl_uri: str, auth_key: str) -> None: +def load_simple(repl_ctrl_uri: str, auth_key: str, load_http: bool) -> None: """Load a simple predefined database into qserv. The database is called "test101" and have a table called Object with one row. @@ -973,6 +973,8 @@ def load_simple(repl_ctrl_uri: str, auth_key: str) -> None: The uri to the replication controller service. auth_key : `str` The authorizaiton key for the replication-ingest system. + load_http : `bool` + If true, the database will be loaded using the http interface. """ repl = ReplicationInterface(repl_ctrl_uri, auth_key) @@ -1025,10 +1027,15 @@ def load_simple(repl_ctrl_uri: str, auth_key: str) -> None: chunk_location = repl.ingest_chunk_config(transaction_id, "0") repl.ingest_data_file( transaction_id, + "0", + False, chunk_location.host, chunk_location.port, + chunk_location.http_host, + chunk_location.http_port, data_file=data_file, table=table, + load_http=load_http, ) repl.commit_transaction(transaction_id) repl.publish_database(database) @@ -1039,6 +1046,7 @@ def integration_test( unload: bool, load: Optional[bool], reload: bool, + load_http: bool, cases: List[str], run_tests: bool, tests_yaml: str, @@ -1051,6 +1059,7 @@ def integration_test( unload=unload, load=load, reload=reload, + load_http=load_http, cases=cases, run_tests=run_tests, tests_yaml=tests_yaml, @@ -1064,6 +1073,7 @@ def integration_test_http( unload: bool, load: Optional[bool], reload: bool, + load_http: bool, cases: List[str], run_tests: bool, tests_yaml: str, @@ -1076,6 +1086,7 @@ def integration_test_http( unload=unload, load=load, reload=reload, + load_http=load_http, cases=cases, run_tests=run_tests, tests_yaml=tests_yaml, diff --git a/src/admin/python/lsst/qserv/admin/itest_load.py b/src/admin/python/lsst/qserv/admin/itest_load.py index 704101814..676e7aa6a 100644 --- a/src/admin/python/lsst/qserv/admin/itest_load.py +++ b/src/admin/python/lsst/qserv/admin/itest_load.py @@ -338,6 +338,7 @@ def _load_database( repl_ctrl_uri: str, auth_key: str, admin_auth_key: str, + load_http: bool, ) -> None: """Load a database. @@ -355,6 +356,8 @@ def _load_database( The authorizaiton key for the replication-ingest system. admin_auth_key : `str` The admin authorizaiton key for the replication-ingest system. + load_http : `bool`, optional + The protocol to use for loading the data. """ _log.info(f"Loading database %s for test %s auth_key %s admin_auth_key %s", load_db.name, load_db.id, auth_key, admin_auth_key) repl = ReplicationInterface(repl_ctrl_uri, auth_key, admin_auth_key) @@ -404,12 +407,14 @@ def do_ingest_table_config() -> None: table=table, chunks_folder=staging_dir, chunk_info_file=os.path.join(staging_dir, chunk_info_file), + load_http=load_http, ) else: repl.ingest_table_data( transaction_id=transaction_id, table=table, data_file=data_file, + load_http=load_http, ) repl.commit_transaction(transaction_id) @@ -528,6 +533,7 @@ def load( test_cases_data: List[Dict[Any, Any]], ref_db_admin: str, load: Optional[bool], + load_http: bool, cases: Optional[List[str]], auth_key: str, admin_auth_key: str, @@ -550,6 +556,8 @@ def load( unload == True then will not load the database, otherwise if `None` will load the database if it is not yet loaded into qserv (assumes the ref database matches the qserv database.) + load_http : `bool` + The protocol to use for loading the data. cases : `list` [`str`], optional Restrict loading to these test cases if provided. auth_key : `str` @@ -570,7 +578,7 @@ def load( for case_data in cases_data: load_db = LoadDb(case_data) if load == True or (load is None and load_db.name not in qserv_dbs): - _load_database(load_db, ref_db_uri, ref_db_admin, repl_ctrl_uri, auth_key, admin_auth_key) + _load_database(load_db, ref_db_uri, ref_db_admin, repl_ctrl_uri, auth_key, admin_auth_key, load_http=load_http) def remove( diff --git a/src/admin/python/lsst/qserv/admin/qservCli/launch.py b/src/admin/python/lsst/qserv/admin/qservCli/launch.py index ebc24cd17..912bb997b 100644 --- a/src/admin/python/lsst/qserv/admin/qservCli/launch.py +++ b/src/admin/python/lsst/qserv/admin/qservCli/launch.py @@ -930,6 +930,7 @@ def integration_test( unload: bool, load: Optional[bool], reload: bool, + load_http: bool, cases: List[str], run_tests: bool, tests_yaml: str, @@ -970,6 +971,8 @@ def integration_test( will load test databases that are not loaded yet. reload : bool Remove and reload test data. Same as passing `unload=True` and `load=True`. + load_http : bool + Table loading protocol. If True, use the HTTP protocol to load tables. cases : List[str] Run this/these test cases only. If list is empty list will run all the cases. run_tests : bool @@ -1019,6 +1022,7 @@ def integration_test( for opt, var in ( ("--unload", unload), ("--reload", reload), + ("--load-http", load_http), ): if var: args.append(opt) @@ -1060,6 +1064,7 @@ def integration_test_http( unload: bool, load: Optional[bool], reload: bool, + load_http: bool, cases: List[str], run_tests: bool, tests_yaml: str, @@ -1091,6 +1096,8 @@ def integration_test_http( If True do not run the command; print what would have been run. project : `str` The name used for qserv instance customizations. + load_http : bool + Table loading protocol. If True, use the HTTP protocol to load tables. cases : List[str] Run this/these test cases only. If list is empty list will run all the cases. run_tests : bool @@ -1140,6 +1147,7 @@ def integration_test_http( for opt, var in ( ("--unload", unload), ("--reload", reload), + ("--load-http", load_http), ): if var: args.append(opt) @@ -1182,6 +1190,7 @@ def itest( unload: bool, load: Optional[bool], reload: bool, + load_http: bool, cases: List[str], run_tests: bool, tests_yaml: str, @@ -1226,6 +1235,7 @@ def itest( unload, load, reload, + load_http, cases, run_tests, tests_yaml, @@ -1251,6 +1261,7 @@ def itest_http( unload: bool, load: Optional[bool], reload: bool, + load_http: bool, cases: List[str], run_tests: bool, tests_yaml: str, @@ -1284,6 +1295,7 @@ def itest_http( unload, load, reload, + load_http, cases, run_tests, tests_yaml, diff --git a/src/admin/python/lsst/qserv/admin/qservCli/qserv_cli.py b/src/admin/python/lsst/qserv/admin/qservCli/qserv_cli.py index fa1a747ba..f4a657559 100644 --- a/src/admin/python/lsst/qserv/admin/qservCli/qserv_cli.py +++ b/src/admin/python/lsst/qserv/admin/qservCli/qserv_cli.py @@ -37,6 +37,7 @@ option_log_level, option_unload, option_reload, + option_load_http, option_run_tests, option_compare_results, option_case, @@ -508,6 +509,7 @@ def run_debug( @option_load() @option_unload() @option_reload() +@option_load_http() @option_run_tests() @option_compare_results() @option_case() @@ -535,6 +537,7 @@ def itest( unload: bool, load: Optional[bool], reload: bool, + load_http: bool, cases: List[str], run_tests: bool, tests_yaml: str, @@ -558,6 +561,7 @@ def itest( unload=unload, load=load, reload=reload, + load_http=load_http, cases=cases, run_tests=run_tests, tests_yaml=tests_yaml, @@ -581,6 +585,7 @@ def itest( @option_load() @option_unload() @option_reload() +@option_load_http() @option_run_tests() @option_compare_results() @option_case() @@ -608,6 +613,7 @@ def itest_http( unload: bool, load: Optional[bool], reload: bool, + load_http: bool, cases: List[str], run_tests: bool, tests_yaml: str, @@ -631,6 +637,7 @@ def itest_http( unload=unload, load=load, reload=reload, + load_http=load_http, cases=cases, run_tests=run_tests, tests_yaml=tests_yaml, diff --git a/src/admin/python/lsst/qserv/admin/replicationInterface.py b/src/admin/python/lsst/qserv/admin/replicationInterface.py index a536d9266..8004d74c4 100644 --- a/src/admin/python/lsst/qserv/admin/replicationInterface.py +++ b/src/admin/python/lsst/qserv/admin/replicationInterface.py @@ -27,6 +27,7 @@ import os from requests import delete, get, post, put from requests.exceptions import ConnectionError +from requests_toolbelt.multipart.encoder import MultipartEncoder import subprocess from .itest_table import LoadTable from typing import Any, Callable, Dict, Generator, List, Optional, NamedTuple, Tuple @@ -47,11 +48,15 @@ class ChunkLocation(NamedTuple): chunk_id: str host: str port: str + http_host: str + http_port: str class RegularLocation(NamedTuple): host: str port: str + http_host: str + http_port: str def _check(result: Dict[Any, Any], url: str) -> None: @@ -94,6 +99,30 @@ def _post(url: str, data: str) -> Dict[Any, Any]: _check(res, url) return res +@backoff.on_exception( + exception=ConnectionError, + wait_gen=backoff.expo, + on_backoff=on_backoff(log=_log), + max_time=max_backoff_sec, +) +def _post_file_upload(url: str, encoder: MultipartEncoder) -> Dict[Any, Any]: + """Call requests.post and check the result for success=1. + + Parameters + ---------- + url : `str` + The url to send to `post`. + data : `data` + The data to send to `post`. + + Returns + ------- + result : `dict` + The dict containing the result of calling `post`. + """ + res: Dict[Any, Any] = post(url, data=encoder, headers={'Content-Type': encoder.content_type}).json() + _check(res, url) + return res @backoff.on_exception( exception=ConnectionError, @@ -288,7 +317,8 @@ def ingest_chunk_config(self, transaction_id: int, chunk_id: str) -> ChunkLocati data=json.dumps(dict(transaction_id=transaction_id, chunk=chunk_id, auth_key=self.auth_key, version=self.repl_api_version,)), ) - return ChunkLocation(chunk_id, res["location"]["host"], res["location"]["port"]) + return ChunkLocation(chunk_id, res["location"]["host"], str(res["location"]["port"]), + res["location"]["http_host"], str(res["location"]["http_port"])) def ingest_chunk_configs(self, transaction_id: int, chunk_ids: List[int]) -> List[ChunkLocation]: """Get the locations where a list of chunk ids should be ingested. @@ -310,7 +340,8 @@ def ingest_chunk_configs(self, transaction_id: int, chunk_ids: List[int]) -> Lis data=json.dumps(dict(transaction_id=transaction_id, chunks=chunk_ids, auth_key=self.auth_key, version=self.repl_api_version,)), ) - return [ChunkLocation(l["chunk"], l["host"], str(l["port"])) for l in res["location"]] + return [ChunkLocation(l["chunk"], l["host"], str(l["port"]), + l["http_host"], str(l["http_port"])) for l in res["location"]] def ingest_regular_table(self, transaction_id: int) -> List[RegularLocation]: """Get the locations where a non-chunk table should be ingested. @@ -331,15 +362,21 @@ def ingest_regular_table(self, transaction_id: int) -> List[RegularLocation]: url=f"http://{self.repl_ctrl.hostname}:{self.repl_ctrl.port}/ingest/regular?version={self.repl_api_version}", data=json.dumps(dict(auth_key=self.auth_key, transaction_id=transaction_id,)), ) - return [RegularLocation(location["host"], str(location["port"])) for location in res["locations"]] + return [RegularLocation(location["host"], str(location["port"]), + location["http_host"], str(location["http_port"])) for location in res["locations"]] def ingest_data_file( self, transaction_id: int, + chunk_id: str, + overlap: bool, worker_host: str, worker_port: str, + worker_http_host: str, + worker_http_port: str, data_file: str, - table: LoadTable + table: LoadTable, + load_http: bool, ) -> None: """Ingest table data from a file. @@ -347,48 +384,81 @@ def ingest_data_file( ---------- transaction_id : `int` The transaction id. + chunk_id : `str` + The chunk id. + overlap : `bool` + The flag indicating if the file reprsentes the chunk overlap. worker_host : `str` The name of the host ingesting the data. worker_port : `str` The worker_host port to use. + worker_http_host : `str` + The name of the host ingesting the data (HTTP protocol). + worker_http_port : `str` + The worker_host port to use (HTTP protocol). data_file : `str` The path to the data file to ingest. table : `LoadTable` Table descriptor, including its name, ingest configuration, etc. + load_http : `bool` + The protocol to use for loading the data. """ if not self.auth_key: raise RuntimeError("auth_key must be set to ingest a data file.") - args = [ - "qserv-replica-file", - "INGEST", - "FILE", - worker_host, - worker_port, - str(transaction_id), - table.table_name, - # app help says P for 'partitioned' and R for 'regular'/non-partitioned. - "P" if table.is_partitioned else "R", - data_file, - "--verbose", - f"--fields-terminated-by={table.fields_terminated_by}", - f"--fields-enclosed-by={table.fields_enclosed_by}", - f"--fields-escaped-by={table.fields_escaped_by}", - f"--auth-key={self.auth_key}", - f"--lines-terminated-by={table.lines_terminated_by}", - ] - _log.debug("ingest file args: %s", args) - res = subprocess.run( - args, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - encoding="utf-8", - errors="replace", - ) - if res.returncode != 0: - raise RuntimeError( - f"Subprocess failed ({res.returncode}) stdout:{res.stdout} stderr:{res.stderr}" + if load_http: + encoder = MultipartEncoder( + fields={ + "auth_key": (None, self.auth_key), + "transaction_id": (None, str(transaction_id)), + "table": (None, table.table_name), + "chunk": (None, str(chunk_id)), + "overlap": (None, str("1" if overlap else "0")), + "fields_terminated_by": (None, str(table.fields_terminated_by)), + "fields_enclosed_by": (None, str(table.fields_enclosed_by)), + "fields_escaped_by": (None, str(table.fields_escaped_by)), + "lines_terminated_by": (None, str(table.lines_terminated_by)), + "file": (os.path.basename(data_file), open(data_file, "rb"), "text/plain"), + } + ) + _log.debug("encoder: %s", encoder) + res_http = _post_file_upload( + url=f"http://{worker_http_host}:{worker_http_port}/ingest/csv", + encoder=encoder) + if not res_http["success"]: + raise RuntimeError(f"Ingest failed ({res_http})") + _log.debug("ingest file res: %s", res_http) + else: + args = [ + "qserv-replica-file", + "INGEST", + "FILE", + worker_host, + worker_port, + str(transaction_id), + table.table_name, + # app help says P for 'partitioned' and R for 'regular'/non-partitioned. + "P" if table.is_partitioned else "R", + data_file, + "--verbose", + f"--fields-terminated-by={table.fields_terminated_by}", + f"--fields-enclosed-by={table.fields_enclosed_by}", + f"--fields-escaped-by={table.fields_escaped_by}", + f"--auth-key={self.auth_key}", + f"--lines-terminated-by={table.lines_terminated_by}", + ] + _log.debug("ingest file args: %s", args) + res = subprocess.run( + args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + encoding="utf-8", + errors="replace", ) - _log.debug("ingest file res: %s", res) + if res.returncode != 0: + raise RuntimeError( + f"Subprocess failed ({res.returncode}) stdout:{res.stdout} stderr:{res.stderr}" + ) + _log.debug("ingest file res: %s", res) def build_table_stats( self, @@ -435,6 +505,7 @@ def ingest_chunks_data( table: LoadTable, chunks_folder: str, chunk_info_file: str, + load_http: bool, ) -> None: """Ingest chunk data that was partitioned using sph-partition. @@ -448,6 +519,8 @@ def ingest_chunks_data( The absolute path to the folder containing the chunk files to be ingested. chunks_info_file : `str` The absolute path to the file containing information about the chunks to be ingested. + load_http : `bool` + The protocol to use for loading the data. """ _log.debug( "ingest_chunks_data transaction_id: %s table_name: %s chunks_folder: %s", @@ -467,34 +540,48 @@ def ingest_chunks_data( # Ingest the chunk files: # Helpful note: Generator type decl is Generator[yield, send, return], # see https://www.python.org/dev/peps/pep-0484/#annotating-generator-functions-and-coroutines - def generate_locations() -> Generator[Tuple[str, str, str], None, None]: + def generate_locations() -> Generator[Tuple[str, str, str, str, str, str, bool], None, None]: for location in locations: for chunk_file in (chunk_file_t, chunk_overlap_file_t): full_path = os.path.join(chunks_folder, chunk_file.format(chunk_id=location.chunk_id)) if os.path.exists(full_path): _log.debug( - f"Ingesting %s to %s:%s", + f"Ingesting %s to %s:%s/%s:%s chunk %s.", full_path, location.host, location.port, + location.http_host, + location.http_port, + location.chunk_id, ) - yield full_path, location.host, location.port + overlap = "overlap" in chunk_file + yield full_path, location.host, location.port, location.http_host, location.http_port, location.chunk_id, overlap else: _log.warn( "Not ingesting %s; it does not exist (probably there is no data for that chunk).", full_path, ) - for _file, host, port in generate_locations(): + for _file, host, port, http_host, http_port, chunk_id, overlap in generate_locations(): self.ingest_data_file( transaction_id, + chunk_id, + overlap, host, port, + http_host, + http_port, data_file=_file, table=table, + load_http=load_http ) - def ingest_table_data(self, transaction_id: int, table: LoadTable, data_file: str) -> None: + def ingest_table_data( + self, transaction_id: int, + table: LoadTable, + data_file: str, + load_http: bool, + ) -> None: """Ingest data for a non-partitioned table. Parameters @@ -505,6 +592,8 @@ def ingest_table_data(self, transaction_id: int, table: LoadTable, data_file: st Table descriptor, including its name, ingest configuration, etc. data_file : `str` The absolute path to the file containing the table data. + load_http : `bool` + The protocol to use for loading the data. """ _log.debug( "ingest_table_data: transaction_id: %s table.table_name: %s data_file: %s", @@ -515,18 +604,25 @@ def ingest_table_data(self, transaction_id: int, table: LoadTable, data_file: st locations = self.ingest_regular_table(transaction_id) for location in locations: _log.debug( - "Ingesting %s to %s:%s table %s.", + "Ingesting %s to %s:%s/%s:%s table %s.", data_file, location.host, location.port, + location.http_host, + location.http_port, table.table_name, ) self.ingest_data_file( transaction_id, + "0", + False, location.host, location.port, + location.http_host, + location.http_port, data_file=data_file, table=table, + load_http=load_http, ) def delete_database( diff --git a/src/admin/python/lsst/qserv/admin/tests/test_qserv_cli.py b/src/admin/python/lsst/qserv/admin/tests/test_qserv_cli.py index b48d606b2..e25dbd7b9 100644 --- a/src/admin/python/lsst/qserv/admin/tests/test_qserv_cli.py +++ b/src/admin/python/lsst/qserv/admin/tests/test_qserv_cli.py @@ -116,6 +116,7 @@ def itest_args(**kwargs): unload=ANY, load=ANY, reload=ANY, + load_http=ANY, cases=ANY, run_tests=ANY, tests_yaml=ANY,