diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 000000000..41957b938 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,12 @@ +# PR Description + + +# Tasks + +* [ ] - A description of the PR has been provided, and a diagram included if it is a new feature. +* [ ] - Formatter has been run +* [ ] - CHANGELOG comment has been added +* [ ] - Labels have been assigned to the pr +* [ ] - A reviwer has been added +* [ ] - A user has been assigned to work on the pr +* [ ] - If new feature a unit test has been added diff --git a/.github/workflows/format-check.yml b/.github/workflows/format-check.yml index 062cdf7ec..24c268d7f 100644 --- a/.github/workflows/format-check.yml +++ b/.github/workflows/format-check.yml @@ -33,5 +33,5 @@ jobs: run: black --check . - name: Lint with flake8 run: | - flake8 --count --select=E9,F63,F7,F82 --show-source --statistics . - flake8 --count --statistics . + flake8 --count --select=E9,F63,F7,F82 --show-source --statistics --max-line-length 100 . + flake8 --count --statistics --max-line-length 100 . diff --git a/.gitignore b/.gitignore index ead16e18d..c7df59ef0 100644 --- a/.gitignore +++ b/.gitignore @@ -9,12 +9,25 @@ config/datafed-ws.cfg core/database/foxx/api/version_router.js core/database/foxx/manifest.json core/server/Version.hpp +docs/.buildinfo +docs/.doctrees/ +docs/.nojekyll python/datafed_pkg/datafed/VERSION.py +python/datafed_pkg/datafed.egg-info/ +python/datafed_pkg/datafed/SDMS_Anon_pb2.py +python/datafed_pkg/datafed/SDMS_Auth_pb2.py +python/datafed_pkg/datafed/SDMS_pb2.py +python/datafed_pkg/datafed/Version_pb2.py +python/datafed_pkg/datafed/__pycache__/ +python/datafed_pkg/dist/ +web/package.json repository/gridftp/globus5/authz/source/Version.hpp repository/server/Version.hpp scripts/globus/datafed-home-repo.sh scripts/globus/datafed-home-repo-form.json scripts/globus/mapping.json +scripts/admin_datafed_backup.sh +scripts/admin_refresh_certs.sh services/ web/SDMS.proto web/SDMS_Anon.proto diff --git a/CHANGELOG.md b/CHANGELOG.md index 94a282930..ba60baba7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,26 @@ +# Pre-release + +## MAJOR Breaking changes + +## MINOR Feature +1. [909] - Added Support for Google Analytics + +## PATCH Bug fixes/Technical Debt/Documentation +1. [914] - Improve GitHub template +2. [910] - Static code analysis and status checks fixed, improvements to CI +3. [923] - Fixed missing flag in certificate refresh script +4. [917] - Add additional files to .gitignore +5. [915] - Refactor CI to use pipelines Gitlab feature along with pipelines + +# v2023.10.23.15.50 + +## MINOR Feature +1. [906] - Added backup and cert refresh scripts. + +## PATCH Bug Fixes/Technical Debt/Documentation +1. [911] - Add GitHub template +2. [913] - Fixed bug, when endpoint info returns an empty array check to seee if array is empty before accessing elements + # v2023.8.21.10.40 ## MAJOR Breaking changes @@ -13,7 +36,7 @@ 7. [879] - Added correlation ids to messages for tracking 8. [879] - Split log output into server specific files. -## PATCH Bug fixes/Technical Debt +## PATCH Bug Fixes/Technical Debt/Documentation 1. [879] - Fixed thread safety of repo list calls in core server which where causing seg faults 2. [879] - Added better error reporting when attempting to delete repo with running tasks. diff --git a/common/include/common/libjson.hpp b/common/include/common/libjson.hpp index 5fad1ddb0..4c353a663 100644 --- a/common/include/common/libjson.hpp +++ b/common/include/common/libjson.hpp @@ -674,16 +674,20 @@ class Value { } Object &asObject() { - if (m_type != VT_OBJECT) - EXCEPT(1, "Value is not an object"); - + if (m_type != VT_OBJECT) { + std::string error_msg = "Value is not an object, it is instead of type: "; + error_msg += getTypeString(); + EXCEPT(1, error_msg); + } return *m_value.o; } const Object &asObject() const { - if (m_type != VT_OBJECT) - EXCEPT(1, "Value is not an object"); - + if (m_type != VT_OBJECT) { + std::string error_msg = "Value is not an object, it is instead of type: "; + error_msg += getTypeString(); + EXCEPT(1, error_msg); + } return *m_value.o; } @@ -698,15 +702,21 @@ class Value { } Array &asArray() { - if (m_type != VT_ARRAY) - EXCEPT(1, "Value is not an array"); + if (m_type != VT_ARRAY) { + std::string error_msg = "Value is not an array, it is instead of type: "; + error_msg += getTypeString(); + EXCEPT(1, error_msg); + } return *m_value.a; } const Array &asArray() const { - if (m_type != VT_ARRAY) - EXCEPT(1, "Value is not an array"); + if (m_type != VT_ARRAY) { + std::string error_msg = "Value is not an array, it is instead of type: "; + error_msg += getTypeString(); + EXCEPT(1, error_msg); + } return *m_value.a; } diff --git a/common/tests/unit/CMakeLists.txt b/common/tests/unit/CMakeLists.txt index 298de375d..52fb4b748 100644 --- a/common/tests/unit/CMakeLists.txt +++ b/common/tests/unit/CMakeLists.txt @@ -4,6 +4,7 @@ foreach(PROG test_CommunicatorFactory test_Frame test_DynaLog + test_Value test_MessageFactory test_OperatorFactory test_ProtoBufFactory diff --git a/common/tests/unit/test_SocketOptions.cpp b/common/tests/unit/test_SocketOptions.cpp index ba76be426..b9040c5cb 100644 --- a/common/tests/unit/test_SocketOptions.cpp +++ b/common/tests/unit/test_SocketOptions.cpp @@ -36,7 +36,8 @@ BOOST_AUTO_TEST_CASE(testing_AddressSplitterINPROC) { BOOST_AUTO_TEST_CASE(testing_AddressSplitterNoPort) { // Still contains ':' - BOOST_CHECK_THROW(AddressSplitter splitter("inproc://www.datafed.com:"), TraceException); + BOOST_CHECK_THROW(AddressSplitter splitter("inproc://www.datafed.com:"), + TraceException); } BOOST_AUTO_TEST_CASE(testing_AddressSplitterNoPort2) { diff --git a/common/tests/unit/test_Value.cpp b/common/tests/unit/test_Value.cpp new file mode 100644 index 000000000..2b5e1601f --- /dev/null +++ b/common/tests/unit/test_Value.cpp @@ -0,0 +1,134 @@ +#define BOOST_TEST_MAIN + +#define BOOST_TEST_MODULE libjson +#include +#include + +// Local public includes +#include "common/DynaLog.hpp" +#include "common/TraceException.hpp" +#include "common/libjson.hpp" + +// Standard includes +#include +#include + +// using namespace SDMS; + +BOOST_AUTO_TEST_SUITE(LibJSONTest) + +BOOST_AUTO_TEST_CASE(testing_object) { + + std::string raw_result = "{\n"; + raw_result += " \"DATA\": [],\n"; + raw_result += " \"DATA_TYPE\": \"endpoint\",\n"; + raw_result += " \"acl_available\": true,\n"; + raw_result += " \"acl_editable\": false,\n"; + raw_result += " \"activated\": false,\n"; + raw_result += " \"authentication_assurance_timeout\": null,\n"; + raw_result += " \"authentication_policy_id\": null,\n"; + raw_result += " \"authentication_timeout_mins\": null,\n"; + raw_result += " \"canonical_name\": " + "\"u_t2uyxwqjgvapzmwlfahbi5l4mq#11bc8bd6-6b22-11eb-8287-" + "0275e0cda761\",\n"; + raw_result += " \"contact_email\": \"researchcomputing@mara.edu\",\n"; + raw_result += " \"contact_info\": null,\n"; + raw_result += " \"default_directory\": \"/{server_default}/\",\n"; + raw_result += " \"department\": \"Research Computing\",\n"; + raw_result += " \"description\": \"Library Published Data\",\n"; + raw_result += " \"disable_anonymous_writes\": false,\n"; + raw_result += " \"disable_verify\": false,\n"; + raw_result += " \"display_name\": \"New Anonymous Endpoint\",\n"; + raw_result += " \"entity_type\": \"GCSv5_guest_collection\",\n"; + raw_result += " \"expire_time\": null,\n"; + raw_result += " \"expires_in\": -1,\n"; + raw_result += " \"force_encryption\": false,\n"; + raw_result += " \"force_verify\": false,\n"; + raw_result += " \"french_english_bilingual\": false,\n"; + raw_result += " \"gcp_connected\": null,\n"; + raw_result += " \"gcp_paused\": null,\n"; + raw_result += + " \"gcs_manager_url\": \"https://e878b.bd7c.data.globus.org\",\n"; + raw_result += " \"gcs_version\": \"5.4.67\",\n"; + raw_result += " \"globus_connect_setup_key\": null,\n"; + raw_result += " \"high_assurance\": false,\n"; + raw_result += " \"host_endpoint\": " + "\"u_t2uyxwqjglapzmwbfahbi5l4mq#3b3f5c6c-5b6a-11eb-87bf-" + "02187389bd35\",\n"; + raw_result += " \"host_endpoint_display_name\": \"net01\",\n"; + raw_result += + " \"host_endpoint_id\": \"9ea98bda-0135-40fc-b2c1-280e14757c64\",\n"; + raw_result += " \"host_path\": null,\n"; + raw_result += + " \"https_server\": \"https://g-f09197.e778b.bd7c.data.globus.org\",\n"; + raw_result += " \"id\": \"e1d2afd2-ce11-4482-b127-b4cceef666f6\",\n"; + raw_result += " \"in_use\": false,\n"; + raw_result += " \"info_link\": null,\n"; + raw_result += " \"is_globus_connect\": false,\n"; + raw_result += " \"is_go_storage\": false,\n"; + raw_result += " \"keywords\": \"Uni,State,Top\",\n"; + raw_result += " \"last_accessed_time\": \"2023-10-23T00:00:00+00:00\",\n"; + raw_result += " \"local_user_info_available\": true,\n"; + raw_result += " \"location\": null,\n"; + raw_result += + " \"mapped_collection_display_name\": \"New Published Data\",\n"; + raw_result += + " \"mapped_collection_id\": \"5b52029b-9a3e-4490-abcf-649dd2f4fd6c\",\n"; + raw_result += " \"max_concurrency\": null,\n"; + raw_result += " \"max_parallelism\": null,\n"; + raw_result += " \"mfa_required\": false,\n"; + raw_result += " \"my_effective_roles\": [],\n"; + raw_result += " \"myproxy_dn\": null,\n"; + raw_result += " \"myproxy_server\": \"myproxy.globusonline.org\",\n"; + raw_result += " \"name\": \"11bc8bd6-6b22-11eb-8287-0275e0cda761\",\n"; + raw_result += " \"network_use\": null,\n"; + raw_result += " \"non_functional\": false,\n"; + raw_result += " \"non_functional_endpoint_display_name\": \"net01\",\n"; + raw_result += " \"non_functional_endpoint_id\": " + "\"9ea98bda-0935-40fc-b2c1-280e14757c64\",\n"; + raw_result += " \"oauth_server\": null,\n"; + raw_result += " \"organization\": \"The new University\",\n"; + raw_result += " \"owner_id\": \"9ea98bda-0935-40fc-b2c1-280e14757c64\",\n"; + raw_result += + " \"owner_string\": " + "\"9ea98bda-0935-40fc-b2c1-210e14757c64@clients.auth.globus.org\",\n"; + raw_result += " \"preferred_concurrency\": null,\n"; + raw_result += " \"preferred_parallelism\": null,\n"; + raw_result += " \"public\": true,\n"; + raw_result += " \"requester_pays\": false,\n"; + raw_result += " \"s3_owner_activated\": false,\n"; + raw_result += " \"s3_url\": null,\n"; + raw_result += " \"shareable\": false,\n"; + raw_result += " \"sharing_target_endpoint\": " + "\"u_t2uyxw0jgkapzmwbfahbi5l4mq#3b3f5c6c-5b6a-11eb-87bf-" + "02187389bd35\",\n"; + raw_result += " \"sharing_target_root_path\": null,\n"; + raw_result += " \"storage_type\": null,\n"; + raw_result += + " \"subscription_id\": \"3ba26681-e247-11e6-9d43-22000a1e3b52\",\n"; + raw_result += " \"tlsftp_server\": " + "\"tlsftp://g-f09277.e778b.bd7c.data.globus.org:443\",\n"; + raw_result += " \"user_message\": null,\n"; + raw_result += " \"user_message_link\": null,\n"; + raw_result += " \"username\": \"u_t2uyxwqjgvapzmwbfbhbi5l4mq\"\n"; + raw_result += "}"; + + std::cout << "raw result" << std::endl; + // std::cout << raw_result << std::endl; + SDMS::global_logger.setSysLog(false); + SDMS::global_logger.addStream(std::cerr); + SDMS::global_logger.setLevel(SDMS::LogLevel::DEBUG); + SDMS::LogContext log_context; + DL_DEBUG(log_context, raw_result); + + libjson::Value result; + result.fromString(raw_result); + std::cout << "Type of value: " << result.getTypeString() << std::endl; + libjson::Value::Object &resp_obj = result.asObject(); + + libjson::Value::Array &data = resp_obj.getArray("DATA"); + + BOOST_CHECK(data.size() == 0); +} + +BOOST_AUTO_TEST_SUITE_END() diff --git a/core/database/tests/test_foxx.sh b/core/database/tests/test_foxx.sh index 902da6d28..5dbb28e67 100755 --- a/core/database/tests/test_foxx.sh +++ b/core/database/tests/test_foxx.sh @@ -125,10 +125,24 @@ export NVM_DIR="$HOME/.nvm" nvm use $NODE_VERSION +FOXX_PREFIX="" +{ + # Determine if exists globally first + which foxx +} || { + FOXX_PREFIX="~/bin/" +} + PATH_TO_PASSWD_FILE=${SOURCE}/database_temp.password if [ "$TEST_TO_RUN" == "all" ] then - foxx test -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} --reporter spec + "${FOXX_PREFIX}foxx" test -u "${local_DATABASE_USER}" \ + -p "${PATH_TO_PASSWD_FILE}" \ + --database "${local_DATABASE_NAME}" \ + "/api/${local_FOXX_MAJOR_API_VERSION}" --reporter spec else - foxx test -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} "$TEST_TO_RUN" --reporter spec + "${FOXX_PREFIX}foxx" test -u "${local_DATABASE_USER}" \ + -p "${PATH_TO_PASSWD_FILE}" \ + --database "${local_DATABASE_NAME}" \ + "/api/${local_FOXX_MAJOR_API_VERSION}" "$TEST_TO_RUN" --reporter spec fi diff --git a/core/server/GlobusAPI.cpp b/core/server/GlobusAPI.cpp index 08b5158a3..c6c7be428 100644 --- a/core/server/GlobusAPI.cpp +++ b/core/server/GlobusAPI.cpp @@ -542,12 +542,11 @@ void GlobusAPI::getEndpointInfo(const std::string &a_ep_id, long code = get(m_curl_xfr, m_config.glob_xfr_url + "endpoint/", a_ep_id, a_acc_token, {}, raw_result); + Value result; try { if (!raw_result.size()) EXCEPT_PARAM(ID_SERVICE_ERROR, "Empty response. Code: " << code); - Value result; - result.fromString(raw_result); Value::Object &resp_obj = result.asObject(); @@ -575,15 +574,18 @@ void GlobusAPI::getEndpointInfo(const std::string &a_ep_id, // Look at DATA[0].scheme to see if it's gsiftp Value::Array &data = resp_obj.getArray("DATA"); - Value::Object &server_obj = data[0].asObject(); - Value &scheme = server_obj.getValue("scheme"); + if (data.size() > 0) { + Value::Object &server_obj = data[0].asObject(); - if (scheme.isNull()) - a_ep_info.supports_encryption = true; - else if (scheme.isString()) - a_ep_info.supports_encryption = - (scheme.asString().compare("gsiftp") == 0); + Value &scheme = server_obj.getValue("scheme"); + + if (scheme.isNull()) + a_ep_info.supports_encryption = true; + else if (scheme.isString()) + a_ep_info.supports_encryption = + (scheme.asString().compare("gsiftp") == 0); + } } } catch (libjson::ParseError &e) { DL_ERROR(m_log_context, "PARSE FAILED! " << raw_result); @@ -591,6 +593,7 @@ void GlobusAPI::getEndpointInfo(const std::string &a_ep_id, "Globus endpoint API call returned invalid JSON."); } catch (TraceException &e) { DL_ERROR(m_log_context, raw_result); + DL_ERROR(m_log_context, "Result type: " << result.getTypeString()); e.addContext("Globus endpoint API call failed."); throw; } catch (exception &e) { diff --git a/core/server/main.cpp b/core/server/main.cpp index 809f4a3c1..b75ac294d 100644 --- a/core/server/main.cpp +++ b/core/server/main.cpp @@ -30,7 +30,7 @@ namespace po = boost::program_options; int main(int a_argc, char **a_argv) { global_logger.setSysLog(false); global_logger.addStream(std::cerr); - global_logger.setLevel(LogLevel::DEBUG); + global_logger.setLevel(LogLevel::INFO); LogContext log_context; log_context.thread_name = "core_server"; log_context.thread_id = 0; diff --git a/doc_source/source/conf.py b/doc_source/source/conf.py index 15877699a..094d69f11 100644 --- a/doc_source/source/conf.py +++ b/doc_source/source/conf.py @@ -1,5 +1,3 @@ -import sphinx_rtd_theme - # Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full diff --git a/python/datafed_pkg/datafed/CLI.py b/python/datafed_pkg/datafed/CLI.py index 7b8a676c6..41c3c6962 100644 --- a/python/datafed_pkg/datafed/CLI.py +++ b/python/datafed_pkg/datafed/CLI.py @@ -1,4 +1,4 @@ -## @namespace datafed.CLI +# @namespace datafed.CLI # @brief Provides a high-level client interface to the DataFed server # # The DataFed CLI module provides a high-level, text-based client @@ -23,25 +23,18 @@ import getpass import os import sys -import datetime import textwrap import shutil +import json as jsonlib import click import click.decorators -import re -import json as jsonlib -import time -import pathlib import wget from google.protobuf.json_format import MessageToJson -from google.protobuf.json_format import MessageToDict from prompt_toolkit import PromptSession from prompt_toolkit.history import FileHistory from prompt_toolkit.auto_suggest import AutoSuggestFromHistory -from prompt_toolkit.formatted_text import to_formatted_text -from . import SDMS_Auth_pb2 as auth -from . import SDMS_pb2 as sdms +# from . import SDMS_Auth_pb2 as auth from . import Version_pb2 from . import CommandLib from . import Config @@ -127,10 +120,11 @@ def run(): try: if _first: _cli(standalone_mode=False) - # Will get here if a command was specified on command-line, assume user wants non-REPL + # Will get here if a command was specified on command-line, + # assume user wants non-REPL _interactive = False else: - if session == None: + if session is None: session = PromptSession( history=FileHistory(os.path.expanduser("~/.datafed-hist")) ) @@ -155,13 +149,14 @@ def run(): if _first: _interactive = False - except SystemExit as e: - # For subsequent interactive commands, hide top-level (start-up) options + except SystemExit: + # For subsequent interactive commands, hide top-level (start-up) + # options if _first and _interactive and _initialized: for i in _cli.params: i.hidden = True - except KeyboardInterrupt as e: + except KeyboardInterrupt: # Break out of main loop _interactive = False break @@ -204,7 +199,7 @@ def run(): _interactive = False # If initialization failed or not in interactive mode, exit main loop - if not _initialized or _interactive == False: + if not _initialized or _interactive is False: break if _first: @@ -329,7 +324,7 @@ def command(command): try: _args = shlex.split(command) _cli(prog_name="datafed", args=_args, standalone_mode=False) - except SystemExit as e: + except SystemExit: pass except click.ClickException as e: raise Exception(e.format_message()) @@ -346,6 +341,7 @@ def command(command): # @cond + # Aliases click commands class _AliasedGroup(click.Group): # Allows command matching by unique suffix @@ -416,7 +412,10 @@ def _set_verbosity_cb(ctx, param, value): "--context", required=False, type=str, - help="User or project ID for command alias context. See 'alias' command help for more information.", + help=( + "User or project ID for command alias context. See 'alias' " + "command help for more information." + ), ) ] @@ -437,11 +436,15 @@ def _global_context_options(func): expose_value=False, help="Verbosity level of output", ), - # click.option('-v', '--verbosity', required=False,type=click.Choice(['0', '1', '2']),callback=_set_verbosity_cb, help='Verbosity level of output'), - # click.option("-J", "--json", is_flag=True, help="Set _cli output format to JSON, when applicable."), - # click.option("-T", "--text", is_flag=True, help="Set _cli output format to human-friendly text.") + # click.option('-v', '--verbosity', required=False,type=click.Choice(['0', '1', '2']), + # callback=_set_verbosity_cb, help='Verbosity level of output'), + # click.option("-J", "--json", is_flag=True, + # help="Set _cli output format to JSON, when applicable."), + # click.option("-T", "--text", is_flag=True, + # help="Set _cli output format to human-friendly text.") ] + # Decorator to add output options to click commands def _global_output_options(func): for option in reversed(__global_output_options): @@ -464,7 +467,11 @@ def _global_output_options(func): is_flag=True, is_eager=True, callback=_set_script_cb, - help="Start in non-interactive scripting mode. Output is in JSON, all intermediate I/O is disabled, and certain client-side commands are unavailable.", + help=( + "Start in non-interactive scripting mode. Output is in JSON, all" + "intermediate I/O is disabled, and certain client-side commands are" + "unavailable." + ), ) @click.option("--version", is_flag=True, help="Print version number and exit.") @click.pass_context @@ -486,7 +493,7 @@ def _cli(ctx, *args, **kwargs): ctx.invoke(_genDoc) raise SystemExit() - if _capi == None: + if _capi is None: _initialize(ctx.params) if ctx.invoked_subcommand is None: @@ -527,7 +534,7 @@ def _genDocHeader(cmd, level): def _genDocCmd(cmd, ctx, level, parname=None, recurse=True): - if cmd == None: + if cmd is None: cname = "Datafed" cmd = _cli elif parname: @@ -569,7 +576,10 @@ def _genDocCmd(cmd, ctx, level, parname=None, recurse=True): doc += "\n" if is_group: - doc += "Sub-Commands:\n\n=============== ============================================================\n" + doc += ( + "Sub-Commands:\n\n=================================" + "============================================\n" + ) for c in cmd.list_commands(ctx): subcmd = cmd.get_command(cmd, c) if not subcmd.hidden: @@ -619,8 +629,8 @@ def _wc(coll_id): global _cur_coll_title global _cur_coll_prefix - if coll_id == None: - if _cur_coll_title == None: + if coll_id is None: + if _cur_coll_title is None: _setWorkingCollectionTitle() if _output_mode == _OM_TEXT: @@ -758,7 +768,10 @@ def _dataView(data_id, context): "--raw-data-file", type=str, required=False, - help="Globus path to raw data file (local or remote) to upload to new record. Default endpoint is used if none provided.", + help=( + "Globus path to raw data file (local or remote) to upload to new" + "record. Default endpoint is used if none provided." + ), ) @click.option( "-x", @@ -779,14 +792,20 @@ def _dataView(data_id, context): "--metadata", type=str, required=False, - help="Inline metadata in JSON format. JSON must define an object type. Cannot be specified with --metadata-file option.", + help=( + "Inline metadata in JSON format. JSON must define an object type." + " Cannot be specified with --metadata-file option." + ), ) @click.option( "-f", "--metadata-file", type=str, required=False, - help="Path to local metadata file containing JSON. JSON must define an object type. Cannot be specified with --metadata option.", + help=( + "Path to local metadata file containing JSON. JSON must define an " + "object type. Cannot be specified with --metadata option." + ), ) @click.option( "-s", "--schema", type=str, required=False, help="Set metadata schema id:version" @@ -803,7 +822,10 @@ def _dataView(data_id, context): "--parent", type=str, required=False, - help="Parent collection ID, alias, or listing index. Default is the current working collection.", + help=( + "Parent collection ID, alias, or listing index. Default is the " + "current working collection." + ), ) @click.option( "-R", @@ -817,7 +839,13 @@ def _dataView(data_id, context): "--deps", multiple=True, type=click.Tuple([click.Choice(["der", "comp", "ver"]), str]), - help="Dependencies (provenance). Use one '--deps' option per dependency and specify with a string consisting of the type of relationship ('der', 'comp', 'ver') follwed by ID/alias of the referenced record. Relationship types are: 'der' for 'derived from', 'comp' for 'a component of', and 'ver' for 'a new version of'.", + help=( + "Dependencies (provenance). Use one '--deps' option per dependency " + "and specify with a string consisting of the type of relationship " + "('der', 'comp', 'ver') follwed by ID/alias of the referenced record." + " Relationship types are: 'der' for 'derived from', 'comp' for 'a " + "component of', and 'ver' for 'a new version of'." + ), ) @_global_context_options @_global_output_options @@ -905,7 +933,10 @@ def _dataCreate( "--raw-data-file", type=str, required=False, - help="Globus path to raw data file (local or remote) to upload with record. Default endpoint used if none provided.", + help=( + "Globus path to raw data file (local or remote) to upload with " + "record. Default endpoint used if none provided." + ), ) @click.option( "-x", @@ -946,14 +977,22 @@ def _dataCreate( "--deps-add", multiple=True, type=click.Tuple([click.Choice(["der", "comp", "ver"]), str]), - help="Specify dependencies to add by listing first the type of relationship ('der', 'comp', or 'ver') follwed by ID/alias of the target record. Can be specified multiple times.", + help=( + "Specify dependencies to add by listing first the type of " + "relationship ('der', 'comp', or 'ver') follwed by ID/alias of the " + "target record. Can be specified multiple times." + ), ) @click.option( "-R", "--deps-rem", multiple=True, type=click.Tuple([click.Choice(["der", "comp", "ver"]), str]), - help="Specify dependencies to remove by listing first the type of relationship ('der', 'comp', or 'ver') followed by ID/alias of the target record. Can be specified multiple times.", + help=( + "Specify dependencies to remove by listing first the type of " + "relationship ('der', 'comp', or 'ver') followed by ID/alias " + "of the target record. Can be specified multiple times." + ), ) @_global_context_options @_global_output_options @@ -1229,7 +1268,7 @@ def _list(ctx, item_id, offset, count, context): global _cur_coll - if item_id == None: + if item_id is None: _id = _cur_coll else: _id = _resolve_coll_id(item_id) @@ -1399,7 +1438,8 @@ def _collDelete(coll_id, force, context): raise Exception("Cannot confirm deletion while running non-interactively.") click.echo( - "Warning: this will delete all data records and collections contained in the specified collection(s)." + "Warning: this will delete all data records and collections " + "contained in the specified collection(s)." ) if not click.confirm("Continue?"): return @@ -1935,7 +1975,7 @@ def _taskList(time_from, to, since, status, offset, count): tasks history such that only up to 30 days of history are retained. """ - if since != None and (time_from != None or to != None): + if since is not None and (time_from is not None or to is not None): raise Exception("Cannot specify 'since' and 'from'/'to' ranges.") reply = _capi.taskList( @@ -2075,7 +2115,7 @@ def _epDefaultSet(current, endpoint): raise Exception("--current option not supported in non-interactive mode.") ep = _capi.endpointGet() - if ep == None: + if ep is None: raise Exception("No current endpoint set.") _capi.endpointDefaultSet(ep) @@ -2113,21 +2153,21 @@ def _setup(ctx): pub_file = _capi.cfg.get("client_pub_key_file") priv_file = _capi.cfg.get("client_priv_key_file") - if cfg_dir == None and (pub_file == None or priv_file == None): + if cfg_dir is None and (pub_file is None or priv_file is None): raise Exception( "Client configuration directory and/or client key files not configured" ) reply = _capi.generateCredentials() - if pub_file == None: + if pub_file is None: pub_file = os.path.join(cfg_dir, "datafed-user-key.pub") keyf = open(pub_file, "w") keyf.write(reply[0].pub_key) keyf.close() - if priv_file == None: + if priv_file is None: priv_file = os.path.join(cfg_dir, "datafed-user-key.priv") keyf = open(priv_file, "w") @@ -2139,7 +2179,9 @@ def _setup(ctx): """ -@_cli.command(name='output',help="Set output mode. If MODE argument is 'json' or 'text', the current mode will be set accordingly. If no argument is provided, the current output mode will be displayed.") +@_cli.command(name='output',help=("Set output mode. If MODE argument is 'json' " +"or 'text', the current mode will be set accordingly. If no argument is " +"provided, the current output mode will be displayed.")) @click.argument("mode",metavar='MODE',required=False) @click.pass_context def _outputModeSet( ctx, mode ): @@ -2177,10 +2219,10 @@ def _verbositySet(level): raise Exception("Command not supported in non-interactive modes.") global _verbosity_sticky - if level != None: + if level is not None: try: v = int(level) - except: + except BaseException: raise Exception("Invalid verbosity value.") if v < 0 or v > 2: @@ -2244,7 +2286,9 @@ def _exit_cli(): """ -@_cli.command(name='more',help="List the next set of data replies from the DataFed server. Optional argument determines number of data replies received (else the previous count will be used)") +@_cli.command(name='more',help=("List the next set of data replies from the " +"DataFed server. Optional argument determines number of data replies received " +"(else the previous count will be used)")) @click.argument("count",type=int,required=False) def _more(count): if not _interactive: @@ -2285,6 +2329,7 @@ def _more(count): # ------------------------------------------------------------- Print Functions # ============================================================================= + # Interactive and verbosity-aware print def _print_msg(level, message, err=False): global _verbosity @@ -2368,7 +2413,7 @@ def _print_endpoints(message): try: _list_items.index(path) - except: + except BaseException: _list_items.append(path) click.echo("{:2}. {}".format(df_idx, path)) df_idx += 1 @@ -2638,7 +2683,7 @@ def _print_proj(message): if len(proj.alloc) > 0: first = True for alloc in proj.alloc: - if first == True: + if first: first = False click.echo( "{:<14} {}, {} total, {} used".format( @@ -2701,7 +2746,7 @@ def _print_query(message): ) if len(message.query.coll): - tags = _arrayToCSV(message.query.coll, 2) + _arrayToCSV(message.query.coll, 2) _wrap_text(_arrayToCSV(message.query.coll, 0), " Selection:", 21) else: click.echo(" {:<18} {}".format("Selection: ", "All Data")) @@ -2713,7 +2758,7 @@ def _print_query(message): _wrap_text(message.query.text, " Text:", 21) if len(message.query.tags): - tags = _arrayToCSV(message.query.tags, 2) + _arrayToCSV(message.query.tags, 2) _wrap_text(_arrayToCSV(message.query.tags, 0), " Tags:", 21) if message.query.HasField("owner"): @@ -2772,7 +2817,7 @@ def _wrap_text(text, prefix, indent, compact=False): for p in para: click.echo(wrapper.fill(p)) - if first == True: + if first: wrapper.initial_indent = " " * indent first = False @@ -2860,7 +2905,7 @@ def _setWorkingCollectionTitle(): _cur_coll_prefix = coll.alias else: _cur_coll_title = '"{}" [{}]'.format(coll.title, coll.id) - _cur_coll_prefix = coll.id + _cur_coll_prefix = coll.id # noqa def _arrayToCSV(items, skip): @@ -2894,18 +2939,18 @@ def _printJSON(json, cur_indent, indent): for k, v in json.items(): print("" if last == 0 else ",\n", pref, end="", sep="") last = 1 - if type(v) is dict: + if isinstance(v, dict): if v: print(k, ": {") _printJSON(v, cur_indent + indent, indent) print("\n", pref, "}", sep="", end="") else: print(k, ": {}", end="") - elif type(v) is list: + elif isinstance(v, list): # Test array for dict or list values cplx = False for a in v: - if type(a) is dict or type(a) is list: + if isinstance(a, dict) or isinstance(a, list): cplx = True break if cplx: @@ -2914,7 +2959,7 @@ def _printJSON(json, cur_indent, indent): print("\n", pref, "]", sep="", end="") else: print(k, " : ", str(v), sep="", end="") - elif type(v) is str: + elif isinstance(v, str): print(k, ' : "', v, '"', sep="", end="") else: print(k, " : ", v, sep="", end="") @@ -2924,7 +2969,7 @@ def _printJSON_List(json, cur_indent, indent): pref = " " * cur_indent last = 0 for v in json: - if type(v) is dict: + if isinstance(v, dict): if v: if last == 0: print(pref, "{", sep="") @@ -2948,11 +2993,11 @@ def _printJSON_List(json, cur_indent, indent): print(",\n" if last != 0 else "", pref, end="", sep="") last = 2 - if type(v) is list: + if isinstance(v, list): # Test array for dict or list values cplx = False for a in v: - if type(a) is dict or type(a) is list: + if isinstance(a, dict) or isinstance(a, list): cplx = True break if cplx: @@ -2961,7 +3006,7 @@ def _printJSON_List(json, cur_indent, indent): print(pref, "]", sep="", end="") else: print(str(v), end="") - elif type(v) is str: + elif isinstance(v, str): print('"', v, '"', end="", sep="") else: print(v, end="") @@ -2997,7 +3042,6 @@ def _bar_adaptive_human_readable(current, total, width=80): # render output = "" for field in selected: - if field == "percent": # fixed size width for percentage output += ("%s%%" % (100 * current // total)).rjust(min_width["percent"]) @@ -3064,7 +3108,7 @@ def _initialize(opts): ) ) - if man_auth or _capi.getAuthUser() == None: + if man_auth or _capi.getAuthUser() is None: if not man_auth: if not _capi._mapi.keysLoaded(): if _output_mode == _OM_RETN: @@ -3099,7 +3143,7 @@ def _initialize(opts): raise Exception("Too many failed log-in attempts.") tmp = _capi.cfg.get("verbosity") - if tmp != None: + if tmp is not None: _verbosity_sticky = tmp _verbosity = tmp @@ -3109,7 +3153,7 @@ def _initialize(opts): _cur_ctx = uid _cur_coll = "c/u_" + uid[2:] + "_root" _initialized = True - except Exception as e: + except Exception: _interactive = False raise diff --git a/python/datafed_pkg/datafed/CommandLib.py b/python/datafed_pkg/datafed/CommandLib.py index 0b31b6650..3ee98befe 100644 --- a/python/datafed_pkg/datafed/CommandLib.py +++ b/python/datafed_pkg/datafed/CommandLib.py @@ -1,4 +1,4 @@ -## @package datafed.CommandLib +# @package datafed.CommandLib # Provides a high-level client interface to the DataFed server # # The DataFed CommandLib module contains a single API class that provides @@ -8,19 +8,16 @@ from __future__ import division, print_function, absolute_import import os -import sys import datetime import re import json as jsonlib import time import pathlib import wget -from . import SDMS_Anon_pb2 as anon from . import SDMS_Auth_pb2 as auth from . import SDMS_pb2 as sdms from . import MessageLib from . import Config -from . import VERSION class API: @@ -30,7 +27,7 @@ class API: The DataFed CommandLib.API class provides a high-level interface for sending requests to a DataFed server. Requests are sent via python class methods and replies are (currently) returned as Google Protobuf message objects. - These reply messages are defined in the \*.proto files included in the + These reply messages are defined in the \\*.proto files included in the DataFed client package. Basic functionality of th API class mirrors the capabilities exposed in the DataFed CLI. @@ -70,7 +67,6 @@ class methods and replies are (currently) returned as Google Protobuf message ob ) def __init__(self, opts={}): - if not isinstance(opts, dict): raise Exception("CommandLib API options parameter must be a dictionary.") @@ -201,7 +197,8 @@ def repoCreate( Parameters ---------- repo_id : str - The id of the data repository i.e. "datafed-home" internally this will be represented as "repo/datafed-home" + The id of the data repository i.e. "datafed-home" internally this + will be represented as "repo/datafed-home" title : str A title describing the repository desc : str @@ -213,14 +210,23 @@ def repoCreate( pub_key : str The public key of the repo so the core server and repository server can communicate address : str - The tcp address of the repository server, given the domain and the port i.e. "tcp://my-repo-server.cu.edu:9000" + The tcp address of the repository server, given the domain and the + port i.e. "tcp://my-repo-server.cu.edu:9000" endpoint : str - The globus UUID associated with the repository with the following format "XXXXYYYYXXXX-XXXX-XXXX-XXXX-XXXXYYYY" + The globus UUID associated with the repository with the following + format "XXXXYYYYXXXX-XXXX-XXXX-XXXX-XXXXYYYY" path : str - The relative POSIX path as seen from the globus collection (endpoint) to the repositories folder which is controled by the datafed repo server. i.e. if I have a POSIX path /home/tony_stark/inventions/datafed-home and the endpoint path pointed to /home/tony_stark/inventions then the POSIX path could be set to /datafed-home, NOTE the last folder in the path must have the same name as the repo_id. + The relative POSIX path as seen from the globus collection + (endpoint) to the repositories folder which is controled by the + datafed repo server. i.e. if I have a POSIX path + /home/tony_stark/inventions/datafed-home and the endpoint path + pointed to /home/tony_stark/inventions then the POSIX path could be + set to /datafed-home, NOTE the last folder in the path must have + the same name as the repo_id. exp_path : str admins : list[str] - A list of DataFed users that will have repository admin rights on the repository. i.e. ["u/tony_stark", "u/pepper"] + A list of DataFed users that will have repository admin rights on + the repository. i.e. ["u/tony_stark", "u/pepper"] Returns ------- @@ -280,7 +286,6 @@ def repoDelete(self, repo_id): return self._mapi.sendRecv(msg) def repoAllocationCreate(self, repo_id, subject, data_limit, rec_limit): - if not repo_id.startswith("repo/"): repo_id = "repo/" + repo_id @@ -462,7 +467,7 @@ def dataCreate( f = open(metadata_file, "r") metadata = f.read() f.close() - except: + except BaseException: raise Exception( "Could not open metadata file: {}".format(metadata_file) ) @@ -607,7 +612,7 @@ def dataUpdate( f = open(metadata_file, "r") metadata = f.read() f.close() - except: + except BaseException: raise Exception( "Could not open metadata file: {}".format(metadata_file) ) @@ -776,7 +781,7 @@ def dataGet( reply2 = self._mapi.sendRecv(msg2, nack_except=False) # timeout - if reply2[0] == None: + if reply2[0] is None: break # Not sure if this can happen: @@ -853,7 +858,7 @@ def dataPut( reply = self._mapi.sendRecv(msg) - if (reply[0].HasField("task") == True) and wait: + if (reply[0].HasField("task")) and wait: msg2 = auth.TaskViewRequest() msg2.task_id = reply[0].task.id elapsed = 0 @@ -1391,7 +1396,6 @@ def queryCreate( sort=None, sort_rev=None, ): - """ Create a new saved query @@ -1486,7 +1490,7 @@ def queryUpdate( msg = auth.QueryUpdateRequest() msg.id = query_id - if title != None: + if title is not None: msg.title = title self._buildSearchRequest( @@ -1656,8 +1660,7 @@ def _buildSearchRequest( offset=0, count=20, ): - - if coll_mode and (schema != None or meta != None or meta_err == True): + if coll_mode and (schema is not None or meta is not None or meta_err): raise Exception( "Cannot specify metadata terms when searching for collection." ) @@ -1670,10 +1673,10 @@ def _buildSearchRequest( # if category != None and not public: # raise Exception("Category search option is only available for public searches.") - if coll != None: + if coll is not None: msg.coll.extend(coll) - if sort != None: + if sort is not None: if sort == "id": msg.sort = 0 elif sort == "title": @@ -1689,7 +1692,7 @@ def _buildSearchRequest( else: raise Exception("Invalid sort option.") - if sort_rev == True: + if sort_rev: if msg.sort == 5: raise Exception( "Reverse sort option not available for text-relevance sorting." @@ -1697,53 +1700,53 @@ def _buildSearchRequest( msg.sort_rev = True - if id != None: + if id is not None: msg.id = id - if text != None: + if text is not None: msg.text = text - if tags != None: + if tags is not None: msg.tags.extend(tags) - if owner != None: + if owner is not None: msg.owner = owner - if creator != None: + if creator is not None: msg.creator = creator - if schema != None: + if schema is not None: msg.sch_id = schema - if meta != None: + if meta is not None: msg.meta = meta - if meta_err == True: + if meta_err: msg.meta_err = True - if time_from != None: + if time_from is not None: ts = self.strToTimestamp(time_from) - if ts == None: + if ts is None: raise Exception("Invalid time format for 'from' option.") setattr(msg, "from", ts) - if time_to != None: + if time_to is not None: ts = self.strToTimestamp(time_to) - if ts == None: + if ts is None: raise Exception("Invalid time format for 'from' option.") msg.to = ts if public: msg.published = True - if category != None: + if category is not None: msg.cat_tags.extend(category.split(".")) - if offset != None: + if offset is not None: msg.offset = offset - if count != None: + if count is not None: msg.count = count # ========================================================================= @@ -1949,13 +1952,13 @@ def sharedList(self, inc_users=None, inc_projects=None, subject=None): """ msg = auth.ACLSharedListRequest() - if inc_users != None: + if inc_users is not None: msg.inc_users = inc_users - if inc_projects != None: + if inc_projects is not None: msg.inc_projects = inc_projects - if subject != None: + if subject is not None: msg.subject = subject.lower() return self._mapi.sendRecv(msg) @@ -2029,7 +2032,7 @@ def sharedListItems(self, owner_id, context=None, offset=None, count=None): msg = auth.ACLSharedListItemsRequest() msg.owner = owner_id.lower() - if context != None: + if context is not None: msg.subject = context.lower() return self._mapi.sendRecv(msg) @@ -2074,26 +2077,26 @@ def taskList( Exception : On communication or server error Exception : On invalid options """ - if since != None and (time_from != None or time_to != None): + if since is not None and (time_from is not None or time_to is not None): raise Exception("Cannot specify 'since' and 'from'/'to' ranges.") msg = auth.TaskListRequest() - if time_from != None: + if time_from is not None: ts = self.strToTimestamp(time_from) - if ts == None: + if ts is None: raise Exception("Invalid time format for 'from' option.") setattr(msg, "from", ts) - if time_to != None: + if time_to is not None: ts = self.strToTimestamp(time_to) - if ts == None: + if ts is None: raise Exception("Invalid time format for 'time_to' option.") msg.to = ts - if since != None: + if since is not None: try: suf = since[-1] mod = 1 @@ -2110,14 +2113,14 @@ def taskList( else: val = int(since) - if val == None: + if val is None: raise Exception("Invalid value for 'since'") msg.since = val * mod - except: + except BaseException: raise Exception("Invalid value for 'since'") - if status != None: + if status is not None: for s in status: if isinstance(s, str): stat = s.lower() @@ -2139,10 +2142,10 @@ def taskList( elif stat == "failed": msg.status.append(4) - if offset != None: + if offset is not None: try: - tmp = int(offset) - except: + int(offset) + except BaseException: raise Exception("Invalid offset value.") if offset >= 0: @@ -2150,10 +2153,10 @@ def taskList( else: raise Exception("Invalid offset value.") - if count != None: + if count is not None: try: - tmp = int(count) - except: + int(count) + except BaseException: raise Exception("Invalid count value.") if count > 0: @@ -2296,7 +2299,7 @@ def setupCredentials(self): pub_file = self.cfg.get("client_pub_key_file") priv_file = self.cfg.get("client_priv_key_file") - if cfg_dir == None and (pub_file == None or priv_file == None): + if cfg_dir is None and (pub_file is None or priv_file is None): raise Exception( "Client configuration directory and/or client key files not configured" ) @@ -2305,14 +2308,14 @@ def setupCredentials(self): reply = self._mapi.sendRecv(msg) - if pub_file == None: + if pub_file is None: pub_file = os.path.join(cfg_dir, "datafed-user-key.pub") keyf = open(pub_file, "w") keyf.write(reply[0].pub_key) keyf.close() - if priv_file == None: + if priv_file is None: priv_file = os.path.join(cfg_dir, "datafed-user-key.priv") keyf = open(priv_file, "w") @@ -2338,7 +2341,7 @@ def setContext(self, item_id=None): Exception : On communication or server error Exception : On invalid options """ - if item_id == None: + if item_id is None: if self._cur_sel == self._uid: return @@ -2363,7 +2366,8 @@ def setContext(self, item_id=None): msg = auth.UserViewRequest() msg.uid = id2 - # Don't need reply - just using to throw an except if id/uid is invalid + # Don't need reply - just using to throw an except if id/uid is + # invalid self._mapi.sendRecv(msg) self._cur_sel = id2 @@ -2418,26 +2422,26 @@ def strToTimestamp(self, time_str): """ try: return int(time_str) - except: + except BaseException: pass try: return int(datetime.datetime.strptime(time_str, "%m/%d/%Y").timestamp()) - except: + except BaseException: pass try: return int( datetime.datetime.strptime(time_str, "%m/%d/%Y,%H:%M").timestamp() ) - except: + except BaseException: pass try: return int( datetime.datetime.strptime(time_str, "%m/%d/%Y,%H:%M:%S").timestamp() ) - except: + except BaseException: pass return None @@ -2565,13 +2569,14 @@ def _resolvePathForGlobus(self, path, must_exist): str Path with globus endpoint UUID or alias prefixed. """ - # Check if this is a full Globus path with either a UUID or legacy endpoint prefix + # Check if this is a full Globus path with either a UUID or legacy + # endpoint prefix if re.match(API._endpoint_legacy, path) or re.match(API._endpoint_uuid, path): return path # Does not have an endpoint prefix, might be a full or relative path - if self._cur_ep == None: + if self._cur_ep is None: raise Exception("No endpoint set.") if path[0] == "~": @@ -2586,9 +2591,10 @@ def _resolvePathForGlobus(self, path, must_exist): if must_exist: _path = str(_path.resolve()) else: - # Can't use resolve b/c it throws an exception when a path doesn't exist pre python 3.6 - # Must manually locate the lowest relative path component and resolve only to that point - # Then append then remainder to the resolved portion + # Can't use resolve b/c it throws an exception when a path doesn't + # exist pre python 3.6 Must manually locate the lowest relative path + # component and resolve only to that point Then append then + # remainder to the resolved portion idx = 0 rel = None @@ -2598,7 +2604,7 @@ def _resolvePathForGlobus(self, path, must_exist): rel = idx idx = idx + 1 - if rel != None: + if rel is not None: basep = pathlib.Path() endp = pathlib.Path() idx = 0 @@ -2613,7 +2619,8 @@ def _resolvePathForGlobus(self, path, must_exist): winp = pathlib.PurePath(_path) - # TODO The follow windows-specific code needs to be tested on windows... + # TODO The follow windows-specific code needs to be tested on + # windows... if isinstance(winp, pathlib.PureWindowsPath): if winp.drive: drive_name = winp.drive.replace(":", "") @@ -2672,20 +2679,21 @@ def _setSaneDefaultOptions(self): """ opts = self.cfg.getOpts() - # Examine initial configuration options and set & save defaults where needed + # Examine initial configuration options and set & save defaults where + # needed save = False - if not "server_host" in opts: + if "server_host" not in opts: self.cfg.set("server_host", "datafed.ornl.gov") opts["server_host"] = "datafed.ornl.gov" save = True - if not "server_port" in opts: + if "server_port" not in opts: self.cfg.set("server_port", 7512) opts["server_port"] = 7512 save = True - if not "server_pub_key_file" in opts: + if "server_pub_key_file" not in opts: serv_key_file = None if "server_cfg_dir" in opts: @@ -2715,13 +2723,13 @@ def _setSaneDefaultOptions(self): url = "https://" + opts["server_host"] + "/datafed-core-key.pub" wget.download(url, out=serv_key_file) - if not "client_pub_key_file" in opts or not "client_priv_key_file" in opts: - if not "client_cfg_dir" in opts: + if "client_pub_key_file" not in opts or "client_priv_key_file" not in opts: + if "client_cfg_dir" not in opts: raise Exception( "Client key file(s) or client configuration directory not specified or invalid." ) - if not "client_pub_key_file" in opts: + if "client_pub_key_file" not in opts: key_file = os.path.expanduser( os.path.join(opts["client_cfg_dir"], "datafed-user-key.pub") ) @@ -2729,7 +2737,7 @@ def _setSaneDefaultOptions(self): opts["client_pub_key_file"] = key_file save = True - if not "client_priv_key_file" in opts: + if "client_priv_key_file" not in opts: key_file = os.path.expanduser( os.path.join(opts["client_cfg_dir"], "datafed-user-key.priv") ) diff --git a/python/datafed_pkg/datafed/Config.py b/python/datafed_pkg/datafed/Config.py index a39348422..1af86477f 100644 --- a/python/datafed_pkg/datafed/Config.py +++ b/python/datafed_pkg/datafed/Config.py @@ -126,6 +126,7 @@ ], } + ## # @class API # @brief A client configuration helper class. @@ -139,20 +140,36 @@ # # Available Settings: # -# key | type | cf. sec | cf. name | env. var. name | long opt. | short opt. -# ----------------------|------|---------|-----------------|-----------------------------|-----------------------|------------ -# server_cfg_dir | path | server | config_dir | DATAFED_SERVER_CFG_DIR | --server-cfg-dir | | -# server_cfg_file | path | | | DATAFED_SERVER_CFG_FILE | --server-cfg-file | | -# server_pub_key_file | path | server | public_key_file | DATAFED_SERVER_PUB_KEY_FILE | --server-pub-key-file | | -# server_host | str | server | host | DATAFED_SERVER_HOST | --server-host | -H | -# server_port | int | server | port | DATAFED_SERVER_PORT | --server-port | -P | -# client_cfg_dir | path | client | config_dir | DATAFED_CLIENT_CFG_DIR | --client-cfg-dir | | -# client_cfg_file | path | client | config_file | DATAFED_CLIENT_CFG_FILE | --client-cfg-file | | -# client_pub_key_file | path | client | public_key_file | DATAFED_CLIENT_PUB_KEY_FILE | --client-pub-key-file | | -# client_priv_key_file | path | client | private_key_file| DATAFED_CLIENT_PRIV_KEY_FILE| --client-priv-key-file| | -# default_ep | str | general | default_endpoint| DATAFED_DEFAULT_ENDPOINT | --default-ep | -e | -# verbosity | int | general | verbosity | DATAFED_DEFAULT_VERBOSITY | --verbosity | -v | -# interactive | bool | general | interactive | DATAFED_DEFAULT_INTERACT | --interact/--no-interact | -i/-n | +# key | type | cf. sec | cf. name | +# ----------------------|------|---------|-----------------| +# server_cfg_dir | path | server | config_dir | +# server_cfg_file | path | | | +# server_pub_key_file | path | server | public_key_file | +# server_host | str | server | host | +# server_port | int | server | port | +# client_cfg_dir | path | client | config_dir | +# client_cfg_file | path | client | config_file | +# client_pub_key_file | path | client | public_key_file | +# client_priv_key_file | path | client | private_key_file| +# default_ep | str | general | default_endpoint| +# verbosity | int | general | verbosity | +# interactive | bool | general | interactive | +# +# Continuation of table +# key | +# ----------------------| +# server_cfg_dir | +# server_cfg_file | +# server_pub_key_file | +# server_host | +# server_port | +# client_cfg_dir | +# client_cfg_file | +# client_pub_key_file | +# client_priv_key_file | +# default_ep | +# verbosity | +# interactive | # # Configuration source priority: # @@ -163,7 +180,6 @@ # 5. set by environment variable # class API: - ## # @brief Class initialization method. # @@ -194,7 +210,7 @@ def _processOptions(self, opts): self._opts = {} for k, v in opts.items(): - if v != None: + if v is not None: self._opts[k] = {"val": v, "pri": 1} # print( "cfg self opts:", self._opts ) @@ -239,12 +255,12 @@ def _processOptions(self, opts): self._loadConfigFile(cfg_file, 2) loaded = True - if not "client_cfg_dir" in self._opts: + if "client_cfg_dir" not in self._opts: cfg_dir = os.path.expanduser("~/.datafed") if not os.path.exists(cfg_dir): try: os.mkdir(cfg_dir) - except: + except BaseException: return self._opts["client_cfg_dir"] = {"val": cfg_dir, "pri": 5} @@ -265,7 +281,7 @@ def _loadEnvironVars(self): # Options with _OPT_NO_ENV are ignored for k, v in _opt_info.items(): if ( - (not k in self._opts) + (k not in self._opts) and ((v[3] & _OPT_NO_ENV) == 0) and (v[2] in os.environ) and os.environ[v[2]] @@ -274,7 +290,7 @@ def _loadEnvironVars(self): if v[3] & _OPT_INT: try: tmp = int(tmp) - except: + except BaseException: raise Exception( "Invalid value specified for {} ({}) from ENV {}".format( k, tmp, v[2] @@ -296,17 +312,17 @@ def _loadEnvironVars(self): self._opts[k] = {"val": tmp, "pri": 4} def _loadConfigFile(self, cfg_file, priority): - # Read config file and check each defined option for a contained value using section and name - # Priority is set by parameter (3 or 4) - # Values are automatically converted to expected type - # Options with _OPT_NO_CF are ignored + # Read config file and check each defined option for a contained value + # using section and name Priority is set by parameter (3 or 4) Values + # are automatically converted to expected type Options with _OPT_NO_CF + # are ignored try: with open(cfg_file, "r") as f: config = configparser.ConfigParser() config.read_file(f) for k, v in _opt_info.items(): - if ((not k in self._opts) or self._opts[k]["pri"] >= priority) and ( + if ((k not in self._opts) or self._opts[k]["pri"] >= priority) and ( v[3] & _OPT_NO_CF ) == 0: if config.has_option(v[0], v[1]): @@ -314,7 +330,7 @@ def _loadConfigFile(self, cfg_file, priority): if v[3] & _OPT_INT: try: tmp = int(tmp) - except: + except BaseException: raise Exception( "Invalid value specified for {} ({}) in {}".format( k, tmp, cfg_file @@ -380,7 +396,7 @@ def getOpts(self): # @exception Exception: If unknown key is provided. # def get(self, key): - if not key in _opt_info: + if key not in _opt_info: raise Exception("Undefined configuration key: " + key) if key in self._opts: @@ -397,7 +413,7 @@ def get(self, key): # @exception Exception: If unknown key is provided. # def set(self, key, value, save=False): - if not key in _opt_info: + if key not in _opt_info: raise Exception("Undefined configuration key:", key) if key in self._opts: diff --git a/python/datafed_pkg/datafed/Connection.py b/python/datafed_pkg/datafed/Connection.py index 9d9d09faf..3c67574e4 100644 --- a/python/datafed_pkg/datafed/Connection.py +++ b/python/datafed_pkg/datafed/Connection.py @@ -1,4 +1,4 @@ -## @package datafed.Connection +# @package datafed.Connection # Low-level message-oriented communications module # # The DataFed Connection class enables sending and receiving Google protobuf @@ -19,11 +19,10 @@ import zmq import zmq.utils.z85 import struct -import time -import inspect import sys import uuid + ## # @class Connection # @brief Provides low-level message-oriented communication @@ -57,7 +56,6 @@ def __init__( zmq_ctxt=None, log_level=logging.INFO, ): - self._log_level = log_level self._format = "%(asctime)s datafed-cli %(levelname)s %(message)" logging.Formatter(self._format) @@ -91,15 +89,15 @@ def __init__( if sys.version_info.major == 3: try: self._socket.setsockopt_string(zmq.CURVE_SECRETKEY, client_priv_key) - except: + except BaseException: raise Exception("Invalid client private key") try: self._socket.setsockopt_string(zmq.CURVE_PUBLICKEY, client_pub_key) - except: + except BaseException: raise Exception("Invalid client public key") try: self._socket.setsockopt_string(zmq.CURVE_SERVERKEY, server_pub_key) - except: + except BaseException: raise Exception("Invalid server public key: " + server_pub_key) else: self._socket.curve_secretkey = client_priv_key @@ -128,7 +126,8 @@ def __del__(self): # def registerProtocol(self, msg_module): # Message descriptors are stored by name created by protobuf compiler - # A custom post-proc tool generates and appends _msg_name_to_type with defined DataFed-sepcific numer message types + # A custom post-proc tool generates and appends _msg_name_to_type with + # defined DataFed-sepcific numer message types for name, desc in sorted(msg_module.DESCRIPTOR.message_types_by_name.items()): msg_t = msg_module._msg_name_to_type[name] @@ -155,7 +154,7 @@ def recv(self, a_timeout=1000): ready = self._socket.poll(a_timeout) if ready > 0: # receive null frame - nf = self._socket.recv_string(0) + self._socket.recv_string(0) header = "" while header != "BEGIN_DATAFED": diff --git a/python/datafed_pkg/datafed/MessageLib.py b/python/datafed_pkg/datafed/MessageLib.py index c4f1755aa..22c6401f2 100644 --- a/python/datafed_pkg/datafed/MessageLib.py +++ b/python/datafed_pkg/datafed/MessageLib.py @@ -1,4 +1,4 @@ -## @package datafed.MessageLib +# @package datafed.MessageLib # Provides a low-level client interface to the DataFed server # # The DataFed MessageLib module contains a single API class that provides @@ -11,7 +11,6 @@ # secure ZeroMQ link. -import os import xmlrpc.client import zmq from . import Version_pb2 @@ -49,7 +48,6 @@ def get_latest_version(package_name): # and both synchronous ans asynchronous message send/recv methods. # class API: - ## # @brief MessageLib.API class initialization method. # @param server_host The DataFed core server hostname or IP address. @@ -59,7 +57,8 @@ class API: # @param client_pub_key_file Client public key file (full path). # @param client_priv_key_file Client private key file (full path). # @param client_cfg_dir Client configuration directory. - # @param manual_auth Client intends to manually authenticate if True. Bypasses client key loading. + # @param manual_auth Client intends to manually authenticate if True. + # Bypasses client key loading. # @param kwargs Placeholder for any extra keyword arguments (ignored) # @exception Exception: On server key load error, timeout, or incompatible protocols. # @@ -84,7 +83,6 @@ def __init__( manual_auth=None, **kwargs, ): - self._ctxt = 0 self._auth = False self._nack_except = True @@ -93,7 +91,7 @@ def __init__( if not server_host: raise Exception("Server host is not defined") - if server_port == None: + if server_port is None: raise Exception("Server port is not defined") if not server_pub_key and not server_pub_key_file: @@ -113,12 +111,12 @@ def __init__( _client_priv_key = None # Use or load server public key - if server_pub_key_file != None: + if server_pub_key_file is not None: try: keyf = open(server_pub_key_file, "r") _server_pub_key = keyf.read() keyf.close() - except: + except BaseException: raise Exception( "Could not open server public key file: " + server_pub_key_file ) @@ -167,7 +165,7 @@ def __init__( self._keys_valid = True self._keys_loaded = True print - except: + except BaseException: pub, priv = zmq.curve_keypair() _client_pub_key = pub.decode("utf-8") _client_priv_key = priv.decode("utf-8") @@ -231,7 +229,7 @@ def __init__( self._auth = reply.auth self._uid = reply.uid - ## @brief Determines if client security keys were loaded. + # @brief Determines if client security keys were loaded. # # @return True if keys were loaded; false otherwise. # @retval bool @@ -239,7 +237,7 @@ def __init__( def keysLoaded(self): return self._keys_loaded - ## @brief Determines if loaded client security keys had a valid format. + # @brief Determines if loaded client security keys had a valid format. # # Note that keys with valid format but invalid value will cause # a connection failure (exception or timeout). @@ -250,7 +248,7 @@ def keysLoaded(self): def keysValid(self): return self._keys_valid - ## @brief Gets the client authentication status and user ID. + # @brief Gets the client authentication status and user ID. # # @return A tuple of (bool,string) - The bool is True if client # is authenticated; False otherwise. IF authenticated, the @@ -260,7 +258,7 @@ def keysValid(self): def getAuthStatus(self): return self._auth, self._uid - ## @brief Perform manual client authentication with DataFed user ID and password. + # @brief Perform manual client authentication with DataFed user ID and password. # # @param uid Client's DataFed user ID. # @param password Client's DataFed password. @@ -278,7 +276,7 @@ def manualAuthByPassword(self, uid, password): # Test auth status reply, mt = self.sendRecv(anon.GetAuthStatusRequest()) if not reply.auth: - raise Exception(f"Password authentication failed.") + raise Exception("Password authentication failed.") self._auth = True self._uid = reply.uid @@ -305,7 +303,7 @@ def logout(self): self._auth = False self._uid = None - ## @brief Get NackReply exception enable state. + # @brief Get NackReply exception enable state. # # @return True if Nack exceptions are enabled; False otherwise. # @retval bool @@ -313,7 +311,7 @@ def logout(self): def getNackExceptionEnabled(self): return self._nack_except - ## @brief Set NackReply exception enable state. + # @brief Set NackReply exception enable state. # # If NackReply exceptions are enabled, any NackReply received by # the recv() or SendRecv() methods will be raised as an exception @@ -337,12 +335,12 @@ def getDefaultTimeout(self): def getDailyMessage(self): # Get daily message, if set reply, mt = self.sendRecv(anon.DailyMessageRequest(), 10000) - if reply == None: + if reply is None: raise Exception("Timeout waiting for server connection.") return reply.message - ## @brief Synchronously send a message then receive a reply to/from DataFed server. + # @brief Synchronously send a message then receive a reply to/from DataFed server. # # @param msg: Protobuf message to send to the server # timeout: Timeout in milliseconds @@ -355,9 +353,9 @@ def getDailyMessage(self): # def sendRecv(self, msg, timeout=None, nack_except=None): self.send(msg) - _timeout = timeout if timeout != None else self._timeout + _timeout = timeout if timeout is not None else self._timeout reply, mt, ctxt = self.recv(_timeout, nack_except) - if reply == None: + if reply is None: raise Exception("Timeout!!!!!!!!!") return None, None if ctxt != self._ctxt: @@ -366,7 +364,7 @@ def sendRecv(self, msg, timeout=None, nack_except=None): ) return reply, mt - ## @brief Asynchronously send a protobuf message to DataFed server. + # @brief Asynchronously send a protobuf message to DataFed server. # # @param msg: Protobuf message to send to the server # @return Auto-generated message re-association context int @@ -378,7 +376,7 @@ def send(self, msg): self._conn.send(msg, self._ctxt) return self._ctxt - ## @brief Receive a protobuf message (reply) from DataFed server. + # @brief Receive a protobuf message (reply) from DataFed server. # # @param timeout: Timeout in milliseconds (0 = don't wait, -1 = # wait forever). @@ -391,13 +389,13 @@ def send(self, msg): # @retval (obj,str,int) # def recv(self, timeout=None, nack_except=None): - _timeout = timeout if timeout != None else self._timeout + _timeout = timeout if timeout is not None else self._timeout reply, msg_type, ctxt = self._conn.recv(_timeout) - if reply == None: + if reply is None: return None, None, None - _nack_except = nack_except if nack_except != None else self._nack_except + _nack_except = nack_except if nack_except is not None else self._nack_except if msg_type == "NackReply" and _nack_except: if reply.err_msg: diff --git a/python/datafed_pkg/datafed/__init__.py b/python/datafed_pkg/datafed/__init__.py index c0e97cf44..488e40b27 100644 --- a/python/datafed_pkg/datafed/__init__.py +++ b/python/datafed_pkg/datafed/__init__.py @@ -28,4 +28,3 @@ name = "datafed" version = VERSION.__version__ -# version = "{}.{}.{}:{}".format(Version_pb2.VER_MAJOR,Version_pb2.VER_MAPI_MAJOR,Version_pb2.VER_MAPI_MINOR,Version_pb2.VER_CLIENT_PY) diff --git a/python/datafed_pkg/make_ver.py b/python/datafed_pkg/make_ver.py deleted file mode 100644 index aa18b5639..000000000 --- a/python/datafed_pkg/make_ver.py +++ /dev/null @@ -1,11 +0,0 @@ -# from os import path -# from datafed import Version_pb2 -# -##version = "{}.{}.{}:{}".format(Version_pb2.VER_MAJOR,Version_pb2.VER_MAPI_MAJOR,Version_pb2.VER_MAPI_MINOR,Version_pb2.VER_CLIENT_PY) -# version = "{}.{}.{}.{}.{}".format(Version_pb2.DATAFED_RELEASE_YEAR,Version_pb2.DATAFED_RELEASE_MONTH,Version_pb2.DATAFED_RELEASE_DAY,Version_pb2.DATAFED_RELEASE_HOUR, Version_pb2.DATAFED_RELEASE_MINUTE) -# -## Write contents of the VERSION file -# this_directory = path.abspath(path.dirname(__file__)) -# with open(path.join(this_directory, 'VERSION'), "w+", encoding='utf-8') as f: -# f.write(version) -# diff --git a/python/datafed_pkg/requirements.txt b/python/datafed_pkg/requirements.txt new file mode 100644 index 000000000..9735f7929 --- /dev/null +++ b/python/datafed_pkg/requirements.txt @@ -0,0 +1,5 @@ +protobuf>=3,<=3.20 +pyzmq>=16 +wget>=3 +click>=7 +prompt_toolkit>=2 diff --git a/python/datafed_pkg/setup.py b/python/datafed_pkg/setup.py index c606d2669..3b25d3abf 100644 --- a/python/datafed_pkg/setup.py +++ b/python/datafed_pkg/setup.py @@ -1,3 +1,4 @@ +from datafed.VERSION import __version__ import setuptools from os import path @@ -7,7 +8,9 @@ with open(path.join(this_directory, "README"), encoding="utf-8") as f: long_description = f.read() -from datafed.VERSION import __version__ + +with open("requirements.txt", "r") as f: + install_requires = [line.strip() for line in f] setuptools.setup( name="datafed", @@ -20,13 +23,7 @@ url="https://github.com/ORNL/DataFed", packages=setuptools.find_packages(), setup_requires=["setuptools"], - install_requires=[ - "protobuf>=3, <=3.20", - "pyzmq>=16", - "wget>=3", - "click>=7", - "prompt_toolkit>=2", - ], + install_requires=install_requires, entry_points={"console_scripts": ["datafed = datafed.CLI:run"]}, classifiers=[ "Programming Language :: Python :: 3", diff --git a/python/datafed_pkg/test/Test_EndToEnd.py b/python/datafed_pkg/test/Test_EndToEnd.py index 386a46782..93c436fac 100644 --- a/python/datafed_pkg/test/Test_EndToEnd.py +++ b/python/datafed_pkg/test/Test_EndToEnd.py @@ -74,15 +74,18 @@ def view(self, form = 'as_json'): return "dv \"%s\" -J" % (self.alias) """ +# import subprocess +# import unittest as ut import os -import unittest import random as r import string -import json + +# import json import warnings -import datafed -import datafed.CommandLib as cmd -import datafed.Config + +# import datafed +# import datafed.CommandLib as cmd +# import datafed.Config warnings.simplefilter("always") # warnings will be raised every time a # non-compliant data record detail is generated @@ -302,7 +305,7 @@ def make_desc(fits_requirements=True): return desc -################################ DATA RECORDS ################################ +# ############################### DATA RECORDS ################################ class DataRecord(object): @@ -411,7 +414,8 @@ def make_topic(cls, fits_requirements=True, quantity=4): if fits_requirements: for x in range(r.randint(1, int(quantity))): topic_as_list.append(string_generator(1, 5, True, True, True)) - topic = ".".join(topic_as_list) # joins words with '.' as delimitor + # joins words with '.' as delimitor + topic = ".".join(topic_as_list) else: @@ -445,7 +449,6 @@ def forbidden_char(num): @classmethod def generate(cls, fits_requirements=True): - if fits_requirements: dr = DataRecord( "d/data_id", @@ -507,7 +510,7 @@ def as_py_input(self): ) # TODO: add keywords and topic return command - def type_change(self, form="as_dict"): #### fix this to use magic method __str__ + def type_change(self, form="as_dict"): # fix this to use magic method __str__ dr_as_list = [] if form == "as_str": details = [ @@ -529,9 +532,8 @@ def type_change(self, form="as_dict"): #### fix this to use magic method __str_ ] for attribute in details: dr_as_list.append('"' + attribute + '"') - dr_as_str = " ".join( - dr_as_list - ) # string suitable for CLI Input, Ouput is separated by commas + # string suitable for CLI Input, Ouput is separated by commas + dr_as_str = " ".join(dr_as_list) return dr_as_str @@ -560,124 +562,116 @@ def type_change(self, form="as_dict"): #### fix this to use magic method __str_ return vars(self) -################################ COLLECTIONS ################################ -""" -class Collection(object): - - def __init__(self, coll_id, alias, title, desc): - self.coll_id = coll_id - self.alias = alias - self.title = title - self.desc = desc - - def __str__(self): - return f'id: {self.coll_id}, alias: {self.alias}, \ - title: {self.title}, desc: {self.desc}' - - def __repr__(self): - return f'{self.__class__.__name__} : ({self.coll_id}, {self.alias}, \ - {self.title}, {self.desc})' - - @classmethod - def generate(cls, fits_requirements=True): - if fits_requirements: - coll = Collection("c/coll_id", make_alias(True), make_title(True),\ - make_desc(True)) - pass - else: - coll = Collection("c/coll_id", make_alias(True), make_title(True),\ - make_desc(True)) - random_detail = r.choice([make_alias, make_title, make_desc]) - coll.random_detail = random_detail(False) - pass - return coll - - def as_input_str(self): # TODO: Update command - string = f'sdms cc -a "{escape(self.alias)}" -t "{escape(self.title)}"\ - -d "{escape(self.desc)}"' - return string - - def type_change(self, form='as_dict'): - if form == "as_list": - coll_as_list = [self.coll_id, self.alias, self.title, self.desc] - return coll_as_list - else: - return vars(self) - - - -################################ PROJECTS ################################ -""" +# ############################### COLLECTIONS ################################ + +# class Collection(object): +# +# def __init__(self, coll_id, alias, title, desc): +# self.coll_id = coll_id +# self.alias = alias +# self.title = title +# self.desc = desc +# +# def __str__(self): +# return f'id: {self.coll_id}, alias: {self.alias}, \ +# title: {self.title}, desc: {self.desc}' +# +# def __repr__(self): +# return f'{self.__class__.__name__} : ({self.coll_id}, {self.alias}, \ +# {self.title}, {self.desc})' +# +# @classmethod +# def generate(cls, fits_requirements=True): +# if fits_requirements: +# coll = Collection("c/coll_id", make_alias(True), make_title(True),\ +# make_desc(True)) +# pass +# else: +# coll = Collection("c/coll_id", make_alias(True), make_title(True),\ +# make_desc(True)) +# random_detail = r.choice([make_alias, make_title, make_desc]) +# coll.random_detail = random_detail(False) +# pass +# return coll +# +# def as_input_str(self): # TODO: Update command +# string = f'sdms cc -a "{escape(self.alias)}" -t "{escape(self.title)}"\ +# -d "{escape(self.desc)}"' +# return string +# +# def type_change(self, form='as_dict'): +# if form == "as_list": +# coll_as_list = [self.coll_id, self.alias, self.title, self.desc] +# return coll_as_list +# else: +# return vars(self) +# + + +# ############################### PROJECTS ################################ """ There is no current CLI functionality in terms of projects, other than viewing, and perhaps creating records within project allocations. this may change, or may not. Saved queries can be listed and executed. """ -""" -class project(object): - - def __init__(self, details): - - - def __str__(self): - - - def __repr__(self): - - - @classmethod - def generate(cls, fits_requirements=True): - - - def as_input_str(self): - - - def type_change(self, form='as_dict'): - - - -################################ QUERIES ################################ - -class query(object): - - def __init__(self, details): - - - def __str__(self): - - - def __repr__(self): - - - @classmethod - def generate(cls, fits_requirements=True): - - - def as_input_str(self): - - - def type_change(self, form='as_dict'): - -################################ ACCESS CONTROL GROUPS ######################### -""" - -################################ DEPENDENCIES ? ################################ - - -import unittest as ut -import subprocess -import os -import json +# class project(object): +# +# def __init__(self, details): +# +# +# def __str__(self): +# +# +# def __repr__(self): +# +# +# @classmethod +# def generate(cls, fits_requirements=True): +# +# +# def as_input_str(self): +# +# +# def type_change(self, form='as_dict'): +# +# +# +# ################################ QUERIES ################################ +# +# class query(object): +# +# def __init__(self, details): +# +# +# def __str__(self): +# +# +# def __repr__(self): +# +# +# @classmethod +# def generate(cls, fits_requirements=True): +# +# +# def as_input_str(self): +# +# +# def type_change(self, form='as_dict'): + +# ############################### ACCESS CONTROL GROUPS ######################### + +# ############################### DEPENDENCIES ? ########################## testdata = "/data/unittesting/testfiles/SampleData.txt" -# TODO: Test the create and delete functions before continuing with testing any other functionality -##Creating a text file of input strings, each on a new line -##LATER Need something recursive? -- that uses the lines of generated by bugaboo -# each action is a unit test -- generate record object, append dc command to list (necessary?), -# send command to shell, (update by putting in data? save timestamp?) dv in json, compare with dr dict object, -# then delete record. +# TODO: Test the create and delete functions before continuing with testing any +# other functionality Creating a text file of input strings, each on a new line +# LATER Need something recursive? -- that uses the lines of generated by +# bugaboo each action is a unit test -- generate record object, append dc +# command to list (necessary?), send command to shell, (update by putting in +# data? save timestamp?) dv in json, compare with dr dict object, then delete +# record. # Setting up a class for testing Data Records @@ -691,573 +685,588 @@ def delete_testing_files(): os.remove("~/jbreet/DataFed/python/datafed_pkg/test/outputfile.txt") -""" - - -class TestDataBasicReturn(ut.TestCase): - - # TODO: using object return, generate, check, then delete data record - def test_dr_create_and_delete(self): - config = datafed.Config.API() - datafed.CommandLib.init() - self.dr = DataRecord.generate(True) - create_reply = cmd.command(self.dr.as_py_input()) #Returns tuple object (protobuf msg, msg type) - self.assertEqual(self.dr.alias, create_reply[0].data[0].alias, msg = "Alias of intial data record create does not match.") - try: - del_reply = cmd.command('data delete {}'.format(create_reply[0].data[0].id)) - self.assertEqual(del_reply[1], 'AckReply', msg = "Delete of initial data record failed.") - except AssertionError: - print("Manual delete required") - - # TODO: same with collection - - #if this fails, do not continue -class TestDataBasicText(ut.TestCase): - - # TODO: using object return, generate, check, then delete data record - def test_dr_create_and_delete(self): - config = datafed.Config.API() - #datafed.CommandLib.init() - self.dr = DataRecord.generate(True) - details = [str(self.dr.alias), str(self.dr.title), - str(self.dr.desc)] - print(details) - with open('datarecordsinput.txt', 'a+') as ipfile: - ipfile.write(self.dr.as_text_input()) - print(ipfile.read()) - with open("outputfile.txt", 'a+') as opfile: - subprocess.run(self.dr.as_text_input(), stdout=opfile, \ - stderr=subprocess.STDOUT, shell=True) - with open("outputfile.txt", 'r') as opfile: - outs = opfile.read().split("\n") - print(outs) - words = [i.split(" ") for i in outs] - flat_list = [] - for sublist in words: - for item in sublist: - flat_list.append(unescape_for_JSON(item.strip())) - print(flat_list) - with self.subTest("Create"): - self.assertIs(all(elem in flat_list for elem in details), True, \ - msg="Data-create command unexpected failure") - with self.subTest("Delete"): - try: - with open("outputfile.txt", "w+") as opfile: - subprocess.run('./scripts/datafed data delete {}'.format(self.dr.alias), - stdout=opfile, stderr=subprocess.STDOUT, shell=True) - with open('outputfile.txt', 'r') as opfile: - outs = opfile.read() - self.assertIs("OK" in outs, True, msg="Data-delete of single \ - record failed") - except AssertionError: - print("Manual delete required") -""" - - -class TestDataRecords_Text(ut.TestCase): - """ - def setUp(self): #happens before EVERY method in testcase - self.drcorr0 = DataRecord.generate(True) - self.drcorr1 = DataRecord.generate(True) - subprocess.run(self.drcorr1.as_text_input(), shell=True) - # MAKE THESE DR AS PART OF SETUP, NOT TEST??? NO WAY TO GURANTEE DR-CREATE TEST WILL PASS? BUT NOT PASSING IS FINE EITHER WAY BECAUSE NOTHIN TO DELETE I GUESS - #Needs to be generated in setUp - - - - def tearDown(self): #Happens after EVERY method in test method - subprocess.run('./scripts/datafed data delete {}'.format(self.drcorr1.alias), shell=True) - files = ["jsonoutput.json", "drinput.txt", "outputfile.txt"] - for item in files: - if os.path.exists(item): - os.remove(item) - else: - pass - """ - - ############################# NEED SETUP - """ - def test_dr_create_JSON(self): #Take randostring, put into commands, pass to sdms, and write output to file - with open('drinput.txt', 'a+') as ipfile: - ipfile.write(self.drcorr0.as_input_str()) #writes a stringified copy of the DataRecord object as dictionary - with open("jsonoutput.json", 'w+') as jsonopfile: - subprocess.run(f'{self.drcorr0.as_input_str()} -J',\ - stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("jsonoutput.json", 'r') as jsonopfile: - op = json.load(jsonopfile) - with self.subTest("Confirming alias"): - self.assertEqual(unescape_for_JSON(str(op["data"][0]["alias"])), - self.drcorr0.alias, msg="Returned alias does not match") - with self.subTest("Confirming title"): - self.assertEqual(unescape_for_JSON(str(op["data"][0]["title"])), - self.drcorr0.title, msg="Returned title does not match") - with self.subTest("Confirming desc"): - self.assertEqual(unescape_for_JSON(op["data"][0]["desc"])), - self.drcorr0.desc, msg="Returned desc does not match") - """ - - def test_dr_create_incorrect(self): - drerr = DataRecord.generate(fits_requirements=False) - drerrinput = drerr.as_text_input() - with open("datarecordsinput.txt", "w+") as ipfile: - ipfile.write(drerrinput) - with open("outputfile.txt", "w+") as opfile: - subprocess.run( - drerrinput, stdout=opfile, stderr=subprocess.STDOUT, shell=True - ) - with open("outputfile.txt", "r") as opfile: - outs = opfile.read() - print(outs) - self.assertIs( - "ID:" not in outs, - True, - msg="Data-create of \ - incorrect data record unexpected pass. Manual delete required", - ) - - """ - def test_dr_create_incorrect_json(self): - drerr = DataRecord.generate(False) - drerrinput = drerr.as_input_str() - with open('drinput.txt', 'a+') as ipfile: - ipfile.write(drerr.as_input_str()) #writes a stringified copy of the DataRecord object as dictionary - with open("jsonoutput.json", 'w+') as jsonopfile: - subprocess.run(f'{drerrinput} -J', stdout=jsonopfile, - stderr=subprocess.STDOUT, shell=True) - with open("jsonoutput.json", 'r') as jsonopfile: - op = json.load(jsonopfile) - self.assertEqual(op["status"], "ERROR", msg="Data-create of \ - incorrect data record unexpected pass") - """ - - -""" - def test_dr_update(self): - new_title = "New Title" - with open("jsonoutput.json", 'w+') as jsonopfile: - subprocess.run(f'./scripts/datafed du {self.drcorr1.alias} -t "{new_title}" -J', stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("jsonoutput.json", 'r') as jsonopfile: - op = json.load(jsonopfile) - self.assertEqual(op["data"][0]["title"], new_title, msg="Update of Data Record title unsuccessful") -""" -""" - def test_dr_put(self): - with open("jsonoutput.json", 'w+') as jsonopfile: - subprocess.run(f'./scripts/datafed put "{self.drcorr1.alias}" "{testdata}"\ - --wait -J', stdout=jsonopfile, stderr=subprocess.STDOUT, - shell=True) - with open("jsonoutput.json", 'r') as jsonopfile: - putop = json.load(jsonopfile) - self.assertEqual(putop["status"], "SUCCEEDED", - msg="Data-put transfer failed") #NB: Xfr will FAIL if Globus Connect is not running -- going to have to find a way to make this work???? - - def test_dr_view(self): - with open("jsonoutput.json", 'w+') as jsonopfile: - subprocess.run(f'./scripts/datafed dv "{self.drcorr1.alias}" -D -J', - stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("jsonoutput.json", 'r') as jsonopfile: - op = json.load(jsonopfile) - with self.subTest("Confirming desc"): - self.assertEqual(unescape_for_JSON(str(op["data"][0]["desc"]), - self.drcorr1.desc, msg="Returned desc does not match") - -################################ NOT making dependencies tests their own sub/class because the setup should be the same. -# Integrative may be better because if general DR unit tests fail, it won't be worth running the dependencies tests anyway. - - def test_deps_add_0(self): - drcorr2 = DataRecord.generate(True) - subprocess.run(f'{drcorr2.as_input_str()}', shell=True) - with open("jsonoutput.json", "w+") as jsonopfile: - subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -A \ - "{drcorr2.alias},0" -J', stdout=jsonopfile, - stderr=subprocess.STDOUT, shell=True) - with self.subTest("Confirming dependency created from owner \ - perspective"): - with open("jsonoutput.json", "r") as jsonopfile: - deps = json.load(jsonopfile) - with self.subTest("Confirming alias"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["alias"])), drcorr2.alias, msg="Add dependency type 0 \ - ERROR: relative's alias according to owner does not match") - with self.subTest("Confirming type"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["type"])), 0, msg="Add dependency type 0 ERROR: \ - type according to owner does not match") - with self.subTest("Confirming direction"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["dir"])), 1, msg="Add dependency type 0 ERROR: dir \ - according to owner does not match") - with self.subTest("Confirming dependency from relative's perspective"): - with open("jsonoutput.json", "w+") as jsonopfile: - subprocess.run(f'./scripts/datafed dv "{drcorr2.alias}" -J', - stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("jsonoutput.json", "r") as jsonopfile: - deps = json.load(jsonopfile) - with self.subTest("Confirming alias"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["alias"])), self.drcorr1.alias, msg="Add dep type 0 \ - ERROR: owner's alias according to relative does not match") - with self.subTest("Confirming type"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["type"])), 0, msg="Add dependency type 0 ERROR: type \ - according to relative does not match") - with self.subTest("Confirming direction"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["dir"])), 0, msg="Add dependency type 0 ERROR: \ - dir according to relative does not match") - subprocess.run(f'./scripts/datafed data-delete {drcorr2.alias}', shell=True) - - def test_deps_add_1(self): - drcorr2 = DataRecord.generate(True) - subprocess.run(f'{drcorr2.as_input_str()}', shell=True) - with open("jsonoutput.json", "w+") as jsonopfile: - subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -A "\ - {drcorr2.alias},1" -J', stdout=jsonopfile, - stderr=subprocess.STDOUT, shell=True) - with self.subTest("Confirming dependency created from \ - owner perspective"): - with open("jsonoutput.json", "r") as jsonopfile: - deps = json.load(jsonopfile) - with self.subTest("Confirming alias"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["alias"])), drcorr2.alias, msg="Add dependency type 1\ - ERROR: relative's alias according to owner does not match") - with self.subTest("Confirming type"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["type"])), 1, msg="Add dependency type 1 ERROR: type \ - according to owner does not match") - with self.subTest("Confirming direction"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["dir"])), 1, msg="Add dependency type 1 ERROR: dir \ - according to owner does not match") - with self.subTest("Confirming dependency from relative's perspective"): - with open("jsonoutput.json", "w+") as jsonopfile: - subprocess.run(f'./scripts/datafed dv "{drcorr2.alias}" -J', - stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("jsonoutput.json", "r") as jsonopfile: - deps = json.load(jsonopfile) - with self.subTest("Confirming alias"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["alias"])), self.drcorr1.alias, msg="Add dep type 1 \ - ERROR: owner's alias according to relative does not match") - with self.subTest("Confirming type"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["type"])), 1, msg="Add dep type 1 ERROR: type according \ - to relative does not match") - with self.subTest("Confirming direction"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["dir"])), 0, msg="Add dependency type 1 ERROR: dir \ - according to relative does not match") - subprocess.run(f'./scripts/datafed data-delete {drcorr2.alias}', shell=True) - - def test_deps_add_2(self): - drcorr2 = DataRecord.generate(True) - subprocess.run(f'{drcorr2.as_input_str()}', shell=True) - with open("jsonoutput.json", "w+") as jsonopfile: - subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -A \ - "{self.drcorr1.alias},2" -J', stdout=jsonopfile, - stderr=subprocess.STDOUT, shell=True) - with self.subTest("Confirming dependency created from owner\ - perspective"): - with open("jsonoutput.json", "r") as jsonopfile: - deps = json.load(jsonopfile) - with self.subTest("Confirming alias"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["alias"])), drcorr2.alias, msg="Add dependency type 2 \ - ERROR: relative's alias according to owner does not match") - with self.subTest("Confirming type"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["type"])), 2, msg="Add dependency type 2 ERROR: type \ - according to owner does not match") - with self.subTest("Confirming direction"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["dir"])), 1, msg="Add dependency type 2 ERROR: dir \ - according to owner does not match") - with self.subTest("Confirming dependency from relative's perspective"): - with open("jsonoutput.json", "w+") as jsonopfile: - subprocess.run(f'./scripts/datafed dv "{drcorr2.alias}" -J', - stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("jsonoutput.json", "r") as jsonopfile: - deps = json.load(jsonopfile) - with self.subTest("Confirming alias"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["alias"])), self.drcorr1.alias, msg="Add dep type 2 \ - ERROR: owner's alias according to relative does not match") - with self.subTest("Confirming type"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["type"])), 2, msg="Add deptype 2 ERROR: type according \ - to relative does not match") - with self.subTest("Confirming direction"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["dir"])), 0, msg="Add dep type 2 ERROR: dir according \ - to relative does not match") - subprocess.run(f'./scripts/datafed data-delete {drcorr2.alias}', shell=True) - - def test_deps_remove(self): - drcorr2 = DataRecord.generate(True) - subprocess.run(f'{drcorr2.as_input_str()}', shell=True) - subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -A "{drcorr2.alias},0"\ - -J', shell=True)#1 - with open("jsonoutput.json", "w+") as jsonopfile: - subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -R \ - "{drcorr2.alias}" -J', stdout=jsonopfile, - stderr=subprocess.STDOUT, shell=True) - with open("jsonoutput.json", "r") as jsonopfile: - deps = json.load(jsonopfile) - self.assertEqual(deps["data"][0]["deps"][0], '[]', - msg="Remove dependency type 0 failed") - subprocess.run(f'./scripts/datafed data-delete {drcorr2.alias}', shell=True) - - def test_deps_replace_single(self): - drcorr2 = DataRecord.generate(True) - subprocess.run(f'{drcorr2.as_input_str()}', shell=True) - drcorr3 = DataRecord.generate(True) - subprocess.run(f'{drcorr3.as_input_str()}', shell=True) - subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -A "{drcorr2.alias},0"\ - -J', shell=True) #remove and add simultaneously #2 - with open("jsonoutput.json", "w+") as jsonopfile: - subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -R \ - "{drcorr2.alias}" -A "{drcorr3.alias},2" -J', stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("jsonoutput.json", "r") as jsonopfile: - deps = json.load(jsonopfile) - with self.subTest("Confirming alias"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["alias"])), drcorr3.alias, msg="Replace dep ERROR: relative's\ - alias according to owner does not match") - with self.subTest("Confirming type"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["type"])), 2, msg="Replace dep ERROR: type according to \ - owner does not match") - with self.subTest("Confirming direction"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["dir"])), 1, msg="Replace depERROR: dir according to \ - owner does not match") - subprocess.run(f'./scripts/datafed data-delete {drcorr2.alias}', shell=True) - subprocess.run(f'./scripts/datafed data-delete {drcorr3.alias}', shell=True) - - def test_deps_replace_multi(self): - drcorr2 = DataRecord.generate(True) - subprocess.run(f'{drcorr2.as_input_str()}', shell=True) - drcorr3 = DataRecord.generate(True) - subprocess.run(f'{drcorr3.as_input_str()}', shell=True) - drcorr4 = DataRecord.generate(True) - subprocess.run(f'{drcorr4.as_input_str()}', shell=True) - drcorr5 = DataRecord.generate(True) - subprocess.run(f'{drcorr5.as_input_str()}', shell=True) - drcorr6 = DataRecord.generate(True) - subprocess.run(f'{drcorr6.as_input_str()}', shell=True) - subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -A "{drcorr2.alias},0" \ - -A "{drcorr3.alias},1" -J', shell=True)#3 - with open("jsonoutput.json", "w+") as jsonopfile: - subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -C -A \ - "{drcorr4.alias},0" -A "{drcorr5.alias},1" -A \ - "{drcorr6.alias},2" -J', stdout=jsonopfile, - stderr=subprocess.STDOUT, shell=True) - with open("jsonoutput.json", "r") as jsonopfile: - deps = json.load(jsonopfile) - with self.subTest("Confirming alias"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["alias"])), drcorr4.alias, msg="Replace dep add type 0 \ - ERROR: relative's alias according to owner does not match") - with self.subTest("Confirming type"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["type"])), 0, msg="Replace dependency add type 0 \ - ERROR: type according to owner does not match") - with self.subTest("Confirming direction"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 0]["dir"])), 1, msg="Replace dependency add type 0 \ - ERROR: dir according to owner does not match") - with self.subTest("Confirming alias"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 1]["alias"])), drcorr5.alias, msg="Replace dependency add type 1\ - ERROR: relative's alias according to owner does not match") - with self.subTest("Confirming type"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 1]["type"])), 1, msg="Replace dependency add type 1 \ - ERROR: type according to owner does not match") - with self.subTest("Confirming direction"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 1]["dir"])), 1, msg="Replace dependency add type 1 \ - ERROR: dir according to owner does not match") - with self.subTest("Confirming alias"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 2]["alias"])), drcorr6.alias, msg="Replace dependency add type 2\ - ERROR: relative's alias according to owner does not match") - with self.subTest("Confirming type"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 2]["type"])), 2, msg="Replace dependency add type 2 \ - ERROR: type according to owner does not match") - with self.subTest("Confirming direction"): - self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ - 2]["dir"])), 1, msg="Replace dependency add type 2 \ - ERROR: dir according to owner does not match") - subprocess.run(f'./scripts/datafed data-delete {drcorr2.alias}', shell=True) - subprocess.run(f'./scripts/datafed data-delete {drcorr3.alias}', shell=True) - subprocess.run(f'./scripts/datafed data-delete {drcorr4.alias}', shell=True) - subprocess.run(f'./scripts/datafed data-delete {drcorr5.alias}', shell=True) - subprocess.run(f'./scripts/datafed data-delete {drcorr6.alias}', shell=True) - - def test_deps_clear(self): #multiple delete - drcorr2 = DataRecord.generate(True) - subprocess.run(f'{drcorr2.as_input_str()}', shell=True) - drcorr3 = DataRecord.generate(True) - subprocess.run(f'{drcorr3.as_input_str()}', shell=True) - subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -A "{drcorr2.alias},0"\ - -A "{drcorr3.alias},1" -J', shell=True) - with open("jsonoutput.json", "w+") as jsonopfile: - subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -C -J', - stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("jsonoutput.json", "r") as jsonopfile: - deps = json.load(jsonopfile) - self.assertEqual(deps["data"][0]["deps"][0], '[]', - msg="Clear dependencies failed") - subprocess.run(f'./scripts/datafed data-delete {drcorr2.alias}', shell=True) - subprocess.run(f'./scripts/datafed data-delete {drcorr3.alias}', shell=True) -""" - -""" -class TestCollections(ut.TestCase): - - def setUp(self): - self.collcorr0 = Collection.generate(True) - self.collcorr1 = Collection.generate(True) - subprocess.run(f'{self.collcorr1.as_input_str()}', shell=True) - - def tearDown(self): - subprocess.run(f'./scripts/datafed coll-delete {self.collcorr1.alias}', shell=True) - files = ["colloutput.json", "collinput.txt", "outputfile.txt"] - for item in files: - if os.path.exists(item): - os.remove(item) - else: - pass - - def test_coll_delete(self): - with open("outputfile.txt", "w+") as opfile: - subprocess.run(f'./scripts/datafed coll-delete {self.collcorr1.alias}', - stdout=opfile, stderr=subprocess.STDOUT, shell=True) - with open('outputfile.txt', 'r') as opfile: - outs = opfile.read() - self.assertEqual("SUCCESS" in outs, True, msg="Collection-delete \ - of single record failed") - - def test_coll_create(self): - with open("collinput.txt", "w+") as ipfile: - ipfile.write(self.collcorr0.as_input_str()) #writes a stringified copy of the Collection object as dictionary - with open("colloutput.json", 'w+') as jsonopfile: - subprocess.run(f'{self.collcorr0.as_input_str()} -J', - stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("colloutput.json", 'r') as jsonopfile: - op = json.load(jsonopfile) - with self.subTest("Confirming alias"): - self.assertEqual(unescape_for_JSON(str(op["coll"][0]["alias"])), - self.collcorr0.alias, msg="Returned alias does not match") - with self.subTest("Confirming title"): - self.assertEqual(unescape_for_JSON(str(op["coll"][0]["title"])), - self.collcorr0.title, msg="Returned title does not match") - with self.subTest("Confirming desc"): - self.assertEqual(unescape_for_JSON(str(op["coll"][0]["desc"])), - self.collcorr0.desc, msg="Returned desc does not match") - coll_id = op["coll"][0]["id"] - return coll_id - - def test_coll_create_incorrect(self): - collerr = Collection.generate(False) - with open("collinput.txt", "a+") as ipfile: - ipfile.write(collerr.as_input_str()) #writes a stringified copy of the Collection object as dictionary - with open("colloutput.json", 'w+') as jsonopfile: - subprocess.run(f'{collerr.as_input_str()} -J', stdout=jsonopfile, - stderr=subprocess.STDOUT, shell=True) - with open("colloutput.json", 'r') as jsonopfile: - op = json.load(jsonopfile) - self.assertEqual(op["status"], "ERROR", msg="Collection-create of \ - incorrect Collection unexpected pass") - - def test_coll_update(self): - with open("colloutput.json", 'w+') as jsonopfile: - subprocess.run(f'./scripts/datafed cu "{self.collcorr1.alias}" -t "New Title" \ - -J', stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("colloutput.json", 'r') as jsonopfile: - op = json.load(jsonopfile) - self.assertEqual(unescape_for_JSON(str(op["coll"][0]["title"])), - "New Title", msg="Returned title does not match") - - def test_coll_view(self): - with open("colloutput.json", 'w+') as jsonopfile: - subprocess.run(f'./scripts/datafed cv {self.collcorr1.alias} -J', - stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("colloutput.json", 'r') as jsonopfile: - op = json.load(jsonopfile) - with self.subTest("Confirming title"): - self.assertEqual(unescape_for_JSON(str(op["coll"][0]["title"])), - self.collcorr1.alias, msg="Returned alias does not match") - with self.subTest("Confirming desc"): - self.assertEqual(unescape_for_JSON(str(op["coll"][0]["desc"])), - self.collcorr1.desc, msg="Returned desc does not match") - - def test_coll_link(self): - collcorr2 = Collection.generate(True) - subprocess.run(f'{collcorr2.as_input_str()}', shell=True) - with open("colloutput.json", 'w+') as jsonopfile: - subprocess.run(f'./scripts/datafed link "{collcorr2.alias}" \ - "{self.collcorr1.alias}" -J', shell=True) - subprocess.run(f'./scripts/datafed ls "{self.collcorr1.alias}" -J', - stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("colloutput.json", 'r') as jsonopfile: - op = json.load(jsonopfile) - self.assertEqual(unescape_for_JSON(str(op["item"][0]["alias"])), - collcorr2.alias, msg="Returned alias of child \ - Collection does not match") - - def test_coll_move(self): - collcorr2 = Collection.generate(True) - subprocess.run(f'{collcorr2.as_input_str()}', shell=True) - collcorr3 = Collection.generate(True) - subprocess.run(f'{collcorr3.as_input_str()}', shell=True) - subprocess.run(f'./scripts/datafed link "{collcorr2.alias}" "{self.collcorr1.alias}"\ - -J', shell=True) - subprocess.run(f'./scripts/datafed move "{collcorr2.alias}" "{collcorr3.alias}" -J',\ - shell=True) - with self.subTest("Confirming unlink from previous parent"): - with open("colloutput.json", 'w+') as jsonopfile: - subprocess.run(f'./scripts/datafed ls "{self.collcorr1.alias}" -J', - stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("colloutput.json", 'r') as jsonopfile: - op = json.load(jsonopfile) - self.assertEqual(unescape_for_JSON(str(op["item"][0])), '[]', - msg="Unlink of child from parent Collection during \ - move command failed") - with self.subTest("Confirming link to new parent"): - with open("colloutput.json", 'w+') as jsonopfile: - subprocess.run(f'./scripts/datafed ls "{collcorr3.alias}" -J', - stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("colloutput.json", 'r') as jsonopfile: - op = json.load(jsonopfile) - self.assertEqual(unescape_for_JSON(str(op["item"][0]["alias"])), - collcorr2.alias, msg="Returned alias of child Collection \ - in new parent during move command does not match") - - def test_coll_unlink(self): - collcorr2 = Collection.generate(True) - subprocess.run(f'{collcorr2.as_input_str()}', shell=True) - subprocess.run(f'./scripts/datafed link "{collcorr2.alias}" "{self.collcorr1.alias}"\ - -J', shell=True) - with open("colloutput.json", 'w+') as jsonopfile: - subprocess.run(f'./scripts/datafed unlink "{collcorr2.alias}" \ - "{self.collcorr1.alias}" -J', shell=True) - subprocess.run(f'./scripts/datafed ls "{self.collcorr1.alias}" -J', - stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) - with open("colloutput.json", 'r') as jsonopfile: - op = json.load(jsonopfile) - self.assertEqual(unescape_for_JSON(str(op["item"][0])), '[]', - msg="Unlink of child from parent Collection failed") - - - - -""" - -########################## -if __name__ == "__main__": - ut.main() - delete_testing_files() +# +# class TestDataBasicReturn(ut.TestCase): +# +# # TODO: using object return, generate, check, then delete data record +# def test_dr_create_and_delete(self): +# config = datafed.Config.API() +# datafed.CommandLib.init() +# self.dr = DataRecord.generate(True) +# create_reply = cmd.command(self.dr.as_py_input()) +# # Returns tuple object (protobuf msg, msg type) +# self.assertEqual(self.dr.alias, create_reply[0].data[0].alias, +# msg =("Alias of intial data record create does not match.")) +# try: +# del_reply = cmd.command('data delete {}'.format(create_reply[0].data[0].id)) +# self.assertEqual(del_reply[1], 'AckReply', msg =("Delete " +# "of initial data record failed.")) +# except AssertionError: +# print("Manual delete required") +# +# # TODO: same with collection +# +# #if this fails, do not continue +# class TestDataBasicText(ut.TestCase): +# +# # TODO: using object return, generate, check, then delete data record +# def test_dr_create_and_delete(self): +# config = datafed.Config.API() +# #datafed.CommandLib.init() +# self.dr = DataRecord.generate(True) +# details = [str(self.dr.alias), str(self.dr.title), +# str(self.dr.desc)] +# print(details) +# with open('datarecordsinput.txt', 'a+') as ipfile: +# ipfile.write(self.dr.as_text_input()) +# print(ipfile.read()) +# with open("outputfile.txt", 'a+') as opfile: +# subprocess.run(self.dr.as_text_input(), stdout=opfile, \ +# stderr=subprocess.STDOUT, shell=True) +# with open("outputfile.txt", 'r') as opfile: +# outs = opfile.read().split("\n") +# print(outs) +# words = [i.split(" ") for i in outs] +# flat_list = [] +# for sublist in words: +# for item in sublist: +# flat_list.append(unescape_for_JSON(item.strip())) +# print(flat_list) +# with self.subTest("Create"): +# self.assertIs(all(elem in flat_list for elem in details), True, \ +# msg="Data-create command unexpected failure") +# with self.subTest("Delete"): +# try: +# with open("outputfile.txt", "w+") as opfile: +# subprocess.run('./scripts/datafed data delete {}'.format(self.dr.alias), +# stdout=opfile, stderr=subprocess.STDOUT, shell=True) +# with open('outputfile.txt', 'r') as opfile: +# outs = opfile.read() +# self.assertIs("OK" in outs, True, msg="Data-delete of single \ +# record failed") +# except AssertionError: +# print("Manual delete required") +# +# +# class TestDataRecords_Text(ut.TestCase): +# """ +# def setUp(self): #happens before EVERY method in testcase +# self.drcorr0 = DataRecord.generate(True) +# self.drcorr1 = DataRecord.generate(True) +# subprocess.run(self.drcorr1.as_text_input(), shell=True) +# # MAKE THESE DR AS PART OF SETUP, NOT TEST??? NO WAY TO GURANTEE DR-CREATE +# # TEST WILL PASS? BUT NOT PASSING IS FINE EITHER WAY BECAUSE NOTHIN TO +# # DELETE I GUESS +# #Needs to be generated in setUp +# +# +# +# def tearDown(self): #Happens after EVERY method in test method +# subprocess.run('./scripts/datafed data delete {}'.format(self.drcorr1.alias), shell=True) +# files = ["jsonoutput.json", "drinput.txt", "outputfile.txt"] +# for item in files: +# if os.path.exists(item): +# os.remove(item) +# else: +# pass +# """ +# +# # NEED SETUP +# """ +# def test_dr_create_JSON(self): +# # Take randostring, put into commands, pass to sdms, and write output to file +# with open('drinput.txt', 'a+') as ipfile: +# ipfile.write(self.drcorr0.as_input_str()) +# # writes a stringified copy of the DataRecord object as dictionary +# with open("jsonoutput.json", 'w+') as jsonopfile: +# subprocess.run(f'{self.drcorr0.as_input_str()} -J',\ +# stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("jsonoutput.json", 'r') as jsonopfile: +# op = json.load(jsonopfile) +# with self.subTest("Confirming alias"): +# self.assertEqual(unescape_for_JSON(str(op["data"][0]["alias"])), +# self.drcorr0.alias, msg="Returned alias does not match") +# with self.subTest("Confirming title"): +# self.assertEqual(unescape_for_JSON(str(op["data"][0]["title"])), +# self.drcorr0.title, msg="Returned title does not match") +# with self.subTest("Confirming desc"): +# self.assertEqual(unescape_for_JSON(op["data"][0]["desc"])), +# self.drcorr0.desc, msg="Returned desc does not match") +# """ +# +# def test_dr_create_incorrect(self): +# drerr = DataRecord.generate(fits_requirements=False) +# drerrinput = drerr.as_text_input() +# with open("datarecordsinput.txt", "w+") as ipfile: +# ipfile.write(drerrinput) +# with open("outputfile.txt", "w+") as opfile: +# subprocess.run( +# drerrinput, stdout=opfile, stderr=subprocess.STDOUT, shell=True +# ) +# with open("outputfile.txt", "r") as opfile: +# outs = opfile.read() +# print(outs) +# self.assertIs( +# "ID:" not in outs, +# True, +# msg="Data-create of \ +# incorrect data record unexpected pass. Manual delete required", +# ) +# +# """ +# def test_dr_create_incorrect_json(self): +# drerr = DataRecord.generate(False) +# drerrinput = drerr.as_input_str() +# with open('drinput.txt', 'a+') as ipfile: +# ipfile.write(drerr.as_input_str()) +# # writes a stringified copy of the DataRecord object as dictionary +# with open("jsonoutput.json", 'w+') as jsonopfile: +# subprocess.run(f'{drerrinput} -J', stdout=jsonopfile, +# stderr=subprocess.STDOUT, shell=True) +# with open("jsonoutput.json", 'r') as jsonopfile: +# op = json.load(jsonopfile) +# self.assertEqual(op["status"], "ERROR", msg="Data-create of \ +# incorrect data record unexpected pass") +# """ +# +# +# """ +# def test_dr_update(self): +# new_title = "New Title" +# with open("jsonoutput.json", 'w+') as jsonopfile: +# subprocess.run(f'./scripts/datafed du {self.drcorr1.alias} -t "{new_title}" -J', +# stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("jsonoutput.json", 'r') as jsonopfile: +# op = json.load(jsonopfile) +# self.assertEqual(op["data"][0]["title"], new_title, msg=("Update " +# "of Data Record title unsuccessful")) +# """ +# """ +# def test_dr_put(self): +# with open("jsonoutput.json", 'w+') as jsonopfile: +# subprocess.run(f'./scripts/datafed put "{self.drcorr1.alias}" "{testdata}"\ +# --wait -J', stdout=jsonopfile, stderr=subprocess.STDOUT, +# shell=True) +# with open("jsonoutput.json", 'r') as jsonopfile: +# putop = json.load(jsonopfile) +# self.assertEqual(putop["status"], "SUCCEEDED", +# msg="Data-put transfer failed") +# # NB: Xfr will FAIL if Globus Connect is not running -- going +# # to have to find a way to make this work???? +# +# def test_dr_view(self): +# with open("jsonoutput.json", 'w+') as jsonopfile: +# subprocess.run(f'./scripts/datafed dv "{self.drcorr1.alias}" -D -J', +# stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("jsonoutput.json", 'r') as jsonopfile: +# op = json.load(jsonopfile) +# with self.subTest("Confirming desc"): +# self.assertEqual(unescape_for_JSON(str(op["data"][0]["desc"]), +# self.drcorr1.desc, msg="Returned desc does not match") +# +# # ############################## +# # NOT making dependencies tests their own +# # sub/class because the setup should be the same. Integrative may be better +# # because if general DR unit tests fail, it won't be worth running the +# # dependencies tests anyway. +# +# def test_deps_add_0(self): +# drcorr2 = DataRecord.generate(True) +# subprocess.run(f'{drcorr2.as_input_str()}', shell=True) +# with open("jsonoutput.json", "w+") as jsonopfile: +# subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -A \ +# "{drcorr2.alias},0" -J', stdout=jsonopfile, +# stderr=subprocess.STDOUT, shell=True) +# with self.subTest("Confirming dependency created from owner \ +# perspective"): +# with open("jsonoutput.json", "r") as jsonopfile: +# deps = json.load(jsonopfile) +# with self.subTest("Confirming alias"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["alias"])), drcorr2.alias, msg="Add dependency type 0 \ +# ERROR: relative's alias according to owner does not match") +# with self.subTest("Confirming type"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["type"])), 0, msg="Add dependency type 0 ERROR: \ +# type according to owner does not match") +# with self.subTest("Confirming direction"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["dir"])), 1, msg="Add dependency type 0 ERROR: dir \ +# according to owner does not match") +# with self.subTest("Confirming dependency from relative's perspective"): +# with open("jsonoutput.json", "w+") as jsonopfile: +# subprocess.run(f'./scripts/datafed dv "{drcorr2.alias}" -J', +# stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("jsonoutput.json", "r") as jsonopfile: +# deps = json.load(jsonopfile) +# with self.subTest("Confirming alias"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["alias"])), self.drcorr1.alias, msg="Add dep type 0 \ +# ERROR: owner's alias according to relative does not match") +# with self.subTest("Confirming type"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["type"])), 0, msg="Add dependency type 0 ERROR: type \ +# according to relative does not match") +# with self.subTest("Confirming direction"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["dir"])), 0, msg="Add dependency type 0 ERROR: \ +# dir according to relative does not match") +# subprocess.run(f'./scripts/datafed data-delete {drcorr2.alias}', shell=True) +# +# def test_deps_add_1(self): +# drcorr2 = DataRecord.generate(True) +# subprocess.run(f'{drcorr2.as_input_str()}', shell=True) +# with open("jsonoutput.json", "w+") as jsonopfile: +# subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -A "\ +# {drcorr2.alias},1" -J', stdout=jsonopfile, +# stderr=subprocess.STDOUT, shell=True) +# with self.subTest("Confirming dependency created from \ +# owner perspective"): +# with open("jsonoutput.json", "r") as jsonopfile: +# deps = json.load(jsonopfile) +# with self.subTest("Confirming alias"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["alias"])), drcorr2.alias, msg="Add dependency type 1\ +# ERROR: relative's alias according to owner does not match") +# with self.subTest("Confirming type"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["type"])), 1, msg="Add dependency type 1 ERROR: type \ +# according to owner does not match") +# with self.subTest("Confirming direction"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["dir"])), 1, msg="Add dependency type 1 ERROR: dir \ +# according to owner does not match") +# with self.subTest("Confirming dependency from relative's perspective"): +# with open("jsonoutput.json", "w+") as jsonopfile: +# subprocess.run(f'./scripts/datafed dv "{drcorr2.alias}" -J', +# stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("jsonoutput.json", "r") as jsonopfile: +# deps = json.load(jsonopfile) +# with self.subTest("Confirming alias"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["alias"])), self.drcorr1.alias, msg="Add dep type 1 \ +# ERROR: owner's alias according to relative does not match") +# with self.subTest("Confirming type"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["type"])), 1, msg="Add dep type 1 ERROR: type according \ +# to relative does not match") +# with self.subTest("Confirming direction"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["dir"])), 0, msg="Add dependency type 1 ERROR: dir \ +# according to relative does not match") +# subprocess.run(f'./scripts/datafed data-delete {drcorr2.alias}', shell=True) +# +# def test_deps_add_2(self): +# drcorr2 = DataRecord.generate(True) +# subprocess.run(f'{drcorr2.as_input_str()}', shell=True) +# with open("jsonoutput.json", "w+") as jsonopfile: +# subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -A \ +# "{self.drcorr1.alias},2" -J', stdout=jsonopfile, +# stderr=subprocess.STDOUT, shell=True) +# with self.subTest("Confirming dependency created from owner\ +# perspective"): +# with open("jsonoutput.json", "r") as jsonopfile: +# deps = json.load(jsonopfile) +# with self.subTest("Confirming alias"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["alias"])), drcorr2.alias, msg="Add dependency type 2 \ +# ERROR: relative's alias according to owner does not match") +# with self.subTest("Confirming type"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["type"])), 2, msg="Add dependency type 2 ERROR: type \ +# according to owner does not match") +# with self.subTest("Confirming direction"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["dir"])), 1, msg="Add dependency type 2 ERROR: dir \ +# according to owner does not match") +# with self.subTest("Confirming dependency from relative's perspective"): +# with open("jsonoutput.json", "w+") as jsonopfile: +# subprocess.run(f'./scripts/datafed dv "{drcorr2.alias}" -J', +# stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("jsonoutput.json", "r") as jsonopfile: +# deps = json.load(jsonopfile) +# with self.subTest("Confirming alias"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["alias"])), self.drcorr1.alias, msg="Add dep type 2 \ +# ERROR: owner's alias according to relative does not match") +# with self.subTest("Confirming type"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["type"])), 2, msg="Add deptype 2 ERROR: type according \ +# to relative does not match") +# with self.subTest("Confirming direction"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["dir"])), 0, msg="Add dep type 2 ERROR: dir according \ +# to relative does not match") +# subprocess.run(f'./scripts/datafed data-delete {drcorr2.alias}', shell=True) +# +# def test_deps_remove(self): +# drcorr2 = DataRecord.generate(True) +# subprocess.run(f'{drcorr2.as_input_str()}', shell=True) +# subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -A "{drcorr2.alias},0"\ +# -J', shell=True)#1 +# with open("jsonoutput.json", "w+") as jsonopfile: +# subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -R \ +# "{drcorr2.alias}" -J', stdout=jsonopfile, +# stderr=subprocess.STDOUT, shell=True) +# with open("jsonoutput.json", "r") as jsonopfile: +# deps = json.load(jsonopfile) +# self.assertEqual(deps["data"][0]["deps"][0], '[]', +# msg="Remove dependency type 0 failed") +# subprocess.run(f'./scripts/datafed data-delete {drcorr2.alias}', shell=True) +# +# def test_deps_replace_single(self): +# drcorr2 = DataRecord.generate(True) +# subprocess.run(f'{drcorr2.as_input_str()}', shell=True) +# drcorr3 = DataRecord.generate(True) +# subprocess.run(f'{drcorr3.as_input_str()}', shell=True) +# subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -A "{drcorr2.alias},0"\ +# -J', shell=True) #remove and add simultaneously #2 +# with open("jsonoutput.json", "w+") as jsonopfile: +# subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -R \ +# "{drcorr2.alias}" -A "{drcorr3.alias},2" -J', +# stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("jsonoutput.json", "r") as jsonopfile: +# deps = json.load(jsonopfile) +# with self.subTest("Confirming alias"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["alias"])), drcorr3.alias, msg="Replace dep ERROR: relative's\ +# alias according to owner does not match") +# with self.subTest("Confirming type"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["type"])), 2, msg="Replace dep ERROR: type according to \ +# owner does not match") +# with self.subTest("Confirming direction"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["dir"])), 1, msg="Replace depERROR: dir according to \ +# owner does not match") +# subprocess.run(f'./scripts/datafed data-delete {drcorr2.alias}', shell=True) +# subprocess.run(f'./scripts/datafed data-delete {drcorr3.alias}', shell=True) +# +# def test_deps_replace_multi(self): +# drcorr2 = DataRecord.generate(True) +# subprocess.run(f'{drcorr2.as_input_str()}', shell=True) +# drcorr3 = DataRecord.generate(True) +# subprocess.run(f'{drcorr3.as_input_str()}', shell=True) +# drcorr4 = DataRecord.generate(True) +# subprocess.run(f'{drcorr4.as_input_str()}', shell=True) +# drcorr5 = DataRecord.generate(True) +# subprocess.run(f'{drcorr5.as_input_str()}', shell=True) +# drcorr6 = DataRecord.generate(True) +# subprocess.run(f'{drcorr6.as_input_str()}', shell=True) +# subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -A "{drcorr2.alias},0" \ +# -A "{drcorr3.alias},1" -J', shell=True)#3 +# with open("jsonoutput.json", "w+") as jsonopfile: +# subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -C -A \ +# "{drcorr4.alias},0" -A "{drcorr5.alias},1" -A \ +# "{drcorr6.alias},2" -J', stdout=jsonopfile, +# stderr=subprocess.STDOUT, shell=True) +# with open("jsonoutput.json", "r") as jsonopfile: +# deps = json.load(jsonopfile) +# with self.subTest("Confirming alias"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["alias"])), drcorr4.alias, msg="Replace dep add type 0 \ +# ERROR: relative's alias according to owner does not match") +# with self.subTest("Confirming type"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["type"])), 0, msg="Replace dependency add type 0 \ +# ERROR: type according to owner does not match") +# with self.subTest("Confirming direction"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 0]["dir"])), 1, msg="Replace dependency add type 0 \ +# ERROR: dir according to owner does not match") +# with self.subTest("Confirming alias"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 1]["alias"])), drcorr5.alias, msg="Replace dependency add type 1\ +# ERROR: relative's alias according to owner does not match") +# with self.subTest("Confirming type"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 1]["type"])), 1, msg="Replace dependency add type 1 \ +# ERROR: type according to owner does not match") +# with self.subTest("Confirming direction"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 1]["dir"])), 1, msg="Replace dependency add type 1 \ +# ERROR: dir according to owner does not match") +# with self.subTest("Confirming alias"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 2]["alias"])), drcorr6.alias, msg="Replace dependency add type 2\ +# ERROR: relative's alias according to owner does not match") +# with self.subTest("Confirming type"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 2]["type"])), 2, msg="Replace dependency add type 2 \ +# ERROR: type according to owner does not match") +# with self.subTest("Confirming direction"): +# self.assertEqual(unescape_for_JSON(str(deps["data"][0]["deps"][ +# 2]["dir"])), 1, msg="Replace dependency add type 2 \ +# ERROR: dir according to owner does not match") +# subprocess.run(f'./scripts/datafed data-delete {drcorr2.alias}', shell=True) +# subprocess.run(f'./scripts/datafed data-delete {drcorr3.alias}', shell=True) +# subprocess.run(f'./scripts/datafed data-delete {drcorr4.alias}', shell=True) +# subprocess.run(f'./scripts/datafed data-delete {drcorr5.alias}', shell=True) +# subprocess.run(f'./scripts/datafed data-delete {drcorr6.alias}', shell=True) +# +# def test_deps_clear(self): #multiple delete +# drcorr2 = DataRecord.generate(True) +# subprocess.run(f'{drcorr2.as_input_str()}', shell=True) +# drcorr3 = DataRecord.generate(True) +# subprocess.run(f'{drcorr3.as_input_str()}', shell=True) +# subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -A "{drcorr2.alias},0"\ +# -A "{drcorr3.alias},1" -J', shell=True) +# with open("jsonoutput.json", "w+") as jsonopfile: +# subprocess.run(f'./scripts/datafed du "{self.drcorr1.alias}" -C -J', +# stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("jsonoutput.json", "r") as jsonopfile: +# deps = json.load(jsonopfile) +# self.assertEqual(deps["data"][0]["deps"][0], '[]', +# msg="Clear dependencies failed") +# subprocess.run(f'./scripts/datafed data-delete {drcorr2.alias}', shell=True) +# subprocess.run(f'./scripts/datafed data-delete {drcorr3.alias}', shell=True) +# """ +# +# """ +# class TestCollections(ut.TestCase): +# +# def setUp(self): +# self.collcorr0 = Collection.generate(True) +# self.collcorr1 = Collection.generate(True) +# subprocess.run(f'{self.collcorr1.as_input_str()}', shell=True) +# +# def tearDown(self): +# subprocess.run(f'./scripts/datafed coll-delete {self.collcorr1.alias}', shell=True) +# files = ["colloutput.json", "collinput.txt", "outputfile.txt"] +# for item in files: +# if os.path.exists(item): +# os.remove(item) +# else: +# pass +# +# def test_coll_delete(self): +# with open("outputfile.txt", "w+") as opfile: +# subprocess.run(f'./scripts/datafed coll-delete {self.collcorr1.alias}', +# stdout=opfile, stderr=subprocess.STDOUT, shell=True) +# with open('outputfile.txt', 'r') as opfile: +# outs = opfile.read() +# self.assertEqual("SUCCESS" in outs, True, msg="Collection-delete \ +# of single record failed") +# +# def test_coll_create(self): +# with open("collinput.txt", "w+") as ipfile: +# ipfile.write(self.collcorr0.as_input_str()) +# # writes a stringified copy of the Collection object as dictionary +# with open("colloutput.json", 'w+') as jsonopfile: +# subprocess.run(f'{self.collcorr0.as_input_str()} -J', +# stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("colloutput.json", 'r') as jsonopfile: +# op = json.load(jsonopfile) +# with self.subTest("Confirming alias"): +# self.assertEqual(unescape_for_JSON(str(op["coll"][0]["alias"])), +# self.collcorr0.alias, msg="Returned alias does not match") +# with self.subTest("Confirming title"): +# self.assertEqual(unescape_for_JSON(str(op["coll"][0]["title"])), +# self.collcorr0.title, msg="Returned title does not match") +# with self.subTest("Confirming desc"): +# self.assertEqual(unescape_for_JSON(str(op["coll"][0]["desc"])), +# self.collcorr0.desc, msg="Returned desc does not match") +# coll_id = op["coll"][0]["id"] +# return coll_id +# +# def test_coll_create_incorrect(self): +# collerr = Collection.generate(False) +# with open("collinput.txt", "a+") as ipfile: +# ipfile.write(collerr.as_input_str()) +# # writes a stringified copy of the Collection object as dictionary +# with open("colloutput.json", 'w+') as jsonopfile: +# subprocess.run(f'{collerr.as_input_str()} -J', stdout=jsonopfile, +# stderr=subprocess.STDOUT, shell=True) +# with open("colloutput.json", 'r') as jsonopfile: +# op = json.load(jsonopfile) +# self.assertEqual(op["status"], "ERROR", msg="Collection-create of \ +# incorrect Collection unexpected pass") +# +# def test_coll_update(self): +# with open("colloutput.json", 'w+') as jsonopfile: +# subprocess.run(f'./scripts/datafed cu "{self.collcorr1.alias}" -t "New Title" \ +# -J', stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("colloutput.json", 'r') as jsonopfile: +# op = json.load(jsonopfile) +# self.assertEqual(unescape_for_JSON(str(op["coll"][0]["title"])), +# "New Title", msg="Returned title does not match") +# +# def test_coll_view(self): +# with open("colloutput.json", 'w+') as jsonopfile: +# subprocess.run(f'./scripts/datafed cv {self.collcorr1.alias} -J', +# stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("colloutput.json", 'r') as jsonopfile: +# op = json.load(jsonopfile) +# with self.subTest("Confirming title"): +# self.assertEqual(unescape_for_JSON(str(op["coll"][0]["title"])), +# self.collcorr1.alias, msg="Returned alias does not match") +# with self.subTest("Confirming desc"): +# self.assertEqual(unescape_for_JSON(str(op["coll"][0]["desc"])), +# self.collcorr1.desc, msg="Returned desc does not match") +# +# def test_coll_link(self): +# collcorr2 = Collection.generate(True) +# subprocess.run(f'{collcorr2.as_input_str()}', shell=True) +# with open("colloutput.json", 'w+') as jsonopfile: +# subprocess.run(f'./scripts/datafed link "{collcorr2.alias}" \ +# "{self.collcorr1.alias}" -J', shell=True) +# subprocess.run(f'./scripts/datafed ls "{self.collcorr1.alias}" -J', +# stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("colloutput.json", 'r') as jsonopfile: +# op = json.load(jsonopfile) +# self.assertEqual(unescape_for_JSON(str(op["item"][0]["alias"])), +# collcorr2.alias, msg="Returned alias of child \ +# Collection does not match") +# +# def test_coll_move(self): +# collcorr2 = Collection.generate(True) +# subprocess.run(f'{collcorr2.as_input_str()}', shell=True) +# collcorr3 = Collection.generate(True) +# subprocess.run(f'{collcorr3.as_input_str()}', shell=True) +# subprocess.run(f'./scripts/datafed link "{collcorr2.alias}" "{self.collcorr1.alias}"\ +# -J', shell=True) +# subprocess.run(f'./scripts/datafed move "{collcorr2.alias}" "{collcorr3.alias}" -J',\ +# shell=True) +# with self.subTest("Confirming unlink from previous parent"): +# with open("colloutput.json", 'w+') as jsonopfile: +# subprocess.run(f'./scripts/datafed ls "{self.collcorr1.alias}" -J', +# stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("colloutput.json", 'r') as jsonopfile: +# op = json.load(jsonopfile) +# self.assertEqual(unescape_for_JSON(str(op["item"][0])), '[]', +# msg="Unlink of child from parent Collection during \ +# move command failed") +# with self.subTest("Confirming link to new parent"): +# with open("colloutput.json", 'w+') as jsonopfile: +# subprocess.run(f'./scripts/datafed ls "{collcorr3.alias}" -J', +# stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("colloutput.json", 'r') as jsonopfile: +# op = json.load(jsonopfile) +# self.assertEqual(unescape_for_JSON(str(op["item"][0]["alias"])), +# collcorr2.alias, msg="Returned alias of child Collection \ +# in new parent during move command does not match") +# +# def test_coll_unlink(self): +# collcorr2 = Collection.generate(True) +# subprocess.run(f'{collcorr2.as_input_str()}', shell=True) +# subprocess.run(f'./scripts/datafed link "{collcorr2.alias}" "{self.collcorr1.alias}"\ +# -J', shell=True) +# with open("colloutput.json", 'w+') as jsonopfile: +# subprocess.run(f'./scripts/datafed unlink "{collcorr2.alias}" \ +# "{self.collcorr1.alias}" -J', shell=True) +# subprocess.run(f'./scripts/datafed ls "{self.collcorr1.alias}" -J', +# stdout=jsonopfile, stderr=subprocess.STDOUT, shell=True) +# with open("colloutput.json", 'r') as jsonopfile: +# op = json.load(jsonopfile) +# self.assertEqual(unescape_for_JSON(str(op["item"][0])), '[]', +# msg="Unlink of child from parent Collection failed") +# +# +# +# +# """ +# +# ########################## +# if __name__ == "__main__": +# ut.main() +# delete_testing_files() diff --git a/python/datafed_pkg/test/Test_ObjectReturn.py b/python/datafed_pkg/test/Test_ObjectReturn.py index 6b43bc118..871dd3d07 100644 --- a/python/datafed_pkg/test/Test_ObjectReturn.py +++ b/python/datafed_pkg/test/Test_ObjectReturn.py @@ -4,8 +4,9 @@ def main(): - config = datafed.Config.API() # generate default configs - datafed.CommandLib.init() # Config module will try to find things and send to MessageLib init + datafed.Config.API() # generate default configs + # Config module will try to find things and send to MessageLib init + datafed.CommandLib.init() for i in range(10): returned = datafed.CommandLib.command("data get y4 -fp ../../../URL_gets") # returned1 = datafed.CommandLib.command('more 2') diff --git a/python/datafed_pkg/test/data_gen.py b/python/datafed_pkg/test/data_gen.py index 13c75a03b..82629e87c 100755 --- a/python/datafed_pkg/test/data_gen.py +++ b/python/datafed_pkg/test/data_gen.py @@ -2,7 +2,6 @@ # api.dataPut(name,"esnet#cern-diskpt1/data1/1M.dat") -import sys import argparse import random import datafed.CommandLib @@ -212,11 +211,11 @@ def selectRand(a, b, cnt): if do_del: if pub: - if api.collectionUpdate(alias, topic="", context=ctx)[0] == None: + if api.collectionUpdate(alias, topic="", context=ctx)[0] is None: print("Timeout on collectionUpdate, coll {}".format(i)) exit() - if api.collectionDelete(alias, context=ctx)[0] == None: + if api.collectionDelete(alias, context=ctx)[0] is None: print("Timeout on collectionDelete, coll {}".format(i)) exit() @@ -248,7 +247,7 @@ def selectRand(a, b, cnt): tags=_tags, context=ctx, )[0] - == None + is None ): print("Timeout on collectionCreate, coll {}".format(i)) exit() @@ -305,8 +304,8 @@ def selectRand(a, b, cnt): if num_link > 0 and num_link < len(aliases) - 1: links = selectRand(0, num_link, len(aliases) - 1) - for l in links: - deps.append(["der", aliases[l]]) + for link in links: + deps.append(["der", aliases[link]]) # Create record if ( @@ -322,13 +321,13 @@ def selectRand(a, b, cnt): repo_id=repo, context=ctx, )[0] - == None + is None ): print("Timeout on dataCreate, coll {}, rec {}".format(i, j)) exit() if up_file: - if api.dataPut(data_alias, up_file, context=ctx)[0] == None: + if api.dataPut(data_alias, up_file, context=ctx)[0] is None: print("Timeout on dataPut, coll {}, rec {}".format(i, j)) exit() @@ -336,7 +335,7 @@ def selectRand(a, b, cnt): sel = random.randint(0, len(topics) - 1) _topic = topics[sel] - if api.collectionUpdate(alias, topic=_topic, context=ctx)[0] == None: + if api.collectionUpdate(alias, topic=_topic, context=ctx)[0] is None: print("Timeout on collectionUpdate, coll {}".format(i)) exit() diff --git a/python/datafed_pkg/test/security.py b/python/datafed_pkg/test/security.py index 657e905d7..1ac258bb7 100755 --- a/python/datafed_pkg/test/security.py +++ b/python/datafed_pkg/test/security.py @@ -1,15 +1,8 @@ #!/usr/bin/env python3 -import sys -import argparse -import random -import datafed.CommandLib -import json -import time import getpass -import datafed.SDMS_Anon_pb2 as anon +import datafed.CommandLib import datafed.SDMS_Auth_pb2 as auth -import datafed.SDMS_pb2 as sdms opts = {} diff --git a/python/pyproto_add_msg_idx.py b/python/pyproto_add_msg_idx.py index e97af69bb..233f83b33 100755 --- a/python/pyproto_add_msg_idx.py +++ b/python/pyproto_add_msg_idx.py @@ -23,7 +23,7 @@ # print( "ID:", parts[idx+1] ) msg_type = int(parts[idx + 1]) << 8 break - except: + except BaseException: pass # msg_type = 0 diff --git a/scripts/pipeline_setup.sh b/scripts/ci_pipeline_setup.sh similarity index 77% rename from scripts/pipeline_setup.sh rename to scripts/ci_pipeline_setup.sh index 3ef912a0b..6084a77d1 100755 --- a/scripts/pipeline_setup.sh +++ b/scripts/ci_pipeline_setup.sh @@ -5,10 +5,10 @@ set -eu Help() { echo "$(basename $0) Will determine if a Open Stack VM exists if not it will" - echo " triger a GitLab pipeline to create the VM. It requires that you " + echo " will exit with an error code. It requires that you " echo "provide the Open Stack VM ID" echo - echo "Syntax: $(basename $0) [-h|i|s|c|g|a|n]" + echo "Syntax: $(basename $0) [-h|i|s|c|a|n]" echo "options:" echo "-h, --help Print this help message" echo "-i, --app-credential-id The application credentials id for" @@ -23,12 +23,12 @@ Help() echo " to check id or name is required." echo "-n, --compute-instance-name The name of the instance we are trying" echo " to check id or name is required.." - echo "-g, --gitlab-trigger-token The GitLab token for restarting the CI" - echo " pipeline to generate the VMs." echo "-a, --gitlab-api-token The GitLab API token for checking the" echo " status of a pipeline." } +GITLAB_PROJECT_ID="10830" + OS_APP_ID=$(printenv OS_APP_ID || true) if [ -z "$OS_APP_ID" ] then @@ -47,14 +47,6 @@ else local_OS_APP_SECRET="$OS_APP_SECRET" fi -GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN=$(printenv GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN || true) -if [ -z "$GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN" ] -then - local_GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN="" -else - local_GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN="$GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN" -fi - GITLAB_DATAFEDCI_REPO_API_TOKEN=$(printenv GITLAB_DATAFEDCI_REPO_API_TOKEN || true) if [ -z "$GITLAB_DATAFEDCI_REPO_API_TOKEN" ] then @@ -69,7 +61,7 @@ COMPUTE_INSTANCE_NAME="" COMPUTE_NAME_PROVIDED="FALSE" COMPUTE_ID_PROVIDED="FALSE" -VALID_ARGS=$(getopt -o hi:s:c:g:a:n: --long 'help',app-credential-id:,app-credential-secret:,compute-instance-id:,gitlab-trigger-token:,gitlab-api-token:,compute-instance-name: -- "$@") +VALID_ARGS=$(getopt -o hi:s:c:a:n: --long 'help',app-credential-id:,app-credential-secret:,compute-instance-id:,gitlab-api-token:,compute-instance-name: -- "$@") if [[ $? -ne 0 ]]; then exit 1; fi @@ -98,10 +90,6 @@ while [ : ]; do COMPUTE_NAME_PROVIDED="TRUE" shift 2 ;; - -g | --gitlab-trigger-token) - local_GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN=$2 - shift 2 - ;; -a | --gitlab-api-token) local_GITLAB_DATAFEDCI_REPO_API_TOKEN=$2 shift 2 @@ -136,16 +124,9 @@ then exit 1 fi -if [ -z "$local_GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN" ] -then - echo "The GitLab token for triggering the CI pipeline has not been defined it" - echo "is a required parameter." - exit 1 -fi - if [ -z "$local_GITLAB_DATAFEDCI_REPO_API_TOKEN" ] then - echo "The GitLab token for accessing the API of the datafedci repo is missing." + echo "The GitLab token for accessing the API of the DataFed ci repo is missing." echo "It is a required parameter." exit 1 fi @@ -173,7 +154,7 @@ then fi # Make sure jq is installed -jq_path=$(which jq) +jq_path=$(which jq || true) if [ -z "$jq_path" ] then echo "jq command not found exiting!" @@ -182,23 +163,27 @@ fi wait_for_running_infrastructure_pipelines_to_finish() { local GITLAB_REPO_API_TOKEN="$1" - local all_other_pipelines=$(curl -s --header "PRIVATE-TOKEN: ${GITLAB_REPO_API_TOKEN}" "https://code.ornl.gov/api/v4/projects/10830/pipelines?status=running" | jq '.[]') + local all_other_pipelines=$(curl -s --header "PRIVATE-TOKEN: ${GITLAB_REPO_API_TOKEN}" "https://code.ornl.gov/api/v4/projects/${GITLAB_PROJECT_ID}/pipelines?status=running" | jq '.[]') if [ -z "$all_other_pipelines" ] then echo "No other running infrastructure provisioning pipelines detected!" fi - + local count=0 while [ ! -z "$all_other_pipelines" ] do - echo "$count Other running infrastructure provisioning pipelines detected... waiting for them to complete." + echo "Attempt $count, Other running infrastructure provisioning pipelines detected... waiting for them to complete." + echo + echo "Running Pipelines Are:" echo "$all_other_pipelines" | jq '.id' sleep 30s count=$(($count + 1)) - all_other_pipelines=$(curl -s --header "PRIVATE-TOKEN: ${GITLAB_REPO_API_TOKEN}" "https://code.ornl.gov/api/v4/projects/10830/pipelines?status=running" | jq '.[]') + all_other_pipelines=$(curl -s --header "PRIVATE-TOKEN: ${GITLAB_REPO_API_TOKEN}" "https://code.ornl.gov/api/v4/projects/${GITLAB_PROJECT_ID}/pipelines?status=running" | jq '.[]') done } +# Will search the open research cloud for instance that is running with the +# provided identity find_orc_instance_by_id() { local SANITIZED_TOKEN="$1" local SANITIZED_URL="$2" @@ -238,7 +223,6 @@ body=$(echo $data | sed 's/^.*{\"token/{\"token/' ) compute_url=$(echo "$body" | jq '.token.catalog[] | select(.name=="nova") |.endpoints[] | select(.interface=="public") | .url ') sanitize_compute_url=$(echo $compute_url | sed 's/\"//g') - header=$(echo "$data" | sed 's/{\"token.*//') subject_token=$(echo "$data" | grep "X-Subject-Token" | awk '{print $2}' ) @@ -271,42 +255,7 @@ pipeline_id="" if [ "$found_vm_id" == "FALSE" ] then echo "VM ID: $compute_id Name: $compute_name is Unhealthy, does not exist, triggering pipeline." - - #datafedci_repo_api_trigger_to_run_ci_pipeline - # Here we need to make a request to the code.ornl.gov at datafedci - gitlab_response=$(curl -s --retry 5 --request POST \ - --form token="$local_GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN" \ - --form ref="main" \ - "https://code.ornl.gov/api/v4/projects/10830/trigger/pipeline") - pipeline_id=$(echo "$gitlab_response" | jq '.id' ) - - MAX_COUNT=40 - count=0 - while [ "$found_vm_id" == "FALSE" ] - do - printf "$count Waiting for pipeline to start VM $compute_id..." - sleep 30s - # Run while loop and sleep until VM shows up or timeout is hit - compute_id="" - compute_name="" - found_vm_id="FALSE" - if [ "$COMPUTE_ID_PROVIDED" == "TRUE" ] - then - find_orc_instance_by_id "$sanitize_subject_token" "$sanitize_compute_url" "$COMPUTE_INSTANCE_ID" - fi - if [[ "$found_vm_id" == "FALSE" && "$COMPUTE_NAME_PROVIDED" == "TRUE" ]] - then - find_orc_instance_by_name "$sanitize_subject_token" "$sanitize_compute_url" "$COMPUTE_INSTANCE_NAME" - fi - - count=$(($count + 1)) - - if [ "$count" == "$MAX_COUNT" ] - then - echo "Exceeded time limit!" - exit 1 - fi - done + exit 1 fi ################################################################################ @@ -329,15 +278,7 @@ fi if [[ "$VM_IS_ACTIVE" == "FALSE" && -z "$pipeline_id" ]] then echo "VM ID: $compute_id Name: $compute_name is unhealthy triggering pipeline." - - #datafedci_repo_api_trigger_to_run_ci_pipeline - # Here we need to make a request to the code.ornl.gov at datafedci - gitlab_response=$(curl -s --retry 5 --request POST \ - --form token="$local_GITLAB_DATAFEDCI_REPO_TRIGGER_TOKEN" \ - --form ref="main" \ - "https://code.ornl.gov/api/v4/projects/10830/trigger/pipeline") - - pipeline_id=$(echo "$gitlab_response" | jq '.id' ) + exit 1 fi # If the pipeline is defined check the status of the VMs @@ -348,7 +289,7 @@ then while [ "$VM_IS_ACTIVE" == "FALSE" ] do - printf "$count Waiting for pipeline to start VM ... " + printf "Attempt $count, Waiting for pipeline to start VM ... " sleep 30s compute_instances=$(curl -s --retry 5 -H "X-Auth-Token: $sanitize_subject_token" "$sanitize_compute_url/servers/detail" | jq) INSTANCE_STATUS=$(echo "$compute_instances" | jq --arg compute_id "$compute_id" '.servers[] | select(.id==$compute_id) | .status ') @@ -389,9 +330,9 @@ then KEEP_RUNNING="TRUE" while [ "$KEEP_RUNNING" == "TRUE" ] do - pipeline_status=$(curl -s --header "PRIVATE-TOKEN: ${local_GITLAB_DATAFEDCI_REPO_API_TOKEN}" "https://code.ornl.gov/api/v4/projects/10830/pipelines/$pipeline_id" | jq .status | sed 's/\"//g') + pipeline_status=$(curl -s --header "PRIVATE-TOKEN: ${local_GITLAB_DATAFEDCI_REPO_API_TOKEN}" "https://code.ornl.gov/api/v4/projects/${GITLAB_PROJECT_ID}/pipelines/$pipeline_id" | jq .status | sed 's/\"//g') - printf "$count Waiting for triggered infrastructure provisioning pipeline: ${pipeline_id} to complete ... " + printf "Attempt $count, Waiting for triggered infrastructure provisioning pipeline: ${pipeline_id} to complete ... " if [ "$pipeline_status" == "failed" ] then echo "Infrastructure triggered pipeline has failed unable to execute CI. STATUS: $pipeline_status" diff --git a/scripts/ci_purge_images.sh b/scripts/ci_purge_images.sh new file mode 100755 index 000000000..1b0923c8c --- /dev/null +++ b/scripts/ci_purge_images.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +# The purpose of this script is to prevent the number of images built on a +# VM from taking over to much storage, here we can set the number of GB, that +# we will allow to be stored on the VM, after which we will start deleting +# the oldest one + +# Max allowed size of all images in GB +THRESHOLD_IN_GB="15" + +get_size_of_all_images_in_GB() { + declare -g total_image_size_number="0" + docker_size_stats=$(docker system df --format "{{.Type}} {{.Size}}") + echo "docker_size_stats" + total_image_size=$(echo "${docker_size_stats}" | head -1 | awk '{print $2}' ) + echo "Image size is $total_image_size" + if [ ! -z "${total_image_size}" ] + then + if [ "${total_image_size: -2}" = "GB" ] + then + total_image_size_number="${total_image_size%??}" + total_image_size_number="${total_image_size%%.*}" + fi + fi +} + +purge_oldest_image() { + oldest_image_id=$(docker image list --format "{{.ID}}" | tail -n1) + docker image rm "$oldest_image_id" -f +} + +get_size_of_all_images_in_GB + +while [ "$total_image_size_number" -gt "$THRESHOLD_IN_GB" ] +do + purge_oldest_image + get_size_of_all_images_in_GB +done + diff --git a/scripts/clear_db.sh b/scripts/clear_db.sh index 696c66bcf..b4a5a20b4 100755 --- a/scripts/clear_db.sh +++ b/scripts/clear_db.sh @@ -22,8 +22,36 @@ else local_DATAFED_ZEROMQ_SYSTEM_SECRET=$(printenv DATAFED_ZEROMQ_SYSTEM_SECRET) fi +if [ -z "${DATAFED_DATABASE_HOST}" ] +then + local_DATAFED_DATABASE_HOST="localhost" +else + local_DATAFED_DATABASE_HOST=$(printenv DATAFED_DATABASE_HOST) +fi + +if [ -z "${DATAFED_DATABASE_PORT}" ] +then + local_DATAFED_DATABASE_PORT="8529" +else + local_DATAFED_DATABASE_PORT=$(printenv DATAFED_DATABASE_PORT) +fi + # Delete database and API from arangodb if command -v arangosh &> /dev/null then - arangosh --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute-string 'db._dropDatabase("sdms");' + exists=$(arangosh --server.endpoint "http+tcp://${local_DATAFED_DATABASE_HOST}:${local_DATAFED_DATABASE_PORT}" \ + --server.username "$local_DATABASE_USER" \ + --server.password "$local_DATAFED_DATABASE_PASSWORD" \ + --javascript.execute "db._databases().includes('$local_DATABASE_NAME')") + + if [ "$exists" = "true" ]; then + arangosh --server.endpoint + "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATAFED_DATABASE_PORT}" \ + --server.password "${local_DATAFED_DATABASE_PASSWORD}" \ + --server.username "${local_DATABASE_USER}" \ + --javascript.execute-string "db._dropDatabase('$local_DATABASE_NAME');" + else + echo "Database $local_DATABASE_NAME does not exist." + fi + fi diff --git a/scripts/dependency_install_functions.sh b/scripts/dependency_install_functions.sh index 5bd8c572f..ee38919ad 100644 --- a/scripts/dependency_install_functions.sh +++ b/scripts/dependency_install_functions.sh @@ -41,8 +41,14 @@ install_protobuf() { cd python python3 setup.py build python3 setup.py test - sudo python3 setup.py install - cd ../../ + python3 setup.py install --user + cd ../ + # Cleanup build file with root ownership + if [ -f build/install_manifest.txt ] + then + sudo rm build/install_manifest.txt + fi + cd ../ # Mark protobuf as installed touch ".protobuf_installed-${DATAFED_PROTOBUF_VERSION}" diff --git a/scripts/generate_certificate_refresh_script.sh b/scripts/generate_certificate_refresh_script.sh new file mode 100755 index 000000000..2ef521140 --- /dev/null +++ b/scripts/generate_certificate_refresh_script.sh @@ -0,0 +1,83 @@ +#!/bin/bash +# Cannot run with -u because we check for unbound variables +# and the script will exit prematurely if '-u' is set +set -ef -o pipefail + +SCRIPT=$(realpath "$0") +FILE_NAME=$(basename "${SCRIPT}") +SOURCE=$(dirname "$SCRIPT") +PROJECT_ROOT=$(realpath "${SOURCE}/..") +source "${PROJECT_ROOT}/config/datafed.sh" + +VERSION="1.0.1" +echo "$FILE_NAME $VERSION" + +ERROR_DETECTED=0 +if [ -z "$DATAFED_INSTALL_PATH" ] +then + echo "Error DATAFED_INSTALL_PATH is not defined in config/datafed.sh, this is" + echo " a required argument." + ERROR_DETECTED=1 +fi + +if [ -z "$DATAFED_LEGO_EMAIL" ] +then + echo "Error DATAFED_LEGO_EMAIL is not defined in config/datafed.sh, this is" + echo " a required argument." + ERROR_DETECTED=1 +fi + +if [ -z "$DATAFED_DOMAIN" ] +then + echo "Error DATAFED_DOMAIN is not defined in config/datafed.sh, this is" + echo " a required argument." + ERROR_DETECTED=1 +fi + +if [ "$ERROR_DETECTED" == "1" ] +then + exit 1 +fi + +cat << OUTER_EOF > "$PROJECT_ROOT/scripts/admin_refresh_certs.sh" +#!/bin/bash + +# NOTE this script is generated by $SCRIPT, to recreate it change +# settings in datafed.sh and rerun $SCRIPT. +# +# You will still need to make sure this script is registered in cron +# sudo crontab -e +# 0 0 * */2 0 ${DATAFED_INSTALL_PATH}/scripts/admin_refresh_certs.sh +# This script will run every second month if configured in cron with the above +# settings. +# This script should be run with 755 permissions +# and root root +DOMAIN="${DATAFED_DOMAIN}" +systemctl stop datafed-ws.service +lego --accept-tos --email="${DATAFED_LEGO_EMAIL}" --domains="${DATAFED_DOMAIN}" --tls run +DIR_NAME=\$(date +%m-%Y) +mkdir -p "\${DIR_NAME}" +# Create a copy so we can always go back and check when the last time was +# that the job ran successfully +cp ".lego/certificates/${DATAFED_DOMAIN}.key" "\${DIR_NAME}" +cp ".lego/certificates/${DATAFED_DOMAIN}.crt" "\${DIR_NAME}" +mv ".lego/certificates/${DATAFED_DOMAIN}.key" ${DATAFED_INSTALL_PATH}/keys/ +mv ".lego/certificates/${DATAFED_DOMAIN}.crt" ${DATAFED_INSTALL_PATH}/keys/ +chmod 600 "${DATAFED_INSTALL_PATH}/keys/${DATAFED_DOMAIN}.key" +chmod 600 "${DATAFED_INSTALL_PATH}/keys/${DATAFED_DOMAIN}.crt" +chown root "${DATAFED_INSTALL_PATH}/keys/${DATAFED_DOMAIN}.crt" +chgrp root "${DATAFED_INSTALL_PATH}/keys/${DATAFED_DOMAIN}.crt" +chown root "${DATAFED_INSTALL_PATH}/keys/${DATAFED_DOMAIN}.key" +chgrp root "${DATAFED_INSTALL_PATH}/keys/${DATAFED_DOMAIN}.key" +systemctl start datafed-ws.service + +sendmail "$DATAFED_ADMIN_EMAIL" << EOF +To: $DATAFED_ADMIN_EMAIL +From: $DATAFED_SYSTEM_EMAIL +Subject: DataFed Certificate Update + +DataFed has updated the web certificates. +EOF +OUTER_EOF + +chmod 755 "$PROJECT_ROOT/scripts/admin_refresh_certs.sh" diff --git a/scripts/generate_datafed.sh b/scripts/generate_datafed.sh index ccb3138ef..4ba27a1c8 100755 --- a/scripts/generate_datafed.sh +++ b/scripts/generate_datafed.sh @@ -142,6 +142,13 @@ else local_DATAFED_CORE_ADDRESS_PORT_INTERNAL=$(printenv DATAFED_CORE_ADDRESS_PORT_INTERNAL) fi +if [ -z "${DATAFED_GOOGLE_ANALYTICS_TAG}" ] +then + local_DATAFED_GOOGLE_ANALYTICS_TAG="" +else + local_DATAFED_GOOGLE_ANALYTICS_TAG=$(printenv DATAFED_GOOGLE_ANALYTICS_TAG) +fi + if [ ! -d "$PATH_TO_CONFIG_DIR" ] then mkdir -p "$PATH_TO_CONFIG_DIR" @@ -175,21 +182,27 @@ export DATAFED_GLOBUS_APP_SECRET="$local_DATAFED_GLOBUS_APP_SECRET" # ************************************************ # i.e. 7512 - ZeroMQ port export DATAFED_SERVER_PORT="$local_DATAFED_SERVER_PORT" + # ************************************************ # Env Variables for Authz, Web, Repo Server # ************************************************ -# If not set will resolve to datafed.ornl.gov -export DATAFED_DOMAIN="$local_DATAFED_DOMAIN" # DataFed Repository POSIX user account that DataFed users will be mapped too # from Globus, so the posix account all globus users will map too export DATAFED_GLOBUS_REPO_USER="" +# ****************************************************************** +# Env Variables for Authz, Web, Repo Server & administrative scripts +# ****************************************************************** +# If not set will resolve to datafed.ornl.gov +export DATAFED_DOMAIN="$local_DATAFED_DOMAIN" + # ************************************************ # Env Variables for Core Server # ************************************************ export DATAFED_DATABASE_PASSWORD="$local_DATAFED_DATABASE_PASSWORD" # The user account the datafed core application will run under export DATAFED_CORE_USER="" + # ************************************************ # Env Variables for Web Server # ************************************************ @@ -206,6 +219,22 @@ export DATAFED_WEB_CERT_PATH="$local_DATAFED_WEB_CERT_PATH" export DATAFED_WEB_USER="" # How the web server communicates with the core server, assumes an internal network export DATAFED_CORE_ADDRESS_PORT_INTERNAL="$local_DATAFED_CORE_ADDRESS_PORT_INTERNAL" +# The id for the associated Google Analytics tag, if left empty, Google Analytics will be disabled +# You can find your tag id by going to the stream details page and it is the field marked as "Measurement ID" +# It will be in the form of "G-XXXXXXXXXX" +export DATAFED_GOOGLE_ANALYTICS_TAG="$local_DATAFED_GOOGLE_ANALYTICS_TAG" + +# **************************************************************************** +# Env Variables for DataFed Core server administrative and operational scripts +# **************************************************************************** +# The admin should who should be receiving emails about the backups +export DATAFED_ADMIN_EMAIL="" +# DataFed system email is from the actual system not from a person, it is +# used to fill in the from field when sending emails out to admins or users. +export DATAFED_SYSTEM_EMAIL="" +# Where the database backups will be placed. +export DATAFED_DATABASE_BACKUP_PATH="" + # ************************************************ # Env Variables for Globus Connect Server # ************************************************ diff --git a/scripts/generate_datafed_backup_script.sh b/scripts/generate_datafed_backup_script.sh new file mode 100755 index 000000000..64ec9ca4b --- /dev/null +++ b/scripts/generate_datafed_backup_script.sh @@ -0,0 +1,89 @@ +#!/bin/bash + +# Cannot run with -u because we check for unbound variables +# and the script will exit prematurely if '-u' is set +set -ef -o pipefail + +SCRIPT=$(realpath "$0") +FILE_NAME=$(basename "${SCRIPT}") +SOURCE=$(dirname "$SCRIPT") +PROJECT_ROOT=$(realpath "${SOURCE}/..") +source "${PROJECT_ROOT}/config/datafed.sh" + +VERSION="1.0.0" +echo "$FILE_NAME $VERSION" + +ERROR_DETECTED=0 +# The admin should who should be receiving emails about the backups +if [ -z "$DATAFED_ADMIN_EMAIL" ] +then + echo "Error DATAFED_ADMIN_EMAIL is not defined, this is a required argument." + ERROR_DETECTED=1 +fi + +# DataFed system email is from the actual system not from a person, it is +# used to fill in the from field when sending emails out to admins or users. +if [ -z "$DATAFED_SYSTEM_EMAIL" ] +then + echo "Error DATAFED_SYSTEM_EMAIL is not defined, this is a required argument" + ERROR_DETECTED=1 +fi + +# Where the database backups will be placed. +if [ -z "$DATAFED_DATABASE_BACKUP_PATH" ] +then + echo "Error DATAFED_DATABASE_BACKUP_PATH is not defined, this is a required argument" + ERROR_DETECTED=1 +fi + +if [ "$ERROR_DETECTED" == "1" ] +then + exit 1 +fi + +cat << OUTER_EOF > "$PROJECT_ROOT/scripts/admin_datafed_backup.sh" +#!/bin/bash + +# This script needs to be registered in the crontab +# 45 23 * * 0 ${DATAFED_INSTALL_PATH}/scripts/admin_datafed-backup.sh +# This will run at 11:45 pm every sunday night +# If need to send mail to SMTP machine + +# NOTE this script is generated by $SCRIPT, to recreate it change +# settings in datafed.sh and rerun $SCRIPT. +echo "DataFed - running DB backup" + +# Shutdown DataFed services +systemctl stop globus-gridftp-server.service +systemctl stop datafed-ws.service +systemctl stop datafed-repo.service +systemctl stop datafed-core.service +systemctl stop arangodb3.service + +backup_file=DataFed_DB_Backup_\$(date +"%Y_%m_%d").tar.gz + +# Tar contents of arangodb directory without full path +tar -C /var/lib/arangodb3 -cvzf \${backup_file} . + +# Move backup file to storage location +mv \${backup_file} ${DATAFED_DATABASE_BACKUP_PATH}/backups + +# Restart DataFed services +systemctl start arangodb3.service +systemctl start datafed-core.service +systemctl start globus-gridftp-server.service +systemctl start datafed-repo.service +systemctl start datafed-ws.service + +echo "DataFed - backup completed" + +sendmail "$DATAFED_ADMIN_EMAIL" << EOF +To: $DATAFED_ADMIN_EMAIL +From: $DATAFED_SYSTEM_EMAIL +Subject: DataFed Database Backup Notification + +DataFed has been backed up. A new backup exists at ${DATAFED_DATABASE_BACKUP_PATH}/backups. +EOF +OUTER_EOF + +chmod +x "$PROJECT_ROOT/scripts/admin_datafed_backup.sh" diff --git a/scripts/generate_ws_config.sh b/scripts/generate_ws_config.sh index 17d8a40ee..f14a00e4c 100755 --- a/scripts/generate_ws_config.sh +++ b/scripts/generate_ws_config.sh @@ -14,7 +14,7 @@ Help() { echo "$(basename $0) Will set up a configuration file for the core server" echo - echo "Syntax: $(basename $0) [-h|s|i|z|y|w|k]" + echo "Syntax: $(basename $0) [-h|s|i|z|y|w|k|t]" echo "options:" echo "-h, --help Print this help message." echo "-s, --globus-secret Globus App secret used by DataFed to authenticate" @@ -38,6 +38,7 @@ Help() echo "-y, --zeromq-system-secret ZeroMQ system secret" echo "-w, --web-cert-path Path to web server certificate file." echo "-k, --web-key-path Path to web server key file." + echo "-t, --google-analytics-tag The tag associated with a Google Analytics stream" } # Set defaults use environment variables by default @@ -97,8 +98,15 @@ else local_DATAFED_CORE_ADDRESS_PORT_INTERNAL=$(printenv DATAFED_CORE_ADDRESS_PORT_INTERNAL) fi +if [ -z "${DATAFED_GOOGLE_ANALYTICS_TAG}" ] +then + local_DATAFED_GOOGLE_ANALYTICS_TAG="" +else + local_DATAFED_GOOGLE_ANALYTICS_TAG=$(printenv DATAFED_GOOGLE_ANALYTICS_TAG) +fi + -VALID_ARGS=$(getopt -o hs:i:z:y:w:k:c: --long 'help',globus-secret:,globus-id:,zeromq-session-secret:,zeromq-system-secret:,web-cert-path:,web-key-path:,core-address-port: -- "$@") +VALID_ARGS=$(getopt -o hs:i:z:y:w:k:c:t: --long 'help',globus-secret:,globus-id:,zeromq-session-secret:,zeromq-system-secret:,web-cert-path:,web-key-path:,core-address-port:,google-analytics-tag: -- "$@") if [[ $? -ne 0 ]]; then exit 1; fi @@ -145,6 +153,11 @@ while [ : ]; do local_DATAFED_CORE_ADDRESS_PORT_INTERNAL=$2 shift 2 ;; + -t | --google-analytics-tag) + echo "Processing 'DataFed Google Analytics tag' option. Input argument is '$2'" + local_DATAFED_GOOGLE_ANALYTICS_TAG=$2 + shift 2 + ;; --) shift; break ;; @@ -227,6 +240,10 @@ client_secret=${local_DATAFED_GLOBUS_APP_SECRET} # This is the address to talk with the core server which is listening on # port 7513, assuming internal network. server_address=tcp://${local_DATAFED_CORE_ADDRESS_PORT_INTERNAL} + +[operations] +# This is the tag associated with a Google Analytics installation that metrics will be sent to. +google_analytics_tag=${local_DATAFED_GOOGLE_ANALYTICS_TAG} EOF echo diff --git a/scripts/install_client_dependencies.sh b/scripts/install_client_dependencies.sh new file mode 100755 index 000000000..574e0457c --- /dev/null +++ b/scripts/install_client_dependencies.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +# Exit on error +set -e + +SCRIPT=$(realpath "$0") +SOURCE=$(dirname "$SCRIPT") +PROJECT_ROOT=$(realpath "${SOURCE}/..") + +source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh" + +# This script will install all of the dependencies needed by DataFed 1.0 +sudo apt-get update +sudo dpkg --configure -a + +python3 -m pip install -r "${PROJECT_ROOT}/python/datafed_pkg/requirements.txt" + +install_protobuf +cd ~ + diff --git a/scripts/install_foxx.sh b/scripts/install_foxx.sh index ea1b0d4ce..84023f9e7 100755 --- a/scripts/install_foxx.sh +++ b/scripts/install_foxx.sh @@ -14,7 +14,7 @@ Help() { echo "$(basename $0) Will set up a configuration file for the core server" echo - echo "Syntax: $(basename $0) [-h|u|f|p|y]" + echo "Syntax: $(basename $0) [-h|u|f|p|i|y]" echo "options:" echo "-h, --help Print this help message." echo "-u, --database-user Database user, needed to log into the database." @@ -24,6 +24,10 @@ Help() echo " provided via the command line it can also be set" echo " using the enviromental variable" echo " DATAFED_DATABASE_PASSWORD." + echo "-i, --database-host The hostname or IP address of the " + echo " database, the env variable: " + echo " DATAFED_DATABASE_HOST can also be " + echo " used." echo "-y, --system-secret ZeroMQ system secret" echo echo "NOTE: Do not run this script with sudo!" @@ -99,6 +103,7 @@ semantic_version_compatible() { local_DATABASE_NAME="sdms" local_DATABASE_USER="root" +local_DATABASE_PORT="8529" if [ -z "${DATAFED_DATABASE_PASSWORD}" ] then @@ -121,13 +126,21 @@ else local_FOXX_MAJOR_API_VERSION=$(printenv FOXX_MAJOR_API_VERSION) fi -VALID_ARGS=$(getopt -o hu:p:f:y: --long 'help',database-user:,database-password:,foxx-api-major-version:,zeromq-system-secret: -- "$@") + +if [ -z "${DATAFED_DATABASE_HOST}" ] +then + local_DATAFED_DATABASE_HOST="localhost" +else + local_DATAFED_DATABASE_HOST=$(printenv DATAFED_DATABASE_HOST) +fi + + +VALID_ARGS=$(getopt -o hu:p:f:i:y: --long 'help',database-user:,database-password:,foxx-api-major-version:,database-host:,zeromq-system-secret: -- "$@") if [[ $? -ne 0 ]]; then exit 1; fi eval set -- "$VALID_ARGS" while [ : ]; do - echo "$1" case "$1" in -h | --help) Help @@ -148,6 +161,11 @@ while [ : ]; do local_FOXX_MAJOR_API_VERSION=$2 shift 2 ;; + -i | --database-host) + echo "Processing 'database host' option. Input argument is '$2'" + local_DATAFED_DATABASE_HOST=$2 + shift 2 + ;; -y | --zeromq-system-secret) echo "Processing 'DataFed ZeroMQ system secret' option. Input argument is '$2'" local_DATAFED_ZEROMQ_SYSTEM_SECRET=$2 @@ -186,17 +204,29 @@ fi # We are now going to initialize the DataFed database in Arango, but only if sdms database does # not exist -output=$(curl --dump - --user $local_DATABASE_USER:$local_DATAFED_DATABASE_PASSWORD http://localhost:8529/_api/database/user) +output=$(curl --dump - \ + --user "$local_DATABASE_USER:$local_DATAFED_DATABASE_PASSWORD" + "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}/_api/database/user") if [[ "$output" =~ .*"sdms".* ]]; then echo "SDMS already exists do nothing" else echo "Creating SDMS" - arangosh --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute ${PROJECT_ROOT}/core/database/foxx/db_create.js + arangosh --server.endpoint \ + "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ + --server.password "${local_DATAFED_DATABASE_PASSWORD}" \ + --server.username "${local_DATABASE_USER}" \ + --javascript.execute "${PROJECT_ROOT}/core/database/foxx/db_create.js" # Give time for the database to be created sleep 2 - arangosh --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute-string 'db._useDatabase("sdms"); db.config.insert({"_key": "msg_daily", "msg" : "DataFed servers will be off-line for regular maintenance every Sunday night from 11:45 pm until 12:15 am EST Monday morning."}, {overwrite: true});' - arangosh --server.password ${local_DATAFED_DATABASE_PASSWORD} --server.username ${local_DATABASE_USER} --javascript.execute-string "db._useDatabase(\"sdms\"); db.config.insert({ \"_key\": \"system\", \"_id\": \"config/system\", \"secret\": \"${local_DATAFED_ZEROMQ_SYSTEM_SECRET}\"}, {overwrite: true } );" + arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ + --server.password "${local_DATAFED_DATABASE_PASSWORD}" \ + --server.username "${local_DATABASE_USER}" \ + --javascript.execute-string 'db._useDatabase("sdms"); db.config.insert({"_key": "msg_daily", "msg" : "DataFed servers will be off-line for regular maintenance every Sunday night from 11:45 pm until 12:15 am EST Monday morning."}, {overwrite: true});' + arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ + --server.password "${local_DATAFED_DATABASE_PASSWORD}" \ + --server.username "${local_DATABASE_USER}" \ + --javascript.execute-string "db._useDatabase(\"sdms\"); db.config.insert({ \"_key\": \"system\", \"_id\": \"config/system\", \"secret\": \"${local_DATAFED_ZEROMQ_SYSTEM_SECRET}\"}, {overwrite: true } );" fi # There are apparently 3 different ways to deploy Foxx microservices, @@ -207,7 +237,7 @@ fi # The web deployment requires manual interaction, and I could not figure out the # syntax for the REST http endpoints with curl so we are going to try the node module actual_version=$(node --version) -semantic_version_compatible $actual_version $DATAFED_NODE_VERSION +semantic_version_compatible "$actual_version" "$DATAFED_NODE_VERSION" compatible=$? if [ "$compatible" -eq "0" ] @@ -221,41 +251,64 @@ then export NVM_DIR="$HOME/.nvm" [ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh" # This loads nvm - nvm install $DATAFED_NODE_VERSION - nvm use $DATAFED_NODE_VERSION + nvm install "$DATAFED_NODE_VERSION" + nvm use "$DATAFED_NODE_VERSION" # Install foxx service node module - $NVM_DIR/nvm-exec npm install --global foxx-cli --prefix ~/ + "$NVM_DIR/nvm-exec" npm install --global foxx-cli --prefix ~/ else # We are assuming that if the correct version of node is installed then the # correct version of npm is also installed npm install --global foxx-cli --prefix ~/ fi -PATH_TO_PASSWD_FILE=${SOURCE}/database_temp.password +FOXX_PREFIX="" +{ + # Determine if exists globally first + which foxx +} || { + FOXX_PREFIX="~/bin/" +} + +PATH_TO_PASSWD_FILE="${SOURCE}/database_temp.password" echo "Path to PASSWRD file ${PATH_TO_PASSWD_FILE} passwd is $local_DATAFED_DATABASE_PASSWORD" echo "$local_DATAFED_DATABASE_PASSWORD" > "${PATH_TO_PASSWD_FILE}" { # try # Check if database foxx services have already been installed - existing_services=$(foxx list -a -u $local_DATABASE_USER -p ${PATH_TO_PASSWD_FILE} --database $local_DATABASE_NAME) + existing_services=$("${FOXX_PREFIX}foxx" list \ + --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ + -a -u "$local_DATABASE_USER" \ + -p "${PATH_TO_PASSWD_FILE}" \ + --database "$local_DATABASE_NAME") FOUND_API=$(echo "$existing_services" | grep "/api/${local_FOXX_MAJOR_API_VERSION}") if [ -z "${FOUND_API}" ] then - foxx install -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx/ + "${FOXX_PREFIX}foxx" install \ + --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ + -u "${local_DATABASE_USER}" \ + -p "${PATH_TO_PASSWD_FILE}" \ + --database "${local_DATABASE_NAME}" \ + "/api/${local_FOXX_MAJOR_API_VERSION}" \ + "${PROJECT_ROOT}/core/database/foxx/" else echo "DataFed Foxx Services have already been uploaded, replacing to ensure consisency" - foxx replace -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx/ + "${FOXX_PREFIX}foxx" replace \ + --server "http://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \ + -u "${local_DATABASE_USER}" \ + -p "${PATH_TO_PASSWD_FILE}" \ + --database "${local_DATABASE_NAME}" \ + "/api/${local_FOXX_MAJOR_API_VERSION}" "${PROJECT_ROOT}/core/database/foxx/" echo "foxx replace -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx" fi - rm ${PATH_TO_PASSWD_FILE} + rm "${PATH_TO_PASSWD_FILE}" } || { # catch - rm ${PATH_TO_PASSWD_FILE} + rm "${PATH_TO_PASSWD_FILE}" } diff --git a/tests/end-to-end/setUp.py b/tests/end-to-end/setUp.py index 9ad99b5a2..504bd8102 100755 --- a/tests/end-to-end/setUp.py +++ b/tests/end-to-end/setUp.py @@ -1,10 +1,10 @@ #!/bin/python3 + import json import os -import subprocess import sys import time -import unittest +from datafed import version as df_ver path_of_file = os.path.abspath(__file__) current_folder = os.path.dirname(path_of_file) @@ -16,11 +16,11 @@ from datafed.CommandLib import API except ImportError: print( - "datafed was not found, make sure you are running script with PYTHONPATH set to the location of the package in the datafed repo" + "datafed was not found, make sure you are running script with " + "PYTHONPATH set to the location of the package in the datafed repo" ) sys.exit(1) -from datafed import version as df_ver print(df_ver) @@ -32,11 +32,6 @@ _df_api.loginByPassword(username, password) path_to_repo_form = os.environ.get("DATAFED_REPO_FORM_PATH") -# if path_to_repo_form is None: -# fail("DATAFED_REPO_FORM_PATH env variable is not defined") - -# if not path_to_repo_form.endswith(".json"): -# fail("repo create test requires that the repo form exist and be provided as a json file, the test uses the environment variable DATAFED_REPO_PATH to search for the repo form") _repo_form = {} with open(path_to_repo_form) as json_file: @@ -65,7 +60,7 @@ result = _df_api.repoList(list_all=True) count = count + 1 if count > 3: - fail("Setup failed with repo create") + raise Exception("Setup failed with repo create") repo_id = _repo_form["id"] @@ -88,10 +83,11 @@ while status < 3: if count > 2: print(task_result) - fail( - "Something went wrong task was unable to complete, attempt to create an allocation after 3 seconds failed, make sure all services are running." + raise Exception( + "Something went wrong task was unable to complete, attempt to " + "create an allocation after 3 seconds failed, make sure all " + "services are running." ) - break time.sleep(1) task_result = _df_api.taskView(task_id) status = task_result[0].task[0].status diff --git a/tests/end-to-end/setup.sh b/tests/end-to-end/setup.sh index 548c89c51..2e35dbef1 100755 --- a/tests/end-to-end/setup.sh +++ b/tests/end-to-end/setup.sh @@ -104,11 +104,19 @@ fi # Detect whether arangodb is running locally -ARANGODB_RUNNING=$(systemctl is-active --quiet arangodb3.service && echo "RUNNING") -if [ "$ARANGODB_RUNNING" != "RUNNING" ] +{ + ARANGODB_RUNNING=$(systemctl is-active --quiet arangodb3.service && echo "RUNNING") +} || { + echo "Arangodb service is not locally detected." +} + +if [ "${DATAFED_DATABASE_HOST}" == "localhost" ] || [ "${DATAFED_DATABASE_HOST}" == "127.0.0.1" ] then - echo "REQUIRED the arangodb service has not been detected to be running by systemctl" - exit 1 + if [ "$ARANGODB_RUNNING" != "RUNNING" ] + then + echo "REQUIRED the arangodb service has not been detected to be running by systemctl" + exit 1 + fi fi # First step is to clear the database @@ -117,31 +125,38 @@ ${PROJECT_ROOT}/scripts/clear_db.sh # Second install foxx ${PROJECT_ROOT}/scripts/install_foxx.sh + +if [ -z "${DATAFED_DATABASE_HOST}" ] +then + local_DATAFED_DATABASE_HOST=$(hostname -I | awk '{print $1}') +else + local_DATAFED_DATABASE_HOST=$(printenv DATAFED_DATABASE_HOST) +fi + #curl -X GET http://127.0.0.1:8529/_db/sdms/ -IP=$(hostname -I | awk '{print $1}') -echo "IP is $IP" +echo "IP is ${local_DATAFED_DATABASE_HOST}" echo "USER89 GLobud ID $DATAFED_USER89_GLOBUS_UUID" echo "Refresh is ${DATAFED_USER89_REFRESH_TOKEN}" # Chreate user datafed89 who is admin -HTTP_CODE=$( curl -w "%{http_code}" -o /dev/null -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed89&uuids=%5B\"${DATAFED_USER89_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER89_PASSWORD}&email=datafed89%40gmail.com&is_admin=true&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}" ) +HTTP_CODE=$( curl -w "%{http_code}" -o /dev/null -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed89&uuids=%5B\"${DATAFED_USER89_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER89_PASSWORD}&email=datafed89%40gmail.com&is_admin=true&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}" ) echo "HTTP_CODE: ${HTTP_CODE}" FIRST_INT=${HTTP_CODE:0:1} if [ "${FIRST_INT}" -ne "2" ] then - response=$( curl -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed89&uuids=%5B\"${DATAFED_USER89_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER89_PASSWORD}&email=datafed89%40gmail.com&is_admin=true&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}" ) + response=$( curl -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed89&uuids=%5B\"${DATAFED_USER89_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER89_PASSWORD}&email=datafed89%40gmail.com&is_admin=true&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}" ) CODE=$(echo $response | jq .code ) ERROR_MSG=$(echo $response | jq .errorMessage ) echo "$ERROR_MSG" exit 1 fi # Set globus tokens -HTTP_CODE=$(curl -w "%{http_code}" -o /dev/null -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed89&access=${DATAFED_USER89_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER89_GLOBUS_REFRESH_TOKEN}&expires_in=1") +HTTP_CODE=$(curl -w "%{http_code}" -o /dev/null -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed89&access=${DATAFED_USER89_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER89_GLOBUS_REFRESH_TOKEN}&expires_in=1") echo "HTTP_CODE: ${HTTP_CODE}" FIRST_INT=${HTTP_CODE:0:1} if [ "${FIRST_INT}" -ne "2" ] then - response=$(curl --fail-early -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed89&access=${DATAFED_USER89_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER89_GLOBUS_REFRESH_TOKEN}&expires_in=1") + response=$(curl --fail-early -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed89&access=${DATAFED_USER89_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER89_GLOBUS_REFRESH_TOKEN}&expires_in=1") CODE=$(echo $response | jq .code ) ERROR_MSG=$(echo $response | jq .errorMessage ) echo "$ERROR_MSG" @@ -149,24 +164,24 @@ then fi # Create user datafed99 who is not admin -HTTP_CODE=$(curl -w "%{http_code}" -o /dev/null -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed99&uuids=%5B\"${DATAFED_USER99_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER99_PASSWORD}&email=datafed99%40gmail.com&is_admin=false&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}") +HTTP_CODE=$(curl -w "%{http_code}" -o /dev/null -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed99&uuids=%5B\"${DATAFED_USER99_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER99_PASSWORD}&email=datafed99%40gmail.com&is_admin=false&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}") echo "HTTP_CODE: ${HTTP_CODE}" FIRST_INT=${HTTP_CODE:0:1} if [ "${FIRST_INT}" -ne "2" ] then - response=$(curl --fail-early -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed99&uuids=%5B\"${DATAFED_USER99_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER99_PASSWORD}&email=datafed99%40gmail.com&is_admin=false&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}") + response=$(curl --fail-early -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/create?name=Data%20Fed&uid=datafed99&uuids=%5B\"${DATAFED_USER99_GLOBUS_UUID}\"%5D&password=${local_DATAFED_USER99_PASSWORD}&email=datafed99%40gmail.com&is_admin=false&secret=${DATAFED_ZEROMQ_SYSTEM_SECRET}") CODE=$(echo $response | jq .code ) ERROR_MSG=$(echo $response | jq .errorMessage ) echo "$ERROR_MSG" exit 1 fi # Set globus tokens -HTTP_CODE=$(curl -w "%{http_code}" -o /dev/null -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed99&access=${DATAFED_USER99_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER99_GLOBUS_REFRESH_TOKEN}&expires_in=1") +HTTP_CODE=$(curl -w "%{http_code}" -o /dev/null -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed99&access=${DATAFED_USER99_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER99_GLOBUS_REFRESH_TOKEN}&expires_in=1") echo "HTTP_CODE: ${HTTP_CODE}" FIRST_INT=${HTTP_CODE:0:1} if [ "${FIRST_INT}" -ne "2" ] then - response=$(curl --fail-early -X GET "http://${IP}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed99&access=${DATAFED_USER99_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER99_GLOBUS_REFRESH_TOKEN}&expires_in=1") + response=$(curl --fail-early -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/${local_FOXX_MAJOR_API_VERSION}/usr/token/set?client=u%2Fdatafed99&access=${DATAFED_USER99_GLOBUS_ACCESS_TOKEN}&refresh=${DATAFED_USER99_GLOBUS_REFRESH_TOKEN}&expires_in=1") CODE=$(echo $response | jq .code ) ERROR_MSG=$(echo $response | jq .errorMessage ) echo "$ERROR_MSG" @@ -177,7 +192,7 @@ exit 0 #source ${DATAFED_REPO_FORM_PATH} # Using the datafed89 client because it has admin rights to add the repo -#curl -X POST --header 'accept: application/json' --data-binary @- --dump - "http://${IP}:8529/_db/sdms/api/repo/create?client=u%2Fdatafed89" <<\ +#curl -X POST --header 'accept: application/json' --data-binary @- --dump - "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/repo/create?client=u%2Fdatafed89" <<\ #EOF #{ # "id" : "$DATAFED_REPO_ID", @@ -196,7 +211,7 @@ exit 0 # ## Using the datafed89 client because it has the repo rights to create an allocation ## Creating an allocation for datafed89 -#curl -X GET "http://${IP}:8529/_db/sdms/api/repo/alloc/create?client=u%2Fdatafed89&subject=u%2Fdatafed89&repo=repo%2F${DATAFED_REPO_ID}&data_limit=1000000000&rec_limit=100" +#curl -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/repo/alloc/create?client=u%2Fdatafed89&subject=u%2Fdatafed89&repo=repo%2F${DATAFED_REPO_ID}&data_limit=1000000000&rec_limit=100" # ## Creating an allocation for datafed99 -#curl -X GET "http://${IP}:8529/_db/sdms/api/repo/alloc/create?client=u%2Fdatafed89&subject=u%2Fdatafed99&repo=repo%2F${DATAFED_REPO_ID}&data_limit=1000000000&rec_limit=100" +#curl -X GET "http://${local_DATAFED_DATABASE_HOST}:8529/_db/sdms/api/repo/alloc/create?client=u%2Fdatafed89&subject=u%2Fdatafed99&repo=repo%2F${DATAFED_REPO_ID}&data_limit=1000000000&rec_limit=100" diff --git a/tests/end-to-end/test_api_alloc.py b/tests/end-to-end/test_api_alloc.py index 4844f35bd..d687649b3 100755 --- a/tests/end-to-end/test_api_alloc.py +++ b/tests/end-to-end/test_api_alloc.py @@ -1,7 +1,6 @@ #!/bin/python3 import json import os -import subprocess import sys import time import unittest @@ -26,7 +25,8 @@ def setUp(self): from datafed.CommandLib import API except ImportError: print( - "datafed was not found, make sure you are running script with PYTHONPATH set to the location of the package in the datafed repo" + "datafed was not found, make sure you are running script with " + "PYTHONPATH set to the location of the package in the datafed repo" ) sys.exit(1) @@ -45,7 +45,7 @@ def setUp(self): try: result = self._df_api.loginByPassword(username, password) break - except: + except BaseException: pass count += 1 # Try three times to authenticate @@ -59,7 +59,9 @@ def setUp(self): if not path_to_repo_form.endswith(".json"): self.fail( - "repo create test requires that the repo form exist and be provided as a json file, the test uses the environment variable DATAFED_REPO_PATH to search for the repo form" + "repo create test requires that the repo form exist and be " + "provided as a json file, the test uses the environment " + "variable DATAFED_REPO_PATH to search for the repo form" ) self._repo_form = {} @@ -96,7 +98,6 @@ def setUp(self): print(result) def test_repo_alloc_list_create_delete(self): - repo_id = self._repo_form["id"] if not repo_id.startswith("repo/"): repo_id = "repo/" + repo_id @@ -126,7 +127,9 @@ def test_repo_alloc_list_create_delete(self): if count > 2: print(task_result) self.fail( - "Something went wrong task was unable to complete, attempt to create an allocation after 3 seconds failed, make sure all services are running." + "Something went wrong task was unable to complete, attempt" + " to create an allocation after 3 seconds failed, make sure" + " all services are running." ) break time.sleep(1) @@ -157,7 +160,9 @@ def test_repo_alloc_list_create_delete(self): if count > 2: print(task_result) self.fail( - "Something went wrong task was unable to complete, attempt to delete an allocation after 3 seconds failed, make sure all services are running." + "Something went wrong task was unable to complete, attempt" + " to delete an allocation after 3 seconds failed, make " + "sure all services are running." ) break time.sleep(1) diff --git a/tests/end-to-end/test_api_collection.py b/tests/end-to-end/test_api_collection.py index 7a87df498..e46e66b50 100755 --- a/tests/end-to-end/test_api_collection.py +++ b/tests/end-to-end/test_api_collection.py @@ -1,7 +1,6 @@ #!/bin/python3 import json import os -import subprocess import sys import time import unittest @@ -30,7 +29,8 @@ def setUp(self): from datafed.CommandLib import API except ImportError: print( - "datafed was not found, make sure you are running script with PYTHONPATH set to the location of the package in the datafed repo" + "datafed was not found, make sure you are running script with " + "PYTHONPATH set to the location of the package in the datafed repo" ) sys.exit(1) @@ -49,7 +49,7 @@ def setUp(self): try: result = self._df_api.loginByPassword(self._username, password) break - except: + except BaseException: pass count += 1 # Try three times to authenticate @@ -61,7 +61,9 @@ def setUp(self): if not path_to_repo_form.endswith(".json"): self.fail( - "repo create test requires that the repo form exist and be provided as a json file, the test uses the environment variable DATAFED_REPO_PATH to search for the repo form" + "repo create test requires that the repo form exist and be " + "provided as a json file, the test uses the environment " + "variable DATAFED_REPO_PATH to search for the repo form" ) self._repo_form = {} @@ -117,7 +119,9 @@ def setUp(self): if count > 2: print(task_result) self.fail( - "Something went wrong task was unable to complete, attempt to create an allocation after 3 seconds failed, make sure all services are running." + "Something went wrong task was unable to complete, attempt" + " to create an allocation after 3 seconds failed, make " + "sure all services are running." ) break time.sleep(1) @@ -126,7 +130,6 @@ def setUp(self): count = count + 1 def test_collection_create_delete(self): - # collectionItemsList in "root" of context list_response = self._df_api.collectionItemsList("root") self.assertEqual(list_response[0].total, 0) @@ -195,7 +198,9 @@ def test_collection_create_delete(self): if count > 2: print(task_result) self.fail( - "Something went wrong task was unable to complete, attempt to delete a colleciton after 3 seconds failed, make sure all services are running." + "Something went wrong task was unable to complete, " + "attempt to delete a colleciton after 3 seconds failed, " + "make sure all services are running." ) break time.sleep(1) @@ -224,7 +229,9 @@ def tearDown(self): if count > 2: print(task_result) self.fail( - "Something went wrong task was unable to complete, attempt to delete an allocation after 3 seconds failed, make sure all services are running." + "Something went wrong task was unable to complete, " + "attempt to delete an allocation after 3 seconds failed," + " make sure all services are running." ) break time.sleep(1) diff --git a/tests/end-to-end/test_api_context.py b/tests/end-to-end/test_api_context.py index be40603b5..0cd44e1ec 100755 --- a/tests/end-to-end/test_api_context.py +++ b/tests/end-to-end/test_api_context.py @@ -1,10 +1,10 @@ #!/bin/python3 -import json +# import json import os -import subprocess import sys import unittest + # Depends on the provided tests first passing # user_login class TestDataFedPythonAPIContext(unittest.TestCase): @@ -25,7 +25,8 @@ def setUp(self): from datafed.CommandLib import API except ImportError: print( - "datafed was not found, make sure you are running script with PYTHONPATH set to the location of the package in the datafed repo" + "datafed was not found, make sure you are running script with " + "PYTHONPATH set to the location of the package in the datafed repo" ) sys.exit(1) @@ -42,17 +43,16 @@ def setUp(self): count = 0 while True: try: - result = self._df_api.loginByPassword(self._username, password) + self._df_api.loginByPassword(self._username, password) break - except: + except BaseException: pass count += 1 # Try three times to authenticate assert count < 3 def test_context(self): - - context = self._df_api.getContext() + self._df_api.getContext() self.assertEqual(self._df_api.getContext(), f"u/{self._username}") diff --git a/tests/end-to-end/test_api_endpoint.py b/tests/end-to-end/test_api_endpoint.py index a772254f7..8d06c4617 100755 --- a/tests/end-to-end/test_api_endpoint.py +++ b/tests/end-to-end/test_api_endpoint.py @@ -1,7 +1,6 @@ #!/bin/python3 -import json +# import json import os -import subprocess import sys import unittest @@ -26,7 +25,8 @@ def setUp(self): from datafed.CommandLib import API except ImportError: print( - "datafed was not found, make sure you are running script with PYTHONPATH set to the location of the package in the datafed repo" + "datafed was not found, make sure you are running script with " + "PYTHONPATH set to the location of the package in the datafed repo" ) sys.exit(1) @@ -43,20 +43,23 @@ def setUp(self): count = 0 while True: try: - result = self._df_api.loginByPassword(username, password) + self._df_api.loginByPassword(username, password) break - except: + except BaseException: pass count += 1 # Try three times to authenticate assert count < 3 def test_endpoint_set_and_default(self): - endpoint = os.environ.get("DATAFED_USER89_GLOBUS_UUID") if endpoint is None: self.fail( - "Cannot run end-to-end tests with Python CLI requires setting env variable DATAFED_REPO_ENDPOINT_UUID so that we know what to set the default endpoint to. This should be the same endpoint that the users have an allocation on... users datafed89 and datafed99" + "Cannot run end-to-end tests with Python CLI requires setting " + " env variable DATAFED_REPO_ENDPOINT_UUID so that we know what to" + " set the default endpoint to. This should be the same endpoint" + " that the users have an allocation on... users datafed89 and" + " datafed99" ) if not self._df_api.endpointDefaultGet(): diff --git a/tests/end-to-end/test_api_record.py b/tests/end-to-end/test_api_record.py index a8ffde29e..853ed69a9 100755 --- a/tests/end-to-end/test_api_record.py +++ b/tests/end-to-end/test_api_record.py @@ -1,7 +1,6 @@ #!/bin/python3 import json import os -import subprocess import sys import time import unittest @@ -31,7 +30,8 @@ def setUp(self): from datafed.CommandLib import API except ImportError: print( - "datafed was not found, make sure you are running script with PYTHONPATH set to the location of the package in the datafed repo" + "datafed was not found, make sure you are running script with " + "PYTHONPATH set to the location of the package in the datafed repo" ) sys.exit(1) @@ -50,7 +50,7 @@ def setUp(self): try: result = self._df_api.loginByPassword(self._username, password) break - except: + except BaseException: pass count += 1 # Try three times to authenticate @@ -64,7 +64,9 @@ def setUp(self): if not path_to_repo_form.endswith(".json"): self.fail( - "repo create test requires that the repo form exist and be provided as a json file, the test uses the environment variable DATAFED_REPO_PATH to search for the repo form" + "repo create test requires that the repo form exist and be " + "provided as a json file, the test uses the environment " + "variable DATAFED_REPO_PATH to search for the repo form" ) self._repo_form = {} @@ -119,7 +121,9 @@ def setUp(self): if count > 2: print(task_result) self.fail( - "Something went wrong task was unable to complete, attempt to create an allocation after 3 seconds failed, make sure all services are running." + "Something went wrong task was unable to complete, attempt " + "to create an allocation after 3 seconds failed, make sure " + "all services are running." ) break time.sleep(1) @@ -159,7 +163,6 @@ def test_record_create_delete(self): put_task = self._df_api.dataPut(new_alias, esnet_uuid + "/1M.dat") - data_put_esnet_pass = False task_id = put_task[0].task.id task_result = self._df_api.taskView(task_id) @@ -215,7 +218,9 @@ def tearDown(self): if count > 2: print(task_result) self.fail( - "Something went wrong task was unable to complete, attempt to delete an allocation after 3 seconds failed, make sure all services are running." + "Something went wrong task was unable to complete, attempt" + " to delete an allocation after 3 seconds failed, make sure" + " all services are running." ) break time.sleep(1) diff --git a/tests/end-to-end/test_api_repo.py b/tests/end-to-end/test_api_repo.py index 3cdbae17c..71858dd8e 100755 --- a/tests/end-to-end/test_api_repo.py +++ b/tests/end-to-end/test_api_repo.py @@ -1,10 +1,10 @@ #!/bin/python3 import json import os -import subprocess import sys import unittest + # Should only run after api login password test has been run class TestDataFedPythonAPIRepo(unittest.TestCase): def setUp(self): @@ -24,7 +24,8 @@ def setUp(self): from datafed.CommandLib import API except ImportError: print( - "datafed was not found, make sure you are running script with PYTHONPATH set to the location of the package in the datafed repo" + "datafed was not found, make sure you are running script with " + "PYTHONPATH set to the location of the package in the datafed repo" ) sys.exit(1) @@ -43,7 +44,7 @@ def setUp(self): try: result = self._df_api.loginByPassword(username, password) break - except: + except BaseException: pass count += 1 # Try three times to authenticate @@ -58,7 +59,9 @@ def setUp(self): if not path_to_repo_form.endswith(".json"): self.fail( - "repo create test requires that the repo form exist and be provided as a json file, the test uses the environment variable DATAFED_REPO_PATH to search for the repo form" + "repo create test requires that the repo form exist and be " + "provided as a json file, the test uses the environment " + "variable DATAFED_REPO_PATH to search for the repo form" ) self._repo_form = {} @@ -66,12 +69,10 @@ def setUp(self): self._repo_form = json.load(json_file) def test_repo_list(self): - result = self._df_api.repoList(list_all=True) self.assertEqual(len(result[0].repo), 0) def test_repo_create_delete(self): - result = self._df_api.repoCreate( repo_id=self._repo_form["id"], title=self._repo_form["title"], diff --git a/tests/end-to-end/test_api_user_login.py b/tests/end-to-end/test_api_user_login.py index ebb1d1873..c27152c84 100755 --- a/tests/end-to-end/test_api_user_login.py +++ b/tests/end-to-end/test_api_user_login.py @@ -1,7 +1,6 @@ #!/bin/python3 -import json +# import json import os -import subprocess import sys import unittest @@ -25,7 +24,8 @@ def test_login_with_password(self): from datafed.CommandLib import API except ImportError: print( - "datafed was not found, make sure you are running script with PYTHONPATH set to the location of the package in the datafed repo" + "datafed was not found, make sure you are running script with " + "PYTHONPATH set to the location of the package in the datafed repo" ) sys.exit(1) @@ -44,7 +44,7 @@ def test_login_with_password(self): try: df_api.loginByPassword(username, password) break - except: + except BaseException: pass count += 1 # Try three times to authenticate diff --git a/web/datafed-ws.js b/web/datafed-ws.js index 32e5a7120..7293e111d 100755 --- a/web/datafed-ws.js +++ b/web/datafed-ws.js @@ -66,7 +66,8 @@ var g_host, g_ver_api_major, g_ver_api_minor, g_ver_api_patch, - g_tls; + g_tls, + g_google_analytics; const nullfr = Buffer.from([]); @@ -294,7 +295,7 @@ app.get('/ui/welcome', (a_req, a_resp) => { const nonce = crypto.randomBytes(16).toString('base64'); a_resp.locals.nonce = nonce; a_resp.setHeader('Content-Security-Policy', `script-src 'nonce-${nonce}' auth.globus.org`); - a_resp.render('index',{nonce:a_resp.locals.nonce, theme:theme,version:g_version,test_mode:g_test}); + a_resp.render('index',{nonce:a_resp.locals.nonce, theme:theme,version:g_version,test_mode:g_test,...g_google_analytics}); } }); @@ -306,7 +307,7 @@ app.get('/ui/main', (a_req, a_resp) => { const nonce = crypto.randomBytes(16).toString('base64'); a_resp.locals.nonce = nonce; a_resp.setHeader('Content-Security-Policy', `script-src 'nonce-${nonce}'`); - a_resp.render('main',{nonce:a_resp.locals.nonce,user_uid:a_req.session.uid,theme:theme,version:g_version,test_mode:g_test}); + a_resp.render('main',{nonce:a_resp.locals.nonce,user_uid:a_req.session.uid,theme:theme,version:g_version,test_mode:g_test,...g_google_analytics}); }else{ // datafed-user cookie not set, so clear datafed-id before redirect //a_resp.clearCookie( 'datafed-id' ); @@ -333,7 +334,7 @@ app.get('/ui/register', (a_req, a_resp) => { const nonce = crypto.randomBytes(16).toString('base64'); a_resp.locals.nonce = nonce; a_resp.setHeader('Content-Security-Policy', `script-src 'nonce-${nonce}' auth.globus.org`); - a_resp.render('register', {nonce:a_resp.locals.nonce, uid: a_req.session.uid, uname: clean, theme: theme, version: g_version, test_mode: g_test }); + a_resp.render('register', {nonce:a_resp.locals.nonce, uid: a_req.session.uid, uname: clean, theme: theme, version: g_version, test_mode: g_test, ...g_google_analytics }); } }); @@ -368,7 +369,7 @@ app.get('/ui/error', (a_req, a_resp) => { const nonce = crypto.randomBytes(16).toString('base64'); a_resp.locals.nonce = nonce; a_resp.setHeader('Content-Security-Policy', `script-src 'nonce-${nonce}'`); - a_resp.render('error',{nonce:a_resp.locals.nonce,theme:"light",version:g_version,test_mode:g_test}); + a_resp.render('error',{nonce:a_resp.locals.nonce,theme:"light",version:g_version,test_mode:g_test,...g_google_analytics}); }); /* This is the OAuth redirect URL after a user authenticates with Globus @@ -1824,6 +1825,10 @@ function loadSettings(){ if ( !g_extern_url ){ g_extern_url = "http"+(g_tls?'s':'')+"://" + g_host + ":" + g_port; } + + if ( config.operations ){ + g_google_analytics = { enableGoogleAnalytics: config.operations.google_analytics_tag !== '', googleAnalyticsTag: config.operations.google_analytics_tag }; + } }catch( e ){ logger.error(loadSettings.name, getCurrentLineNumber(), "Could not open/parse configuration file: " + process.argv[2] ); logger.error(loadSettings.name, getCurrentLineNumber(), e.message ); diff --git a/web/views/head.ect b/web/views/head.ect index 25d7c2f15..6017f3b6b 100644 --- a/web/views/head.ect +++ b/web/views/head.ect @@ -1,3 +1,14 @@ +<% if @enableGoogleAnalytics : %> + + + +<% end %>