Skip to content

Commit

Permalink
Merge pull request #87 from citrusleaf/TOOLS-1516-support-latencies-cmd
Browse files Browse the repository at this point in the history
Tools 1516 support latencies cmd
  • Loading branch information
jdogmcsteezy authored Sep 29, 2020
2 parents 10f9bdf + e976814 commit 150230b
Show file tree
Hide file tree
Showing 18 changed files with 18,456 additions and 2,908 deletions.
2,897 changes: 1,986 additions & 911 deletions lib/basiccontroller.py

Large diffs are not rendered by default.

692 changes: 533 additions & 159 deletions lib/client/node.py

Large diffs are not rendered by default.

1,846 changes: 1,288 additions & 558 deletions lib/collectinfocontroller.py

Large diffs are not rendered by default.

448 changes: 322 additions & 126 deletions lib/getcontroller.py

Large diffs are not rendered by default.

954 changes: 659 additions & 295 deletions lib/utils/common.py

Large diffs are not rendered by default.

3 changes: 2 additions & 1 deletion lib/utils/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,4 +101,5 @@ def __getitem__(self, name):


# server versions with critical changes
SERVER_NEW_HISTOGRAM_FIRST_VERSION = "4.2"
SERVER_NEW_HISTOGRAM_FIRST_VERSION = "4.2"
SERVER_NEW_LATENCIES_CMD_FIRST_VERSION = "5.1"
102 changes: 75 additions & 27 deletions lib/utils/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

from __future__ import print_function
from future import standard_library

standard_library.install_aliases()
from builtins import filter
from builtins import str
Expand Down Expand Up @@ -52,8 +53,7 @@ def wrapper(func, *args, **kwargs):
# Store original stack trace/exception to be re-thrown later.
self.exc = e

self._worker = threading.Thread(target=wrapper,
args=args, kwargs=kwargs)
self._worker = threading.Thread(target=wrapper, args=args, kwargs=kwargs)

def start(self):
self._worker.start()
Expand All @@ -72,19 +72,18 @@ def shell_command(command):
command is a list of ['cmd','arg1','arg2',...]
"""
command = pipes.quote(" ".join(command))
command = ['bash', '-c', "'%s'" % (command)]
command = ["bash", "-c", "'%s'" % (command)]
try:
p = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)

out, err = p.communicate()
except Exception:
return '', 'error'
return "", "error"
else:
return bytes_to_str(out), bytes_to_str(err)


def capture_stdout(func, line=''):
def capture_stdout(func, line=""):
"""
Redirecting the stdout to use the output elsewhere
"""
Expand All @@ -104,10 +103,12 @@ def capture_stdout(func, line=''):
def compile_likes(likes):
try:
# python2.7
likes = ["(" + like.translate(None, '\'"') + ")" for like in likes]
likes = ["(" + like.translate(None, "'\"") + ")" for like in likes]
except Exception:
# python3
likes = ["(" + like.translate(str.maketrans('','','\'"')) + ")" for like in likes]
likes = [
"(" + like.translate(str.maketrans("", "", "'\"")) + ")" for like in likes
]

likes = "|".join(likes)
likes = re.compile(likes)
Expand Down Expand Up @@ -161,7 +162,13 @@ def _fetch_line_clear_dict(line, arg, return_type, default, keys, d):
def get_arg_and_delete_from_mods(line, arg, return_type, default, modifiers, mods):
try:
val = _fetch_line_clear_dict(
line=line, arg=arg, return_type=return_type, default=default, keys=modifiers, d=mods)
line=line,
arg=arg,
return_type=return_type,
default=default,
keys=modifiers,
d=mods,
)
line.remove(arg)
if val:
line.remove(str(val))
Expand All @@ -182,6 +189,7 @@ def check_arg_and_delete_from_mods(line, arg, default, modifiers, mods):
val = default
return val


CMD_FILE_SINGLE_LINE_COMMENT_START = "//"
CMD_FILE_MULTI_LINE_COMMENT_START = "/*"
CMD_FILE_MULTI_LINE_COMMENT_END = "*/"
Expand All @@ -192,7 +200,7 @@ def parse_commands(file_or_queries, command_end_char=";", is_file=True):
try:
commented = False
if is_file:
lines = open(file_or_queries, 'r').readlines()
lines = open(file_or_queries, "r").readlines()
else:
lines = file_or_queries.split("\n")

Expand All @@ -212,9 +220,9 @@ def parse_commands(file_or_queries, command_end_char=";", is_file=True):
continue
try:
if line.endswith(command_end_char):
line = line.replace('\n', '')
line = line.replace("\n", "")
else:
line = line.replace('\n', ' ')
line = line.replace("\n", " ")
commands = commands + line
except Exception:
commands = line
Expand All @@ -232,8 +240,13 @@ def parse_queries(file, delimiter=";", is_file=True):


def set_value_in_dict(d, key, value):
if (d is None or not isinstance(d, dict) or not key or (not value and value != 0 and value != False)
or isinstance(value, Exception)):
if (
d is None
or not isinstance(d, dict)
or not key
or (not value and value != 0 and value != False)
or isinstance(value, Exception)
):
return

d[key] = value
Expand All @@ -248,7 +261,6 @@ def _cast(value, return_type=None):
if not return_type or value is None:
return value, True


try:
if return_type == bool and isinstance(value, future_basestring):
if value.lower() == "false":
Expand All @@ -265,6 +277,7 @@ def _cast(value, return_type=None):

return None, False


def get_value_from_dict(d, keys, default_value=None, return_type=None):
"""
Function takes dictionary and keys to find values inside dictionary.
Expand Down Expand Up @@ -311,7 +324,7 @@ def get_values_from_dict(d, re_keys, return_type=None):


def strip_string(search_str):
return search_str.strip().strip("\'\"")
return search_str.strip().strip("'\"")


def flip_keys(orig_data):
Expand Down Expand Up @@ -380,7 +393,8 @@ def restructure_sys_data(content, cmd):
if iid not in new_interrrupt_dict[itype]:
new_interrrupt_dict[itype][iid] = {}
new_interrrupt_dict[itype][iid].update(
copy.deepcopy(new_interrrupt))
copy.deepcopy(new_interrrupt)
)
content[n]["device_interrupts"] = new_interrrupt_dict
except Exception as e:
print(e)
Expand Down Expand Up @@ -421,7 +435,9 @@ def restructure_sys_data(content, cmd):
return content


def get_value_from_second_level_of_dict(data, keys, default_value=None, return_type=None):
def get_value_from_second_level_of_dict(
data, keys, default_value=None, return_type=None
):
"""
Function takes dictionary and subkeys to find values inside all keys of dictionary.
Returns dictionary containing key and value of input keys
Expand All @@ -435,10 +451,13 @@ def get_value_from_second_level_of_dict(data, keys, default_value=None, return_t
if not data[_k] or isinstance(data[_k], Exception):
continue

res_dict[_k] = get_value_from_dict(data[_k], keys, default_value=default_value, return_type=return_type)
res_dict[_k] = get_value_from_dict(
data[_k], keys, default_value=default_value, return_type=return_type
)

return res_dict


def get_values_from_second_level_of_dict(data, re_keys, return_type=None):
"""
Function takes dictionary and regular expression subkeys to find values inside all keys of dictionary.
Expand All @@ -458,6 +477,25 @@ def get_values_from_second_level_of_dict(data, re_keys, return_type=None):
return res_dict


# Given a list of keys, returns the nested value in a dict.
def get_nested_value_from_dict(data, keys, default_value=None, return_type=None):
ref = data
for key in keys:
temp_ref = get_value_from_dict(ref, key)

if not temp_ref:
return default_value

ref = temp_ref

val, success = _cast(ref, return_type)

if success:
return val

return ref


def add_dicts(d1, d2):
"""
Function takes two dictionaries and merges those to one dictionary by adding values for same key.
Expand Down Expand Up @@ -488,7 +526,7 @@ def pct_to_value(data, d_pct):
if _k not in d_pct:
continue

out_map[_k] = (float(data[_k])/100.0) * float(d_pct[_k])
out_map[_k] = (float(data[_k]) / 100.0) * float(d_pct[_k])

return out_map

Expand All @@ -511,27 +549,33 @@ def mbytes_to_bytes(data):
def find_delimiter_in(value):
"""Find a good delimiter to split the value by"""

for d in [';', ':', ',']:
for d in [";", ":", ","]:
if d in value:
return d

return ';'
return ";"


def convert_edition_to_shortform(edition):
"""Convert edition to shortform Enterprise or Community or N/E"""

if edition.lower() in ['enterprise', 'true', 'ee'] or 'enterprise' in edition.lower():
if (
edition.lower() in ["enterprise", "true", "ee"]
or "enterprise" in edition.lower()
):
return "Enterprise"

if edition.lower() in ['community', 'false', 'ce'] or 'community' in edition.lower():
if (
edition.lower() in ["community", "false", "ce"]
or "community" in edition.lower()
):
return "Community"

return "N/E"


def write_to_file(file, data):
f = open(str(file), 'a')
f = open(str(file), "a")
f.write(str(data))
return f.close()

Expand Down Expand Up @@ -570,6 +614,7 @@ def is_valid_ip_port(key):

return False


def _is_valid_ipv4_address(address):
try:
socket.inet_pton(socket.AF_INET, address)
Expand All @@ -578,19 +623,21 @@ def _is_valid_ipv4_address(address):
socket.inet_aton(address)
except socket.error:
return False
return address.count('.') == 3
return address.count(".") == 3
except socket.error: # not a valid address
return False

return True


def _is_valid_ipv6_address(address):
try:
socket.inet_pton(socket.AF_INET6, address)
except socket.error: # not a valid address
return False
return True


def is_str(data):
if data is None:
return False
Expand All @@ -612,6 +659,7 @@ def bytes_to_str(data):
# python2.7
return data


def str_to_bytes(data):
try:
# python3
Expand Down
8 changes: 6 additions & 2 deletions lib/view/view.py
Original file line number Diff line number Diff line change
Expand Up @@ -995,7 +995,7 @@ def _update_latency_column_list(data, all_columns):
for column in data["columns"]:
if column[0] == '>':
c = int(column[1:-2])
all_columns.add((c,(column, "%%>%dMs"%c)))
all_columns.add((c,(column, "%%%s"%column)))

elif column[0:2] == "%>":
c = int(column[2:-2])
Expand All @@ -1017,7 +1017,11 @@ def _create_latency_row(data, ns=" "):
return rows

@staticmethod
def show_latency(latency, cluster, machine_wise_display=False, show_ns_details=False, like=None, timestamp="", **ignore):
def show_latency(latency, cluster, machine_wise_display=False, show_ns_details=False, like=None, timestamp="", message=None, **ignore):
if message is not None:
for line in message:
CliView.print_result(str(line))

prefixes = cluster.get_node_names()

if like:
Expand Down
10 changes: 8 additions & 2 deletions run_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.

# Used for testing "show latencies -v" to get micro benchmarks
echo "Configuring additional write & read benchmarks for server 5.1+"
asinfo -v 'set-config:context=namespace;id=test;enable-benchmarks-write=true'
asinfo -v 'set-config:context=namespace;id=test;enable-benchmarks-read=true'

echo "Running unit test cases :"
unit2 discover -s test/unit -t .
# Forced to run with python3 because some tests were not running correctly
python3 -m unittest2 discover -s test/unit -t .

echo
echo "Running e2e test cases :"
unit2 discover -s test/e2e -t .
python3 -m unittest2 discover -s test/e2e -t .

echo
echo "Running asinfo test cases :"
Expand Down
Loading

0 comments on commit 150230b

Please sign in to comment.