Skip to content

Commit

Permalink
Merge branch 'master' of github.com:hyperledger/indy-node into pa-ec2…
Browse files Browse the repository at this point in the history
…-spot
  • Loading branch information
andkononykhin committed Nov 30, 2018
2 parents 5f1f199 + 345e0d2 commit b4c0632
Show file tree
Hide file tree
Showing 5 changed files with 149 additions and 73 deletions.
2 changes: 1 addition & 1 deletion indy_node/server/action_req_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def apply(self, req: Request, cons_time: int = None):
result = self._generate_action_result(req)
result[DATA] = self.info_tool.info
result[DATA].update(self.info_tool.memory_profiler)
result[DATA].update(self.info_tool.software_info)
result[DATA].update(self.info_tool._generate_software_info())
result[DATA].update(self.info_tool.extractions)
result[DATA].update(self.info_tool.node_disk_size)
else:
Expand Down
38 changes: 25 additions & 13 deletions indy_node/server/validator_info_tool.py
Original file line number Diff line number Diff line change
@@ -1,34 +1,46 @@
import importlib
import time
import os

from indy_node.__metadata__ import __version__ as node_pgk_version
from plenum.server.validator_info_tool import none_on_fail, \
ValidatorNodeInfoTool as PlenumValidatorNodeInfoTool
from plenum.common.constants import POOL_LEDGER_ID, DOMAIN_LEDGER_ID, CONFIG_LEDGER_ID


class ValidatorNodeInfoTool(PlenumValidatorNodeInfoTool):

@property
def info(self):
info = super().info
ts_str = "{}".format(time.strftime(
"%A, %B %{0}d, %Y %{0}I:%M:%S %p %z".format('#' if os.name == 'nt' else '-'),
time.localtime(info["timestamp"])))
info.update({"Update time": ts_str})
if 'Node_info' in info:
if 'Metrics' in info['Node_info']:
info['Node_info']['Metrics']['transaction-count'].update(
config=self.__config_ledger_size
)
return info
std_ledgers = [POOL_LEDGER_ID, DOMAIN_LEDGER_ID, CONFIG_LEDGER_ID]
other_ledgers = {}
for idx, linfo in self._node.ledgerManager.ledgerRegistry.items():
if linfo.id in std_ledgers:
continue
other_ledgers[linfo.id] = linfo.ledger.size
info['Node_info']['Metrics']['transaction-count'].update(other_ledgers)

@property
@none_on_fail
def software_info(self):
info = super().software_info
if 'Software' in info:
info['Software'].update({'indy-node': self.__node_pkg_version})
return info

@property
@none_on_fail
def __config_ledger_size(self):
return self._node.configLedger.size
def _generate_software_info(self):
sfv = super()._generate_software_info()
sfv['Software'].update({'indy-node': self.__node_pkg_version})
sfv['Software'].update({'sovrin': "unknown"})
try:
pkg = importlib.import_module(self._config.UPGRADE_ENTRY)
sfv['Software'].update({self._config.UPGRADE_ENTRY: pkg.__version__})
except Exception:
pass

return sfv

@property
@none_on_fail
Expand Down
66 changes: 54 additions & 12 deletions indy_node/test/validator_info/test_validator_info.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import pytest
import importlib

from indy_node.test.state_proof.helper import sdk_submit_operation_and_get_result
from plenum.common.constants import TARGET_NYM, RAW, NAME, VERSION, ORIGIN

# noinspection PyUnresolvedReferences
from plenum.common.ledger import Ledger
from plenum.test.primary_selection.test_primary_selector import FakeLedger
from plenum.test.validator_info.conftest import info, node # qa

from indy_common.constants import TXN_TYPE, DATA, GET_NYM, GET_ATTR, GET_SCHEMA, GET_CLAIM_DEF, REF, SIGNATURE_TYPE
Expand All @@ -25,19 +26,19 @@ def test_validator_info_file_metrics_count_ledger_field_valid(info):
assert info['Node_info']['Metrics']['transaction-count']['config'] == 0


@pytest.mark.skip(reason="info will not be included by default")
def test_validator_info_file_software_indy_node_valid(info):
assert info['Software']['indy-node'] == node_pgk_version
def test_validator_info_bls_key_field_valid(node, info):
assert info['Node_info']['BLS_key']


def test_validator_info_ha_fields_valid(node, info):
assert info['Node_info']['Node_ip']
assert info['Node_info']['Client_ip']
assert info['Node_info']['Node_port']
assert info['Node_info']['Client_port']

@pytest.mark.skip(reason="info will not be included by default")
def test_validator_info_file_software_sovrin_valid(info):
try:
pkg = importlib.import_module('sovrin')
except ImportError:
assert info['Software']['sovrin'] is None
else:
assert info['Software']['sovrin'] == pkg.__version__

def test_validator_info_file_software_indy_node_valid(info):
assert info['Software']['indy-node'] == node_pgk_version


@pytest.fixture()
Expand Down Expand Up @@ -153,3 +154,44 @@ def read_wrapped(txn_type):

def reset_node_total_read_request_number(node):
node.total_read_request_number = 0


class FakeTree:
@property
def root_hash(self):
return '222222222222222222222222222'

class FakeLedgerEx(FakeLedger):
@property
def uncommittedRootHash(self):
return '111111111111111111111111111111111'

@property
def uncommittedTxns(self):
return []

@property
def tree(self):
return FakeTree()

@property
def size(self):
return 100


def test_validator_info_file_metrics_count_all_ledgers_field_valid(node):
new_ids = [444, 555, 666, 777]
for newid in new_ids:
node.ledgerManager.addLedger(newid, FakeLedgerEx(newid, newid))
info = node._info_tool.info
has_cnt = len(info['Node_info']['Metrics']['transaction-count'])
assert has_cnt == len(new_ids) + 3


def test_validator_info_update_date_field_valid(info):
assert "Update time" in info
import time
import datetime
from_str = time.mktime(datetime.datetime.strptime(info["Update time"],
"%A, %B %d, %Y %I:%M:%S %p %z").timetuple())
assert int(from_str) == info["timestamp"]
114 changes: 68 additions & 46 deletions scripts/validator-info
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,8 @@ class FloatUnknown(BaseUnknown):
class TimestampUnknown(BaseUnknown):
def _str(self):
return "{}".format(
datetime.datetime.fromtimestamp(self.val).strftime(
"%A, %B %{0}d, %Y %{0}I:%M:%S %p".format(
'#' if os.name == 'nt' else '-'))
)
time.strftime("%A, %B %{0}d, %Y %{0}I:%M:%S %p %z".format('#' if os.name == 'nt' else '-'),
time.localtime(self.val)))


class UptimeUnknown(BaseUnknown):
Expand All @@ -98,10 +96,14 @@ class StateUnknown(BaseUnknown):

class NodesListUnknown(BaseUnknown):
def __init__(self, val):
super().__init__([] if val is None else val)
super().__init__({} if val is None else {rn[0]: rn[1] for rn in val})

def _str(self):
return "\n".join("# {}".format(alias) for alias in self.val)
if self.val:
return "\n".join([" {}\t{}".format(pr_n, "({})".format(r_idx) if r_idx is not None else "")
for pr_n, r_idx in self.val.items()])
else:
return ""

def __iter__(self):
return iter(self.val)
Expand Down Expand Up @@ -257,12 +259,18 @@ class BindingStats(BaseUnknown):
], False)


class TransactionsStats(BaseStats):
shema = [
("config", BaseUnknown),
("ledger", BaseUnknown),
("pool", BaseUnknown)
]
class TransactionsStats(BaseUnknown):
def __init__(self, val):
super().__init__({} if val is None else val)

def _str(self):
if self.val:
return "\n".join([" Total {} Transactions: {}".format(ledger, cnt) for ledger, cnt in self.val.items()])
else:
return ""

def __iter__(self):
return iter(self.val)


class AverageStats(BaseStats):
Expand All @@ -285,8 +293,11 @@ class NodeStats(BaseStats):
("Name", BaseUnknown),
("did", BaseUnknown),
("verkey", BaseUnknown),
("Node_port", BindingStats),
("Client_port", BindingStats),
("BLS_key", BaseUnknown),
("Node_port", BaseUnknown),
("Client_port", BaseUnknown),
("Node_ip", BaseUnknown),
("Client_ip", BaseUnknown),
("Metrics", MetricsStats)
]

Expand Down Expand Up @@ -452,19 +463,17 @@ class ValidatorStats(BaseStats):
# will drop visibility of output
lines = [
"Validator {} is {}".format(self['Node_info']['Name'], self['state']),
"#Current time: {}".format(self['timestamp']),
"Update time: {}".format(self['timestamp']),
"Validator DID: {}".format(self['Node_info']['did']),
"Verification Key: {}".format(self['Node_info']['verkey']),
"Node Port: {}".format(self['Node_info']['Node_port']),
"Client Port: {}".format(self['Node_info']['Client_port']),
"BLS Key: {}".format(self['Node_info']['BLS_key']),
"Node HA: {}:{}".format(self['Node_info']['Node_ip'], self['Node_info']['Node_port']),
"Client HA: {}:{}".format(self['Node_info']['Client_ip'], self['Node_info']['Client_port']),
"Metrics:",
" Uptime: {}".format(self['Node_info']['Metrics']['uptime']),
"# Total Config Transactions: {}".format(
self['Node_info']['Metrics']['transaction-count']['config']),
" Total Ledger Transactions: {}".format(
self['Node_info']['Metrics']['transaction-count']['ledger']),
" Total Pool Transactions: {}".format(
self['Node_info']['Metrics']['transaction-count']['pool']),
" Uptime: {}".format(self['Node_info']['Metrics']['uptime'])
] + [
str(self['Node_info']['Metrics']['transaction-count'])
] + [
" Read Transactions/Seconds: {}".format(
self['Node_info']['Metrics']['average-per-second']['read-transactions']),
" Write Transactions/Seconds: {}".format(
Expand All @@ -473,28 +482,27 @@ class ValidatorStats(BaseStats):
self['Pool_info']['Reachable_nodes_count'],
self['Pool_info']['Total_nodes_count'])
] + [
"# {}".format(alias)
for alias in self['Pool_info']['Reachable_nodes']
str(self['Pool_info']['Reachable_nodes'])
] + [
"Unreachable Hosts: {}/{}".format(
self['Pool_info']['Unreachable_nodes_count'],
self['Pool_info']['Total_nodes_count']
)
] + [
"# {}".format(alias)
for alias in self['Pool_info']['Unreachable_nodes']
str(self['Pool_info']['Unreachable_nodes'])
] + [
"#Software Versions:"
"Software Versions:"
] + [
"# {}: {}".format(pkgName, self['software'][pkgName])
" {}: {}".format(pkgName, self['software'][pkgName])
for pkgName in self['software'].keys()
]

# skip lines with started with '#' if not verbose
# or remove '#' otherwise
# return "\n".join(lines)
return ("\n".join(
[l[(1 if l[0] == '#' else 0):]
for l in lines if self._verbose or l[0] != '#'])
for l in lines if self._verbose or (l and l[0] != '#')])
)


Expand All @@ -519,7 +527,7 @@ async def handle_client(client_reader, client_writer):
else:
logger.debug("Received data: {}".format(data))
stats = json.loads(data.decode())
print(json.dumps(stats, indent=2, cls=NewEncoder))
print(json.dumps(stats, indent=2, cls=NewEncoder, sort_keys=True))


def accept_client(client_reader, client_writer):
Expand Down Expand Up @@ -567,7 +575,7 @@ def nagios(vstats):
"{} {}_Unreachable_Validators unreachable_validators={} {} Unreachable Validators".format(
state,vstats['Node_info']['Name'],vstats['Pool_info']['Unreachable_nodes_count'],vstats['Node_info']['Name'])
]
return "\n".join(lines);
return "\n".join(lines)


def get_stats_from_file(stats, verbose, _json, _nagios):
Expand All @@ -576,7 +584,7 @@ def get_stats_from_file(stats, verbose, _json, _nagios):
vstats = ValidatorStats(stats, verbose)

if _json:
return json.dumps(vstats, indent=2, cls=NewEncoder)
return json.dumps(vstats, indent=2, cls=NewEncoder, sort_keys=True)
if _nagios:
return nagios(vstats)

Expand Down Expand Up @@ -617,7 +625,7 @@ def format_value(value):


def create_print_tree(stats: dict, indent=0, lines=[]):
for key, value in stats.items():
for key, value in sorted(stats.items(), key=lambda x: x[0]):
if isinstance(value, dict):
lines.append(make_indent(indent) + format_key(key))
create_print_tree(value, indent + 1, lines)
Expand Down Expand Up @@ -805,25 +813,36 @@ def main():
# loop.close()
# else:
all_paths = glob(os.path.join(args.basedir, "*_info.json"))
info_paths = []
additional_paths = []

files_by_node = dict()

for path in all_paths:
if path.find("additional") != -1:
additional_paths.append(path)
bn = os.path.basename(path)
if not bn:
continue
node_name = bn.split("_", maxsplit=1)[0]
if "additional" in bn:
files_by_node.setdefault(node_name, {}).update({"additional": path})
elif "version" in bn:
files_by_node.setdefault(node_name, {}).update({"version": path})
else:
info_paths.append(path)
if not info_paths:
files_by_node.setdefault(node_name, {}).update({"info": path})
if not files_by_node:
print('There are no info files in {}'.format(args.basedir))
return

if args.json:
out_json = compile_json_ouput(info_paths + additional_paths)
allf = []
for n, ff in files_by_node.items():
allf.extend([v for k, v in ff.items()])
out_json = compile_json_ouput(allf)
if out_json:
print(json.dumps(out_json))
print(json.dumps(out_json, sort_keys=True))
sys.exit(0)

for file_path in info_paths:
json_data = read_json(file_path)
for node in files_by_node:
inf_ver = [v for k, v in files_by_node[node].items() if k in ["info", "version"]]
json_data = compile_json_ouput(inf_ver)
if json_data:
if args.verbose:
print("{}".format(os.linesep).join(create_print_tree(json_data, lines=[])))
Expand All @@ -832,7 +851,10 @@ def main():

print('\n')
if args.verbose:
for file_path in additional_paths:
for node in files_by_node:
file_path = files_by_node[node].get("additional", "")
if not file_path:
continue
json_data = read_json(file_path)
if json_data:
print("{}".format(os.linesep).join(create_print_tree(json_data, lines=[])))
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
data_files=[(
(BASE_DIR, ['data/nssm_original.exe'])
)],
install_requires=['indy-plenum-dev==1.6.613',
install_requires=['indy-plenum-dev==1.6.617',
'python-dateutil',
'timeout-decorator==0.4.0'],
setup_requires=['pytest-runner'],
Expand Down

0 comments on commit b4c0632

Please sign in to comment.