Skip to content

Commit

Permalink
Merge remote-tracking branch 'base/txn-refactroing' into indy-1319
Browse files Browse the repository at this point in the history
  • Loading branch information
ArtObr committed May 23, 2018
2 parents 6cf8952 + f644556 commit 430f25f
Show file tree
Hide file tree
Showing 34 changed files with 1,055 additions and 204 deletions.
12 changes: 3 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
* [How to Install a Test Network](#how-to-install-a-test-network)
* [How to Start Working with the Code](#how-to-start-working-with-the-code)
* [How to Start Indy Client CLI](#how-to-start-indy-client-cli)
* [Continues integration and delivery](#continues-integration-and-delivery)
* [Continuous integration and delivery](#continues-integration-and-delivery)
* [How to send a PR](#how-to-send-a-pr)
* [Docs and links](#docs-and-links)

Expand Down Expand Up @@ -129,9 +129,9 @@ Note: For Windows, we recommended using either [cmder](http://cmder.net/) or [co
indy
```

## Continues Integration and Delivery
## Continuous Integration and Delivery

Please have a look at [Continues integration/delivery](docs/ci-cd.md)
Please have a look at [Continuous integration/delivery](docs/ci-cd.md)

## How to send a PR

Expand Down Expand Up @@ -181,9 +181,3 @@ If you made changes in both indy-plenum and indy-node, you need to do the follow
- [Indy file folder structure guideline](docs/indy-file-structure-guideline.md)
- [Helper Scripts](docs/helper-scripts.md)
- [Pool Upgrade](docs/pool-upgrade.md)






1 change: 1 addition & 0 deletions build-scripts/ubuntu-1604/postinst_node
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,7 @@ TEST_MODE=
HOLD_EXT=
EOF
fi
sed -ie '/HOLD_EXT/{ s/\\//g}' $GENERAL_CONFIG_DIR/node_control.conf

mv /usr/local/bin/upgrade_indy_node_ubuntu1604.sh /usr/local/bin/upgrade_indy_node
mv /usr/local/bin/upgrade_indy_node_ubuntu1604_test.sh /usr/local/bin/upgrade_indy_node_test
Expand Down
6 changes: 3 additions & 3 deletions ci/code-validation.dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,6 @@ RUN apt-get update -y && apt-get install -y \
RUN pip3 install -U \
'pip<10.0.0' \
setuptools \
pep8 \
pep8-naming \
flake8
pep8==1.7.1 \
pep8-naming==0.6.1 \
flake8==3.5.0
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def migrate_storage(level_db_dir, rocks_db_dir, db_name, is_db_int_keys):

try:
for key, val in leveldb_storage.iterator():
rocksdb_storage.put(key, val)
rocksdb_storage.put(bytes(key), bytes(val))
except Exception:
logger.error(traceback.print_exc())
logger.error("Could not put key/value to RocksDB storage '{}'".format(db_name))
Expand Down
38 changes: 19 additions & 19 deletions data/migrations/deb/disabled_1_0_97_to_1_0_96.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@
logger = getlogger()


def __migrate_ledger(data_directory,
old_ledger_file, new_ledger_file,
serializer: MappingSerializer = None):
def _migrate_ledger(data_directory,
old_ledger_file, new_ledger_file,
serializer: MappingSerializer = None):
"""
Test for the directory, open old and new ledger, migrate data, rename directories
"""
Expand Down Expand Up @@ -72,8 +72,8 @@ def __migrate_ledger(data_directory,
os.path.join(data_directory, old_ledger_file))


def __open_old_ledger(data_directory, old_ledger_file,
hash_store_name, serializer):
def _open_old_ledger(data_directory, old_ledger_file,
hash_store_name, serializer):
# open old Ledger with leveldb hash store (to re-init it)
old_txn_log_store = ChunkedFileStore(data_directory,
old_ledger_file,
Expand Down Expand Up @@ -121,31 +121,31 @@ def migrate_all_hash_stores(node_data_directory):

# open new Ledgers
fields = getTxnOrderedFields()
__open_old_ledger(node_data_directory, config.poolTransactionsFile,
'pool', serializer=JsonSerializer())
__open_old_ledger(node_data_directory, config.domainTransactionsFile,
'domain', serializer=CompactSerializer(fields=fields))
__open_old_ledger(node_data_directory, config.configTransactionsFile,
'config', serializer=JsonSerializer())
_open_old_ledger(node_data_directory, config.poolTransactionsFile,
'pool', serializer=JsonSerializer())
_open_old_ledger(node_data_directory, config.domainTransactionsFile,
'domain', serializer=CompactSerializer(fields=fields))
_open_old_ledger(node_data_directory, config.configTransactionsFile,
'config', serializer=JsonSerializer())


def migrate_all_ledgers_for_node(node_data_directory):
# using default ledger names
__migrate_ledger(node_data_directory,
config.poolTransactionsFile, config.poolTransactionsFile,
serializer=JsonSerializer())
__migrate_ledger(
_migrate_ledger(node_data_directory,
config.poolTransactionsFile, config.poolTransactionsFile,
serializer=JsonSerializer())
_migrate_ledger(
node_data_directory,
config.configTransactionsFile,
config.configTransactionsFile,
serializer=JsonSerializer())

# domain ledger uses custom CompactSerializer and old file name
fields = getTxnOrderedFields()
__migrate_ledger(node_data_directory,
config.domainTransactionsFile.replace(
'domain_', ''), config.domainTransactionsFile,
serializer=CompactSerializer(fields=fields))
_migrate_ledger(node_data_directory,
config.domainTransactionsFile.replace(
'domain_', ''), config.domainTransactionsFile,
serializer=CompactSerializer(fields=fields))


def migrate_all_states(node_data_directory):
Expand Down
34 changes: 17 additions & 17 deletions data/migrations/deb/helper_1_0_96_to_1_0_97.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@
logger = getlogger()


def __migrate_ledger(data_directory,
old_ledger_file, new_ledger_file,
serializer: MappingSerializer = None):
def _migrate_ledger(data_directory,
old_ledger_file, new_ledger_file,
serializer: MappingSerializer = None):
"""
Test for the directory, open old and new ledger, migrate data, rename directories
"""
Expand Down Expand Up @@ -88,7 +88,7 @@ def __migrate_ledger(data_directory,
os.path.join(data_directory, new_ledger_file)))


def __open_new_ledger(data_directory, new_ledger_file, hash_store_name):
def _open_new_ledger(data_directory, new_ledger_file, hash_store_name):
# open new Ledger with leveldb hash store (to re-init it)
logger.info("Open new ledger folder: {}".format(
os.path.join(data_directory, new_ledger_file)))
Expand Down Expand Up @@ -128,30 +128,30 @@ def migrate_all_hash_stores(node_data_directory):
os.remove(old_merkle_leaves_config_bin)

# open new Ledgers
__open_new_ledger(node_data_directory, config.poolTransactionsFile, 'pool')
__open_new_ledger(node_data_directory,
config.domainTransactionsFile, 'domain')
__open_new_ledger(node_data_directory,
config.configTransactionsFile, 'config')
_open_new_ledger(node_data_directory, config.poolTransactionsFile, 'pool')
_open_new_ledger(node_data_directory,
config.domainTransactionsFile, 'domain')
_open_new_ledger(node_data_directory,
config.configTransactionsFile, 'config')


def migrate_all_ledgers_for_node(node_data_directory):
# using default ledger names
__migrate_ledger(node_data_directory,
config.poolTransactionsFile, config.poolTransactionsFile,
serializer=JsonSerializer())
__migrate_ledger(
_migrate_ledger(node_data_directory,
config.poolTransactionsFile, config.poolTransactionsFile,
serializer=JsonSerializer())
_migrate_ledger(
node_data_directory,
config.configTransactionsFile,
config.configTransactionsFile,
serializer=JsonSerializer())

# domain ledger uses custom CompactSerializer and old file name
fields = getTxnOrderedFields()
__migrate_ledger(node_data_directory,
config.domainTransactionsFile.replace(
'domain_', ''), config.domainTransactionsFile,
serializer=CompactSerializer(fields=fields))
_migrate_ledger(node_data_directory,
config.domainTransactionsFile.replace(
'domain_', ''), config.domainTransactionsFile,
serializer=CompactSerializer(fields=fields))


def migrate_all_states(node_data_directory):
Expand Down
2 changes: 1 addition & 1 deletion design/pool_restart_txn.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ To send POOL_RESTART, fill the field "action" with the value "start".
To cancel the scheduled restart, you should set the field "action" value "cancel".

### POOL_RESTART - restart now
To restart as early as possible, send message without the "datetime" field or put in it value "0" or the past date on this place.
To restart as early as possible, send message without the "datetime" field or put in it value "0" or ""(empty string) or the past date on this place.
The restart is performed immediately and there is no guarantee of receiving an answer with Reply.

### POOL_RESTART - Reply
Expand Down
6 changes: 3 additions & 3 deletions docs/ci-cd.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Continues Integration / Delivery
# Continuous Integration / Delivery

#### Branches

Expand All @@ -10,7 +10,7 @@
- Each PR needs to be reviewed.
- PR can be merged only after all tests pass and code is reviewed.

## Continues Integration
## Continuous Integration

- for each PR we execute:
- static code validation
Expand All @@ -29,7 +29,7 @@
- Run validation on the root folder of the project: `flake8 .`


## Continues Delivery
## Continuous Delivery

- CD part of the pipeline is defined in `Jenkinsfile.cd` file.
- CD part is run on a private Jenkins server dealing with issuing and uploading new builds.
Expand Down
10 changes: 8 additions & 2 deletions docs/node-monitoring-tools-for-stewards.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,16 +53,22 @@ If you get a email on your [email protected] then `sendmail` is working.

### Install

`$ pip3 install indynotifieremail`
`# pip3 install indynotifieremail`

### Configuration

The spike detection and notification mechanisms should be enabled by appending of the following line to
`indy_config.py` configuration file:

`SpikeEventsEnabled=True`

The package depends on two environment variables:

- `INDY_NOTIFIER_EMAIL_RECIPIENTS` (required)
- `INDY_NOTIFIER_EMAIL_SENDER` (optional)

Add these variables to `/etc/indy/indy.env` environment file as you are required to set such system environment variables for indy-node service in form described below.
Add these variables to `/etc/indy/indy.env` environment file as you are required to set such system environment
variables for indy-node service in form described below.

**INDY_NOTIFIER_EMAIL_RECIPIENTS**

Expand Down
2 changes: 1 addition & 1 deletion docs/requests.md
Original file line number Diff line number Diff line change
Expand Up @@ -1333,7 +1333,7 @@ The command to restart all nodes at the time specified in field "datetime"(sent
- `datetime` (string):

Restart time in datetime frmat/
To restart as early as possible, send message without the "datetime" field or put in it value "0" or the past date on this place.
To restart as early as possible, send message without the "datetime" field or put in it value "0" or ""(empty string) or the past date on this place.
The restart is performed immediately and there is no guarantee of receiving an answer with Reply.


Expand Down
28 changes: 28 additions & 0 deletions docs/transactions.md
Original file line number Diff line number Diff line change
Expand Up @@ -733,3 +733,31 @@ Command to change Pool's configuration
}
}
```


## Action Transactions

#### POOL_RESTART
POOL_RESTART is the command to restart all nodes at the time specified in field "datetime"(sent by Trustee).

- `datetime` (string):

Restart time in datetime frmat/
To restart as early as possible, send message without the "datetime" field or put in it value "0" or ""(empty string) or the past date on this place.
The restart is performed immediately and there is no guarantee of receiving an answer with Reply.


- `action` (enum: `start` or `cancel`):

Starts or cancels the Restart.

**Example:**
```
{
"reqId": 98262,
"type": "118",
"identifier": "M9BJDuS24bqbJNvBRsoGg3",
"datetime": "2018-03-29T15:38:34.464106+00:00",
"action": "start"
}
```
2 changes: 1 addition & 1 deletion getting-started.md
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ This is a friendly name for the connection that Alice has been invited to accept
DID: not yet assigned
```

**DID** (**distributed identifier**) is an opaque, unique sequences of bits, (like UUIDs or GUIDs) that get generated when a user tries to accept the connection request. That DID will be sent to Faber College, and used by Faber College to reference Alice in secure interactions.
**DID** (**Decentralized Identifier**) is an opaque, unique sequences of bits, (like UUIDs or GUIDs) that get generated when a user tries to accept the connection request. That DID will be sent to Faber College, and used by Faber College to reference Alice in secure interactions.
Each connection request on the Indy network establishes a **pairwise relationship** when accepted. A pairwise relationship is a unique relationship between two identity owners (e.g., Faber and Alice). The relationship between them is not shareable with others; it is unique to those two parties in that each pairwise relationship uses different DIDs. (In other circles you may see this defined as two sets of data working in conjunction with each other to perform a specific function, such as in a "public" key and a "private" key working together. This is _not_ how it is defined within the Indy code base.) Alice won’t use this DID with other relationships. By having independent pairwise relationships, Alice reduces the ability for others to correlate her activities across multiple interactions.

```
Expand Down
10 changes: 5 additions & 5 deletions indy_client/client/wallet/wallet.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from ledger.util import F
from plenum.client.wallet import Wallet as PWallet
from plenum.common.did_method import DidMethods
from plenum.common.txn_util import get_seq_no
from plenum.common.txn_util import get_seq_no, get_reply_itentifier, get_reply_txntype, get_reply_nym
from plenum.common.util import randomString
from stp_core.common.log import getlogger
from plenum.common.constants import TXN_TYPE, TARGET_NYM, DATA, \
Expand Down Expand Up @@ -235,11 +235,11 @@ def handleIncomingReply(self, observer_name, reqId, frm, result,
replies
:return:
"""
preparedReq = self._prepared.get((result[IDENTIFIER], reqId))
preparedReq = self._prepared.get(get_reply_itentifier(result), reqId)
if not preparedReq:
raise RuntimeError('no matching prepared value for {},{}'.
format(result[IDENTIFIER], reqId))
typ = result.get(TXN_TYPE)
format(get_reply_itentifier(result), reqId))
typ = get_reply_txntype(result)
if typ and typ in self.replyHandler:
self.replyHandler[typ](result, preparedReq)
# else:
Expand All @@ -263,7 +263,7 @@ def _getAttrReply(self, result, preparedReq):
logger.debug("No attribute found")

def _nymReply(self, result, preparedReq):
target = result[TARGET_NYM]
target = get_reply_nym(result)
idy = self._trustAnchored.get(target)
if idy:
idy.seqNo = get_seq_no(result)
Expand Down
3 changes: 1 addition & 2 deletions indy_client/test/agent/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,7 @@
from indy_client.test.helper import createNym, TestClient

# noinspection PyUnresolvedReferences
from indy_node.test.conftest import nodeSet, updatedDomainTxnFile, \
genesisTxns
from indy_node.test.conftest import nodeSet, genesisTxns

# noinspection PyUnresolvedReferences
from plenum.test.conftest import poolTxnStewardData, poolTxnStewardNames
Expand Down
6 changes: 3 additions & 3 deletions indy_client/test/agent/test_general_use_case.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@

# noinspection PyUnresolvedReferences
from indy_node.test.conftest import tdir, nodeSet, tconf, \
updatedPoolTxnData, updatedDomainTxnFile, txnPoolNodeSet, poolTxnData, \
dirName, tdirWithDomainTxns, tdirWithPoolTxns, \
updatedPoolTxnData, txnPoolNodeSet, poolTxnData, \
dirName, tdirWithPoolTxns, \
domainTxnOrderedFields, genesisTxns, stewardWallet, poolTxnStewardData, \
poolTxnStewardNames, trusteeWallet, trusteeData, poolTxnTrusteeNames, \
patchPluginManager, txnPoolNodesLooper, tdirWithPoolTxns, \
patchPluginManager, txnPoolNodesLooper, \
poolTxnNodeNames, allPluginsPath, tdirWithNodeKeepInited, testNodeClass, \
genesisTxns

Expand Down
1 change: 1 addition & 0 deletions indy_client/test/anon_creds/test_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from stp_core.common.log import getlogger

logger = getlogger()
whitelist = ['Consensus for ReqId:']


def test_submit_schema(submitted_schema, schema):
Expand Down
Loading

0 comments on commit 430f25f

Please sign in to comment.