Skip to content

Commit

Permalink
Feature: Adicionando processamento de transacoes em real-time. Proces…
Browse files Browse the repository at this point in the history
…sando registros de empresa e tambem emissao de notas-fiscais ( incluindo obtencao da nota completa offchain, via rpc call )
  • Loading branch information
FranciscoThiesen committed Apr 14, 2019
1 parent b0491de commit 52744e4
Show file tree
Hide file tree
Showing 2 changed files with 192 additions and 3 deletions.
70 changes: 67 additions & 3 deletions Mce/DataStore.py
Original file line number Diff line number Diff line change
Expand Up @@ -1181,6 +1181,9 @@ def import_block(store, b, chain_ids=None, chain=None):

for pos in xrange(len(b['transactions'])):
tx = b['transactions'][pos]

# print("IMPRIMINDO TRANSACAO DE UM JEITO DIFERENTE")
# print("X = %s" % str(tx) )

if 'hash' not in tx:
if chain is None:
Expand Down Expand Up @@ -1308,6 +1311,27 @@ def import_block(store, b, chain_ids=None, chain=None):
# This is not an expected error, or our caller may have to
# rewind a block file. Let them deal with it.
raise

def transactions_pretty_print(tx):
for k in tx.keys():
if k == 'value_out':
print(k, int(tx[k]) )
elif k == 'hash':
x = int(tx[k], 16)
print(k, x)
elif k == 'txIn':
for sub_k in (tx[k]).keys():
if sub_k == 'prevout_hash':
x2 = int(tx[k][sub_k], 16)
print(sub_k, x2)
else:
print(sub_k, tx[k][sub_k])
elif k == '__data__':
x = int(tx[k], 16)
print(k, x)
else:
print(k, tx[k])


# List the block's transactions in block_tx.
for tx_pos in xrange(len(b['transactions'])):
Expand All @@ -1317,7 +1341,9 @@ def import_block(store, b, chain_ids=None, chain=None):
(block_id, tx_id, tx_pos)
VALUES (?, ?, ?)""",
(block_id, tx['tx_id'], tx_pos))
store.log.info("block_tx %d %d", block_id, tx['tx_id'])
store.log.info("Testando!! block_tx %d %d", block_id, tx['tx_id'])
# transactions_pretty_print(tx)


if b['height'] is not None:
store._populate_block_txin(block_id)
Expand Down Expand Up @@ -2976,11 +3002,10 @@ def get_blockhash(height):

# Returns -1 on error, so we'll get 0 on empty chain
height = store.get_block_number(chain.id) + 1

def get_tx(rpc_tx_hash):
try:
rpc_tx_hex = rpc("getrawtransaction", rpc_tx_hash)

except util.JsonrpcException, e:
if e.code != -5 and e.code!= -710: # -5 or -710: transaction not in index.
raise
Expand All @@ -2996,7 +3021,11 @@ def get_tx(rpc_tx_hash):
" see import-tx in abe.conf")
return None

decoded_tx = rpc("decoderawtransaction", rpc_tx_hex)
sdec_transaction_handler(decoded_tx)

rpc_tx = rpc_tx_hex.decode('hex')

tx_hash = rpc_tx_hash.decode('hex')[::-1]

computed_tx_hash = chain.transaction_hash(rpc_tx)
Expand All @@ -3006,7 +3035,41 @@ def get_tx(rpc_tx_hash):

tx = chain.parse_transaction(rpc_tx)
tx['hash'] = tx_hash

#print("tx after parsing = %s" % str(tx) )
# obj = deserialize.deserialize_Transaction(tx)

return tx

def sdec_transaction_handler(decoded_tx):

# rpc_tx_hex = rpc("getrawtransaction", rpc_tx_hash)
# decoded_tx = rpc("decoderawtransaction", rpc_tx_hex)

# We should now find out if this specific transaction involves offchain data
# If it does, then we should use the rpc and ask for it
try:
transaction_item = decoded_tx['vout'][0]['items'][0]
except Exception as e:
return

# Boolean flag that tells us if there is offchain data
published_offchain = transaction_item['offchain']

if published_offchain == False:
stream_name = transaction_item['name']
company_info = transaction_item['data']
print("EMPRESA = %s " % str(company_info))

else:
region = transaction_item['name']
stream_ref = transaction_item['streamref']
item_txid = transaction_item['data']['txid']

# RPC call necessary for obtaining offchain-data.
# It is important to mention
offchain_data = rpc("getstreamitem", stream_ref, item_txid)
print("NOTA FISCAL = %s ", str(offchain_data) )

def first_new_block(height, next_hash):
"""Find the first new block."""
Expand Down Expand Up @@ -3048,6 +3111,7 @@ def catch_up_mempool(height):
height_chk = time.time() + 1

tx = get_tx(rpc_tx_hash)

if tx is None:
# NB: On new blocks, older mempool tx are often missing
# This happens some other times too, just get over with
Expand Down
125 changes: 125 additions & 0 deletions Mce/abe.py
Original file line number Diff line number Diff line change
Expand Up @@ -4347,6 +4347,128 @@ def fix_path_info(abe, env):
env['PATH_INFO'] = pi
return ret




### Experimental function for printing all transactions on console in a coordinated manner
## OBSERVATION -> This is a bad giant function that does to many things, it has to be split into smaller encapsuled features for merging with master
# Necessary steps
# 1 - Get chain name
# 2 - Get chain connection
# 3 - Gather all transactions
# 4 - Filter transactions by type ( maybe can be postponed )
# 5 - Getting all useful information about the transactions. ( jsonRPC calls will be required! )
# 6 - Print obtained data for each transaction. ( Maybe some conversions/deserializations are required first!)

# Observations:
# Maybe we have to process the transactions block-by-block, but this is not a problem in itself

def exhibit_all_transactions(abe):
chain = None

try:
chain = abe.store.get_chain_by_id(1)
abe.log.info("Able to connect to chain named %s ", str(chain.name) )
except Exception as e:
abe.log.info("Unable to lookup chain with id = 1")
abe.log.warning(e)
return 0

## abe.store.catch_up()

# Currently I am considering a single relevant chain,
# but this can be adapted to multiple chains

# Getting relevant info about our main chain
params = abe.get_blockchainparams(chain)

print("PARAMS = %s", str(params) )

num_txs = abe.store.get_number_of_transactions(chain)
print( "TOTAL TRANSACTIONS = %d" % int(num_txs) )

num_addresses = abe.store.get_number_of_addresses(chain)
print( "Total addresses = %d" % int(num_addresses) )

connection_status = True

''' DEAL with streams later
try:
num_streams = abe.store.get_number_of_streams(chain)
except Exception as e:
connection_status = False
abe.log.warning(e)
abe.log.info("Unable to get total_of_streams for MyChain")
num_streams = -1
'''

# This should get the latest 10 transactions of a chain
# Why not trying this function?
try:
mempool = abe.store.get_rawmempool(chain)
except Exception as e:
print("ERROR on get_rawmempool method + %s" % (str(e)) )

try:
recenttx = abe.store.get_recent_transactions_as_json(chain, 5)
except Exception as e:
abe.log.warning(e)
print("ERROR on get_recent_transactions_as_json -> %s" % str(e))

print("Letting the explorer continue")

sorted_mempool = sorted(
mempool_items()[:10], key = lambda tup: tup[1]['time'], reverse = True )

if len(sorted_mempool) < 10:
sorted_recenttx = sorted(
recenttx, key = lambda tx: tx['time'], reverse = True )
existing_txids = [txid for (txid, value) in sorted_mempool]
for tx in sorted_recenttx:
if len(sorted_mempool) == 10:
break
if tx['txid'] not in existing_txids:
existing_txids.append(tx['txid'])
sorted_mempool.append( (tx['txid'], tx) )

for (k, v) in sorted_mempool:
txid = k;
abe.log.info("Processing transaction = %s", str(txid) )
if abe.store.does_transaction_exist(txid):
labels = abe.store.get_labels_for_tx(txid, chain)
else:
json = None
try:
json = abe.store.get_rawtransaction_decoded(chain, txid)
except Exception as e:
abe.log.warning(e)
pass
if json is not None:
scriptpubkeys = [vout['scriptPubKey']['hex']
for vout in json['vout'] ]
labels = None
d = set()
for hex in scriptpubkeys:
binscript = binascii.unhexlify(hex)
tmp = abe.store.get_labels_for_scriptpubkey(
chain, binscrpit )

d |= set(tmp)

labels = list(d)

if labels is None:
labels = []

for label in labels:
abe.log.info("Label = %s", label)

conf = v.get('confirmations', None)
if conf is None or conf == 0:
abe.log.info("Zero confirmation for transaction %s", str(txid))
else:
abe.log.info("%d confirmations for transaction %s", int(conf), str(txid) )


def find_htdocs():
return os.path.join(os.path.split(__file__)[0], 'htdocs')
Expand Down Expand Up @@ -4672,6 +4794,9 @@ def main(argv):
return 1

store = make_store(args)
# abe = Abe(store, args) # not quite sure if this line is necessary at all...

# abe.exhibit_all_transactions()

return 0

Expand Down

0 comments on commit 52744e4

Please sign in to comment.