From 91c9f5596ada28a19a298a187bcfd8f72aabc867 Mon Sep 17 00:00:00 2001 From: Kevin van Liebergen Date: Wed, 16 Mar 2022 16:07:20 +0100 Subject: [PATCH] addedPEP8 style guide and --show parameter included to optionally export to HTML or not --- orbit.py | 62 ++++++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 45 insertions(+), 17 deletions(-) diff --git a/orbit.py b/orbit.py index a488d15..5baed3e 100644 --- a/orbit.py +++ b/orbit.py @@ -19,11 +19,23 @@ from core.colors import green, white, red, info, run, end parse = argparse.ArgumentParser() -parse.add_argument('-s', '--seeds', help='target blockchain address(es)', dest='seeds') -parse.add_argument('-o', '--output', help='output file to save raw JSON data', dest='output') -parse.add_argument('-d', '--depth', help='depth of crawling', dest='depth', type=int, default=3) -parse.add_argument('-t', '--top', help='number of addresses to crawl from results', dest='top', type=int, default=20) -parse.add_argument('-l', '--limit', help='maximum number of addresses to fetch from one address', dest='limit', type=int, default=100) +parse.add_argument('-s', '--seeds', required=True, + help='target blockchain address(es)', dest='seeds') +parse.add_argument('-o', '--output', help='output file to save raw JSON data', + dest='output') +parse.add_argument('-d', '--depth', help='depth of crawling', dest='depth', + type=int, default=3) +parse.add_argument('-t', '--top', + help='number of addresses to crawl from results', + dest='top', type=int, default=20) +parse.add_argument('-l', '--limit', dest='limit', type=int, default=100, + help='maximum number of addresses to fetch from one address' + ) +parse.add_argument('--show', dest='show', action='store_true', + help='If enabled show in browser' + ) + + args = parse.parse_args() top = args.top @@ -31,6 +43,7 @@ depth = args.depth limit = args.limit output = args.output +show = args.show print ('''%s __ @@ -48,15 +61,19 @@ getQuark() + def crawl(addresses, processed, database, limit): threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=10) - futures = (threadpool.submit(getTransactions, address, processed, database, limit) for address in addresses) + futures = (threadpool.submit(getTransactions, address, processed, database, + limit) for address in addresses) for i, _ in enumerate(concurrent.futures.as_completed(futures)): - print('%s Progress: %i/%i ' % (info, i + 1, len(addresses)), end='\r') + print('%s Progress: %i/%i ' % (info, i + 1, + len(addresses)), end='\r') + try: for i in range(depth): - print ('%s Crawling level %i' % (run, i + 1)) + print('%s Crawling level %i' % (run, i + 1)) database = ranker(database, top + 1) toBeProcessed = getNew(database, processed) print('%s %i addresses to crawl' % (info, len(toBeProcessed))) @@ -66,7 +83,7 @@ def crawl(addresses, processed, database, limit): database = ranker(database, top) -jsoned = {'edges':[],'nodes':[]} +jsoned = {'edges': [], 'nodes': []} num = 1 num = 0 @@ -79,7 +96,9 @@ def crawl(addresses, processed, database, limit): size = 20 if node not in doneNodes: doneNodes.append(node) - jsoned['nodes'].append({'label': node, 'x': x, 'y': y, 'id':'id=' + node, 'size':size}) + jsoned['nodes'].append({'label': node, + 'x': x, 'y': y, 'id': 'id=' + node, + 'size': size}) for childNode in database[node]: uniqueSize = database[node][childNode] if uniqueSize > 20: @@ -87,20 +106,29 @@ def crawl(addresses, processed, database, limit): x, y = genLocation() if childNode not in doneNodes: doneNodes.append(childNode) - jsoned['nodes'].append({'label': childNode, 'x': x, 'y': y, 'id':'id=' + childNode, 'size': uniqueSize}) + jsoned['nodes'].append({'label': childNode, 'x': x, 'y': y, + 'id':'id=' + childNode, 'size': uniqueSize}) if (node + ':' + childNode or childNode + ':' + node) not in doneEdges: - doneEdges.extend([(node + ':' + childNode), (childNode + ':' + node)]) - jsoned['edges'].append({'source':'id=' + childNode, 'target':'id=' + node, 'id':num, "size":uniqueSize/3 if uniqueSize > 3 else uniqueSize}) + doneEdges.extend([(node + ':' + childNode), + (childNode + ': ' + node)]) + jsoned['edges'].append({'source': 'id=' + childNode, + 'target': 'id=' + node, + 'id': num, + "size": uniqueSize/3 if uniqueSize > 3 + else uniqueSize}) num += 1 -print('%s Total wallets:%i' % (info, len(jsoned['nodes']))) -print('%s Total connections:%i' % (info, len(jsoned['edges']))) +print('%s Total wallets: %i' % (info, len(jsoned['nodes']))) +print('%s Total connections: %i' % (info, len(jsoned['edges']))) render = json.dumps(jsoned).replace(' ', '').replace('\'', '"') -prepareGraph('%s.json' % seeds[0], render) -webbrowser.open('file://' + os.getcwd() + '/quark.html') +if show: + prepareGraph('%s.json' % seeds[0], render) + webbrowser.open('file://' + os.getcwd() + '/quark.html') +if not output: + output = seeds[0] + '.json' if output: data = exporter(output, jsoned) new = open(output, 'w+')