Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

added PEP8 style guide and '--show' parameter to optionally export to HTML #8

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 45 additions & 17 deletions orbit.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,18 +19,31 @@
from core.colors import green, white, red, info, run, end

parse = argparse.ArgumentParser()
parse.add_argument('-s', '--seeds', help='target blockchain address(es)', dest='seeds')
parse.add_argument('-o', '--output', help='output file to save raw JSON data', dest='output')
parse.add_argument('-d', '--depth', help='depth of crawling', dest='depth', type=int, default=3)
parse.add_argument('-t', '--top', help='number of addresses to crawl from results', dest='top', type=int, default=20)
parse.add_argument('-l', '--limit', help='maximum number of addresses to fetch from one address', dest='limit', type=int, default=100)
parse.add_argument('-s', '--seeds', required=True,
help='target blockchain address(es)', dest='seeds')
parse.add_argument('-o', '--output', help='output file to save raw JSON data',
dest='output')
parse.add_argument('-d', '--depth', help='depth of crawling', dest='depth',
type=int, default=3)
parse.add_argument('-t', '--top',
help='number of addresses to crawl from results',
dest='top', type=int, default=20)
parse.add_argument('-l', '--limit', dest='limit', type=int, default=100,
help='maximum number of addresses to fetch from one address'
)
parse.add_argument('--show', dest='show', action='store_true',
help='If enabled show in browser'
)


args = parse.parse_args()

top = args.top
seeds = args.seeds
depth = args.depth
limit = args.limit
output = args.output
show = args.show

print ('''%s
__
Expand All @@ -48,15 +61,19 @@

getQuark()


def crawl(addresses, processed, database, limit):
threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=10)
futures = (threadpool.submit(getTransactions, address, processed, database, limit) for address in addresses)
futures = (threadpool.submit(getTransactions, address, processed, database,
limit) for address in addresses)
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
print('%s Progress: %i/%i ' % (info, i + 1, len(addresses)), end='\r')
print('%s Progress: %i/%i ' % (info, i + 1,
len(addresses)), end='\r')


try:
for i in range(depth):
print ('%s Crawling level %i' % (run, i + 1))
print('%s Crawling level %i' % (run, i + 1))
database = ranker(database, top + 1)
toBeProcessed = getNew(database, processed)
print('%s %i addresses to crawl' % (info, len(toBeProcessed)))
Expand All @@ -66,7 +83,7 @@ def crawl(addresses, processed, database, limit):

database = ranker(database, top)

jsoned = {'edges':[],'nodes':[]}
jsoned = {'edges': [], 'nodes': []}
num = 1

num = 0
Expand All @@ -79,28 +96,39 @@ def crawl(addresses, processed, database, limit):
size = 20
if node not in doneNodes:
doneNodes.append(node)
jsoned['nodes'].append({'label': node, 'x': x, 'y': y, 'id':'id=' + node, 'size':size})
jsoned['nodes'].append({'label': node,
'x': x, 'y': y, 'id': 'id=' + node,
'size': size})
for childNode in database[node]:
uniqueSize = database[node][childNode]
if uniqueSize > 20:
uniqueSize = 20
x, y = genLocation()
if childNode not in doneNodes:
doneNodes.append(childNode)
jsoned['nodes'].append({'label': childNode, 'x': x, 'y': y, 'id':'id=' + childNode, 'size': uniqueSize})
jsoned['nodes'].append({'label': childNode, 'x': x, 'y': y,
'id':'id=' + childNode, 'size': uniqueSize})
if (node + ':' + childNode or childNode + ':' + node) not in doneEdges:
doneEdges.extend([(node + ':' + childNode), (childNode + ':' + node)])
jsoned['edges'].append({'source':'id=' + childNode, 'target':'id=' + node, 'id':num, "size":uniqueSize/3 if uniqueSize > 3 else uniqueSize})
doneEdges.extend([(node + ':' + childNode),
(childNode + ': ' + node)])
jsoned['edges'].append({'source': 'id=' + childNode,
'target': 'id=' + node,
'id': num,
"size": uniqueSize/3 if uniqueSize > 3
else uniqueSize})
num += 1

print('%s Total wallets:%i' % (info, len(jsoned['nodes'])))
print('%s Total connections:%i' % (info, len(jsoned['edges'])))
print('%s Total wallets: %i' % (info, len(jsoned['nodes'])))
print('%s Total connections: %i' % (info, len(jsoned['edges'])))

render = json.dumps(jsoned).replace(' ', '').replace('\'', '"')

prepareGraph('%s.json' % seeds[0], render)
webbrowser.open('file://' + os.getcwd() + '/quark.html')
if show:
prepareGraph('%s.json' % seeds[0], render)
webbrowser.open('file://' + os.getcwd() + '/quark.html')

if not output:
output = seeds[0] + '.json'
if output:
data = exporter(output, jsoned)
new = open(output, 'w+')
Expand Down