Skip to content

Commit

Permalink
Merge pull request #23 from swdotcom/hash-values-in-json
Browse files Browse the repository at this point in the history
use hashed_values.json file
  • Loading branch information
bjacobson26 authored Sep 9, 2020
2 parents 9e2e50b + bdd5dc5 commit dea0552
Show file tree
Hide file tree
Showing 4 changed files with 49 additions and 39 deletions.
1 change: 0 additions & 1 deletion Software.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,6 @@ def on_modified_async(self, view):
# Iniates the plugin tasks once the it's loaded into Sublime.
def plugin_loaded():
initializeUser()
fetch_user_hashed_values()
track_editor_action(
jwt=getJwt(),
entity='editor',
Expand Down
24 changes: 12 additions & 12 deletions lib/KpmManager.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,17 +23,17 @@ def post_json(json_data):
track_codetime_event(
jwt=jwt,
keystrokes=payload['keystrokes'],
lines_added= payload.get('document_change_info', {})['lines_added'],
lines_deleted= payload.get('document_change_info', {})['lines_deleted'],
characters_added= payload.get('document_change_info', {})['characters_added'],
characters_deleted= payload.get('document_change_info', {})['characters_deleted'],
single_deletes= payload.get('document_change_info', {})['single_deletes'],
multi_deletes= payload.get('document_change_info', {})['multi_deletes'],
single_adds= payload.get('document_change_info', {})['single_adds'],
multi_adds= payload.get('document_change_info', {})['multi_adds'],
auto_indents= payload.get('document_change_info', {})['auto_indents'],
replacements= payload.get('document_change_info', {})['replacements'],
is_net_change= payload.get('document_change_info', {})['is_net_change'],
lines_added=payload.get('document_change_info', {}).get('lines_added', 0),
lines_deleted=payload.get('document_change_info', {}).get('lines_deleted', 0),
characters_added=payload.get('document_change_info', {}).get('characters_added', 0),
characters_deleted=payload.get('document_change_info', {}).get('characters_deleted', 0),
single_deletes=payload.get('document_change_info', {}).get('single_deletes', 0),
multi_deletes=payload.get('document_change_info', {}).get('multi_deletes', 0),
single_adds=payload.get('document_change_info', {}).get('single_adds', 0),
multi_adds=payload.get('document_change_info', {}).get('multi_adds', 0),
auto_indents=payload.get('document_change_info', {}).get('auto_indents', 0),
replacements=payload.get('document_change_info', {}).get('replacements', 0),
is_net_change=payload.get('document_change_info', {}).get('is_net_change', False),
start_time=payload['local_start'],
end_time=payload['local_end'],
file_path=payload['file_path'],
Expand All @@ -48,7 +48,7 @@ def post_json(json_data):
plugin_name=payload['plugin_name'],
repo_identifier=payload['repo_identifier'],
repo_name=payload['repo_name'],
owner_id=payload['repo_owner_id'],
owner_id=payload.get('repo_owner_id', None),
git_branch=payload['git_branch'],
git_tag=payload['git_tag']
)
Expand Down
16 changes: 16 additions & 0 deletions lib/SoftwareUtil.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,22 @@ def getFileDataPayloadsAsJson(file):
return []
return payloads

def storeHashedValues(user_hashed_values):
file = getSoftwareHashedValuesFile()
if user_hashed_values:
try:
with open(file, 'w') as f:
json.dump(user_hashed_values, f, indent=4)
except Exception as ex:
log('Code time: Error writing hashed_values: %s' % ex)

def getHashedValues():
return getFileDataAsJson(getSoftwareHashedValuesFile()) or {}

def getSoftwareHashedValuesFile():
file = getSoftwareDir(True)
return os.path.join(file, 'hashed_values.json')

def getSoftwareDataStoreFile():
file = getSoftwareDir(True)
return os.path.join(file, 'data.json')
Expand Down
47 changes: 21 additions & 26 deletions lib/TrackerManager.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from datetime import datetime
from .SoftwareHttp import *
from .blake2 import BLAKE2b
from .SoftwareUtil import *
# Add vendor directory to module search path
# This needs to be here to load the snowplow_tracker library
vendor_dir = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '..', 'vendor'))
Expand All @@ -12,7 +13,6 @@

cached_tracker = None
cached_hashed_values = {}
refresh_hashed_values = False
# swdc_tracker will initialize on the first use of it (editor activated event)
# and use a cached instance for every subsequent call
def swdc_tracker(use_cache = True):
Expand All @@ -36,24 +36,18 @@ def track_codetime_event(**kwargs):
event_json = codetime_payload(**kwargs)
context = build_context(**kwargs)
swdc_tracker().track_self_describing_event(event_json, context)
if(refresh_hashed_values):
fetch_user_hashed_values()

def track_editor_action(**kwargs):
if tracker_enabled():
event_json = editor_action_payload(**kwargs)
context = build_context(**kwargs)
response = swdc_tracker().track_self_describing_event(event_json, context)
if(refresh_hashed_values):
fetch_user_hashed_values()

def track_ui_interaction(**kwargs):
if tracker_enabled():
event_json = ui_interaction_payload(**kwargs)
context = build_context(**kwargs)
swdc_tracker().track_self_describing_event(event_json, context)
if(refresh_hashed_values):
fetch_user_hashed_values()

def build_context(**kwargs):
ctx = []
Expand Down Expand Up @@ -125,7 +119,7 @@ def auth_payload(**kwargs):
)

def file_payload(**kwargs):
hashed_name = hash_value(kwargs['file_name'], 'file_name', kwargs['jwt'])
hashed_name = hash_value(kwargs['file_name'].replace("\\", "/"), 'file_name', kwargs['jwt'])
hashed_path = hash_value(kwargs['file_path'], 'file_path', kwargs['jwt'])

return SelfDescribingJson(
Expand Down Expand Up @@ -191,36 +185,37 @@ def ui_element_payload(**kwargs):
}
)

def hash_value(val, data_type, jwt):
if data_type == 'file_name':
value = val.replace("\\", "/")
else:
value = val
latestJwt = None
def hash_value(value, data_type, jwt):
global latestJwt

if(jwt != latestJwt):
latestJwt = jwt
fetch_user_hashed_values()

if value:
hashed_value = BLAKE2b(value.encode(), 64).hexdigest()

global cached_hashed_values
if hashed_value not in cached_hashed_values.get(data_type, []):
if encrypt_and_save(value, hashed_value, data_type, jwt):
if(cached_hashed_values.get(data_type, None)):
cached_hashed_values[data_type].append(hashed_value)
else:
cached_hashed_values[data_type] = [hashed_value]

global refresh_hashed_values
refresh_hashed_values = True
if cached_hashed_values.get(data_type, False):
cached_hashed_values[data_type].append(hashed_value)
else:
cached_hashed_values[data_type] = [hashed_value]
storeHashedValues(cached_hashed_values)

encrypt_and_save(value, hashed_value, data_type, jwt)

return hashed_value
else:
return ''

def fetch_user_hashed_values():
try:
response = requestIt('GET', '/hashed_values', None, getJwt())
data = json.loads(response.read().decode('utf-8'))
user_hashed_values = json.loads(response.read().decode('utf-8'))

global cached_hashed_values
cached_hashed_values = data
cached_hashed_values = user_hashed_values
storeHashedValues(user_hashed_values)
except Exception as ex:
print("ERROR FETCHING HASHED VALUES")
print(ex)
Expand All @@ -236,5 +231,5 @@ def encrypt_and_save(value, hashed_value, data_type, jwt):
if response and isResponseOk(response):
return True
else:
print("error POSTing to /user_encrypted_data for value: " + value)
print("error POSTing to /user_encrypted_data for value: " + hashed_value)
return False

0 comments on commit dea0552

Please sign in to comment.