-
Notifications
You must be signed in to change notification settings - Fork 1
/
database.py
86 lines (61 loc) · 2.31 KB
/
database.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
import json
# from pathlib import Path
import datetime
import aiofiles
import os
class DataBase:
""" slow but good place holder for now"""
def __init__(self, metadata, top, filename):
return #currently disabled because loging isn't priority
# self.metadata = metadata
self.top = top
# self.history = {key: [] for key in self.top.flat_meta( lambda node, path: ".".join(path) ) }
# def get_id(node, path):
# return ".".join(path[1:])
# self.data = {id: [] for id in self.top.flat_meta(get_id)}
self.filename = "../logs/" + datetime.datetime.now().strftime('%Y-%m-%d_%H:%M_') + filename + ".log"
print("making new log file", self.filename)
os.makedirs("../logs", exist_ok=True)
#possibly store data as json encoded - python doesn't need to look at it
# self.file = open(filename, "a")
async def add_log_line(self, name, obj):
return
async with aiofiles.open(self.filename, mode='a') as f:
await f.write(json.dumps({name: obj}))
# @staticmethod
# def load_data(filename):
# data = []
# with open(filename, "r") as file:
# for line in file.readlines():
# data.append( json.loads(line) )
# return data
# def __del__(self):
# self.file.close()
# def last_n(self, ids, last_n):
# out = {}
# for id in data["ids"]:
# out[id] = self.history["slate." + id][-last_n:]
# return out
# def query(self, path, start, stop):
# path = path.split(".")
# assert path[0] == "slate"
# path.pop(0)
# return self.data[path][start:stop]
# def get_path(self, index, path):
# path = path.split(".")
# assert path[0] == "slate"
# path.pop(0)
# node = self.local_data[index]
# for name in path:
# try:
# node = node[name]
# except KeyError:
# return "null"
# return node
# def add_message(self, datapoint):
# self.local_data.append(datapoint)
# def add_multiple(self, datapoints):
# for datapoint in datapoints:
# self.local_data.append(datapoint)
# # self.file.write( json.dumps(datapoint) + '\n')
# self.file.flush()