Skip to content

Commit

Permalink
🚀 Too many changes!
Browse files Browse the repository at this point in the history
- Added FileExcludeSearch file property: exclude the file from latest, random and search.

- Added replicator script: easy archives and restores for Iamages server.

- Added database upgrader: upgrade current database to latest format easily. (From format 1 -> 2).

- Slight update to ToS to comply with UberSpace house rules.

- Changed name of database maker script.

- Server and scripts now check for storage format version.

- Updated dependencies.
  • Loading branch information
sunsetsonwheels committed Feb 16, 2021
1 parent d0a907f commit d96377e
Show file tree
Hide file tree
Showing 9 changed files with 386 additions and 102 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
iamages_storage
iamages_replicated
__pycache__
.vscode
.DS_Store
servercfg.json
replicatorcfg.json
180 changes: 91 additions & 89 deletions Pipfile.lock

Large diffs are not rendered by default.

8 changes: 7 additions & 1 deletion iamagesdb_create.py → iamages_mkdb.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
__version__ = "master"
__version__ = "2.1.0"
__copyright__ = "© jkelol111 et al 2020-present"

import os
import json
import sqlite3

SUPPORTED_FORMAT = 2

print("[Make Iamages Database version '{0}'. {1}]".format(__version__, __copyright__))

print("0/3: Load the server configuration file.")
Expand All @@ -28,4 +30,8 @@
with open("iamagesdb.sql", "r") as sqlscript:
storedb_cursor.executescript(sqlscript.read())

server_config["files"]["storage"]["format"] = SUPPORTED_FORMAT

json.dump(server_config, open("servercfg.json", "w"))

print("Done!")
189 changes: 189 additions & 0 deletions iamages_replicator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,189 @@
__version__ = "2.1.0"
__copyright__ = "© jkelol111 et al 2021-present"

import argparse
import os
import shutil
import sqlite3
import csv
import json
import hashlib
import datetime
import tempfile

print("[Iamages Storage Replicator version '{0}'. {1}]".format(__version__, __copyright__))

SUPPORTED_FORMAT = 2

print("0/?: Load the server configuration file.")
server_config = json.load(open("servercfg.json", "r"))

print("0/?: Load the replicator configuration file.")
replicator_config = json.load(open("replicatorcfg.json", "r"))

print("0/?: Load previous replicated archives list.")
archives = {}
archives_filepath = os.path.join(replicator_config["directory"], "archives.json")
if os.path.isfile(archives_filepath):
archives = json.load(open(archives_filepath, "r"))

CMD_PARSER = argparse.ArgumentParser(
description='Back up or restore a replicated Iamages database.'
)

CMD_PARSER.add_argument(
'command',
action='store',
help='The command to run (archive, restore, delete, list)')

CMD_PARSER.add_argument(
'archive_name',
action='store',
nargs="?",
help='The name of the replicated archive (for use with restore and delete command).')

CMD_PARSED = CMD_PARSER.parse_args()

def delete_archive(archive):
if not archive in archives:
raise FileNotFoundError("Archive {} not found!".format(archive))

os.remove(os.path.join(replicator_config["directory"], archive + ".zip"))
if archives[archive]["has_hash"]:
os.remove(os.path.join(replicator_config["directory"], archive + ".blake2b.txt"))
archives.pop(archive)
json.dump(archives, open(archives_filepath, "w"))

if CMD_PARSED.command == "archive":
if not server_config["files"]["storage"]["format"] == SUPPORTED_FORMAT:
print(f'Current storage format is not supported. (expected: {SUPPORTED_FORMAT}, got: {server_config["files"]["format"]})')
exit(1)
with tempfile.TemporaryDirectory() as tmp:
shutil.copytree(server_config["files"]["storage"]["directory"], tmp, dirs_exist_ok=True, ignore=shutil.ignore_patterns("replicated"))

conn = sqlite3.connect(os.path.join(tmp, "iamages.db"))
cur = conn.cursor()
with open(os.path.join(tmp, "Files.csv"), "w") as csv_files:
writer = csv.writer(csv_files)
writer.writerow(["FileID", "FileName", "FileDescription", "FileNSFW", "FilePrivate", "FileMime", "FileWidth", "FileHeight", "FileHash", "FileLink", "FileCreatedDate", "FileExcludeSearch"])
writer.writerows(cur.execute("SELECT * FROM Files").fetchall())
with open(os.path.join(tmp, "Files_Users.csv"), "w") as csv_files_users:
writer = csv.writer(csv_files_users)
writer.writerow(["FileID", "UserID"])
writer.writerows(cur.execute("SELECT * FROM Files_Users").fetchall())
with open(os.path.join(tmp, "Users.csv"), "w") as csv_users:
writer = csv.writer(csv_users)
writer.writerow(["UserID", "UserName", "UserPassword", "UserBiography", "UserCreatedDate"])
writer.writerows(cur.execute("SELECT * FROM Users").fetchall())
conn.close()

os.remove(os.path.join(tmp, "iamages.db"))

current_datetime = datetime.datetime.now()
substitute_datetimes = {
"year": current_datetime.strftime("%Y"),
"month": current_datetime.strftime("%m"),
"day": current_datetime.strftime("%d"),
"hour": current_datetime.strftime("%H"),
"minute": current_datetime.strftime("%M"),
"second": current_datetime.strftime("%S")
}

replicated_filename = replicator_config["naming"].format(**substitute_datetimes) + ".iamagesbak"
replicated_filepath = os.path.join(replicator_config["directory"], replicated_filename)
shutil.make_archive(replicated_filepath, "zip", tmp)

if replicator_config["additional_options"]["save_archive_hash"]:
with open(replicated_filepath + ".zip", "rb") as replicated_file:
with open(os.path.join(replicator_config["directory"], replicated_filename + ".blake2b.txt"), "w") as replicated_file_hash:
replicated_file_hash.write(hashlib.blake2b(replicated_file.read()).hexdigest())

if len(archives) >= replicator_config["saves"]:
delete_archive(list(enumerate(archives))[-1][1])

archives[replicated_filename] = {
"format": replicator_config["format"],
"created_date": current_datetime.strftime("%Y/%m/%d %H:%M:%S"),
"has_hash": replicator_config["additional_options"]["save_archive_hash"]
}

json.dump(archives, open(archives_filepath, "w"))
elif CMD_PARSED.command == "restore":
if not CMD_PARSED.archive_name:
print("Replicated name not provided. Exiting.")
exit(1)

replicated_info = archives[CMD_PARSED.archive_name]

if not replicated_info["format"] == SUPPORTED_FORMAT:
print(f'Replicated archive format is not supported. (expected: {SUPPORTED_FORMAT}, got: {replicated_info["format"]})')
exit(1)

replicated_filepath = os.path.join(os.getcwd(), replicator_config["directory"], CMD_PARSED.archive_name + ".zip")

with open(replicated_filepath, "rb") as replicated_archive:
with open(os.path.join(replicator_config["directory"], CMD_PARSED.archive_name.split(".zip")[0] + ".blake2b.txt"), "r") as replicated_archive_hash:
if not hashlib.blake2b(replicated_archive.read()).hexdigest() == replicated_archive_hash.read():
print("Replicated archive hash doesn't match saved hash in file. Exiting.")
exit(1)

with tempfile.TemporaryDirectory() as tmp:
shutil.unpack_archive(replicated_filepath, tmp)

conn = sqlite3.connect(os.path.join(tmp, "iamages.db"))
cur = conn.cursor()
cur.execute("PRAGMA journal_mode=WAL")

FILES_CSV_PATH = os.path.join(tmp, "Files.csv")
with open(FILES_CSV_PATH, "r") as csv_files:
cur.execute("CREATE TABLE Files (FileID INTEGER PRIMARY KEY, FileName TEXT, FileDescription TEXT, FileNSFW INTEGER, FilePrivate INTEGER, FileMime TEXT, FileWidth INTEGER, FileHeight INTEGER, FileHash TEXT, FileLink INTEGER, FileCreatedDate TEXT, FileExcludeSearch INTEGER)")
reader = csv.DictReader(csv_files)
for row in reader:
cur.execute("INSERT INTO Files (FileID, FileName, FileDescription, FileNSFW, FilePrivate, FileMime, FileWidth, FileHeight, FileHash, FileLink, FileCreatedDate, FileExcludeSearch) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", (
row["FileID"], row["FileName"], row["FileDescription"], row["FileNSFW"], row["FilePrivate"], row["FileMime"], row["FileWidth"], row["FileHeight"], row["FileHash"], row["FileLink"], row["FileCreatedDate"], row["FileExcludeSearch"]))
os.remove(FILES_CSV_PATH)

FILES_USERS_CSV_PATH = os.path.join(tmp, "Files_Users.csv")
with open(FILES_USERS_CSV_PATH, "r") as csv_files_users:
cur.execute("CREATE TABLE Files_Users (FileID INTEGER, UserID INTEGER)")
reader = csv.DictReader(csv_files_users)
for row in reader:
cur.execute("INSERT INTO Files_Users (FileID, UserID) VALUES (?, ?)", (
row["FileID"], row["UserID"]))
os.remove(FILES_USERS_CSV_PATH)

USERS_CSV_PATH = os.path.join(tmp, "Users.csv")
with open(USERS_CSV_PATH, "r") as csv_users:
cur.execute("CREATE TABLE Users (UserID INTEGER PRIMARY KEY, UserName TEXT, UserPassword TEXT, UserBiography TEXT, UserCreatedDate TEXT)")
reader = csv.DictReader(csv_users)
for row in reader:
cur.execute("INSERT INTO Users (UserID, UserName, UserPassword, UserBiography, UserCreatedDate) VALUES (?, ?, ?, ?, ?)", (
row["UserID"], row["UserName"], row["UserPassword"], row["UserBiography"], row["UserCreatedDate"]))
os.remove(USERS_CSV_PATH)

conn.commit()
conn.close()

shutil.rmtree(server_config["files"]["storage"]["directory"])
shutil.copytree(tmp, server_config["files"]["storage"]["directory"])
elif CMD_PARSED.command == "delete":
delete_archive(CMD_PARSED.archive_name)
elif CMD_PARSED.command == "list":
print("\nAvailable replicated archives:\n")
for save in archives:
archive_filepath = os.path.join(replicator_config["directory"], save + ".zip")
if os.path.isfile(archive_filepath):
print(f'- {save} ({archive_filepath})')
print(f' + Created date: {archives[save]["created_date"]}')
print(f' + Archive format: {archives[save]["format"]}')
if archives[save]["has_hash"]:
archive_hash_filepath = os.path.join(replicator_config["directory"], save + ".blake2b.txt")
if os.path.isfile(archive_hash_filepath):
print(f' + Archive hash: enabled ({archive_hash_filepath})')
else:
print(" + Archive hash: enabled (not found)")
else:
print(f' + Archive hash: disabled')
else:
print(f'- {save} (not found)')
print("")
37 changes: 31 additions & 6 deletions iamages_server.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "2.0.0"
__version__ = "2.1.0"
__copyright__ = "© jkelol111 et al 2020-present"

import os
Expand Down Expand Up @@ -28,6 +28,12 @@

server_config = json.load(open("servercfg.json", "r"))

SUPPORTED_FORMAT = 2

if server_config["files"]["storage"]["format"] != SUPPORTED_FORMAT:
print(f'Current storage format is not supported. (expected: {SUPPORTED_FORMAT}, got: {server_config["files"]["format"]})')
exit(1)

if not os.path.isdir(server_config["files"]["storage"]["directory"]):
os.makedirs(server_config["files"]["storage"]["directory"])

Expand Down Expand Up @@ -186,7 +192,7 @@ async def get(self, request):
response_body = {
"FileIDs": []
}
FileIDs = await iamagesdb.fetch_all("SELECT FileID FROM Files WHERE FilePrivate = 0 ORDER BY FileID DESC LIMIT 10")
FileIDs = await iamagesdb.fetch_all("SELECT FileID FROM Files WHERE FilePrivate = 0 AND FileExcludeSearch = 0 ORDER BY FileID DESC LIMIT 10")
for FileID in FileIDs:
response_body["FileIDs"].append(FileID[0])
return starlette.responses.JSONResponse(response_body)
Expand All @@ -200,7 +206,7 @@ async def get(self, request):
attempts = 0
while successful_FileID == 0 and attempts <= 3:
successful_FileID = random.randint(1, total_files)
actual_successful_FileID = await iamagesdb.fetch_one("SELECT FileID From Files WHERE FileID = :FileID AND FilePrivate = 0", {
actual_successful_FileID = await iamagesdb.fetch_one("SELECT FileID From Files WHERE FileID = :FileID AND FilePrivate = 0 AND FileExcludeSearch = 0", {
"FileID": successful_FileID
})
if not actual_successful_FileID:
Expand Down Expand Up @@ -238,6 +244,7 @@ async def put(self, request):
"FileHash": FileHash
})
default_query_FilePrivate = "UPDATE Files SET FilePrivate = :FilePrivate WHERE FileID = " + str(FileID)
default_query_FileExcludeSearch = "UPDATE Files SET FileExcludeSearch = :FileExcludeSearch WHERE FileID = " + str(FileID)
if duplicate_exists:
await iamagesdb.execute("UPDATE Files SET FileLink = :FileLink WHERE FileID = :FileID", {
"FileLink": duplicate_exists[0][0],
Expand All @@ -260,6 +267,14 @@ async def put(self, request):
await iamagesdb.execute(default_query_FilePrivate, {
"FilePrivate": False
})
if "FileExcludeSearch" in request_body:
await iamagesdb.execute(default_query_FileExcludeSearch, {
"FileExcludeSearch": request_body["FileExcludeSearch"]
})
else:
await iamagesdb.execute(default_query_FileExcludeSearch, {
"FileExcludeSearch": False
})
response_body["FileID"] = FileID
return starlette.responses.JSONResponse(response_body)
else:
Expand Down Expand Up @@ -304,6 +319,14 @@ async def put(self, request):
await iamagesdb.execute(default_query_FilePrivate, {
"FilePrivate": False
})
if "FileExcludeSearch" in request_body:
await iamagesdb.execute(default_query_FileExcludeSearch, {
"FileExcludeSearch": request_body["FileExcludeSearch"]
})
else:
await iamagesdb.execute(default_query_FileExcludeSearch, {
"FileExcludeSearch": False
})
else:
await SharedFunctions.delete_file(FileID)
return starlette.responses.Response(status_code=415)
Expand All @@ -330,7 +353,7 @@ async def patch(self, request):
if FileID == request_body["FileID"]:
basic_query = "UPDATE Files SET {0} = :value WHERE FileID = " + str(FileID)
for modification in request_body["Modifications"]:
if modification in ["FileDescription", "FileNSFW", "FilePrivate"]:
if modification in ["FileDescription", "FileNSFW", "FilePrivate", "FileExcludeSearch"]:
basic_query = basic_query.format(modification)
await iamagesdb.execute(basic_query, {
"value": request_body["Modifications"][modification]
Expand Down Expand Up @@ -358,7 +381,8 @@ async def get(self, request):
"FileMime": None,
"FileWidth": None,
"FileHeight": None,
"FileCreatedDate": None
"FileCreatedDate": None,
"FileExcludeSearch": None
}
async def set_response(FileInformation):
response_body["FileID"] = int(request.path_params["FileID"])
Expand All @@ -377,9 +401,10 @@ async def set_response(FileInformation):
response_body["FileWidth"] = FileInformation[4]
response_body["FileHeight"] = FileInformation[5]
response_body["FileCreatedDate"] = FileInformation[6]
response_body["FileExcludeSearch"] = bool(FileInformation[8])

FileID = int(request.path_params["FileID"])
FileInformation = await iamagesdb.fetch_one("SELECT FileDescription, FileNSFW, FilePrivate, FileMime, FileWidth, FileHeight, FileCreatedDate, FileLink FROM Files WHERE FileID = :FileID", {
FileInformation = await iamagesdb.fetch_one("SELECT FileDescription, FileNSFW, FilePrivate, FileMime, FileWidth, FileHeight, FileCreatedDate, FileLink, FileExcludeSearch FROM Files WHERE FileID = :FileID", {
"FileID": FileID
})
FilePrivate = bool(FileInformation[2])
Expand Down
54 changes: 54 additions & 0 deletions iamages_updb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
__version__ = "2.1.0"
__copyright__ = "© jkelol111 et al 2020-present"

import os
import json
import sqlite3

print("[Upgrade Iamages Database version '{0}'. {1}]".format(__version__, __copyright__))

BASE_FORMAT = 1
UPGRADED_FORMAT = 2

print("0/3: Load the server configuration file.")
server_config = json.load(open("servercfg.json", "r"))

if not server_config["files"]["storage"]["format"] == BASE_FORMAT:
print("This script doesn't upgrade from this database version! Exiting. (got: {}, expected: {})".format(server_config["files"]["storage"]["format"], BASE_FORMAT))
exit(1)

print("1/3: Analysing existing database.")
FILESDB_PATH = os.path.join(server_config["files"]["storage"]["directory"], "iamages.db")

if not os.path.isfile(FILESDB_PATH):
print("Database doesn't exist! Exiting.")
exit(1)

storedb_connection = sqlite3.connect(FILESDB_PATH)
storedb_cursor = storedb_connection.cursor()

files_table_columns = storedb_cursor.execute("PRAGMA table_info('Files')").fetchall()

FilesExcludeSearch_found = False

for files_table_column in files_table_columns:
if files_table_column[1] == "FileExcludeSearch":
FilesExcludeSearch_found = True
print("FilesExcludeSearch column found. No change required.")
break

if not FilesExcludeSearch_found:
print("2/3: Performing database upgrade.")
storedb_cursor.execute("ALTER TABLE Files ADD FileExcludeSearch INTEGER")
FileIDs = storedb_cursor.execute("SELECT FileID FROM Files").fetchall()
for FileID in FileIDs:
storedb_cursor.execute("UPDATE Files SET FileExcludeSearch = ? WHERE FileID = ?", (False, FileID[0]))

storedb_connection.commit()
storedb_connection.close()

print("3/3: Updating server configuration.")
server_config["files"]["storage"]["format"] = UPGRADED_FORMAT
json.dump(server_config, open("servercfg.json", "w"))

print("Done!")
Loading

0 comments on commit d96377e

Please sign in to comment.