From be7f9a9aff02ebb6e2f37bf6cb1da0f0e32b4104 Mon Sep 17 00:00:00 2001 From: Julius Kreutz Date: Fri, 22 Sep 2023 04:09:03 +1000 Subject: [PATCH] Saving whatever I did --- ...c71dc79ef3e3e451157d26c002d20455ed36c.json | 15 ++ ...d6e1877336b69292f00a5f59f081ced501c89.json | 17 -- ...99a825c721a12b062e29bbfa42b221fe0367d.json | 14 ++ ...5a8dcc583fbece88e8748c812e3681c347fb.json} | 23 +- ...76d0bd88fe7989d73e8b689f4274cc15c18f1.json | 48 ---- ...af365cb8e4548a36c87ae265030d55e45cce1.json | 47 ---- ...b039a9bd8f94eada1cd64b418b26ae5a00432.json | 17 -- ...0f5e03a5f3d3ba5b1672f235cc0163de4b794.json | 14 ++ ...7d5ff4fdbd2632d525b9ecabe63d1d6658fe7.json | 15 ++ ...697e5b4b5f6f12dcd379323f9f7888e06270c.json | 48 ---- ...6a4740e284a2ee28fe316663ea56cb1e9c67c.json | 19 ++ ...0b366efd716bfa95a302fb6f8d83db53ad683.json | 17 -- ...782f51d690785fe7354d27de980bfb47c5624.json | 47 ---- ...cca10cb1344cfdb73404aac104fad444773a9.json | 17 -- ...5896f8297e269735438bca387e0b89e4eac00.json | 48 ---- ...1b7e0e39518f5373fa3fc08b400f05b49717e.json | 47 ---- ...ecedc350e83b2d3691221f0042bc2894536d1.json | 17 -- ...0eeacd928248d15293d8169b91290f4505e1.json} | 4 +- ...92d5ab1639b02e9fea32938bdeea8536d4184.json | 14 ++ ...a6f7ca1fe05eb0c33c2bcdc2aadb6b5aba1dc.json | 47 ---- ...807ec8ec8679ffea7e8e56829e05ebac89854.json | 20 ++ ...fd39a526963c9e92c5b664f3075fbdd7eadba.json | 47 ---- ...30ee6ac09324328bf284b086aaa4b3b4e3788.json | 48 ---- ...9dcd95ad19ee9f2279f95651b26623d53d7cd.json | 15 ++ ...7af94d3ff910a1e8957f1bcfd39bcd4345972.json | 47 ---- ...fdee2c7b3f4696a247f9247149dff2fa20105.json | 48 ---- ...2fca1fd27589dfde611fa740f3ad59ed76fc9.json | 26 +- ...e911b5122882d767fb7d48b1c6b16cfecb313.json | 47 ---- ...e9fdf2ceb8bab62dbcadeea111a294ae65b77.json | 60 +++++ ...f433a153c9134f0038f9dedef3d79ffab129a.json | 14 ++ ...5969cdfc807ce2417563b703e9afc38d5599c.json | 17 -- ...0e94258b923aa68d0d7b55973085fa101fd17.json | 48 ---- ...3cb299a18216965539c2efa9fda8cdbc163ae.json | 48 ---- ...d2eebeabb05e95738610de1afa6fd9a419ee3.json | 17 -- ...5abea665748dcf7404a5e187667eae9be1779.json | 20 ++ ...5de32c0a738527e60ac0e8f45a1613693306.json} | 30 ++- ...c9420f12f76be6e6450a1fa6ad2dd00decc22.json | 17 -- ...5d69fac6982d30eb3254da8e5135bf3b54ae2.json | 60 +++++ ...5310c51413b5aad22b494bfb582b3da82deca.json | 47 ---- Cargo.toml | 2 + .../20230907085011_users_references.sql | 14 ++ migrations/20230912034344_books.sql | 3 + migrations/20230921163833_warps.sql | 17 ++ .../20230921172629_warps_primary_key.sql | 3 + src/api/achievements/mod.rs | 4 +- src/api/books/id/comment/mod.rs | 94 ++++++++ src/api/books/id/image1/mod.rs | 94 ++++++++ src/api/books/id/image2/mod.rs | 94 ++++++++ src/api/books/id/mod.rs | 15 +- src/api/books/mod.rs | 19 +- src/api/free_jade_alert/mod.rs | 2 +- .../{import => import_achievements}/mod.rs | 12 +- src/api/import_books/mod.rs | 123 ++++++++++ src/api/mod.rs | 20 +- src/api/pages/achievement_tracker/mod.rs | 10 +- src/api/pages/book_tracker/mod.rs | 193 +++++++++++++++ src/api/pages/mod.rs | 12 +- src/api/pages/warp_tracker/mod.rs | 16 ++ src/api/pages/warp_tracker/uid/mod.rs | 106 ++++++++ src/api/scores/mod.rs | 4 +- src/api/sitemap/mod.rs | 117 +++++++++ src/api/warps/mod.rs | 210 ++++++++-------- src/api/warps/uid/mod.rs | 227 +++--------------- src/database/achievements.rs | 18 ++ src/database/books.rs | 69 +++++- src/database/mod.rs | 18 +- src/database/sessions.rs | 18 ++ src/database/users_achievements.rs | 2 +- src/database/warp_departure_characters.rs | 98 -------- src/database/warp_departure_light_cones.rs | 98 -------- src/database/warp_lc_characters.rs | 98 -------- src/database/warp_lc_light_cones.rs | 98 -------- src/database/warp_special_characters.rs | 98 -------- src/database/warp_special_light_cones.rs | 98 -------- src/database/warp_standard_characters.rs | 98 -------- src/database/warp_standard_light_cones.rs | 98 -------- src/database/warps.rs | 137 +++++++++++ src/main.rs | 2 + src/pg_session_store.rs | 4 + src/update/achievements_percent.rs | 15 +- src/update/books_percent.rs | 15 +- src/update/dimbreath.rs | 11 + 82 files changed, 1717 insertions(+), 1998 deletions(-) create mode 100644 .sqlx/query-0332a46d40aa6b5c2b62b389852c71dc79ef3e3e451157d26c002d20455ed36c.json delete mode 100644 .sqlx/query-0c348b98012d850136a5736b7aad6e1877336b69292f00a5f59f081ced501c89.json create mode 100644 .sqlx/query-13c5141ef17c13ed74375f5a4e199a825c721a12b062e29bbfa42b221fe0367d.json rename .sqlx/{query-ecea2a6e436e6072a3b218926806392e2d3c177ea3b7ffa2dc0e921e0698c4ee.json => query-2f3e2e499cf8cfd82e41f1d64f835a8dcc583fbece88e8748c812e3681c347fb.json} (50%) delete mode 100644 .sqlx/query-321a5e28338dd19f2e8b811dcbb76d0bd88fe7989d73e8b689f4274cc15c18f1.json delete mode 100644 .sqlx/query-358c407545f08857d7fd3573c76af365cb8e4548a36c87ae265030d55e45cce1.json delete mode 100644 .sqlx/query-3c1c9a69f562b651b692400501db039a9bd8f94eada1cd64b418b26ae5a00432.json create mode 100644 .sqlx/query-3df551c52abda62417d55a680f40f5e03a5f3d3ba5b1672f235cc0163de4b794.json create mode 100644 .sqlx/query-3f9b36c66e4b05708ec7de32a817d5ff4fdbd2632d525b9ecabe63d1d6658fe7.json delete mode 100644 .sqlx/query-3fc1bfc5eec0663cb418ecf1aab697e5b4b5f6f12dcd379323f9f7888e06270c.json create mode 100644 .sqlx/query-400a696954c28d46593ac0211cf6a4740e284a2ee28fe316663ea56cb1e9c67c.json delete mode 100644 .sqlx/query-435f481453aeed9f1a85a37e8f20b366efd716bfa95a302fb6f8d83db53ad683.json delete mode 100644 .sqlx/query-4a9d9afd70c99ae211d68a4c72b782f51d690785fe7354d27de980bfb47c5624.json delete mode 100644 .sqlx/query-4e4d06702f6f88001b03f19911bcca10cb1344cfdb73404aac104fad444773a9.json delete mode 100644 .sqlx/query-5617cec35a9f4787019dc474d345896f8297e269735438bca387e0b89e4eac00.json delete mode 100644 .sqlx/query-59b39a6aed5db16f2fcd91c28711b7e0e39518f5373fa3fc08b400f05b49717e.json delete mode 100644 .sqlx/query-5b1e2c8a01c03f1b0d5c966cc4cecedc350e83b2d3691221f0042bc2894536d1.json rename .sqlx/{query-9f3d8d898ef71f0717021cd5724d82cf12c8675babf9eec9c8b785002011fb3f.json => query-601fcee853b8bb3f4b340be90ded0eeacd928248d15293d8169b91290f4505e1.json} (65%) create mode 100644 .sqlx/query-63be1f694e817e6ebd3ff4bd5ea92d5ab1639b02e9fea32938bdeea8536d4184.json delete mode 100644 .sqlx/query-6486eb9aaf6f4cd5a5efb7462a6a6f7ca1fe05eb0c33c2bcdc2aadb6b5aba1dc.json create mode 100644 .sqlx/query-66ed9fb3be98ee285372e71233b807ec8ec8679ffea7e8e56829e05ebac89854.json delete mode 100644 .sqlx/query-824dae72aa5ec31fcc58cd27e7efd39a526963c9e92c5b664f3075fbdd7eadba.json delete mode 100644 .sqlx/query-837d309b4882f2d9fb7673cbe5530ee6ac09324328bf284b086aaa4b3b4e3788.json create mode 100644 .sqlx/query-85fc1fdf2224f7594e5e6247e9f9dcd95ad19ee9f2279f95651b26623d53d7cd.json delete mode 100644 .sqlx/query-8b8eb93b6d709348a1e64b49f587af94d3ff910a1e8957f1bcfd39bcd4345972.json delete mode 100644 .sqlx/query-92d35f71d6160fc1509b5b2aa79fdee2c7b3f4696a247f9247149dff2fa20105.json delete mode 100644 .sqlx/query-a6a614c93e338b3e4acfb8c949ae911b5122882d767fb7d48b1c6b16cfecb313.json create mode 100644 .sqlx/query-a76946f329d620a3ec7f85ec341e9fdf2ceb8bab62dbcadeea111a294ae65b77.json create mode 100644 .sqlx/query-b4dcdeace3f76113834d0bb2b03f433a153c9134f0038f9dedef3d79ffab129a.json delete mode 100644 .sqlx/query-bd034e6f3b7a52700f58d4a7f495969cdfc807ce2417563b703e9afc38d5599c.json delete mode 100644 .sqlx/query-bd86a7435e9a119062d87d537b90e94258b923aa68d0d7b55973085fa101fd17.json delete mode 100644 .sqlx/query-bf3bd841cc3db181d4e4602f0403cb299a18216965539c2efa9fda8cdbc163ae.json delete mode 100644 .sqlx/query-c6b359088f23140b6b552a970e6d2eebeabb05e95738610de1afa6fd9a419ee3.json create mode 100644 .sqlx/query-da5e9ce18fe703c120cdab639725abea665748dcf7404a5e187667eae9be1779.json rename .sqlx/{query-eebce2d863e45079e7a2aefa5f39c92b17964163ad46128dfb21a28fac3b1623.json => query-e17cc6da24815887b8469cda77965de32c0a738527e60ac0e8f45a1613693306.json} (81%) delete mode 100644 .sqlx/query-e7b0f7e736692bfced818fea852c9420f12f76be6e6450a1fa6ad2dd00decc22.json create mode 100644 .sqlx/query-e9befc3e05f2e0efb257ac7d7ca5d69fac6982d30eb3254da8e5135bf3b54ae2.json delete mode 100644 .sqlx/query-ea7158526eac5fac6de2de7518c5310c51413b5aad22b494bfb582b3da82deca.json create mode 100644 migrations/20230907085011_users_references.sql create mode 100644 migrations/20230912034344_books.sql create mode 100644 migrations/20230921163833_warps.sql create mode 100644 migrations/20230921172629_warps_primary_key.sql create mode 100644 src/api/books/id/comment/mod.rs create mode 100644 src/api/books/id/image1/mod.rs create mode 100644 src/api/books/id/image2/mod.rs rename src/api/{import => import_achievements}/mod.rs (94%) create mode 100644 src/api/import_books/mod.rs create mode 100644 src/api/pages/book_tracker/mod.rs create mode 100644 src/api/pages/warp_tracker/mod.rs create mode 100644 src/api/pages/warp_tracker/uid/mod.rs create mode 100644 src/api/sitemap/mod.rs delete mode 100644 src/database/warp_departure_characters.rs delete mode 100644 src/database/warp_departure_light_cones.rs delete mode 100644 src/database/warp_lc_characters.rs delete mode 100644 src/database/warp_lc_light_cones.rs delete mode 100644 src/database/warp_special_characters.rs delete mode 100644 src/database/warp_special_light_cones.rs delete mode 100644 src/database/warp_standard_characters.rs delete mode 100644 src/database/warp_standard_light_cones.rs create mode 100644 src/database/warps.rs diff --git a/.sqlx/query-0332a46d40aa6b5c2b62b389852c71dc79ef3e3e451157d26c002d20455ed36c.json b/.sqlx/query-0332a46d40aa6b5c2b62b389852c71dc79ef3e3e451157d26c002d20455ed36c.json new file mode 100644 index 00000000..f24e7fb8 --- /dev/null +++ b/.sqlx/query-0332a46d40aa6b5c2b62b389852c71dc79ef3e3e451157d26c002d20455ed36c.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE books SET image1 = $2 WHERE id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + }, + "nullable": [] + }, + "hash": "0332a46d40aa6b5c2b62b389852c71dc79ef3e3e451157d26c002d20455ed36c" +} diff --git a/.sqlx/query-0c348b98012d850136a5736b7aad6e1877336b69292f00a5f59f081ced501c89.json b/.sqlx/query-0c348b98012d850136a5736b7aad6e1877336b69292f00a5f59f081ced501c89.json deleted file mode 100644 index 8b9a2027..00000000 --- a/.sqlx/query-0c348b98012d850136a5736b7aad6e1877336b69292f00a5f59f081ced501c89.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n warp_departure_light_cones(id, uid, light_cone, timestamp)\n VALUES\n ($1, $2, $3, $4)\n ON CONFLICT\n (id, uid)\n DO UPDATE SET\n light_cone = EXCLUDED.light_cone,\n timestamp = EXCLUDED.timestamp\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Int4", - "Timestamp" - ] - }, - "nullable": [] - }, - "hash": "0c348b98012d850136a5736b7aad6e1877336b69292f00a5f59f081ced501c89" -} diff --git a/.sqlx/query-13c5141ef17c13ed74375f5a4e199a825c721a12b062e29bbfa42b221fe0367d.json b/.sqlx/query-13c5141ef17c13ed74375f5a4e199a825c721a12b062e29bbfa42b221fe0367d.json new file mode 100644 index 00000000..41acfc9f --- /dev/null +++ b/.sqlx/query-13c5141ef17c13ed74375f5a4e199a825c721a12b062e29bbfa42b221fe0367d.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE books SET comment = NULL WHERE id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "13c5141ef17c13ed74375f5a4e199a825c721a12b062e29bbfa42b221fe0367d" +} diff --git a/.sqlx/query-ecea2a6e436e6072a3b218926806392e2d3c177ea3b7ffa2dc0e921e0698c4ee.json b/.sqlx/query-2f3e2e499cf8cfd82e41f1d64f835a8dcc583fbece88e8748c812e3681c347fb.json similarity index 50% rename from .sqlx/query-ecea2a6e436e6072a3b218926806392e2d3c177ea3b7ffa2dc0e921e0698c4ee.json rename to .sqlx/query-2f3e2e499cf8cfd82e41f1d64f835a8dcc583fbece88e8748c812e3681c347fb.json index ed881648..8eca1439 100644 --- a/.sqlx/query-ecea2a6e436e6072a3b218926806392e2d3c177ea3b7ffa2dc0e921e0698c4ee.json +++ b/.sqlx/query-2f3e2e499cf8cfd82e41f1d64f835a8dcc583fbece88e8748c812e3681c347fb.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_departure_characters.*,\n characters_text.name\n FROM\n warp_departure_characters\n INNER JOIN\n characters_text\n ON\n characters_text.id = character AND characters_text.language = $3\n WHERE\n warp_departure_characters.id = $1\n AND\n uid = $2\n ", + "query": "\n SELECT\n warps.*,\n COALESCE(characters_text.name, light_cones_text.name) AS name\n FROM\n warps\n LEFT JOIN\n characters_text\n ON\n characters_text.id = character AND characters_text.language = $2\n LEFT JOIN\n light_cones_text\n ON\n light_cones_text.id = light_cone AND light_cones_text.language = $2\n WHERE\n uid = $1\n ORDER BY\n id\n ", "describe": { "columns": [ { @@ -15,23 +15,32 @@ }, { "ordinal": 2, + "name": "gacha_type", + "type_info": "Text" + }, + { + "ordinal": 3, "name": "character", "type_info": "Int4" }, { - "ordinal": 3, + "ordinal": 4, + "name": "light_cone", + "type_info": "Int4" + }, + { + "ordinal": 5, "name": "timestamp", "type_info": "Timestamp" }, { - "ordinal": 4, + "ordinal": 6, "name": "name", "type_info": "Text" } ], "parameters": { "Left": [ - "Int8", "Int8", "Text" ] @@ -40,9 +49,11 @@ false, false, false, + true, + true, false, - false + null ] }, - "hash": "ecea2a6e436e6072a3b218926806392e2d3c177ea3b7ffa2dc0e921e0698c4ee" + "hash": "2f3e2e499cf8cfd82e41f1d64f835a8dcc583fbece88e8748c812e3681c347fb" } diff --git a/.sqlx/query-321a5e28338dd19f2e8b811dcbb76d0bd88fe7989d73e8b689f4274cc15c18f1.json b/.sqlx/query-321a5e28338dd19f2e8b811dcbb76d0bd88fe7989d73e8b689f4274cc15c18f1.json deleted file mode 100644 index 742b4a0f..00000000 --- a/.sqlx/query-321a5e28338dd19f2e8b811dcbb76d0bd88fe7989d73e8b689f4274cc15c18f1.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_special_characters.*,\n characters_text.name\n FROM\n warp_special_characters\n INNER JOIN\n characters_text\n ON\n characters_text.id = character AND characters_text.language = $3\n WHERE\n warp_special_characters.id = $1\n AND\n uid = $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "character", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "321a5e28338dd19f2e8b811dcbb76d0bd88fe7989d73e8b689f4274cc15c18f1" -} diff --git a/.sqlx/query-358c407545f08857d7fd3573c76af365cb8e4548a36c87ae265030d55e45cce1.json b/.sqlx/query-358c407545f08857d7fd3573c76af365cb8e4548a36c87ae265030d55e45cce1.json deleted file mode 100644 index 98ebb7df..00000000 --- a/.sqlx/query-358c407545f08857d7fd3573c76af365cb8e4548a36c87ae265030d55e45cce1.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_departure_light_cones.*,\n light_cones_text.name\n FROM\n warp_departure_light_cones\n INNER JOIN\n light_cones_text\n ON\n light_cones_text.id = light_cone AND light_cones_text.language = $2\n WHERE\n uid = $1\n ORDER BY\n timestamp\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "light_cone", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "358c407545f08857d7fd3573c76af365cb8e4548a36c87ae265030d55e45cce1" -} diff --git a/.sqlx/query-3c1c9a69f562b651b692400501db039a9bd8f94eada1cd64b418b26ae5a00432.json b/.sqlx/query-3c1c9a69f562b651b692400501db039a9bd8f94eada1cd64b418b26ae5a00432.json deleted file mode 100644 index 2ab039f5..00000000 --- a/.sqlx/query-3c1c9a69f562b651b692400501db039a9bd8f94eada1cd64b418b26ae5a00432.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n warp_special_characters(id, uid, character, timestamp)\n VALUES\n ($1, $2, $3, $4)\n ON CONFLICT\n (id, uid)\n DO UPDATE SET\n character = EXCLUDED.character,\n timestamp = EXCLUDED.timestamp\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Int4", - "Timestamp" - ] - }, - "nullable": [] - }, - "hash": "3c1c9a69f562b651b692400501db039a9bd8f94eada1cd64b418b26ae5a00432" -} diff --git a/.sqlx/query-3df551c52abda62417d55a680f40f5e03a5f3d3ba5b1672f235cc0163de4b794.json b/.sqlx/query-3df551c52abda62417d55a680f40f5e03a5f3d3ba5b1672f235cc0163de4b794.json new file mode 100644 index 00000000..667b862f --- /dev/null +++ b/.sqlx/query-3df551c52abda62417d55a680f40f5e03a5f3d3ba5b1672f235cc0163de4b794.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE books SET image2 = NULL WHERE id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "3df551c52abda62417d55a680f40f5e03a5f3d3ba5b1672f235cc0163de4b794" +} diff --git a/.sqlx/query-3f9b36c66e4b05708ec7de32a817d5ff4fdbd2632d525b9ecabe63d1d6658fe7.json b/.sqlx/query-3f9b36c66e4b05708ec7de32a817d5ff4fdbd2632d525b9ecabe63d1d6658fe7.json new file mode 100644 index 00000000..59c0699e --- /dev/null +++ b/.sqlx/query-3f9b36c66e4b05708ec7de32a817d5ff4fdbd2632d525b9ecabe63d1d6658fe7.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE books SET comment = $2 WHERE id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + }, + "nullable": [] + }, + "hash": "3f9b36c66e4b05708ec7de32a817d5ff4fdbd2632d525b9ecabe63d1d6658fe7" +} diff --git a/.sqlx/query-3fc1bfc5eec0663cb418ecf1aab697e5b4b5f6f12dcd379323f9f7888e06270c.json b/.sqlx/query-3fc1bfc5eec0663cb418ecf1aab697e5b4b5f6f12dcd379323f9f7888e06270c.json deleted file mode 100644 index 3d973637..00000000 --- a/.sqlx/query-3fc1bfc5eec0663cb418ecf1aab697e5b4b5f6f12dcd379323f9f7888e06270c.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_lc_characters.*,\n characters_text.name\n FROM\n warp_lc_characters\n INNER JOIN\n characters_text\n ON\n characters_text.id = character AND characters_text.language = $3\n WHERE\n warp_lc_characters.id = $1\n AND\n uid = $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "character", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "3fc1bfc5eec0663cb418ecf1aab697e5b4b5f6f12dcd379323f9f7888e06270c" -} diff --git a/.sqlx/query-400a696954c28d46593ac0211cf6a4740e284a2ee28fe316663ea56cb1e9c67c.json b/.sqlx/query-400a696954c28d46593ac0211cf6a4740e284a2ee28fe316663ea56cb1e9c67c.json new file mode 100644 index 00000000..3ccae69d --- /dev/null +++ b/.sqlx/query-400a696954c28d46593ac0211cf6a4740e284a2ee28fe316663ea56cb1e9c67c.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n warps(id, uid, gacha_type, character, light_cone, timestamp)\n VALUES\n ($1, $2, $3, $4, $5, $6)\n ON CONFLICT\n DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int8", + "Text", + "Int4", + "Int4", + "Timestamp" + ] + }, + "nullable": [] + }, + "hash": "400a696954c28d46593ac0211cf6a4740e284a2ee28fe316663ea56cb1e9c67c" +} diff --git a/.sqlx/query-435f481453aeed9f1a85a37e8f20b366efd716bfa95a302fb6f8d83db53ad683.json b/.sqlx/query-435f481453aeed9f1a85a37e8f20b366efd716bfa95a302fb6f8d83db53ad683.json deleted file mode 100644 index 2d9fa7ba..00000000 --- a/.sqlx/query-435f481453aeed9f1a85a37e8f20b366efd716bfa95a302fb6f8d83db53ad683.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n warp_departure_characters(id, uid, character, timestamp)\n VALUES\n ($1, $2, $3, $4)\n ON CONFLICT\n (id, uid)\n DO UPDATE SET\n character = EXCLUDED.character,\n timestamp = EXCLUDED.timestamp\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Int4", - "Timestamp" - ] - }, - "nullable": [] - }, - "hash": "435f481453aeed9f1a85a37e8f20b366efd716bfa95a302fb6f8d83db53ad683" -} diff --git a/.sqlx/query-4a9d9afd70c99ae211d68a4c72b782f51d690785fe7354d27de980bfb47c5624.json b/.sqlx/query-4a9d9afd70c99ae211d68a4c72b782f51d690785fe7354d27de980bfb47c5624.json deleted file mode 100644 index 5e17ca89..00000000 --- a/.sqlx/query-4a9d9afd70c99ae211d68a4c72b782f51d690785fe7354d27de980bfb47c5624.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_departure_characters.*,\n characters_text.name\n FROM\n warp_departure_characters\n INNER JOIN\n characters_text\n ON\n characters_text.id = character AND characters_text.language = $2\n WHERE\n uid = $1\n ORDER BY\n timestamp\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "character", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "4a9d9afd70c99ae211d68a4c72b782f51d690785fe7354d27de980bfb47c5624" -} diff --git a/.sqlx/query-4e4d06702f6f88001b03f19911bcca10cb1344cfdb73404aac104fad444773a9.json b/.sqlx/query-4e4d06702f6f88001b03f19911bcca10cb1344cfdb73404aac104fad444773a9.json deleted file mode 100644 index a35d1aa4..00000000 --- a/.sqlx/query-4e4d06702f6f88001b03f19911bcca10cb1344cfdb73404aac104fad444773a9.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n warp_special_light_cones(id, uid, light_cone, timestamp)\n VALUES\n ($1, $2, $3, $4)\n ON CONFLICT\n (id, uid)\n DO UPDATE SET\n light_cone = EXCLUDED.light_cone,\n timestamp = EXCLUDED.timestamp\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Int4", - "Timestamp" - ] - }, - "nullable": [] - }, - "hash": "4e4d06702f6f88001b03f19911bcca10cb1344cfdb73404aac104fad444773a9" -} diff --git a/.sqlx/query-5617cec35a9f4787019dc474d345896f8297e269735438bca387e0b89e4eac00.json b/.sqlx/query-5617cec35a9f4787019dc474d345896f8297e269735438bca387e0b89e4eac00.json deleted file mode 100644 index 852f8fff..00000000 --- a/.sqlx/query-5617cec35a9f4787019dc474d345896f8297e269735438bca387e0b89e4eac00.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_special_light_cones.*,\n light_cones_text.name\n FROM\n warp_special_light_cones\n INNER JOIN\n light_cones_text\n ON\n light_cones_text.id = light_cone AND light_cones_text.language = $3\n WHERE\n warp_special_light_cones.id = $1\n AND\n uid = $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "light_cone", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "5617cec35a9f4787019dc474d345896f8297e269735438bca387e0b89e4eac00" -} diff --git a/.sqlx/query-59b39a6aed5db16f2fcd91c28711b7e0e39518f5373fa3fc08b400f05b49717e.json b/.sqlx/query-59b39a6aed5db16f2fcd91c28711b7e0e39518f5373fa3fc08b400f05b49717e.json deleted file mode 100644 index 3e6908fb..00000000 --- a/.sqlx/query-59b39a6aed5db16f2fcd91c28711b7e0e39518f5373fa3fc08b400f05b49717e.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_lc_light_cones.*,\n light_cones_text.name\n FROM\n warp_lc_light_cones\n INNER JOIN\n light_cones_text\n ON\n light_cones_text.id = light_cone AND light_cones_text.language = $2\n WHERE\n uid = $1\n ORDER BY\n timestamp\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "light_cone", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "59b39a6aed5db16f2fcd91c28711b7e0e39518f5373fa3fc08b400f05b49717e" -} diff --git a/.sqlx/query-5b1e2c8a01c03f1b0d5c966cc4cecedc350e83b2d3691221f0042bc2894536d1.json b/.sqlx/query-5b1e2c8a01c03f1b0d5c966cc4cecedc350e83b2d3691221f0042bc2894536d1.json deleted file mode 100644 index aede2eef..00000000 --- a/.sqlx/query-5b1e2c8a01c03f1b0d5c966cc4cecedc350e83b2d3691221f0042bc2894536d1.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n warp_lc_characters(id, uid, character, timestamp)\n VALUES\n ($1, $2, $3, $4)\n ON CONFLICT\n (id, uid)\n DO UPDATE SET\n character = EXCLUDED.character,\n timestamp = EXCLUDED.timestamp\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Int4", - "Timestamp" - ] - }, - "nullable": [] - }, - "hash": "5b1e2c8a01c03f1b0d5c966cc4cecedc350e83b2d3691221f0042bc2894536d1" -} diff --git a/.sqlx/query-9f3d8d898ef71f0717021cd5724d82cf12c8675babf9eec9c8b785002011fb3f.json b/.sqlx/query-601fcee853b8bb3f4b340be90ded0eeacd928248d15293d8169b91290f4505e1.json similarity index 65% rename from .sqlx/query-9f3d8d898ef71f0717021cd5724d82cf12c8675babf9eec9c8b785002011fb3f.json rename to .sqlx/query-601fcee853b8bb3f4b340be90ded0eeacd928248d15293d8169b91290f4505e1.json index 0d5bd2b8..e9325af4 100644 --- a/.sqlx/query-9f3d8d898ef71f0717021cd5724d82cf12c8675babf9eec9c8b785002011fb3f.json +++ b/.sqlx/query-601fcee853b8bb3f4b340be90ded0eeacd928248d15293d8169b91290f4505e1.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "INSERT INTO users_achievements(username, id) VALUES($1, $2)", + "query": "INSERT INTO users_achievements(username, id) VALUES($1, $2) ON CONFLICT(username, id) DO NOTHING", "describe": { "columns": [], "parameters": { @@ -11,5 +11,5 @@ }, "nullable": [] }, - "hash": "9f3d8d898ef71f0717021cd5724d82cf12c8675babf9eec9c8b785002011fb3f" + "hash": "601fcee853b8bb3f4b340be90ded0eeacd928248d15293d8169b91290f4505e1" } diff --git a/.sqlx/query-63be1f694e817e6ebd3ff4bd5ea92d5ab1639b02e9fea32938bdeea8536d4184.json b/.sqlx/query-63be1f694e817e6ebd3ff4bd5ea92d5ab1639b02e9fea32938bdeea8536d4184.json new file mode 100644 index 00000000..5fdbbc90 --- /dev/null +++ b/.sqlx/query-63be1f694e817e6ebd3ff4bd5ea92d5ab1639b02e9fea32938bdeea8536d4184.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE books SET image1 = NULL WHERE id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "63be1f694e817e6ebd3ff4bd5ea92d5ab1639b02e9fea32938bdeea8536d4184" +} diff --git a/.sqlx/query-6486eb9aaf6f4cd5a5efb7462a6a6f7ca1fe05eb0c33c2bcdc2aadb6b5aba1dc.json b/.sqlx/query-6486eb9aaf6f4cd5a5efb7462a6a6f7ca1fe05eb0c33c2bcdc2aadb6b5aba1dc.json deleted file mode 100644 index ed1b1c1a..00000000 --- a/.sqlx/query-6486eb9aaf6f4cd5a5efb7462a6a6f7ca1fe05eb0c33c2bcdc2aadb6b5aba1dc.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_special_characters.*,\n characters_text.name\n FROM\n warp_special_characters\n INNER JOIN\n characters_text\n ON\n characters_text.id = character AND characters_text.language = $2\n WHERE\n uid = $1\n ORDER BY\n timestamp\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "character", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "6486eb9aaf6f4cd5a5efb7462a6a6f7ca1fe05eb0c33c2bcdc2aadb6b5aba1dc" -} diff --git a/.sqlx/query-66ed9fb3be98ee285372e71233b807ec8ec8679ffea7e8e56829e05ebac89854.json b/.sqlx/query-66ed9fb3be98ee285372e71233b807ec8ec8679ffea7e8e56829e05ebac89854.json new file mode 100644 index 00000000..8fcd5f1c --- /dev/null +++ b/.sqlx/query-66ed9fb3be98ee285372e71233b807ec8ec8679ffea7e8e56829e05ebac89854.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n id\n FROM\n achievements\n WHERE NOT\n (hidden AND impossible)\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "66ed9fb3be98ee285372e71233b807ec8ec8679ffea7e8e56829e05ebac89854" +} diff --git a/.sqlx/query-824dae72aa5ec31fcc58cd27e7efd39a526963c9e92c5b664f3075fbdd7eadba.json b/.sqlx/query-824dae72aa5ec31fcc58cd27e7efd39a526963c9e92c5b664f3075fbdd7eadba.json deleted file mode 100644 index 656e90ae..00000000 --- a/.sqlx/query-824dae72aa5ec31fcc58cd27e7efd39a526963c9e92c5b664f3075fbdd7eadba.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_special_light_cones.*,\n light_cones_text.name\n FROM\n warp_special_light_cones\n INNER JOIN\n light_cones_text\n ON\n light_cones_text.id = light_cone AND light_cones_text.language = $2\n WHERE\n uid = $1\n ORDER BY\n timestamp\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "light_cone", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "824dae72aa5ec31fcc58cd27e7efd39a526963c9e92c5b664f3075fbdd7eadba" -} diff --git a/.sqlx/query-837d309b4882f2d9fb7673cbe5530ee6ac09324328bf284b086aaa4b3b4e3788.json b/.sqlx/query-837d309b4882f2d9fb7673cbe5530ee6ac09324328bf284b086aaa4b3b4e3788.json deleted file mode 100644 index 8e3db09b..00000000 --- a/.sqlx/query-837d309b4882f2d9fb7673cbe5530ee6ac09324328bf284b086aaa4b3b4e3788.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_standard_light_cones.*,\n light_cones_text.name\n FROM\n warp_standard_light_cones\n INNER JOIN\n light_cones_text\n ON\n light_cones_text.id = light_cone AND light_cones_text.language = $3\n WHERE\n warp_standard_light_cones.id = $1\n AND\n uid = $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "light_cone", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "837d309b4882f2d9fb7673cbe5530ee6ac09324328bf284b086aaa4b3b4e3788" -} diff --git a/.sqlx/query-85fc1fdf2224f7594e5e6247e9f9dcd95ad19ee9f2279f95651b26623d53d7cd.json b/.sqlx/query-85fc1fdf2224f7594e5e6247e9f9dcd95ad19ee9f2279f95651b26623d53d7cd.json new file mode 100644 index 00000000..ee307edc --- /dev/null +++ b/.sqlx/query-85fc1fdf2224f7594e5e6247e9f9dcd95ad19ee9f2279f95651b26623d53d7cd.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE books SET image2 = $2 WHERE id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + }, + "nullable": [] + }, + "hash": "85fc1fdf2224f7594e5e6247e9f9dcd95ad19ee9f2279f95651b26623d53d7cd" +} diff --git a/.sqlx/query-8b8eb93b6d709348a1e64b49f587af94d3ff910a1e8957f1bcfd39bcd4345972.json b/.sqlx/query-8b8eb93b6d709348a1e64b49f587af94d3ff910a1e8957f1bcfd39bcd4345972.json deleted file mode 100644 index 97d3495b..00000000 --- a/.sqlx/query-8b8eb93b6d709348a1e64b49f587af94d3ff910a1e8957f1bcfd39bcd4345972.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_lc_characters.*,\n characters_text.name\n FROM\n warp_lc_characters\n INNER JOIN\n characters_text\n ON\n characters_text.id = character AND characters_text.language = $2\n WHERE\n uid = $1\n ORDER BY\n timestamp\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "character", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "8b8eb93b6d709348a1e64b49f587af94d3ff910a1e8957f1bcfd39bcd4345972" -} diff --git a/.sqlx/query-92d35f71d6160fc1509b5b2aa79fdee2c7b3f4696a247f9247149dff2fa20105.json b/.sqlx/query-92d35f71d6160fc1509b5b2aa79fdee2c7b3f4696a247f9247149dff2fa20105.json deleted file mode 100644 index 26b3f9d2..00000000 --- a/.sqlx/query-92d35f71d6160fc1509b5b2aa79fdee2c7b3f4696a247f9247149dff2fa20105.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_departure_light_cones.*,\n light_cones_text.name\n FROM\n warp_departure_light_cones\n INNER JOIN\n light_cones_text\n ON\n light_cones_text.id = light_cone AND light_cones_text.language = $3\n WHERE\n warp_departure_light_cones.id = $1\n AND\n uid = $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "light_cone", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "92d35f71d6160fc1509b5b2aa79fdee2c7b3f4696a247f9247149dff2fa20105" -} diff --git a/.sqlx/query-98e515308ca58d71532fbf62a182fca1fd27589dfde611fa740f3ad59ed76fc9.json b/.sqlx/query-98e515308ca58d71532fbf62a182fca1fd27589dfde611fa740f3ad59ed76fc9.json index 30910b2e..5ffca661 100644 --- a/.sqlx/query-98e515308ca58d71532fbf62a182fca1fd27589dfde611fa740f3ad59ed76fc9.json +++ b/.sqlx/query-98e515308ca58d71532fbf62a182fca1fd27589dfde611fa740f3ad59ed76fc9.json @@ -20,26 +20,41 @@ }, { "ordinal": 3, - "name": "name", + "name": "comment", "type_info": "Text" }, { "ordinal": 4, + "name": "image1", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "image2", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "name", + "type_info": "Text" + }, + { + "ordinal": 7, "name": "percent", "type_info": "Float8" }, { - "ordinal": 5, + "ordinal": 8, "name": "series_world", "type_info": "Int4" }, { - "ordinal": 6, + "ordinal": 9, "name": "series_name", "type_info": "Text" }, { - "ordinal": 7, + "ordinal": 10, "name": "series_world_name", "type_info": "Text" } @@ -54,6 +69,9 @@ false, false, false, + true, + true, + true, false, false, false, diff --git a/.sqlx/query-a6a614c93e338b3e4acfb8c949ae911b5122882d767fb7d48b1c6b16cfecb313.json b/.sqlx/query-a6a614c93e338b3e4acfb8c949ae911b5122882d767fb7d48b1c6b16cfecb313.json deleted file mode 100644 index 64bdb925..00000000 --- a/.sqlx/query-a6a614c93e338b3e4acfb8c949ae911b5122882d767fb7d48b1c6b16cfecb313.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_standard_light_cones.*,\n light_cones_text.name\n FROM\n warp_standard_light_cones\n INNER JOIN\n light_cones_text\n ON\n light_cones_text.id = light_cone AND light_cones_text.language = $2\n WHERE\n uid = $1\n ORDER BY\n timestamp\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "light_cone", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "a6a614c93e338b3e4acfb8c949ae911b5122882d767fb7d48b1c6b16cfecb313" -} diff --git a/.sqlx/query-a76946f329d620a3ec7f85ec341e9fdf2ceb8bab62dbcadeea111a294ae65b77.json b/.sqlx/query-a76946f329d620a3ec7f85ec341e9fdf2ceb8bab62dbcadeea111a294ae65b77.json new file mode 100644 index 00000000..bb0c0114 --- /dev/null +++ b/.sqlx/query-a76946f329d620a3ec7f85ec341e9fdf2ceb8bab62dbcadeea111a294ae65b77.json @@ -0,0 +1,60 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n warps.*,\n COALESCE(characters_text.name, light_cones_text.name) AS name\n FROM\n warps\n LEFT JOIN\n characters_text\n ON\n characters_text.id = character AND characters_text.language = $3\n LEFT JOIN\n light_cones_text\n ON\n light_cones_text.id = light_cone AND light_cones_text.language = $3\n WHERE\n warps.id = $1\n AND\n gacha_type = $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "uid", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "gacha_type", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "character", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "light_cone", + "type_info": "Int4" + }, + { + "ordinal": 5, + "name": "timestamp", + "type_info": "Timestamp" + }, + { + "ordinal": 6, + "name": "name", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8", + "Text", + "Text" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + false, + null + ] + }, + "hash": "a76946f329d620a3ec7f85ec341e9fdf2ceb8bab62dbcadeea111a294ae65b77" +} diff --git a/.sqlx/query-b4dcdeace3f76113834d0bb2b03f433a153c9134f0038f9dedef3d79ffab129a.json b/.sqlx/query-b4dcdeace3f76113834d0bb2b03f433a153c9134f0038f9dedef3d79ffab129a.json new file mode 100644 index 00000000..ff58e889 --- /dev/null +++ b/.sqlx/query-b4dcdeace3f76113834d0bb2b03f433a153c9134f0038f9dedef3d79ffab129a.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM\n sessions\n WHERE\n uuid\n IN\n (SELECT uuid FROM sessions WHERE username = $1 ORDER BY expiry DESC OFFSET 9)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [] + }, + "hash": "b4dcdeace3f76113834d0bb2b03f433a153c9134f0038f9dedef3d79ffab129a" +} diff --git a/.sqlx/query-bd034e6f3b7a52700f58d4a7f495969cdfc807ce2417563b703e9afc38d5599c.json b/.sqlx/query-bd034e6f3b7a52700f58d4a7f495969cdfc807ce2417563b703e9afc38d5599c.json deleted file mode 100644 index 50ef5770..00000000 --- a/.sqlx/query-bd034e6f3b7a52700f58d4a7f495969cdfc807ce2417563b703e9afc38d5599c.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n warp_standard_characters(id, uid, character, timestamp)\n VALUES\n ($1, $2, $3, $4)\n ON CONFLICT\n (id, uid)\n DO UPDATE SET\n character = EXCLUDED.character,\n timestamp = EXCLUDED.timestamp\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Int4", - "Timestamp" - ] - }, - "nullable": [] - }, - "hash": "bd034e6f3b7a52700f58d4a7f495969cdfc807ce2417563b703e9afc38d5599c" -} diff --git a/.sqlx/query-bd86a7435e9a119062d87d537b90e94258b923aa68d0d7b55973085fa101fd17.json b/.sqlx/query-bd86a7435e9a119062d87d537b90e94258b923aa68d0d7b55973085fa101fd17.json deleted file mode 100644 index 9cd8e6c8..00000000 --- a/.sqlx/query-bd86a7435e9a119062d87d537b90e94258b923aa68d0d7b55973085fa101fd17.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_lc_light_cones.*,\n light_cones_text.name\n FROM\n warp_lc_light_cones\n INNER JOIN\n light_cones_text\n ON\n light_cones_text.id = light_cone AND light_cones_text.language = $3\n WHERE\n warp_lc_light_cones.id = $1\n AND\n uid = $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "light_cone", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "bd86a7435e9a119062d87d537b90e94258b923aa68d0d7b55973085fa101fd17" -} diff --git a/.sqlx/query-bf3bd841cc3db181d4e4602f0403cb299a18216965539c2efa9fda8cdbc163ae.json b/.sqlx/query-bf3bd841cc3db181d4e4602f0403cb299a18216965539c2efa9fda8cdbc163ae.json deleted file mode 100644 index d744d01f..00000000 --- a/.sqlx/query-bf3bd841cc3db181d4e4602f0403cb299a18216965539c2efa9fda8cdbc163ae.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_standard_characters.*,\n characters_text.name\n FROM\n warp_standard_characters\n INNER JOIN\n characters_text\n ON\n characters_text.id = character AND characters_text.language = $3\n WHERE\n warp_standard_characters.id = $1\n AND\n uid = $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "character", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "bf3bd841cc3db181d4e4602f0403cb299a18216965539c2efa9fda8cdbc163ae" -} diff --git a/.sqlx/query-c6b359088f23140b6b552a970e6d2eebeabb05e95738610de1afa6fd9a419ee3.json b/.sqlx/query-c6b359088f23140b6b552a970e6d2eebeabb05e95738610de1afa6fd9a419ee3.json deleted file mode 100644 index eb5c04f8..00000000 --- a/.sqlx/query-c6b359088f23140b6b552a970e6d2eebeabb05e95738610de1afa6fd9a419ee3.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n warp_lc_light_cones(id, uid, light_cone, timestamp)\n VALUES\n ($1, $2, $3, $4)\n ON CONFLICT\n (id, uid)\n DO UPDATE SET\n light_cone = EXCLUDED.light_cone,\n timestamp = EXCLUDED.timestamp\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Int4", - "Timestamp" - ] - }, - "nullable": [] - }, - "hash": "c6b359088f23140b6b552a970e6d2eebeabb05e95738610de1afa6fd9a419ee3" -} diff --git a/.sqlx/query-da5e9ce18fe703c120cdab639725abea665748dcf7404a5e187667eae9be1779.json b/.sqlx/query-da5e9ce18fe703c120cdab639725abea665748dcf7404a5e187667eae9be1779.json new file mode 100644 index 00000000..8b0c702b --- /dev/null +++ b/.sqlx/query-da5e9ce18fe703c120cdab639725abea665748dcf7404a5e187667eae9be1779.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n id\n FROM\n books\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "da5e9ce18fe703c120cdab639725abea665748dcf7404a5e187667eae9be1779" +} diff --git a/.sqlx/query-eebce2d863e45079e7a2aefa5f39c92b17964163ad46128dfb21a28fac3b1623.json b/.sqlx/query-e17cc6da24815887b8469cda77965de32c0a738527e60ac0e8f45a1613693306.json similarity index 81% rename from .sqlx/query-eebce2d863e45079e7a2aefa5f39c92b17964163ad46128dfb21a28fac3b1623.json rename to .sqlx/query-e17cc6da24815887b8469cda77965de32c0a738527e60ac0e8f45a1613693306.json index 9380129d..3a7c67fd 100644 --- a/.sqlx/query-eebce2d863e45079e7a2aefa5f39c92b17964163ad46128dfb21a28fac3b1623.json +++ b/.sqlx/query-e17cc6da24815887b8469cda77965de32c0a738527e60ac0e8f45a1613693306.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n books.*,\n books_text.name,\n percent,\n book_series.world series_world,\n book_series_text.name series_name,\n book_series_worlds_text.name series_world_name\n FROM\n books\n NATURAL INNER JOIN\n books_percent\n INNER JOIN\n books_text\n ON\n books.id = books_text.id AND books_text.language = $1\n INNER JOIN\n book_series\n ON\n series = book_series.id\n INNER JOIN\n book_series_text\n ON\n series = book_series_text.id AND book_series_text.language = $1\n INNER JOIN\n book_series_worlds_text\n ON\n book_series.world = book_series_worlds_text.id AND book_series_worlds_text.language = $1\n ORDER BY\n id\n ", + "query": "\n SELECT\n books.*,\n books_text.name,\n percent,\n book_series.world series_world,\n book_series_text.name series_name,\n book_series_worlds_text.name series_world_name\n FROM\n books\n NATURAL INNER JOIN\n books_percent\n INNER JOIN\n books_text\n ON\n books.id = books_text.id AND books_text.language = $1\n INNER JOIN\n book_series\n ON\n series = book_series.id\n INNER JOIN\n book_series_text\n ON\n series = book_series_text.id AND book_series_text.language = $1\n INNER JOIN\n book_series_worlds_text\n ON\n book_series.world = book_series_worlds_text.id AND book_series_worlds_text.language = $1\n ORDER BY\n world, series, id\n ", "describe": { "columns": [ { @@ -20,26 +20,41 @@ }, { "ordinal": 3, - "name": "name", + "name": "comment", "type_info": "Text" }, { "ordinal": 4, + "name": "image1", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "image2", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "name", + "type_info": "Text" + }, + { + "ordinal": 7, "name": "percent", "type_info": "Float8" }, { - "ordinal": 5, + "ordinal": 8, "name": "series_world", "type_info": "Int4" }, { - "ordinal": 6, + "ordinal": 9, "name": "series_name", "type_info": "Text" }, { - "ordinal": 7, + "ordinal": 10, "name": "series_world_name", "type_info": "Text" } @@ -53,6 +68,9 @@ false, false, false, + true, + true, + true, false, false, false, @@ -60,5 +78,5 @@ false ] }, - "hash": "eebce2d863e45079e7a2aefa5f39c92b17964163ad46128dfb21a28fac3b1623" + "hash": "e17cc6da24815887b8469cda77965de32c0a738527e60ac0e8f45a1613693306" } diff --git a/.sqlx/query-e7b0f7e736692bfced818fea852c9420f12f76be6e6450a1fa6ad2dd00decc22.json b/.sqlx/query-e7b0f7e736692bfced818fea852c9420f12f76be6e6450a1fa6ad2dd00decc22.json deleted file mode 100644 index 030b8aaa..00000000 --- a/.sqlx/query-e7b0f7e736692bfced818fea852c9420f12f76be6e6450a1fa6ad2dd00decc22.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n warp_standard_light_cones(id, uid, light_cone, timestamp)\n VALUES\n ($1, $2, $3, $4)\n ON CONFLICT\n (id, uid)\n DO UPDATE SET\n light_cone = EXCLUDED.light_cone,\n timestamp = EXCLUDED.timestamp\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Int4", - "Timestamp" - ] - }, - "nullable": [] - }, - "hash": "e7b0f7e736692bfced818fea852c9420f12f76be6e6450a1fa6ad2dd00decc22" -} diff --git a/.sqlx/query-e9befc3e05f2e0efb257ac7d7ca5d69fac6982d30eb3254da8e5135bf3b54ae2.json b/.sqlx/query-e9befc3e05f2e0efb257ac7d7ca5d69fac6982d30eb3254da8e5135bf3b54ae2.json new file mode 100644 index 00000000..6ec2d33a --- /dev/null +++ b/.sqlx/query-e9befc3e05f2e0efb257ac7d7ca5d69fac6982d30eb3254da8e5135bf3b54ae2.json @@ -0,0 +1,60 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n warps.*,\n COALESCE(characters_text.name, light_cones_text.name) AS name\n FROM\n warps\n LEFT JOIN\n characters_text\n ON\n characters_text.id = character AND characters_text.language = $3\n LEFT JOIN\n light_cones_text\n ON\n light_cones_text.id = light_cone AND light_cones_text.language = $3\n WHERE\n uid = $1\n AND\n gacha_type = $2\n ORDER BY\n id\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "uid", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "gacha_type", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "character", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "light_cone", + "type_info": "Int4" + }, + { + "ordinal": 5, + "name": "timestamp", + "type_info": "Timestamp" + }, + { + "ordinal": 6, + "name": "name", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8", + "Text", + "Text" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + false, + null + ] + }, + "hash": "e9befc3e05f2e0efb257ac7d7ca5d69fac6982d30eb3254da8e5135bf3b54ae2" +} diff --git a/.sqlx/query-ea7158526eac5fac6de2de7518c5310c51413b5aad22b494bfb582b3da82deca.json b/.sqlx/query-ea7158526eac5fac6de2de7518c5310c51413b5aad22b494bfb582b3da82deca.json deleted file mode 100644 index df9ae030..00000000 --- a/.sqlx/query-ea7158526eac5fac6de2de7518c5310c51413b5aad22b494bfb582b3da82deca.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n warp_standard_characters.*,\n characters_text.name\n FROM\n warp_standard_characters\n INNER JOIN\n characters_text\n ON\n characters_text.id = character AND characters_text.language = $2\n WHERE\n uid = $1\n ORDER BY\n timestamp\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "uid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "character", - "type_info": "Int4" - }, - { - "ordinal": 3, - "name": "timestamp", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "name", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "ea7158526eac5fac6de2de7518c5310c51413b5aad22b494bfb582b3da82deca" -} diff --git a/Cargo.toml b/Cargo.toml index 71bd4be9..493be71c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,6 +12,7 @@ actix-web = "4.3.1" anyhow = "1.0.75" async-rwlock = "1.3.0" async-trait = "0.1.73" +calamine = "0.22.0" chrono = { version = "0.4.26", features = ["serde"] } csv = "1.2.2" dotenv = "0.15.0" @@ -21,6 +22,7 @@ image = "0.24.7" indexmap = "2.0.0" lettre = { version = "0.10.4", features = ["tokio1-native-tls"] } log = "0.4.20" +quick-xml = { version = "0.30.0", features = ["serialize"] } rand = "0.8.5" regex = "1.9.3" reqwest = { version = "0.11.18", features = ["json"] } diff --git a/migrations/20230907085011_users_references.sql b/migrations/20230907085011_users_references.sql new file mode 100644 index 00000000..13af8b1b --- /dev/null +++ b/migrations/20230907085011_users_references.sql @@ -0,0 +1,14 @@ +ALTER TABLE admins DROP CONSTRAINT admins_username_fkey; +ALTER TABLE admins ADD CONSTRAINT admins_username_fkey FOREIGN KEY (username) REFERENCES users ON DELETE CASCADE ON UPDATE CASCADE; + +ALTER TABLE sessions DROP CONSTRAINT sessions_new_username_fkey; +ALTER TABLE sessions ADD CONSTRAINT sessions_username_fkey FOREIGN KEY (username) REFERENCES users ON DELETE CASCADE ON UPDATE CASCADE; + +ALTER TABLE users_achievements DROP CONSTRAINT completed_username_fkey; +ALTER TABLE users_achievements ADD CONSTRAINT users_achievements_username_fkey FOREIGN KEY (username) REFERENCES users ON DELETE CASCADE ON UPDATE CASCADE; + +ALTER TABLE users_books DROP CONSTRAINT users_books_username_fkey; +ALTER TABLE users_books ADD CONSTRAINT users_books_username_fkey FOREIGN KEY (username) REFERENCES users ON DELETE CASCADE ON UPDATE CASCADE; + +ALTER TABLE verifications DROP CONSTRAINT verifications_username_fkey; +ALTER TABLE verifications ADD CONSTRAINT verifications_username_fkey FOREIGN KEY (username) REFERENCES users ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/migrations/20230912034344_books.sql b/migrations/20230912034344_books.sql new file mode 100644 index 00000000..4f2e9a77 --- /dev/null +++ b/migrations/20230912034344_books.sql @@ -0,0 +1,3 @@ +ALTER TABLE books ADD COLUMN comment TEXT; +ALTER TABLE books ADD COLUMN image1 TEXT; +ALTER TABLE books ADD COLUMN image2 TEXT; diff --git a/migrations/20230921163833_warps.sql b/migrations/20230921163833_warps.sql new file mode 100644 index 00000000..323374bc --- /dev/null +++ b/migrations/20230921163833_warps.sql @@ -0,0 +1,17 @@ +DROP TABLE warp_standard_characters; +DROP TABLE warp_standard_light_cones; +DROP TABLE warp_departure_characters; +DROP TABLE warp_departure_light_cones; +DROP TABLE warp_special_characters; +DROP TABLE warp_special_light_cones; +DROP TABLE warp_lc_characters; +DROP TABLE warp_lc_light_cones; + +CREATE TABLE IF NOT EXISTS warps ( + id INT8 NOT NULL PRIMARY KEY, + uid INT8 NOT NULL REFERENCES mihomo ON DELETE CASCADE, + gacha_type TEXT NOT NULL, + character INT4 REFERENCES characters ON DELETE CASCADE, + light_cone INT4 REFERENCES light_cones ON DELETE CASCADE, + timestamp TIMESTAMP NOT NULL +); \ No newline at end of file diff --git a/migrations/20230921172629_warps_primary_key.sql b/migrations/20230921172629_warps_primary_key.sql new file mode 100644 index 00000000..5c8683c8 --- /dev/null +++ b/migrations/20230921172629_warps_primary_key.sql @@ -0,0 +1,3 @@ +ALTER TABLE warps DROP CONSTRAINT warps_pkey; + +ALTER TABLE warps ADD PRIMARY KEY (id, gacha_type); \ No newline at end of file diff --git a/src/api/achievements/mod.rs b/src/api/achievements/mod.rs index 80d275dc..e32e2d9a 100644 --- a/src/api/achievements/mod.rs +++ b/src/api/achievements/mod.rs @@ -26,8 +26,8 @@ use super::Language; struct ApiDoc; #[derive(Display, EnumString, Serialize, Deserialize, ToSchema)] -#[strum(serialize_all = "lowercase")] -#[serde(rename_all = "lowercase")] +#[strum(serialize_all = "snake_case")] +#[serde(rename_all = "snake_case")] enum Difficulty { Easy, Medium, diff --git a/src/api/books/id/comment/mod.rs b/src/api/books/id/comment/mod.rs new file mode 100644 index 00000000..d57abbea --- /dev/null +++ b/src/api/books/id/comment/mod.rs @@ -0,0 +1,94 @@ +use actix_session::Session; +use actix_web::{delete, put, web, HttpResponse, Responder}; +use serde::Deserialize; +use sqlx::PgPool; +use utoipa::{OpenApi, ToSchema}; + +use crate::{api::ApiResult, database}; + +#[derive(OpenApi)] +#[openapi( + tags((name = "books/{id}/comment")), + paths(put_book_comment, delete_book_comment), + components(schemas(CommentUpdate)) +)] +struct ApiDoc; + +#[derive(Deserialize, ToSchema)] +struct CommentUpdate { + comment: String, +} + +pub fn openapi() -> utoipa::openapi::OpenApi { + ApiDoc::openapi() +} + +pub fn configure(cfg: &mut web::ServiceConfig) { + cfg.service(put_book_comment).service(delete_book_comment); +} + +#[utoipa::path( + tag = "books/{id}/comment", + put, + path = "/api/books/{id}/comment", + request_body = CommentUpdate, + responses( + (status = 200, description = "Updated comment"), + (status = 403, description = "Not an admin"), + ), + security(("admin" = [])) +)] +#[put("/api/books/{id}/comment")] +async fn put_book_comment( + session: Session, + id: web::Path, + comment_update: web::Json, + pool: web::Data, +) -> ApiResult { + let Ok(Some(username)) = session.get::("username") else { + return Ok(HttpResponse::BadRequest().finish()); + }; + + if database::get_admin_by_username(&username, &pool) + .await + .is_err() + { + return Ok(HttpResponse::Forbidden().finish()); + } + + database::update_book_comment(*id, &comment_update.comment, &pool).await?; + + Ok(HttpResponse::Ok().finish()) +} + +#[utoipa::path( + tag = "books/{id}/comment", + delete, + path = "/api/books/{id}/comment", + responses( + (status = 200, description = "Deleted comment"), + (status = 403, description = "Not an admin"), + ), + security(("admin" = [])) +)] +#[delete("/api/books/{id}/comment")] +async fn delete_book_comment( + session: Session, + id: web::Path, + pool: web::Data, +) -> ApiResult { + let Ok(Some(username)) = session.get::("username") else { + return Ok(HttpResponse::BadRequest().finish()); + }; + + if database::get_admin_by_username(&username, &pool) + .await + .is_err() + { + return Ok(HttpResponse::Forbidden().finish()); + } + + database::delete_book_comment(*id, &pool).await?; + + Ok(HttpResponse::Ok().finish()) +} diff --git a/src/api/books/id/image1/mod.rs b/src/api/books/id/image1/mod.rs new file mode 100644 index 00000000..de001189 --- /dev/null +++ b/src/api/books/id/image1/mod.rs @@ -0,0 +1,94 @@ +use actix_session::Session; +use actix_web::{delete, put, web, HttpResponse, Responder}; +use serde::Deserialize; +use sqlx::PgPool; +use utoipa::{OpenApi, ToSchema}; + +use crate::{api::ApiResult, database}; + +#[derive(OpenApi)] +#[openapi( + tags((name = "books/{id}/image1")), + paths(put_book_image1, delete_book_image1), + components(schemas(Image1Update)) +)] +struct ApiDoc; + +#[derive(Deserialize, ToSchema)] +struct Image1Update { + image1: String, +} + +pub fn openapi() -> utoipa::openapi::OpenApi { + ApiDoc::openapi() +} + +pub fn configure(cfg: &mut web::ServiceConfig) { + cfg.service(put_book_image1).service(delete_book_image1); +} + +#[utoipa::path( + tag = "books/{id}/image1", + put, + path = "/api/books/{id}/image1", + request_body = Image1Update, + responses( + (status = 200, description = "Updated image1"), + (status = 403, description = "Not an admin"), + ), + security(("admin" = [])) +)] +#[put("/api/books/{id}/image1")] +async fn put_book_image1( + session: Session, + id: web::Path, + image1_update: web::Json, + pool: web::Data, +) -> ApiResult { + let Ok(Some(username)) = session.get::("username") else { + return Ok(HttpResponse::BadRequest().finish()); + }; + + if database::get_admin_by_username(&username, &pool) + .await + .is_err() + { + return Ok(HttpResponse::Forbidden().finish()); + } + + database::update_book_image1(*id, &image1_update.image1, &pool).await?; + + Ok(HttpResponse::Ok().finish()) +} + +#[utoipa::path( + tag = "books/{id}/image1", + delete, + path = "/api/books/{id}/image1", + responses( + (status = 200, description = "Deleted image1"), + (status = 403, description = "Not an admin"), + ), + security(("admin" = [])) +)] +#[delete("/api/books/{id}/image1")] +async fn delete_book_image1( + session: Session, + id: web::Path, + pool: web::Data, +) -> ApiResult { + let Ok(Some(username)) = session.get::("username") else { + return Ok(HttpResponse::BadRequest().finish()); + }; + + if database::get_admin_by_username(&username, &pool) + .await + .is_err() + { + return Ok(HttpResponse::Forbidden().finish()); + } + + database::delete_book_image1(*id, &pool).await?; + + Ok(HttpResponse::Ok().finish()) +} diff --git a/src/api/books/id/image2/mod.rs b/src/api/books/id/image2/mod.rs new file mode 100644 index 00000000..e801c1f5 --- /dev/null +++ b/src/api/books/id/image2/mod.rs @@ -0,0 +1,94 @@ +use actix_session::Session; +use actix_web::{delete, put, web, HttpResponse, Responder}; +use serde::Deserialize; +use sqlx::PgPool; +use utoipa::{OpenApi, ToSchema}; + +use crate::{api::ApiResult, database}; + +#[derive(OpenApi)] +#[openapi( + tags((name = "books/{id}/image2")), + paths(put_book_image2, delete_book_image2), + components(schemas(Image2Update)) +)] +struct ApiDoc; + +#[derive(Deserialize, ToSchema)] +struct Image2Update { + image2: String, +} + +pub fn openapi() -> utoipa::openapi::OpenApi { + ApiDoc::openapi() +} + +pub fn configure(cfg: &mut web::ServiceConfig) { + cfg.service(put_book_image2).service(delete_book_image2); +} + +#[utoipa::path( + tag = "books/{id}/image2", + put, + path = "/api/books/{id}/image2", + request_body = Image2Update, + responses( + (status = 200, description = "Updated image2"), + (status = 403, description = "Not an admin"), + ), + security(("admin" = [])) +)] +#[put("/api/books/{id}/image2")] +async fn put_book_image2( + session: Session, + id: web::Path, + image2_update: web::Json, + pool: web::Data, +) -> ApiResult { + let Ok(Some(username)) = session.get::("username") else { + return Ok(HttpResponse::BadRequest().finish()); + }; + + if database::get_admin_by_username(&username, &pool) + .await + .is_err() + { + return Ok(HttpResponse::Forbidden().finish()); + } + + database::update_book_image2(*id, &image2_update.image2, &pool).await?; + + Ok(HttpResponse::Ok().finish()) +} + +#[utoipa::path( + tag = "books/{id}/image2", + delete, + path = "/api/books/{id}/image2", + responses( + (status = 200, description = "Deleted image2"), + (status = 403, description = "Not an admin"), + ), + security(("admin" = [])) +)] +#[delete("/api/books/{id}/image2")] +async fn delete_book_image2( + session: Session, + id: web::Path, + pool: web::Data, +) -> ApiResult { + let Ok(Some(username)) = session.get::("username") else { + return Ok(HttpResponse::BadRequest().finish()); + }; + + if database::get_admin_by_username(&username, &pool) + .await + .is_err() + { + return Ok(HttpResponse::Forbidden().finish()); + } + + database::delete_book_image2(*id, &pool).await?; + + Ok(HttpResponse::Ok().finish()) +} diff --git a/src/api/books/id/mod.rs b/src/api/books/id/mod.rs index 2bb19a2e..844be4ec 100644 --- a/src/api/books/id/mod.rs +++ b/src/api/books/id/mod.rs @@ -1,3 +1,7 @@ +mod comment; +mod image1; +mod image2; + use actix_web::{get, web, HttpResponse, Responder}; use sqlx::PgPool; use utoipa::OpenApi; @@ -15,11 +19,18 @@ use crate::{ struct ApiDoc; pub fn openapi() -> utoipa::openapi::OpenApi { - ApiDoc::openapi() + let mut openapi = ApiDoc::openapi(); + openapi.merge(comment::openapi()); + openapi.merge(image1::openapi()); + openapi.merge(image2::openapi()); + openapi } pub fn configure(cfg: &mut web::ServiceConfig) { - cfg.service(get_book); + cfg.service(get_book) + .configure(comment::configure) + .configure(image1::configure) + .configure(image2::configure); } #[utoipa::path( diff --git a/src/api/books/mod.rs b/src/api/books/mod.rs index 22462ddb..9b3887b1 100644 --- a/src/api/books/mod.rs +++ b/src/api/books/mod.rs @@ -26,8 +26,8 @@ use super::Language; struct ApiDoc; #[derive(Display, EnumString, Serialize, Deserialize, ToSchema)] -#[strum(serialize_all = "lowercase")] -#[serde(rename_all = "lowercase")] +#[strum(serialize_all = "snake_case")] +#[serde(rename_all = "snake_case")] enum Difficulty { Easy, Medium, @@ -43,11 +43,24 @@ struct Book { series_world_name: String, series_inside: i32, name: String, + #[serde(skip_serializing_if = "Option::is_none")] + comment: Option, + images: Vec, percent: f64, } impl From for Book { fn from(db_book: database::DbBook) -> Self { + let mut images = Vec::new(); + + if let Some(image) = db_book.image1 { + images.push(image); + } + + if let Some(image) = db_book.image2 { + images.push(image); + } + Book { id: db_book.id, series: db_book.series, @@ -56,6 +69,8 @@ impl From for Book { series_world_name: db_book.series_world_name, series_inside: db_book.series_inside, name: db_book.name, + comment: db_book.comment, + images, percent: db_book.percent, } } diff --git a/src/api/free_jade_alert/mod.rs b/src/api/free_jade_alert/mod.rs index 37f295a0..52287d6a 100644 --- a/src/api/free_jade_alert/mod.rs +++ b/src/api/free_jade_alert/mod.rs @@ -48,7 +48,7 @@ struct Reward { } #[derive(Deserialize, ToSchema)] -#[serde(rename_all = "lowercase")] +#[serde(rename_all = "snake_case")] enum RewardType { Jade, Credit, diff --git a/src/api/import/mod.rs b/src/api/import_achievements/mod.rs similarity index 94% rename from src/api/import/mod.rs rename to src/api/import_achievements/mod.rs index 564935c6..39165d20 100644 --- a/src/api/import/mod.rs +++ b/src/api/import_achievements/mod.rs @@ -11,8 +11,8 @@ use crate::{api::ApiResult, database}; #[derive(OpenApi)] #[openapi( - tags((name = "import")), - paths(import), + tags((name = "import-achievements")), + paths(import_achievements), components(schemas( File, )) @@ -24,7 +24,7 @@ pub fn openapi() -> utoipa::openapi::OpenApi { } pub fn configure(cfg: &mut web::ServiceConfig) { - cfg.service(import); + cfg.service(import_achievements); } #[derive(MultipartForm, ToSchema)] @@ -56,7 +56,7 @@ struct Achievement { #[utoipa::path( tag = "pinned", post, - path = "/api/import", + path = "/api/import-achievements", request_body(content = File, content_type = "multipart/form-data"), responses( (status = 200, description = "Successfully imported"), @@ -64,8 +64,8 @@ struct Achievement { (status = 403, description = "Not an admin") ) )] -#[post("/api/import")] -async fn import( +#[post("/api/import-achievements")] +async fn import_achievements( session: Session, file: MultipartForm, pool: web::Data, diff --git a/src/api/import_books/mod.rs b/src/api/import_books/mod.rs new file mode 100644 index 00000000..1d8af37b --- /dev/null +++ b/src/api/import_books/mod.rs @@ -0,0 +1,123 @@ +use std::collections::HashMap; + +use actix_multipart::form::{tempfile::TempFile, MultipartForm}; +use actix_session::Session; +use actix_web::{post, web, HttpResponse, Responder}; +use calamine::{DataType, Reader}; +use sqlx::PgPool; +use utoipa::{OpenApi, ToSchema}; + +use crate::{api::ApiResult, database}; + +#[derive(OpenApi)] +#[openapi( + tags((name = "import-books")), + paths(import_books), + components(schemas( + File, + )) +)] +struct ApiDoc; + +pub fn openapi() -> utoipa::openapi::OpenApi { + ApiDoc::openapi() +} + +pub fn configure(cfg: &mut web::ServiceConfig) { + cfg.service(import_books); +} + +#[derive(MultipartForm, ToSchema)] +struct File { + #[schema(value_type = String, format = Binary)] + file: TempFile, +} + +#[utoipa::path( + tag = "pinned", + post, + path = "/api/import-books", + request_body(content = File, content_type = "multipart/form-data"), + responses( + (status = 200, description = "Successfully imported"), + (status = 400, description = "Not logged in"), + (status = 403, description = "Not an admin") + ) +)] +#[post("/api/import-books")] +async fn import_books( + session: Session, + file: MultipartForm, + pool: web::Data, +) -> ApiResult { + let Ok(Some(username)) = session.get::("username") else { + return Ok(HttpResponse::BadRequest().finish()); + }; + + if database::get_admin_by_username(&username, &pool) + .await + .is_err() + { + return Ok(HttpResponse::Forbidden().finish()); + } + + let mut workbook = calamine::open_workbook_auto(&file.file.file)?; + + let mut images = HashMap::new(); + if let Some(Ok(r)) = workbook.worksheet_formula("Bookshelf") { + let (sy, _) = r.start().unwrap_or_default(); + + let height = r.height(); + + for y in 0..height { + let image1 = r.get((y, 0)).unwrap().clone(); + let image2 = r.get((y, 6)).unwrap().clone(); + + if !image1.is_empty() && !image2.is_empty() { + let image1 = image1[7..image1.len() - 2].to_string(); + let image2 = image2[7..image2.len() - 2].to_string(); + + images.insert(sy as usize + y, (image1, image2)); + } + } + } + + for (y, row) in workbook + .worksheet_range("Bookshelf") + .unwrap()? + .rows() + .enumerate() + .skip(1) + .filter(|(_, data)| data[7] != DataType::Empty) + { + let ids = match &row[7] { + &DataType::Int(id) => vec![id], + &DataType::Float(id) => vec![id as i64], + DataType::String(ids) => ids.lines().map(|id| id.parse().unwrap()).collect(), + _ => return Err(anyhow::anyhow!("Id in row {y} is in a wrong format").into()), + }; + + let comment = match &row[5] { + DataType::String(s) => s, + DataType::Empty => "", + _ => { + return Err(anyhow::anyhow!("How to obtain in row {y} is in a wrong format").into()) + } + }; + + for id in ids { + if !comment.is_empty() { + database::update_book_comment(id, comment, &pool).await?; + } else { + database::delete_book_comment(id, &pool).await?; + } + + if let Some((image1, image2)) = images.get(&(y + 1)) { + database::update_book_image1(id, image1, &pool).await?; + database::update_book_image2(id, image2, &pool).await?; + } + } + } + + Ok(HttpResponse::Ok().finish()) +} diff --git a/src/api/mod.rs b/src/api/mod.rs index b82a5250..1962bd8a 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -6,13 +6,15 @@ mod books; mod characters; mod community_tier_list; mod free_jade_alert; -mod import; +mod import_achievements; +mod import_books; mod languages; mod light_cones; mod mihomo; mod pages; mod scores; mod select_all; +mod sitemap; mod users; mod warps; @@ -65,8 +67,8 @@ struct LanguageParams { Clone, Copy, )] -#[strum(serialize_all = "lowercase")] -#[serde(rename_all = "lowercase")] +#[strum(serialize_all = "snake_case")] +#[serde(rename_all = "snake_case")] enum Language { Chs, Cht, @@ -120,13 +122,15 @@ pub fn openapi() -> utoipa::openapi::OpenApi { openapi.merge(characters::openapi()); openapi.merge(community_tier_list::openapi()); openapi.merge(free_jade_alert::openapi()); - openapi.merge(import::openapi()); + openapi.merge(import_achievements::openapi()); + openapi.merge(import_books::openapi()); openapi.merge(languages::openapi()); openapi.merge(light_cones::openapi()); openapi.merge(mihomo::openapi()); openapi.merge(pages::openapi()); openapi.merge(scores::openapi()); openapi.merge(select_all::openapi()); + openapi.merge(sitemap::openapi()); openapi.merge(users::openapi()); openapi.merge(warps::openapi()); openapi @@ -141,13 +145,15 @@ pub fn configure(cfg: &mut web::ServiceConfig) { .configure(characters::configure) .configure(community_tier_list::configure) .configure(free_jade_alert::configure) - .configure(import::configure) + .configure(import_achievements::configure) + .configure(import_books::configure) .configure(languages::configure) .configure(light_cones::configure) .configure(mihomo::configure) .configure(pages::configure) .configure(scores::configure) .configure(select_all::configure) + .configure(sitemap::configure) .configure(users::configure) .configure(warps::configure); } @@ -157,3 +163,7 @@ pub fn cache_achievement_tracker( ) -> web::Data { pages::cache_achievement_tracker(pool) } + +pub fn cache_book_tracker(pool: PgPool) -> web::Data { + pages::cache_book_tracker(pool) +} diff --git a/src/api/pages/achievement_tracker/mod.rs b/src/api/pages/achievement_tracker/mod.rs index 55011121..eddd1618 100644 --- a/src/api/pages/achievement_tracker/mod.rs +++ b/src/api/pages/achievement_tracker/mod.rs @@ -18,7 +18,7 @@ use crate::{ }; #[derive(OpenApi)] -#[openapi(paths(get_achievemenent_tracker))] +#[openapi(paths(get_achievement_tracker))] struct ApiDoc; pub fn openapi() -> utoipa::openapi::OpenApi { @@ -26,7 +26,7 @@ pub fn openapi() -> utoipa::openapi::OpenApi { } pub fn configure(cfg: &mut web::ServiceConfig) { - cfg.service(get_achievemenent_tracker); + cfg.service(get_achievement_tracker); } #[derive(Default)] @@ -71,8 +71,8 @@ struct Achievement { } #[derive(EnumString, Serialize)] -#[strum(serialize_all = "lowercase")] -#[serde(rename_all = "lowercase")] +#[strum(serialize_all = "snake_case")] +#[serde(rename_all = "snake_case")] enum Difficulty { Easy, Medium, @@ -217,7 +217,7 @@ async fn update( ) )] #[get("/api/pages/achievement-tracker", guard = "private")] -async fn get_achievemenent_tracker( +async fn get_achievement_tracker( language_params: web::Query, achievement_tracker_cache: web::Data, ) -> ApiResult { diff --git a/src/api/pages/book_tracker/mod.rs b/src/api/pages/book_tracker/mod.rs new file mode 100644 index 00000000..4e30c24a --- /dev/null +++ b/src/api/pages/book_tracker/mod.rs @@ -0,0 +1,193 @@ +use std::{ + collections::HashMap, + time::{Duration, Instant}, +}; + +use actix_web::{get, web, HttpResponse, Responder}; +use anyhow::Result; +use async_rwlock::RwLock; +use indexmap::IndexMap; +use serde::Serialize; +use sqlx::PgPool; +use strum::IntoEnumIterator; +use utoipa::OpenApi; + +use crate::{ + api::{private, ApiResult, Language, LanguageParams}, + database, +}; + +#[derive(OpenApi)] +#[openapi(paths(get_book_tracker))] +struct ApiDoc; + +pub fn openapi() -> utoipa::openapi::OpenApi { + ApiDoc::openapi() +} + +pub fn configure(cfg: &mut web::ServiceConfig) { + cfg.service(get_book_tracker); +} + +#[derive(Default)] +pub struct BookTrackerCache { + book_tracker_map: RwLock>, +} + +#[derive(Default, Serialize)] +struct BookTracker { + book_count: usize, + user_count: i64, + language: Language, + worlds: Vec, +} + +#[derive(Serialize)] +struct World { + world: String, + book_count: usize, + series: Vec, +} + +#[derive(Serialize)] +struct Series { + series: String, + book_count: usize, + books: Vec, +} + +#[derive(Serialize)] +struct Book { + id: i64, + series: i32, + series_name: String, + series_world: i32, + series_world_name: String, + name: String, + #[serde(skip_serializing_if = "Option::is_none")] + comment: Option, + percent: f64, +} + +impl From for Book { + fn from(db_book: database::DbBook) -> Self { + Book { + id: db_book.id, + series: db_book.series, + series_name: db_book.series_name.clone(), + series_world: db_book.series_world, + series_world_name: db_book.series_world_name, + name: db_book.name.clone(), + comment: db_book.comment.clone(), + percent: db_book.percent, + } + } +} + +pub fn cache(pool: PgPool) -> web::Data { + let book_tracker_cache = web::Data::new(BookTrackerCache::default()); + + { + let book_tracker_cache = book_tracker_cache.clone(); + + tokio::spawn(async move { + let mut interval = tokio::time::interval(Duration::from_secs(60)); + + loop { + interval.tick().await; + + let start = Instant::now(); + + if let Err(e) = update(&book_tracker_cache, &pool).await { + log::error!( + "Book Tracker update failed with {e} in {}s", + start.elapsed().as_secs_f64() + ); + } else { + log::info!( + "Book Tracker update succeeded in {}s", + start.elapsed().as_secs_f64() + ); + } + } + }); + } + + book_tracker_cache +} + +async fn update(book_tracker_cache: &web::Data, pool: &PgPool) -> Result<()> { + let mut book_tracker_map = HashMap::new(); + + for language in Language::iter() { + let db_books = database::get_books(&language.to_string(), pool).await?; + + let mut worlds: IndexMap>> = IndexMap::new(); + + for db_book in db_books { + worlds + .entry(db_book.series_world_name.clone()) + .or_default() + .entry(db_book.series_name.clone()) + .or_default() + .push(Book::from(db_book)); + } + + let worlds = worlds + .into_iter() + .map(|(world, series)| { + let series = series + .into_iter() + .map(|(series, books)| Series { + series, + book_count: books.len(), + books, + }) + .collect::>(); + + let book_count = series.iter().map(|bs| bs.book_count).sum(); + + World { + world, + book_count, + series, + } + }) + .collect::>(); + + let book_count = worlds.iter().map(|bw| bw.book_count).sum(); + let user_count = database::get_users_books_user_count(pool).await?; + + let book_tracker = BookTracker { + book_count, + user_count, + language, + worlds, + }; + + book_tracker_map.insert(language, book_tracker); + } + + *book_tracker_cache.book_tracker_map.write().await = book_tracker_map; + + Ok(()) +} + +#[utoipa::path( + tag = "pages", + get, + path = "/api/pages/book-tracker", + params(LanguageParams), + security(("api_key" = [])), + responses( + (status = 200, description = "BookTracker"), + ) +)] +#[get("/api/pages/book-tracker", guard = "private")] +async fn get_book_tracker( + language_params: web::Query, + book_tracker_cache: web::Data, +) -> ApiResult { + Ok(HttpResponse::Ok() + .json(&book_tracker_cache.book_tracker_map.read().await[&language_params.lang])) +} diff --git a/src/api/pages/mod.rs b/src/api/pages/mod.rs index fea1ac40..0c8aaffa 100644 --- a/src/api/pages/mod.rs +++ b/src/api/pages/mod.rs @@ -1,7 +1,9 @@ pub mod achievement_tracker; +pub mod book_tracker; mod community_tier_list; mod leaderboard; mod profiles; +mod warp_tracker; use actix_web::web; use sqlx::PgPool; @@ -14,17 +16,21 @@ struct ApiDoc; pub fn openapi() -> utoipa::openapi::OpenApi { let mut openapi = ApiDoc::openapi(); openapi.merge(achievement_tracker::openapi()); + openapi.merge(book_tracker::openapi()); openapi.merge(community_tier_list::openapi()); openapi.merge(leaderboard::openapi()); openapi.merge(profiles::openapi()); + openapi.merge(warp_tracker::openapi()); openapi } pub fn configure(cfg: &mut web::ServiceConfig) { cfg.configure(achievement_tracker::configure) + .configure(book_tracker::configure) .configure(community_tier_list::configure) .configure(leaderboard::configure) - .configure(profiles::configure); + .configure(profiles::configure) + .configure(warp_tracker::configure); } pub fn cache_achievement_tracker( @@ -32,3 +38,7 @@ pub fn cache_achievement_tracker( ) -> web::Data { achievement_tracker::cache(pool) } + +pub fn cache_book_tracker(pool: PgPool) -> web::Data { + book_tracker::cache(pool) +} diff --git a/src/api/pages/warp_tracker/mod.rs b/src/api/pages/warp_tracker/mod.rs new file mode 100644 index 00000000..2a230393 --- /dev/null +++ b/src/api/pages/warp_tracker/mod.rs @@ -0,0 +1,16 @@ +use actix_web::web; +use utoipa::OpenApi; + +mod uid; + +#[derive(OpenApi)] +#[openapi()] +struct ApiDoc; + +pub fn openapi() -> utoipa::openapi::OpenApi { + uid::openapi() +} + +pub fn configure(cfg: &mut web::ServiceConfig) { + cfg.configure(uid::configure); +} diff --git a/src/api/pages/warp_tracker/uid/mod.rs b/src/api/pages/warp_tracker/uid/mod.rs new file mode 100644 index 00000000..66beaeec --- /dev/null +++ b/src/api/pages/warp_tracker/uid/mod.rs @@ -0,0 +1,106 @@ +use actix_web::{get, web, HttpResponse, Responder}; +use chrono::NaiveDateTime; +use serde::Serialize; +use sqlx::PgPool; +use utoipa::OpenApi; + +use crate::{ + api::{private, ApiResult, LanguageParams}, + database, +}; + +#[derive(OpenApi)] +#[openapi(paths(get_warp_tracker))] +struct ApiDoc; + +pub fn openapi() -> utoipa::openapi::OpenApi { + ApiDoc::openapi() +} + +pub fn configure(cfg: &mut web::ServiceConfig) { + cfg.service(get_warp_tracker); +} + +#[derive(Serialize)] +struct Warp { + r#type: WarpType, + name: Option, + timestamp: NaiveDateTime, +} + +#[derive(Serialize)] +#[serde(rename_all = "snake_case")] +enum WarpType { + Character, + LightCone, +} + +impl From for Warp { + fn from(warp: database::DbWarp) -> Self { + let r#type = if warp.character.is_some() { + WarpType::Character + } else { + WarpType::LightCone + }; + + Self { + r#type, + name: warp.name, + timestamp: warp.timestamp, + } + } +} + +#[derive(Serialize)] +struct WarpTracker { + count: usize, + standard: Vec, + departure: Vec, + special: Vec, + lc: Vec, +} + +#[utoipa::path( + tag = "pages", + get, + path = "/api/pages/warp-tracker/{uid}", + security(("api_key" = [])), + responses( + (status = 200, description = "WarpTracker"), + ) +)] +#[get("/api/pages/warp-tracker/{uid}", guard = "private")] +async fn get_warp_tracker( + uid: web::Path, + language_params: web::Query, + pool: web::Data, +) -> ApiResult { + let warps = database::get_warps_by_uid(*uid, &language_params.lang.to_string(), &pool).await?; + + let mut standard = Vec::new(); + let mut departure = Vec::new(); + let mut special = Vec::new(); + let mut lc = Vec::new(); + + for warp in warps { + match warp.gacha_type.as_str() { + "standard" => standard.push(warp.into()), + "departure" => departure.push(warp.into()), + "special" => special.push(warp.into()), + "lc" => lc.push(warp.into()), + _ => {} + } + } + + let count = standard.len() + departure.len() + special.len() + lc.len(); + + let warp_tracker = WarpTracker { + count, + standard, + departure, + special, + lc, + }; + + Ok(HttpResponse::Ok().json(warp_tracker)) +} diff --git a/src/api/scores/mod.rs b/src/api/scores/mod.rs index 8862dcbc..970558fe 100644 --- a/src/api/scores/mod.rs +++ b/src/api/scores/mod.rs @@ -13,8 +13,8 @@ use utoipa::{IntoParams, OpenApi, ToSchema}; struct ApiDoc; #[derive(Display, EnumString, Serialize, Deserialize, ToSchema, Clone, Copy)] -#[strum(serialize_all = "lowercase")] -#[serde(rename_all = "lowercase")] +#[strum(serialize_all = "snake_case")] +#[serde(rename_all = "snake_case")] pub enum Region { NA, EU, diff --git a/src/api/sitemap/mod.rs b/src/api/sitemap/mod.rs new file mode 100644 index 00000000..00a19ffd --- /dev/null +++ b/src/api/sitemap/mod.rs @@ -0,0 +1,117 @@ +use actix_web::{get, web, HttpResponse, Responder}; +use serde::Serialize; +use sqlx::PgPool; +use utoipa::OpenApi; + +use crate::{api::ApiResult, database}; + +#[derive(OpenApi)] +#[openapi( + tags((name = "sitemap")), + paths(sitemap) +)] +struct ApiDoc; + +pub fn openapi() -> utoipa::openapi::OpenApi { + ApiDoc::openapi() +} + +pub fn configure(cfg: &mut web::ServiceConfig) { + cfg.service(sitemap); +} + +const ROUTES: &[&str] = &[ + "https://stardb.gg", + "https://stardb.gg/login", + "https://stardb.gg/register", + "https://stardb.gg/leaderboard", + "https://stardb.gg/tier-list", + "https://stardb.gg/achievement-tracker", + "https://stardb.gg/profile-card-generator", + "https://stardb.gg/privacy-policy", + "https://stardb.gg/articles/", + "https://stardb.gg/articles/daily-farm-route/", + "https://stardb.gg/articles/free-stellar-jade-alerts/", + "https://stardb.gg/articles/oneiric-shard-price/", + "https://stardb.gg/articles/oneiric-shard-price-australia/", + "https://stardb.gg/articles/oneiric-shard-price-brazil/", + "https://stardb.gg/articles/oneiric-shard-price-canada/", + "https://stardb.gg/articles/oneiric-shard-price-china/", + "https://stardb.gg/articles/oneiric-shard-price-eu/", + "https://stardb.gg/articles/oneiric-shard-price-india/", + "https://stardb.gg/articles/oneiric-shard-price-indonesia/", + "https://stardb.gg/articles/oneiric-shard-price-japan/", + "https://stardb.gg/articles/oneiric-shard-price-kazakhstan/", + "https://stardb.gg/articles/oneiric-shard-price-korea/", + "https://stardb.gg/articles/oneiric-shard-price-malaysia/", + "https://stardb.gg/articles/oneiric-shard-price-mexico/", + "https://stardb.gg/articles/oneiric-shard-price-paraguay/", + "https://stardb.gg/articles/oneiric-shard-price-phillipines/", + "https://stardb.gg/articles/oneiric-shard-price-russia/", + "https://stardb.gg/articles/oneiric-shard-price-singapore/", + "https://stardb.gg/articles/oneiric-shard-price-taiwan/", + "https://stardb.gg/articles/oneiric-shard-price-thailand/", + "https://stardb.gg/articles/oneiric-shard-price-uk/", + "https://stardb.gg/articles/oneiric-shard-price-us/", + "https://stardb.gg/articles/oneiric-shard-price-vietnam/", + "https://stardb.gg/api/help/", +]; + +#[derive(Serialize)] +#[allow(non_camel_case_types)] +struct urlset { + #[serde(rename = "@xmlns")] + xmlns: String, + url: Vec, +} + +#[derive(Serialize)] +struct Url { + loc: String, + lastmod: String, +} + +#[utoipa::path( + tag = "sitemap", + get, + path = "/api/sitemap", + responses( + (status = 200, description = "Sitemap"), + ) +)] +#[get("/api/sitemap")] +async fn sitemap(pool: web::Data) -> ApiResult { + let lastmod = "2023-09-06"; + + let mut urls = Vec::new(); + + for route in ROUTES { + let url = Url { + loc: route.to_string(), + lastmod: lastmod.to_string(), + }; + + urls.push(url); + } + + for id in database::get_achievements_id(&pool).await? { + let url = Url { + loc: format!("https://stardb.gg/database/achievements/{id}"), + lastmod: lastmod.to_string(), + }; + + urls.push(url); + } + + let urlset = urlset { + xmlns: "http://www.sitemaps.org/schemas/sitemap/0.9".to_string(), + url: urls, + }; + + let sitemap = r#""#.to_string() + + &quick_xml::se::to_string(&urlset)?; + + Ok(HttpResponse::Ok() + .content_type("application/xml") + .body(sitemap)) +} diff --git a/src/api/warps/mod.rs b/src/api/warps/mod.rs index 9d07b8bf..b497e9e2 100644 --- a/src/api/warps/mod.rs +++ b/src/api/warps/mod.rs @@ -4,6 +4,7 @@ use actix_web::{post, web, HttpResponse, Responder}; use chrono::NaiveDateTime; use serde::Deserialize; use sqlx::PgPool; +use strum::Display; use utoipa::{OpenApi, ToSchema}; use crate::{api::ApiResult, database}; @@ -12,10 +13,20 @@ use crate::{api::ApiResult, database}; #[openapi( tags((name = "warps")), paths(post_warps), - components(schemas(WarpAuthKey)) + components(schemas(WarpAuthKey, GachaType)) )] struct ApiDoc; +#[derive(Display, Deserialize, ToSchema)] +#[strum(serialize_all = "snake_case")] +#[serde(rename_all = "snake_case")] +enum GachaType { + Standard, + Departure, + Special, + Lc, +} + pub fn openapi() -> utoipa::openapi::OpenApi { let mut openapi = ApiDoc::openapi(); openapi.merge(uid::openapi()); @@ -85,48 +96,45 @@ async fn post_warps( for entry in gacha_log.data.list.iter() { let id = entry.id.parse()?; + let gacha_type = GachaType::Standard.to_string(); + + if database::get_warp_by_id_and_gacha_type(id, &gacha_type, "en", &pool) + .await + .is_ok() + { + break 'outer; + } + let uid = entry.uid.parse()?; let timestamp = NaiveDateTime::parse_from_str(&entry.time, "%Y-%m-%d %H:%M:%S")?; - if entry.item_type == "Character" { - if database::get_warp_standard_character_by_id_and_uid(id, uid, "en", &pool) - .await - .is_ok() - { - break 'outer; - } - - let character = entry.item_id.parse()?; + let item = entry.item_id.parse()?; - let db_warp_standard_character = database::DbWarpStandardCharacter { + if entry.item_type == "Character" { + let db_warp = database::DbWarp { id, uid, - character, - name: String::new(), + character: Some(item), + light_cone: None, + gacha_type, + name: None, timestamp, }; - database::set_warp_standard_character(&db_warp_standard_character, &pool).await?; + database::set_warp(&db_warp, &pool).await?; } else if entry.item_type == "Light Cone" { - if database::get_warp_standard_light_cone_by_id_and_uid(id, uid, "en", &pool) - .await - .is_ok() - { - break 'outer; - } - - let light_cone = entry.item_id.parse()?; - - let db_warp_standard_light_cone = database::DbWarpStandardLightCone { + let db_warp = database::DbWarp { id, uid, - light_cone, - name: String::new(), + character: None, + light_cone: Some(item), + gacha_type, + name: None, timestamp, }; - database::set_warp_standard_light_cone(&db_warp_standard_light_cone, &pool).await?; + database::set_warp(&db_warp, &pool).await?; } } @@ -155,49 +163,45 @@ async fn post_warps( for entry in gacha_log.data.list.iter() { let id = entry.id.parse()?; + let gacha_type = GachaType::Departure.to_string(); + + if database::get_warp_by_id_and_gacha_type(id, &gacha_type, "en", &pool) + .await + .is_ok() + { + break 'outer; + } + let uid = entry.uid.parse()?; let timestamp = NaiveDateTime::parse_from_str(&entry.time, "%Y-%m-%d %H:%M:%S")?; - if entry.item_type == "Character" { - if database::get_warp_departure_character_by_id_and_uid(id, uid, "en", &pool) - .await - .is_ok() - { - break 'outer; - } - - let character = entry.item_id.parse()?; + let item = entry.item_id.parse()?; - let db_warp_departure_character = database::DbWarpDepartureCharacter { + if entry.item_type == "Character" { + let db_warp = database::DbWarp { id, uid, - character, - name: String::new(), + character: Some(item), + light_cone: None, + gacha_type, + name: None, timestamp, }; - database::set_warp_departure_character(&db_warp_departure_character, &pool).await?; + database::set_warp(&db_warp, &pool).await?; } else if entry.item_type == "Light Cone" { - if database::get_warp_departure_light_cone_by_id_and_uid(id, uid, "en", &pool) - .await - .is_ok() - { - break 'outer; - } - - let light_cone = entry.item_id.parse()?; - - let db_warp_departure_light_cone = database::DbWarpDepartureLightCone { + let db_warp = database::DbWarp { id, uid, - light_cone, - name: String::new(), + character: None, + light_cone: Some(item), + gacha_type, + name: None, timestamp, }; - database::set_warp_departure_light_cone(&db_warp_departure_light_cone, &pool) - .await?; + database::set_warp(&db_warp, &pool).await?; } } @@ -226,48 +230,45 @@ async fn post_warps( for entry in gacha_log.data.list.iter() { let id = entry.id.parse()?; + let gacha_type = GachaType::Special.to_string(); + + if database::get_warp_by_id_and_gacha_type(id, &gacha_type, "en", &pool) + .await + .is_ok() + { + break 'outer; + } + let uid = entry.uid.parse()?; let timestamp = NaiveDateTime::parse_from_str(&entry.time, "%Y-%m-%d %H:%M:%S")?; - if entry.item_type == "Character" { - if database::get_warp_special_character_by_id_and_uid(id, uid, "en", &pool) - .await - .is_ok() - { - break 'outer; - } + let item = entry.item_id.parse()?; - let character = entry.item_id.parse()?; - - let db_warp_special_character = database::DbWarpSpecialCharacter { + if entry.item_type == "Character" { + let db_warp = database::DbWarp { id, uid, - character, - name: String::new(), + character: Some(item), + light_cone: None, + gacha_type, + name: None, timestamp, }; - database::set_warp_special_character(&db_warp_special_character, &pool).await?; + database::set_warp(&db_warp, &pool).await?; } else if entry.item_type == "Light Cone" { - if database::get_warp_special_light_cone_by_id_and_uid(id, uid, "en", &pool) - .await - .is_ok() - { - break 'outer; - } - - let light_cone = entry.item_id.parse()?; - - let db_warp_special_light_cone = database::DbWarpSpecialLightCone { + let db_warp = database::DbWarp { id, uid, - light_cone, - name: String::new(), + character: None, + light_cone: Some(item), + gacha_type, + name: None, timestamp, }; - database::set_warp_special_light_cone(&db_warp_special_light_cone, &pool).await?; + database::set_warp(&db_warp, &pool).await?; } } @@ -296,48 +297,45 @@ async fn post_warps( for entry in gacha_log.data.list.iter() { let id = entry.id.parse()?; + let gacha_type = GachaType::Lc.to_string(); + + if database::get_warp_by_id_and_gacha_type(id, &gacha_type, "en", &pool) + .await + .is_ok() + { + break 'outer; + } + let uid = entry.uid.parse()?; let timestamp = NaiveDateTime::parse_from_str(&entry.time, "%Y-%m-%d %H:%M:%S")?; - if entry.item_type == "Character" { - if database::get_warp_lc_character_by_id_and_uid(id, uid, "en", &pool) - .await - .is_ok() - { - break 'outer; - } - - let character = entry.item_id.parse()?; + let item = entry.item_id.parse()?; - let db_warp_lc_character = database::DbWarpLcCharacter { + if entry.item_type == "Character" { + let db_warp = database::DbWarp { id, uid, - character, - name: String::new(), + character: Some(item), + light_cone: None, + gacha_type, + name: None, timestamp, }; - database::set_warp_lc_character(&db_warp_lc_character, &pool).await?; + database::set_warp(&db_warp, &pool).await?; } else if entry.item_type == "Light Cone" { - if database::get_warp_lc_light_cone_by_id_and_uid(id, uid, "en", &pool) - .await - .is_ok() - { - break 'outer; - } - - let light_cone = entry.item_id.parse()?; - - let db_warp_lc_light_cone = database::DbWarpLcLightCone { + let db_warp = database::DbWarp { id, uid, - light_cone, - name: String::new(), + character: None, + light_cone: Some(item), + gacha_type, + name: None, timestamp, }; - database::set_warp_lc_light_cone(&db_warp_lc_light_cone, &pool).await?; + database::set_warp(&db_warp, &pool).await?; } } diff --git a/src/api/warps/uid/mod.rs b/src/api/warps/uid/mod.rs index 88fc26d0..b0175dd4 100644 --- a/src/api/warps/uid/mod.rs +++ b/src/api/warps/uid/mod.rs @@ -9,27 +9,28 @@ use crate::{ database, }; +use super::GachaType; + #[derive(OpenApi)] #[openapi( tags((name = "warps/{uid}")), paths(get_warps), - components(schemas(Warp, GachaType)) + components(schemas(Warp)) )] struct ApiDoc; #[derive(Serialize, ToSchema)] struct Warp { - name: String, + r#type: WarpType, + name: Option, timestamp: NaiveDateTime, } -#[derive(Deserialize, ToSchema)] -#[serde(rename_all = "lowercase")] -enum GachaType { - Standard, - Departure, - Special, - Lc, +#[derive(Serialize, ToSchema)] +#[serde(rename_all = "snake_case")] +enum WarpType { + Character, + LightCone, } #[derive(Deserialize, IntoParams)] @@ -37,74 +38,18 @@ struct WarpParams { gacha_type: GachaType, } -impl From for Warp { - fn from(warp_character: database::DbWarpStandardCharacter) -> Self { - Self { - name: warp_character.name, - timestamp: warp_character.timestamp, - } - } -} - -impl From for Warp { - fn from(warp_light_cone: database::DbWarpStandardLightCone) -> Self { - Self { - name: warp_light_cone.name, - timestamp: warp_light_cone.timestamp, - } - } -} - -impl From for Warp { - fn from(warp_character: database::DbWarpDepartureCharacter) -> Self { - Self { - name: warp_character.name, - timestamp: warp_character.timestamp, - } - } -} - -impl From for Warp { - fn from(warp_light_cone: database::DbWarpDepartureLightCone) -> Self { - Self { - name: warp_light_cone.name, - timestamp: warp_light_cone.timestamp, - } - } -} - -impl From for Warp { - fn from(warp_character: database::DbWarpSpecialCharacter) -> Self { - Self { - name: warp_character.name, - timestamp: warp_character.timestamp, - } - } -} - -impl From for Warp { - fn from(warp_light_cone: database::DbWarpSpecialLightCone) -> Self { - Self { - name: warp_light_cone.name, - timestamp: warp_light_cone.timestamp, - } - } -} - -impl From for Warp { - fn from(warp_character: database::DbWarpLcCharacter) -> Self { - Self { - name: warp_character.name, - timestamp: warp_character.timestamp, - } - } -} +impl From for Warp { + fn from(warp: database::DbWarp) -> Self { + let r#type = if warp.character.is_some() { + WarpType::Character + } else { + WarpType::LightCone + }; -impl From for Warp { - fn from(warp_light_cone: database::DbWarpLcLightCone) -> Self { Self { - name: warp_light_cone.name, - timestamp: warp_light_cone.timestamp, + r#type, + name: warp.name, + timestamp: warp.timestamp, } } } @@ -123,7 +68,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { path = "/api/warps/{uid}", params(LanguageParams, WarpParams), responses( - (status = 200, description = "[WarpSpecial]", body = Vec), + (status = 200, description = "[Warp]", body = Vec), ) )] #[get("/api/warps/{uid}")] @@ -133,126 +78,16 @@ async fn get_warps( warp_params: web::Query, pool: web::Data, ) -> ApiResult { - let (warp_characters, warp_light_cones) = match warp_params.gacha_type { - GachaType::Standard => { - let warp_standard_characters = database::get_warp_standard_characters_by_uid( - *uid, - &language_params.lang.to_string(), - &pool, - ) - .await? - .into_iter() - .map(Warp::from) - .collect::>(); - - let warp_standard_light_cones = database::get_warp_standard_light_cones_by_uid( - *uid, - &language_params.lang.to_string(), - &pool, - ) - .await? - .into_iter() - .map(Warp::from) - .collect::>(); - - (warp_standard_characters, warp_standard_light_cones) - } - GachaType::Departure => { - let warp_departure_characters = database::get_warp_departure_characters_by_uid( - *uid, - &language_params.lang.to_string(), - &pool, - ) - .await? - .into_iter() - .map(Warp::from) - .collect::>(); - - let warp_departure_light_cones = database::get_warp_departure_light_cones_by_uid( - *uid, - &language_params.lang.to_string(), - &pool, - ) - .await? - .into_iter() - .map(Warp::from) - .collect::>(); - - (warp_departure_characters, warp_departure_light_cones) - } - GachaType::Special => { - let warp_special_characters = database::get_warp_special_characters_by_uid( - *uid, - &language_params.lang.to_string(), - &pool, - ) - .await? - .into_iter() - .map(Warp::from) - .collect::>(); - - let warp_special_light_cones = database::get_warp_special_light_cones_by_uid( - *uid, - &language_params.lang.to_string(), - &pool, - ) - .await? - .into_iter() - .map(Warp::from) - .collect::>(); - - (warp_special_characters, warp_special_light_cones) - } - GachaType::Lc => { - let warp_lc_characters = database::get_warp_lc_characters_by_uid( - *uid, - &language_params.lang.to_string(), - &pool, - ) - .await? - .into_iter() - .map(Warp::from) - .collect::>(); - - let warp_lc_light_cones = database::get_warp_lc_light_cones_by_uid( - *uid, - &language_params.lang.to_string(), - &pool, - ) - .await? - .into_iter() - .map(Warp::from) - .collect::>(); - - (warp_lc_characters, warp_lc_light_cones) - } - }; - - let mut warp_characters = warp_characters.into_iter().peekable(); - let mut warp_light_cones = warp_light_cones.into_iter().peekable(); - - let mut warps = Vec::new(); - - loop { - let n = match (warp_characters.peek(), warp_light_cones.peek()) { - (Some(l), Some(r)) => { - if l.timestamp < r.timestamp { - -1 - } else { - 1 - } - } - (Some(_), None) => -1, - (None, Some(_)) => 1, - (None, None) => 0, - }; - - match n { - -1 => warps.push(warp_characters.next().unwrap()), - 1 => warps.push(warp_light_cones.next().unwrap()), - _ => break, - } - } + let warps: Vec<_> = database::get_warps_by_uid_and_gacha_type( + *uid, + &warp_params.gacha_type.to_string(), + &language_params.lang.to_string(), + &pool, + ) + .await? + .into_iter() + .map(Warp::from) + .collect(); Ok(HttpResponse::Ok().json(warps)) } diff --git a/src/database/achievements.rs b/src/database/achievements.rs index 79dea121..5a048e36 100644 --- a/src/database/achievements.rs +++ b/src/database/achievements.rs @@ -111,6 +111,24 @@ pub async fn get_achievements(language: &str, pool: &PgPool) -> Result Result> { + Ok(sqlx::query!( + " + SELECT + id + FROM + achievements + WHERE NOT + (hidden AND impossible) + " + ) + .fetch_all(pool) + .await? + .iter() + .map(|r| r.id) + .collect()) +} + pub async fn get_related(id: i64, set: i32, pool: &PgPool) -> Result> { Ok(sqlx::query!( " diff --git a/src/database/books.rs b/src/database/books.rs index 4bb21d9f..f89493c8 100644 --- a/src/database/books.rs +++ b/src/database/books.rs @@ -10,6 +10,9 @@ pub struct DbBook { pub series_world_name: String, pub series_inside: i32, pub name: String, + pub comment: Option, + pub image1: Option, + pub image2: Option, pub percent: f64, } @@ -68,7 +71,7 @@ pub async fn get_books(language: &str, pool: &PgPool) -> Result> { ON book_series.world = book_series_worlds_text.id AND book_series_worlds_text.language = $1 ORDER BY - id + world, series, id ", language ) @@ -116,3 +119,67 @@ pub async fn get_book_by_id(id: i64, language: &str, pool: &PgPool) -> Result Result> { + Ok(sqlx::query!( + " + SELECT + id + FROM + books + " + ) + .fetch_all(pool) + .await? + .iter() + .map(|r| r.id) + .collect()) +} + +pub async fn update_book_comment(id: i64, comment: &str, pool: &PgPool) -> Result<()> { + sqlx::query!("UPDATE books SET comment = $2 WHERE id = $1", id, comment,) + .execute(pool) + .await?; + + Ok(()) +} + +pub async fn update_book_image1(id: i64, image1: &str, pool: &PgPool) -> Result<()> { + sqlx::query!("UPDATE books SET image1 = $2 WHERE id = $1", id, image1,) + .execute(pool) + .await?; + + Ok(()) +} + +pub async fn update_book_image2(id: i64, image2: &str, pool: &PgPool) -> Result<()> { + sqlx::query!("UPDATE books SET image2 = $2 WHERE id = $1", id, image2,) + .execute(pool) + .await?; + + Ok(()) +} + +pub async fn delete_book_comment(id: i64, pool: &PgPool) -> Result<()> { + sqlx::query!("UPDATE books SET comment = NULL WHERE id = $1", id) + .execute(pool) + .await?; + + Ok(()) +} + +pub async fn delete_book_image1(id: i64, pool: &PgPool) -> Result<()> { + sqlx::query!("UPDATE books SET image1 = NULL WHERE id = $1", id) + .execute(pool) + .await?; + + Ok(()) +} + +pub async fn delete_book_image2(id: i64, pool: &PgPool) -> Result<()> { + sqlx::query!("UPDATE books SET image2 = NULL WHERE id = $1", id) + .execute(pool) + .await?; + + Ok(()) +} diff --git a/src/database/mod.rs b/src/database/mod.rs index 8107783b..0719c09e 100644 --- a/src/database/mod.rs +++ b/src/database/mod.rs @@ -25,14 +25,7 @@ mod users; mod users_achievements; mod users_books; mod verifications; -mod warp_departure_characters; -mod warp_departure_light_cones; -mod warp_lc_characters; -mod warp_lc_light_cones; -mod warp_special_characters; -mod warp_special_light_cones; -mod warp_standard_characters; -mod warp_standard_light_cones; +mod warps; pub use achievement_series::*; pub use achievement_series_text::*; @@ -61,11 +54,4 @@ pub use users::*; pub use users_achievements::*; pub use users_books::*; pub use verifications::*; -pub use warp_departure_characters::*; -pub use warp_departure_light_cones::*; -pub use warp_lc_characters::*; -pub use warp_lc_light_cones::*; -pub use warp_special_characters::*; -pub use warp_special_light_cones::*; -pub use warp_standard_characters::*; -pub use warp_standard_light_cones::*; +pub use warps::*; diff --git a/src/database/sessions.rs b/src/database/sessions.rs index cbe57e70..346c689e 100644 --- a/src/database/sessions.rs +++ b/src/database/sessions.rs @@ -32,6 +32,24 @@ pub async fn set_session(session: &DbSession, pool: &PgPool) -> Result<()> { Ok(()) } +pub async fn delete_oldest_sessions_by_username(username: &str, pool: &PgPool) -> Result<()> { + sqlx::query!( + " + DELETE FROM + sessions + WHERE + uuid + IN + (SELECT uuid FROM sessions WHERE username = $1 ORDER BY expiry DESC OFFSET 9) + ", + username, + ) + .execute(pool) + .await?; + + Ok(()) +} + pub async fn get_session_by_uuid(uuid: Uuid, pool: &PgPool) -> Result { Ok(sqlx::query_as!( DbSession, diff --git a/src/database/users_achievements.rs b/src/database/users_achievements.rs index 2c44cce7..d6c934dd 100644 --- a/src/database/users_achievements.rs +++ b/src/database/users_achievements.rs @@ -13,7 +13,7 @@ pub async fn add_user_achievement( pool: &PgPool, ) -> Result<()> { sqlx::query!( - "INSERT INTO users_achievements(username, id) VALUES($1, $2)", + "INSERT INTO users_achievements(username, id) VALUES($1, $2) ON CONFLICT(username, id) DO NOTHING", user_achievement.username, user_achievement.id, ) diff --git a/src/database/warp_departure_characters.rs b/src/database/warp_departure_characters.rs deleted file mode 100644 index 77e60ffa..00000000 --- a/src/database/warp_departure_characters.rs +++ /dev/null @@ -1,98 +0,0 @@ -use anyhow::Result; -use chrono::NaiveDateTime; -use sqlx::PgPool; - -pub struct DbWarpDepartureCharacter { - pub id: i64, - pub uid: i64, - pub character: i32, - pub name: String, - pub timestamp: NaiveDateTime, -} - -pub async fn set_warp_departure_character( - warp_departure_character: &DbWarpDepartureCharacter, - pool: &PgPool, -) -> Result<()> { - sqlx::query!( - " - INSERT INTO - warp_departure_characters(id, uid, character, timestamp) - VALUES - ($1, $2, $3, $4) - ON CONFLICT - (id, uid) - DO UPDATE SET - character = EXCLUDED.character, - timestamp = EXCLUDED.timestamp - ", - warp_departure_character.id, - warp_departure_character.uid, - warp_departure_character.character, - warp_departure_character.timestamp, - ) - .execute(pool) - .await?; - - Ok(()) -} - -pub async fn get_warp_departure_characters_by_uid( - uid: i64, - language: &str, - pool: &PgPool, -) -> Result> { - Ok(sqlx::query_as!( - DbWarpDepartureCharacter, - " - SELECT - warp_departure_characters.*, - characters_text.name - FROM - warp_departure_characters - INNER JOIN - characters_text - ON - characters_text.id = character AND characters_text.language = $2 - WHERE - uid = $1 - ORDER BY - timestamp - ", - uid, - language, - ) - .fetch_all(pool) - .await?) -} - -pub async fn get_warp_departure_character_by_id_and_uid( - id: i64, - uid: i64, - language: &str, - pool: &PgPool, -) -> Result { - Ok(sqlx::query_as!( - DbWarpDepartureCharacter, - " - SELECT - warp_departure_characters.*, - characters_text.name - FROM - warp_departure_characters - INNER JOIN - characters_text - ON - characters_text.id = character AND characters_text.language = $3 - WHERE - warp_departure_characters.id = $1 - AND - uid = $2 - ", - id, - uid, - language, - ) - .fetch_one(pool) - .await?) -} diff --git a/src/database/warp_departure_light_cones.rs b/src/database/warp_departure_light_cones.rs deleted file mode 100644 index d1976f88..00000000 --- a/src/database/warp_departure_light_cones.rs +++ /dev/null @@ -1,98 +0,0 @@ -use anyhow::Result; -use chrono::NaiveDateTime; -use sqlx::PgPool; - -pub struct DbWarpDepartureLightCone { - pub id: i64, - pub uid: i64, - pub light_cone: i32, - pub name: String, - pub timestamp: NaiveDateTime, -} - -pub async fn set_warp_departure_light_cone( - warp_departure_light_cone: &DbWarpDepartureLightCone, - pool: &PgPool, -) -> Result<()> { - sqlx::query!( - " - INSERT INTO - warp_departure_light_cones(id, uid, light_cone, timestamp) - VALUES - ($1, $2, $3, $4) - ON CONFLICT - (id, uid) - DO UPDATE SET - light_cone = EXCLUDED.light_cone, - timestamp = EXCLUDED.timestamp - ", - warp_departure_light_cone.id, - warp_departure_light_cone.uid, - warp_departure_light_cone.light_cone, - warp_departure_light_cone.timestamp, - ) - .execute(pool) - .await?; - - Ok(()) -} - -pub async fn get_warp_departure_light_cones_by_uid( - uid: i64, - language: &str, - pool: &PgPool, -) -> Result> { - Ok(sqlx::query_as!( - DbWarpDepartureLightCone, - " - SELECT - warp_departure_light_cones.*, - light_cones_text.name - FROM - warp_departure_light_cones - INNER JOIN - light_cones_text - ON - light_cones_text.id = light_cone AND light_cones_text.language = $2 - WHERE - uid = $1 - ORDER BY - timestamp - ", - uid, - language, - ) - .fetch_all(pool) - .await?) -} - -pub async fn get_warp_departure_light_cone_by_id_and_uid( - id: i64, - uid: i64, - language: &str, - pool: &PgPool, -) -> Result { - Ok(sqlx::query_as!( - DbWarpDepartureLightCone, - " - SELECT - warp_departure_light_cones.*, - light_cones_text.name - FROM - warp_departure_light_cones - INNER JOIN - light_cones_text - ON - light_cones_text.id = light_cone AND light_cones_text.language = $3 - WHERE - warp_departure_light_cones.id = $1 - AND - uid = $2 - ", - id, - uid, - language, - ) - .fetch_one(pool) - .await?) -} diff --git a/src/database/warp_lc_characters.rs b/src/database/warp_lc_characters.rs deleted file mode 100644 index f3e6f36c..00000000 --- a/src/database/warp_lc_characters.rs +++ /dev/null @@ -1,98 +0,0 @@ -use anyhow::Result; -use chrono::NaiveDateTime; -use sqlx::PgPool; - -pub struct DbWarpLcCharacter { - pub id: i64, - pub uid: i64, - pub character: i32, - pub name: String, - pub timestamp: NaiveDateTime, -} - -pub async fn set_warp_lc_character( - warp_lc_character: &DbWarpLcCharacter, - pool: &PgPool, -) -> Result<()> { - sqlx::query!( - " - INSERT INTO - warp_lc_characters(id, uid, character, timestamp) - VALUES - ($1, $2, $3, $4) - ON CONFLICT - (id, uid) - DO UPDATE SET - character = EXCLUDED.character, - timestamp = EXCLUDED.timestamp - ", - warp_lc_character.id, - warp_lc_character.uid, - warp_lc_character.character, - warp_lc_character.timestamp, - ) - .execute(pool) - .await?; - - Ok(()) -} - -pub async fn get_warp_lc_characters_by_uid( - uid: i64, - language: &str, - pool: &PgPool, -) -> Result> { - Ok(sqlx::query_as!( - DbWarpLcCharacter, - " - SELECT - warp_lc_characters.*, - characters_text.name - FROM - warp_lc_characters - INNER JOIN - characters_text - ON - characters_text.id = character AND characters_text.language = $2 - WHERE - uid = $1 - ORDER BY - timestamp - ", - uid, - language, - ) - .fetch_all(pool) - .await?) -} - -pub async fn get_warp_lc_character_by_id_and_uid( - id: i64, - uid: i64, - language: &str, - pool: &PgPool, -) -> Result { - Ok(sqlx::query_as!( - DbWarpLcCharacter, - " - SELECT - warp_lc_characters.*, - characters_text.name - FROM - warp_lc_characters - INNER JOIN - characters_text - ON - characters_text.id = character AND characters_text.language = $3 - WHERE - warp_lc_characters.id = $1 - AND - uid = $2 - ", - id, - uid, - language, - ) - .fetch_one(pool) - .await?) -} diff --git a/src/database/warp_lc_light_cones.rs b/src/database/warp_lc_light_cones.rs deleted file mode 100644 index ed701faf..00000000 --- a/src/database/warp_lc_light_cones.rs +++ /dev/null @@ -1,98 +0,0 @@ -use anyhow::Result; -use chrono::NaiveDateTime; -use sqlx::PgPool; - -pub struct DbWarpLcLightCone { - pub id: i64, - pub uid: i64, - pub light_cone: i32, - pub name: String, - pub timestamp: NaiveDateTime, -} - -pub async fn set_warp_lc_light_cone( - warp_lc_light_cone: &DbWarpLcLightCone, - pool: &PgPool, -) -> Result<()> { - sqlx::query!( - " - INSERT INTO - warp_lc_light_cones(id, uid, light_cone, timestamp) - VALUES - ($1, $2, $3, $4) - ON CONFLICT - (id, uid) - DO UPDATE SET - light_cone = EXCLUDED.light_cone, - timestamp = EXCLUDED.timestamp - ", - warp_lc_light_cone.id, - warp_lc_light_cone.uid, - warp_lc_light_cone.light_cone, - warp_lc_light_cone.timestamp, - ) - .execute(pool) - .await?; - - Ok(()) -} - -pub async fn get_warp_lc_light_cones_by_uid( - uid: i64, - language: &str, - pool: &PgPool, -) -> Result> { - Ok(sqlx::query_as!( - DbWarpLcLightCone, - " - SELECT - warp_lc_light_cones.*, - light_cones_text.name - FROM - warp_lc_light_cones - INNER JOIN - light_cones_text - ON - light_cones_text.id = light_cone AND light_cones_text.language = $2 - WHERE - uid = $1 - ORDER BY - timestamp - ", - uid, - language, - ) - .fetch_all(pool) - .await?) -} - -pub async fn get_warp_lc_light_cone_by_id_and_uid( - id: i64, - uid: i64, - language: &str, - pool: &PgPool, -) -> Result { - Ok(sqlx::query_as!( - DbWarpLcLightCone, - " - SELECT - warp_lc_light_cones.*, - light_cones_text.name - FROM - warp_lc_light_cones - INNER JOIN - light_cones_text - ON - light_cones_text.id = light_cone AND light_cones_text.language = $3 - WHERE - warp_lc_light_cones.id = $1 - AND - uid = $2 - ", - id, - uid, - language, - ) - .fetch_one(pool) - .await?) -} diff --git a/src/database/warp_special_characters.rs b/src/database/warp_special_characters.rs deleted file mode 100644 index cf0745f6..00000000 --- a/src/database/warp_special_characters.rs +++ /dev/null @@ -1,98 +0,0 @@ -use anyhow::Result; -use chrono::NaiveDateTime; -use sqlx::PgPool; - -pub struct DbWarpSpecialCharacter { - pub id: i64, - pub uid: i64, - pub character: i32, - pub name: String, - pub timestamp: NaiveDateTime, -} - -pub async fn set_warp_special_character( - warp_special_character: &DbWarpSpecialCharacter, - pool: &PgPool, -) -> Result<()> { - sqlx::query!( - " - INSERT INTO - warp_special_characters(id, uid, character, timestamp) - VALUES - ($1, $2, $3, $4) - ON CONFLICT - (id, uid) - DO UPDATE SET - character = EXCLUDED.character, - timestamp = EXCLUDED.timestamp - ", - warp_special_character.id, - warp_special_character.uid, - warp_special_character.character, - warp_special_character.timestamp, - ) - .execute(pool) - .await?; - - Ok(()) -} - -pub async fn get_warp_special_characters_by_uid( - uid: i64, - language: &str, - pool: &PgPool, -) -> Result> { - Ok(sqlx::query_as!( - DbWarpSpecialCharacter, - " - SELECT - warp_special_characters.*, - characters_text.name - FROM - warp_special_characters - INNER JOIN - characters_text - ON - characters_text.id = character AND characters_text.language = $2 - WHERE - uid = $1 - ORDER BY - timestamp - ", - uid, - language, - ) - .fetch_all(pool) - .await?) -} - -pub async fn get_warp_special_character_by_id_and_uid( - id: i64, - uid: i64, - language: &str, - pool: &PgPool, -) -> Result { - Ok(sqlx::query_as!( - DbWarpSpecialCharacter, - " - SELECT - warp_special_characters.*, - characters_text.name - FROM - warp_special_characters - INNER JOIN - characters_text - ON - characters_text.id = character AND characters_text.language = $3 - WHERE - warp_special_characters.id = $1 - AND - uid = $2 - ", - id, - uid, - language, - ) - .fetch_one(pool) - .await?) -} diff --git a/src/database/warp_special_light_cones.rs b/src/database/warp_special_light_cones.rs deleted file mode 100644 index 1e7d73b1..00000000 --- a/src/database/warp_special_light_cones.rs +++ /dev/null @@ -1,98 +0,0 @@ -use anyhow::Result; -use chrono::NaiveDateTime; -use sqlx::PgPool; - -pub struct DbWarpSpecialLightCone { - pub id: i64, - pub uid: i64, - pub light_cone: i32, - pub name: String, - pub timestamp: NaiveDateTime, -} - -pub async fn set_warp_special_light_cone( - warp_special_light_cone: &DbWarpSpecialLightCone, - pool: &PgPool, -) -> Result<()> { - sqlx::query!( - " - INSERT INTO - warp_special_light_cones(id, uid, light_cone, timestamp) - VALUES - ($1, $2, $3, $4) - ON CONFLICT - (id, uid) - DO UPDATE SET - light_cone = EXCLUDED.light_cone, - timestamp = EXCLUDED.timestamp - ", - warp_special_light_cone.id, - warp_special_light_cone.uid, - warp_special_light_cone.light_cone, - warp_special_light_cone.timestamp, - ) - .execute(pool) - .await?; - - Ok(()) -} - -pub async fn get_warp_special_light_cones_by_uid( - uid: i64, - language: &str, - pool: &PgPool, -) -> Result> { - Ok(sqlx::query_as!( - DbWarpSpecialLightCone, - " - SELECT - warp_special_light_cones.*, - light_cones_text.name - FROM - warp_special_light_cones - INNER JOIN - light_cones_text - ON - light_cones_text.id = light_cone AND light_cones_text.language = $2 - WHERE - uid = $1 - ORDER BY - timestamp - ", - uid, - language, - ) - .fetch_all(pool) - .await?) -} - -pub async fn get_warp_special_light_cone_by_id_and_uid( - id: i64, - uid: i64, - language: &str, - pool: &PgPool, -) -> Result { - Ok(sqlx::query_as!( - DbWarpSpecialLightCone, - " - SELECT - warp_special_light_cones.*, - light_cones_text.name - FROM - warp_special_light_cones - INNER JOIN - light_cones_text - ON - light_cones_text.id = light_cone AND light_cones_text.language = $3 - WHERE - warp_special_light_cones.id = $1 - AND - uid = $2 - ", - id, - uid, - language, - ) - .fetch_one(pool) - .await?) -} diff --git a/src/database/warp_standard_characters.rs b/src/database/warp_standard_characters.rs deleted file mode 100644 index 8b88ce50..00000000 --- a/src/database/warp_standard_characters.rs +++ /dev/null @@ -1,98 +0,0 @@ -use anyhow::Result; -use chrono::NaiveDateTime; -use sqlx::PgPool; - -pub struct DbWarpStandardCharacter { - pub id: i64, - pub uid: i64, - pub character: i32, - pub name: String, - pub timestamp: NaiveDateTime, -} - -pub async fn set_warp_standard_character( - warp_standard_character: &DbWarpStandardCharacter, - pool: &PgPool, -) -> Result<()> { - sqlx::query!( - " - INSERT INTO - warp_standard_characters(id, uid, character, timestamp) - VALUES - ($1, $2, $3, $4) - ON CONFLICT - (id, uid) - DO UPDATE SET - character = EXCLUDED.character, - timestamp = EXCLUDED.timestamp - ", - warp_standard_character.id, - warp_standard_character.uid, - warp_standard_character.character, - warp_standard_character.timestamp, - ) - .execute(pool) - .await?; - - Ok(()) -} - -pub async fn get_warp_standard_characters_by_uid( - uid: i64, - language: &str, - pool: &PgPool, -) -> Result> { - Ok(sqlx::query_as!( - DbWarpStandardCharacter, - " - SELECT - warp_standard_characters.*, - characters_text.name - FROM - warp_standard_characters - INNER JOIN - characters_text - ON - characters_text.id = character AND characters_text.language = $2 - WHERE - uid = $1 - ORDER BY - timestamp - ", - uid, - language, - ) - .fetch_all(pool) - .await?) -} - -pub async fn get_warp_standard_character_by_id_and_uid( - id: i64, - uid: i64, - language: &str, - pool: &PgPool, -) -> Result { - Ok(sqlx::query_as!( - DbWarpStandardCharacter, - " - SELECT - warp_standard_characters.*, - characters_text.name - FROM - warp_standard_characters - INNER JOIN - characters_text - ON - characters_text.id = character AND characters_text.language = $3 - WHERE - warp_standard_characters.id = $1 - AND - uid = $2 - ", - id, - uid, - language, - ) - .fetch_one(pool) - .await?) -} diff --git a/src/database/warp_standard_light_cones.rs b/src/database/warp_standard_light_cones.rs deleted file mode 100644 index 6807102b..00000000 --- a/src/database/warp_standard_light_cones.rs +++ /dev/null @@ -1,98 +0,0 @@ -use anyhow::Result; -use chrono::NaiveDateTime; -use sqlx::PgPool; - -pub struct DbWarpStandardLightCone { - pub id: i64, - pub uid: i64, - pub light_cone: i32, - pub name: String, - pub timestamp: NaiveDateTime, -} - -pub async fn set_warp_standard_light_cone( - warp_standard_light_cone: &DbWarpStandardLightCone, - pool: &PgPool, -) -> Result<()> { - sqlx::query!( - " - INSERT INTO - warp_standard_light_cones(id, uid, light_cone, timestamp) - VALUES - ($1, $2, $3, $4) - ON CONFLICT - (id, uid) - DO UPDATE SET - light_cone = EXCLUDED.light_cone, - timestamp = EXCLUDED.timestamp - ", - warp_standard_light_cone.id, - warp_standard_light_cone.uid, - warp_standard_light_cone.light_cone, - warp_standard_light_cone.timestamp, - ) - .execute(pool) - .await?; - - Ok(()) -} - -pub async fn get_warp_standard_light_cones_by_uid( - uid: i64, - language: &str, - pool: &PgPool, -) -> Result> { - Ok(sqlx::query_as!( - DbWarpStandardLightCone, - " - SELECT - warp_standard_light_cones.*, - light_cones_text.name - FROM - warp_standard_light_cones - INNER JOIN - light_cones_text - ON - light_cones_text.id = light_cone AND light_cones_text.language = $2 - WHERE - uid = $1 - ORDER BY - timestamp - ", - uid, - language, - ) - .fetch_all(pool) - .await?) -} - -pub async fn get_warp_standard_light_cone_by_id_and_uid( - id: i64, - uid: i64, - language: &str, - pool: &PgPool, -) -> Result { - Ok(sqlx::query_as!( - DbWarpStandardLightCone, - " - SELECT - warp_standard_light_cones.*, - light_cones_text.name - FROM - warp_standard_light_cones - INNER JOIN - light_cones_text - ON - light_cones_text.id = light_cone AND light_cones_text.language = $3 - WHERE - warp_standard_light_cones.id = $1 - AND - uid = $2 - ", - id, - uid, - language, - ) - .fetch_one(pool) - .await?) -} diff --git a/src/database/warps.rs b/src/database/warps.rs new file mode 100644 index 00000000..a473069b --- /dev/null +++ b/src/database/warps.rs @@ -0,0 +1,137 @@ +use anyhow::Result; +use chrono::NaiveDateTime; +use sqlx::PgPool; + +pub struct DbWarp { + pub id: i64, + pub uid: i64, + pub gacha_type: String, + pub character: Option, + pub light_cone: Option, + pub name: Option, + pub timestamp: NaiveDateTime, +} + +pub async fn set_warp(warp: &DbWarp, pool: &PgPool) -> Result<()> { + sqlx::query!( + " + INSERT INTO + warps(id, uid, gacha_type, character, light_cone, timestamp) + VALUES + ($1, $2, $3, $4, $5, $6) + ON CONFLICT + DO NOTHING + ", + warp.id, + warp.uid, + warp.gacha_type, + warp.character, + warp.light_cone, + warp.timestamp, + ) + .execute(pool) + .await?; + + Ok(()) +} + +pub async fn get_warps_by_uid(uid: i64, language: &str, pool: &PgPool) -> Result> { + Ok(sqlx::query_as!( + DbWarp, + " + SELECT + warps.*, + COALESCE(characters_text.name, light_cones_text.name) AS name + FROM + warps + LEFT JOIN + characters_text + ON + characters_text.id = character AND characters_text.language = $2 + LEFT JOIN + light_cones_text + ON + light_cones_text.id = light_cone AND light_cones_text.language = $2 + WHERE + uid = $1 + ORDER BY + id + ", + uid, + language, + ) + .fetch_all(pool) + .await?) +} + +pub async fn get_warps_by_uid_and_gacha_type( + uid: i64, + gacha_type: &str, + language: &str, + pool: &PgPool, +) -> Result> { + Ok(sqlx::query_as!( + DbWarp, + " + SELECT + warps.*, + COALESCE(characters_text.name, light_cones_text.name) AS name + FROM + warps + LEFT JOIN + characters_text + ON + characters_text.id = character AND characters_text.language = $3 + LEFT JOIN + light_cones_text + ON + light_cones_text.id = light_cone AND light_cones_text.language = $3 + WHERE + uid = $1 + AND + gacha_type = $2 + ORDER BY + id + ", + uid, + gacha_type, + language, + ) + .fetch_all(pool) + .await?) +} + +pub async fn get_warp_by_id_and_gacha_type( + id: i64, + gacha_type: &str, + language: &str, + pool: &PgPool, +) -> Result { + Ok(sqlx::query_as!( + DbWarp, + " + SELECT + warps.*, + COALESCE(characters_text.name, light_cones_text.name) AS name + FROM + warps + LEFT JOIN + characters_text + ON + characters_text.id = character AND characters_text.language = $3 + LEFT JOIN + light_cones_text + ON + light_cones_text.id = light_cone AND light_cones_text.language = $3 + WHERE + warps.id = $1 + AND + gacha_type = $2 + ", + id, + gacha_type, + language, + ) + .fetch_one(pool) + .await?) +} diff --git a/src/main.rs b/src/main.rs index da07f24c..b9c819bd 100644 --- a/src/main.rs +++ b/src/main.rs @@ -54,6 +54,7 @@ async fn main() -> anyhow::Result<()> { let tokens_data = Data::new(Mutex::new(HashMap::::new())); //FIXME: This is ugly as hell let achievement_tracker_cache_data = api::cache_achievement_tracker(pool.clone()); + let book_tracker_cache_data = api::cache_book_tracker(pool.clone()); let key = Key::from(&std::fs::read("session_key")?); @@ -64,6 +65,7 @@ async fn main() -> anyhow::Result<()> { .app_data(tokens_data.clone()) .app_data(pool_data.clone()) .app_data(achievement_tracker_cache_data.clone()) + .app_data(book_tracker_cache_data.clone()) .wrap(Compress::default()) .wrap(if cfg!(debug_assertions) { SessionMiddleware::builder(PgSessionStore::new(pool.clone()), key.clone()) diff --git a/src/pg_session_store.rs b/src/pg_session_store.rs index 3b169833..4fa92e51 100644 --- a/src/pg_session_store.rs +++ b/src/pg_session_store.rs @@ -49,6 +49,10 @@ impl SessionStore for PgSessionStore { let username = &session_state["username"]; let username = username[1..username.len() - 1].to_string(); + database::delete_oldest_sessions_by_username(&username, &self.pool) + .await + .map_err(SaveError::Other)?; + let expiry = (Utc::now() + chrono::Duration::seconds(ttl.whole_seconds())).naive_utc(); let db_session = database::DbSession { diff --git a/src/update/achievements_percent.rs b/src/update/achievements_percent.rs index 99680a73..c6ee3a13 100644 --- a/src/update/achievements_percent.rs +++ b/src/update/achievements_percent.rs @@ -1,4 +1,7 @@ -use std::time::{Duration, Instant}; +use std::{ + collections::HashMap, + time::{Duration, Instant}, +}; use anyhow::Result; use sqlx::PgPool; @@ -34,10 +37,20 @@ async fn update(pool: &PgPool) -> Result<()> { let achievements_users_count = database::get_achievements_users_count(pool).await?; + let mut achievements_users_count_map = HashMap::new(); + + for id in database::get_achievements_id(pool).await? { + achievements_users_count_map.insert(id, 0.0); + } + for achievement_users_count in achievements_users_count { let id = achievement_users_count.id; let percent = achievement_users_count.count.unwrap_or_default() as f64 / total_count; + achievements_users_count_map.insert(id, percent); + } + + for (id, percent) in achievements_users_count_map { let achievement_percent = database::DbAchievementPercent { id, percent }; database::set_achievement_percent(&achievement_percent, pool).await?; diff --git a/src/update/books_percent.rs b/src/update/books_percent.rs index b0bfe438..0b526e18 100644 --- a/src/update/books_percent.rs +++ b/src/update/books_percent.rs @@ -1,4 +1,7 @@ -use std::time::{Duration, Instant}; +use std::{ + collections::HashMap, + time::{Duration, Instant}, +}; use anyhow::Result; use sqlx::PgPool; @@ -34,10 +37,20 @@ async fn update(pool: &PgPool) -> Result<()> { let books_users_count = database::get_books_users_count(pool).await?; + let mut books_users_count_map = HashMap::new(); + + for id in database::get_books_id(pool).await? { + books_users_count_map.insert(id, 0.0); + } + for book_users_count in books_users_count { let id = book_users_count.id; let percent = book_users_count.count.unwrap_or_default() as f64 / total_count; + books_users_count_map.insert(id, percent); + } + + for (id, percent) in books_users_count_map { let book_percent = database::DbBookPercent { id, percent }; database::set_book_percent(&book_percent, pool).await?; diff --git a/src/update/dimbreath.rs b/src/update/dimbreath.rs index c2701bae..e53140bd 100644 --- a/src/update/dimbreath.rs +++ b/src/update/dimbreath.rs @@ -325,6 +325,9 @@ async fn update(pool: &PgPool) -> Result<()> { series_world_name: String::new(), series_inside, name: String::new(), + comment: None, + image1: None, + image2: None, percent: 0.0, }; @@ -361,6 +364,14 @@ async fn update(pool: &PgPool) -> Result<()> { } .to_string(); + if language == "EN" { + *text_map.get_mut("1279882444").unwrap() = "Use Basic ATK at least #1[i] times and win within a single battle in Simulated Universe".to_string(); + *text_map.get_mut("1279882442").unwrap() = + "Unlock 1 Blessing(s) of Propagation in Simulated Universe".to_string(); + *text_map.get_mut("1279882440").unwrap() = + "In Simulated Universe: Swarm Disaster, enter a Combat: Swarm, Occurrence: Swarm, or Boss: Swarm domain with a Path other than Propagation for a total of 15 times".to_string(); + } + for achievement_series in achievement_series.values() { let id = achievement_series.id;