diff --git a/.env b/.env index 21aa0d99..85f578ac 100644 --- a/.env +++ b/.env @@ -98,3 +98,5 @@ CLICKHOUSE_DATABASE=staging_ariadne MAXMIND_LICENSE_KEY=none PAYOUTS_BUDGET=100 + +FLAME_ANVIL_URL=none \ No newline at end of file diff --git a/.sqlx/query-603eaa54b3956d68f656008e9b04f1c352857cf2eb15874cee9d31f8d992ab77.json b/.sqlx/query-04c04958c71c4fab903c46c9185286e7460a6ff7b03cbc90939ac6c7cb526433.json similarity index 71% rename from .sqlx/query-603eaa54b3956d68f656008e9b04f1c352857cf2eb15874cee9d31f8d992ab77.json rename to .sqlx/query-04c04958c71c4fab903c46c9185286e7460a6ff7b03cbc90939ac6c7cb526433.json index fbc6462a..6c62c2b6 100644 --- a/.sqlx/query-603eaa54b3956d68f656008e9b04f1c352857cf2eb15874cee9d31f8d992ab77.json +++ b/.sqlx/query-04c04958c71c4fab903c46c9185286e7460a6ff7b03cbc90939ac6c7cb526433.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT id, enum_id, value, ordering, metadata, created FROM loader_field_enum_values\n WHERE enum_id = ANY($1)\n ORDER BY enum_id, ordering, created DESC\n ", + "query": "\n SELECT id, enum_id, value, ordering, metadata, created FROM loader_field_enum_values\n WHERE enum_id = ANY($1)\n ORDER BY enum_id, ordering, created DESC\n ", "describe": { "columns": [ { @@ -48,5 +48,5 @@ false ] }, - "hash": "603eaa54b3956d68f656008e9b04f1c352857cf2eb15874cee9d31f8d992ab77" + "hash": "04c04958c71c4fab903c46c9185286e7460a6ff7b03cbc90939ac6c7cb526433" } diff --git a/.sqlx/query-070174adf972b808aca7519168719e6c7b762bfbcc09d8ab2624b00113f71e77.json b/.sqlx/query-070174adf972b808aca7519168719e6c7b762bfbcc09d8ab2624b00113f71e77.json new file mode 100644 index 00000000..e9d49592 --- /dev/null +++ b/.sqlx/query-070174adf972b808aca7519168719e6c7b762bfbcc09d8ab2624b00113f71e77.json @@ -0,0 +1,31 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT v.id version_id, v.mod_id mod_id\n FROM versions v\n INNER JOIN version_fields vf ON vf.field_id = 3 AND v.id = vf.version_id\n INNER JOIN loader_field_enum_values lfev ON vf.enum_value = lfev.id AND (cardinality($2::varchar[]) = 0 OR lfev.value = ANY($2::varchar[]))\n INNER JOIN loaders_versions lv ON lv.version_id = v.id\n INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))\n WHERE v.mod_id = ANY($1) AND (cardinality($4::varchar[]) = 0 OR v.version_type = ANY($4))\n ORDER BY v.date_published ASC\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "version_id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "mod_id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8Array", + "VarcharArray", + "VarcharArray", + "VarcharArray" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "070174adf972b808aca7519168719e6c7b762bfbcc09d8ab2624b00113f71e77" +} diff --git a/.sqlx/query-10279b5a8383ba8e286f1bfb9a486e3f8b362c46cfc2647c90a83a10e5329569.json b/.sqlx/query-10279b5a8383ba8e286f1bfb9a486e3f8b362c46cfc2647c90a83a10e5329569.json deleted file mode 100644 index 14b87d63..00000000 --- a/.sqlx/query-10279b5a8383ba8e286f1bfb9a486e3f8b362c46cfc2647c90a83a10e5329569.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE threads\n SET show_in_mod_inbox = FALSE\n WHERE id = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [] - }, - "hash": "10279b5a8383ba8e286f1bfb9a486e3f8b362c46cfc2647c90a83a10e5329569" -} diff --git a/.sqlx/query-93c0fdb2bdc9c57602671d50108957654ede51e944944d4af59fe1ba1f6a336e.json b/.sqlx/query-21c44c435bf9a6c138d40cd40d70ccecfd09d877e84f3fbe5cd190dd69d3b7e1.json similarity index 52% rename from .sqlx/query-93c0fdb2bdc9c57602671d50108957654ede51e944944d4af59fe1ba1f6a336e.json rename to .sqlx/query-21c44c435bf9a6c138d40cd40d70ccecfd09d877e84f3fbe5cd190dd69d3b7e1.json index f5bb3982..a706ac67 100644 --- a/.sqlx/query-93c0fdb2bdc9c57602671d50108957654ede51e944944d4af59fe1ba1f6a336e.json +++ b/.sqlx/query-21c44c435bf9a6c138d40cd40d70ccecfd09d877e84f3fbe5cd190dd69d3b7e1.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT t.id, t.thread_type, t.mod_id, t.report_id, t.show_in_mod_inbox,\n ARRAY_AGG(DISTINCT tm.user_id) filter (where tm.user_id is not null) members,\n JSONB_AGG(DISTINCT jsonb_build_object('id', tmsg.id, 'author_id', tmsg.author_id, 'thread_id', tmsg.thread_id, 'body', tmsg.body, 'created', tmsg.created)) filter (where tmsg.id is not null) messages\n FROM threads t\n LEFT OUTER JOIN threads_messages tmsg ON tmsg.thread_id = t.id\n LEFT OUTER JOIN threads_members tm ON tm.thread_id = t.id\n WHERE t.id = ANY($1)\n GROUP BY t.id\n ", + "query": "\n SELECT t.id, t.thread_type, t.mod_id, t.report_id,\n ARRAY_AGG(DISTINCT tm.user_id) filter (where tm.user_id is not null) members,\n JSONB_AGG(DISTINCT jsonb_build_object('id', tmsg.id, 'author_id', tmsg.author_id, 'thread_id', tmsg.thread_id, 'body', tmsg.body, 'created', tmsg.created, 'hide_identity', tmsg.hide_identity)) filter (where tmsg.id is not null) messages\n FROM threads t\n LEFT OUTER JOIN threads_messages tmsg ON tmsg.thread_id = t.id\n LEFT OUTER JOIN threads_members tm ON tm.thread_id = t.id\n WHERE t.id = ANY($1)\n GROUP BY t.id\n ", "describe": { "columns": [ { @@ -25,16 +25,11 @@ }, { "ordinal": 4, - "name": "show_in_mod_inbox", - "type_info": "Bool" - }, - { - "ordinal": 5, "name": "members", "type_info": "Int8Array" }, { - "ordinal": 6, + "ordinal": 5, "name": "messages", "type_info": "Jsonb" } @@ -49,10 +44,9 @@ false, true, true, - false, null, null ] }, - "hash": "93c0fdb2bdc9c57602671d50108957654ede51e944944d4af59fe1ba1f6a336e" + "hash": "21c44c435bf9a6c138d40cd40d70ccecfd09d877e84f3fbe5cd190dd69d3b7e1" } diff --git a/.sqlx/query-4deaf065c12dbfd5f585286001fdf66f60524ec13eab7d922db9290237297849.json b/.sqlx/query-28e5a9147061e78c0c1574ff650a30ead9fe7883d283e08a46155382e7a6c163.json similarity index 71% rename from .sqlx/query-4deaf065c12dbfd5f585286001fdf66f60524ec13eab7d922db9290237297849.json rename to .sqlx/query-28e5a9147061e78c0c1574ff650a30ead9fe7883d283e08a46155382e7a6c163.json index b9780b84..a901da94 100644 --- a/.sqlx/query-4deaf065c12dbfd5f585286001fdf66f60524ec13eab7d922db9290237297849.json +++ b/.sqlx/query-28e5a9147061e78c0c1574ff650a30ead9fe7883d283e08a46155382e7a6c163.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT o.id, o.slug, o.name, o.team_id, o.description, o.icon_url, o.color\n FROM organizations o\n WHERE o.id = ANY($1) OR LOWER(o.slug) = ANY($2)\n GROUP BY o.id;\n ", + "query": "\n SELECT o.id, o.slug, o.name, o.team_id, o.description, o.icon_url, o.color\n FROM organizations o\n WHERE o.id = ANY($1) OR LOWER(o.slug) = ANY($2)\n GROUP BY o.id;\n ", "describe": { "columns": [ { @@ -55,5 +55,5 @@ true ] }, - "hash": "4deaf065c12dbfd5f585286001fdf66f60524ec13eab7d922db9290237297849" + "hash": "28e5a9147061e78c0c1574ff650a30ead9fe7883d283e08a46155382e7a6c163" } diff --git a/.sqlx/query-3151ef71738a1f0d097aa14967d7b9eb1f24d4de1f81b80c4bd186427edc1399.json b/.sqlx/query-3151ef71738a1f0d097aa14967d7b9eb1f24d4de1f81b80c4bd186427edc1399.json new file mode 100644 index 00000000..057d8602 --- /dev/null +++ b/.sqlx/query-3151ef71738a1f0d097aa14967d7b9eb1f24d4de1f81b80c4bd186427edc1399.json @@ -0,0 +1,34 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT mel.id, mel.flame_project_id, mel.status status\n FROM moderation_external_licenses mel\n WHERE mel.flame_project_id = ANY($1)\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "flame_project_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "status", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int4Array" + ] + }, + "nullable": [ + false, + true, + false + ] + }, + "hash": "3151ef71738a1f0d097aa14967d7b9eb1f24d4de1f81b80c4bd186427edc1399" +} diff --git a/.sqlx/query-32f4aa1ab67fbdcd7187fbae475876bf3d3225ca7b4994440a67cbd6a7b610f6.json b/.sqlx/query-32f4aa1ab67fbdcd7187fbae475876bf3d3225ca7b4994440a67cbd6a7b610f6.json new file mode 100644 index 00000000..4dcf6bfd --- /dev/null +++ b/.sqlx/query-32f4aa1ab67fbdcd7187fbae475876bf3d3225ca7b4994440a67cbd6a7b610f6.json @@ -0,0 +1,102 @@ +{ + "db_name": "PostgreSQL", +<<<<<<<< HEAD:.sqlx/query-ae95eabd762a497932e5e66d37e743ee22f0d4665ced3aa66122a4f6623b4769.json + "query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering\n FROM versions v\n WHERE v.id = ANY($1)\n ORDER BY v.ordering ASC NULLS LAST, v.date_published ASC;\n ", +======== + "query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering\n FROM versions v\n WHERE v.id = ANY($1);\n ", +>>>>>>>> master:.sqlx/query-32f4aa1ab67fbdcd7187fbae475876bf3d3225ca7b4994440a67cbd6a7b610f6.json + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "mod_id", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "author_id", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "version_name", + "type_info": "Varchar" + }, + { + "ordinal": 4, + "name": "version_number", + "type_info": "Varchar" + }, + { + "ordinal": 5, + "name": "changelog", + "type_info": "Varchar" + }, + { + "ordinal": 6, + "name": "date_published", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "downloads", + "type_info": "Int4" + }, + { + "ordinal": 8, + "name": "version_type", + "type_info": "Varchar" + }, + { + "ordinal": 9, + "name": "featured", + "type_info": "Bool" + }, + { + "ordinal": 10, + "name": "status", + "type_info": "Varchar" + }, + { + "ordinal": 11, + "name": "requested_status", + "type_info": "Varchar" + }, + { + "ordinal": 12, + "name": "ordering", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Int8Array" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false, + true, + true + ] + }, +<<<<<<<< HEAD:.sqlx/query-ae95eabd762a497932e5e66d37e743ee22f0d4665ced3aa66122a4f6623b4769.json + "hash": "ae95eabd762a497932e5e66d37e743ee22f0d4665ced3aa66122a4f6623b4769" +======== + "hash": "32f4aa1ab67fbdcd7187fbae475876bf3d3225ca7b4994440a67cbd6a7b610f6" +>>>>>>>> master:.sqlx/query-32f4aa1ab67fbdcd7187fbae475876bf3d3225ca7b4994440a67cbd6a7b610f6.json +} diff --git a/.sqlx/query-0b79ae3825e05ae07058a0a9d02fb0bd68ce37f3c7cf0356d565c23520988816.json b/.sqlx/query-3689ca9f16fb80c55a0d2fd3c08ae4d0b70b92c8ab9a75afb96297748ec36bd4.json similarity index 61% rename from .sqlx/query-0b79ae3825e05ae07058a0a9d02fb0bd68ce37f3c7cf0356d565c23520988816.json rename to .sqlx/query-3689ca9f16fb80c55a0d2fd3c08ae4d0b70b92c8ab9a75afb96297748ec36bd4.json index 6d206d58..b1f9dab6 100644 --- a/.sqlx/query-0b79ae3825e05ae07058a0a9d02fb0bd68ce37f3c7cf0356d565c23520988816.json +++ b/.sqlx/query-3689ca9f16fb80c55a0d2fd3c08ae4d0b70b92c8ab9a75afb96297748ec36bd4.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT f.id, f.version_id, v.mod_id, f.url, f.filename, f.is_primary, f.size, f.file_type,\n JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'))) filter (where h.hash is not null) hashes\n FROM files f\n INNER JOIN versions v on v.id = f.version_id\n INNER JOIN hashes h on h.file_id = f.id\n WHERE h.algorithm = $1 AND h.hash = ANY($2)\n GROUP BY f.id, v.mod_id, v.date_published\n ORDER BY v.date_published\n ", + "query": "\n SELECT f.id, f.version_id, v.mod_id, f.url, f.filename, f.is_primary, f.size, f.file_type,\n JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'))) filter (where h.hash is not null) hashes\n FROM files f\n INNER JOIN versions v on v.id = f.version_id\n INNER JOIN hashes h on h.file_id = f.id\n WHERE h.algorithm = $1 AND h.hash = ANY($2)\n GROUP BY f.id, v.mod_id, v.date_published\n ORDER BY v.date_published\n ", "describe": { "columns": [ { @@ -67,5 +67,5 @@ null ] }, - "hash": "0b79ae3825e05ae07058a0a9d02fb0bd68ce37f3c7cf0356d565c23520988816" + "hash": "3689ca9f16fb80c55a0d2fd3c08ae4d0b70b92c8ab9a75afb96297748ec36bd4" } diff --git a/.sqlx/query-3c875a8a1c03432f258040c436e19dbab6e78bd1789dc70f445578c779c7b995.json b/.sqlx/query-3c875a8a1c03432f258040c436e19dbab6e78bd1789dc70f445578c779c7b995.json new file mode 100644 index 00000000..5d919103 --- /dev/null +++ b/.sqlx/query-3c875a8a1c03432f258040c436e19dbab6e78bd1789dc70f445578c779c7b995.json @@ -0,0 +1,34 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT mel.id, mel.flame_project_id, mel.status status\n FROM moderation_external_licenses mel\n WHERE mel.flame_project_id = ANY($1)\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "flame_project_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "status", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int4Array" + ] + }, + "nullable": [ + false, + true, + false + ] + }, + "hash": "3c875a8a1c03432f258040c436e19dbab6e78bd1789dc70f445578c779c7b995" +} diff --git a/.sqlx/query-49813a96f007216072d69468aae705d73d5b85dcdd64a22060009b12d947ed5a.json b/.sqlx/query-49813a96f007216072d69468aae705d73d5b85dcdd64a22060009b12d947ed5a.json deleted file mode 100644 index 32840287..00000000 --- a/.sqlx/query-49813a96f007216072d69468aae705d73d5b85dcdd64a22060009b12d947ed5a.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE threads\n SET show_in_mod_inbox = $1\n WHERE id = $2\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Bool", - "Int8" - ] - }, - "nullable": [] - }, - "hash": "49813a96f007216072d69468aae705d73d5b85dcdd64a22060009b12d947ed5a" -} diff --git a/.sqlx/query-4cb9fe3dbb2cbfe30a49487f896fb7890f726af2ff11da53f450a88c3dc5fc64.json b/.sqlx/query-4cb9fe3dbb2cbfe30a49487f896fb7890f726af2ff11da53f450a88c3dc5fc64.json new file mode 100644 index 00000000..0397073b --- /dev/null +++ b/.sqlx/query-4cb9fe3dbb2cbfe30a49487f896fb7890f726af2ff11da53f450a88c3dc5fc64.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT encode(mef.sha1, 'escape') sha1, mel.status status\n FROM moderation_external_files mef\n INNER JOIN moderation_external_licenses mel ON mef.external_license_id = mel.id\n WHERE mef.sha1 = ANY($1)\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "sha1", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "status", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "ByteaArray" + ] + }, + "nullable": [ + null, + false + ] + }, + "hash": "4cb9fe3dbb2cbfe30a49487f896fb7890f726af2ff11da53f450a88c3dc5fc64" +} diff --git a/.sqlx/query-f2f865b1f1428ed9469e8f73796c93a23895e6b10a4eb34aa761d29acfa24fb0.json b/.sqlx/query-4fc11e55884d6813992fba1d0b3111742a5f98453942fe83e09c2056bda401f4.json similarity index 66% rename from .sqlx/query-f2f865b1f1428ed9469e8f73796c93a23895e6b10a4eb34aa761d29acfa24fb0.json rename to .sqlx/query-4fc11e55884d6813992fba1d0b3111742a5f98453942fe83e09c2056bda401f4.json index c1b79a18..5c5d3861 100644 --- a/.sqlx/query-f2f865b1f1428ed9469e8f73796c93a23895e6b10a4eb34aa761d29acfa24fb0.json +++ b/.sqlx/query-4fc11e55884d6813992fba1d0b3111742a5f98453942fe83e09c2056bda401f4.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT c.id id, c.name name, c.description description,\n c.icon_url icon_url, c.color color, c.created created, c.user_id user_id,\n c.updated updated, c.status status,\n ARRAY_AGG(DISTINCT cm.mod_id) filter (where cm.mod_id is not null) mods\n FROM collections c\n LEFT JOIN collections_mods cm ON cm.collection_id = c.id\n WHERE c.id = ANY($1)\n GROUP BY c.id;\n ", + "query": "\n SELECT c.id id, c.name name, c.description description,\n c.icon_url icon_url, c.color color, c.created created, c.user_id user_id,\n c.updated updated, c.status status,\n ARRAY_AGG(DISTINCT cm.mod_id) filter (where cm.mod_id is not null) mods\n FROM collections c\n LEFT JOIN collections_mods cm ON cm.collection_id = c.id\n WHERE c.id = ANY($1)\n GROUP BY c.id;\n ", "describe": { "columns": [ { @@ -72,5 +72,5 @@ null ] }, - "hash": "f2f865b1f1428ed9469e8f73796c93a23895e6b10a4eb34aa761d29acfa24fb0" + "hash": "4fc11e55884d6813992fba1d0b3111742a5f98453942fe83e09c2056bda401f4" } diff --git a/.sqlx/query-520b6b75e79245e9ec19dbe5c30f041d8081eb317a21b122c0d61d7b13f58072.json b/.sqlx/query-520b6b75e79245e9ec19dbe5c30f041d8081eb317a21b122c0d61d7b13f58072.json new file mode 100644 index 00000000..893e3ac9 --- /dev/null +++ b/.sqlx/query-520b6b75e79245e9ec19dbe5c30f041d8081eb317a21b122c0d61d7b13f58072.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT EXISTS(SELECT 1 FROM notifications WHERE id = ANY($1))", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "exists", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Int8Array" + ] + }, + "nullable": [ + null + ] + }, + "hash": "520b6b75e79245e9ec19dbe5c30f041d8081eb317a21b122c0d61d7b13f58072" +} diff --git a/.sqlx/query-5c7bc2b59e5bcbe50e556cf28fb7a20de645752beef330b6779ec256f33e666a.json b/.sqlx/query-64fe01f3dd84c51966150e1278189c04da9e5fcd994ef5162afb1321b9d4b643.json similarity index 78% rename from .sqlx/query-5c7bc2b59e5bcbe50e556cf28fb7a20de645752beef330b6779ec256f33e666a.json rename to .sqlx/query-64fe01f3dd84c51966150e1278189c04da9e5fcd994ef5162afb1321b9d4b643.json index e1d35b11..e24329c3 100644 --- a/.sqlx/query-5c7bc2b59e5bcbe50e556cf28fb7a20de645752beef330b6779ec256f33e666a.json +++ b/.sqlx/query-64fe01f3dd84c51966150e1278189c04da9e5fcd994ef5162afb1321b9d4b643.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT id, url, size, created, owner_id, context, mod_id, version_id, thread_message_id, report_id\n FROM uploaded_images\n WHERE id = ANY($1)\n GROUP BY id;\n ", + "query": "\n SELECT id, url, size, created, owner_id, context, mod_id, version_id, thread_message_id, report_id\n FROM uploaded_images\n WHERE id = ANY($1)\n GROUP BY id;\n ", "describe": { "columns": [ { @@ -72,5 +72,5 @@ true ] }, - "hash": "5c7bc2b59e5bcbe50e556cf28fb7a20de645752beef330b6779ec256f33e666a" + "hash": "64fe01f3dd84c51966150e1278189c04da9e5fcd994ef5162afb1321b9d4b643" } diff --git a/.sqlx/query-6e4ff5010b19890e26867611a243a308fb32f7439a18c83d1e16d3e537a43e7d.json b/.sqlx/query-6e4ff5010b19890e26867611a243a308fb32f7439a18c83d1e16d3e537a43e7d.json new file mode 100644 index 00000000..feafe67d --- /dev/null +++ b/.sqlx/query-6e4ff5010b19890e26867611a243a308fb32f7439a18c83d1e16d3e537a43e7d.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT encode(mef.sha1, 'escape') sha1, mel.status status\n FROM moderation_external_files mef\n INNER JOIN moderation_external_licenses mel ON mef.external_license_id = mel.id\n WHERE mef.sha1 = ANY($1)\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "sha1", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "status", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "ByteaArray" + ] + }, + "nullable": [ + null, + false + ] + }, + "hash": "6e4ff5010b19890e26867611a243a308fb32f7439a18c83d1e16d3e537a43e7d" +} diff --git a/.sqlx/query-21d20e5f09cb0729dc16c8609c35cec5a913f3172b53b8ae05da0096a33b4b64.json b/.sqlx/query-6fac7682527a4a9dc34e121e8b7c356cb8fe1d0ff1f9a19d29937721acaa8842.json similarity index 52% rename from .sqlx/query-21d20e5f09cb0729dc16c8609c35cec5a913f3172b53b8ae05da0096a33b4b64.json rename to .sqlx/query-6fac7682527a4a9dc34e121e8b7c356cb8fe1d0ff1f9a19d29937721acaa8842.json index aff58048..c7ccefa7 100644 --- a/.sqlx/query-21d20e5f09cb0729dc16c8609c35cec5a913f3172b53b8ae05da0096a33b4b64.json +++ b/.sqlx/query-6fac7682527a4a9dc34e121e8b7c356cb8fe1d0ff1f9a19d29937721acaa8842.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT id\n FROM pats\n WHERE user_id = $1\n ORDER BY created DESC\n ", + "query": "\n SELECT id\n FROM pats\n WHERE user_id = $1\n ORDER BY created DESC\n ", "describe": { "columns": [ { @@ -18,5 +18,5 @@ false ] }, - "hash": "21d20e5f09cb0729dc16c8609c35cec5a913f3172b53b8ae05da0096a33b4b64" + "hash": "6fac7682527a4a9dc34e121e8b7c356cb8fe1d0ff1f9a19d29937721acaa8842" } diff --git a/.sqlx/query-c94faba99d486b11509fff59465b7cc71983551b035e936ce4d9776510afb514.json b/.sqlx/query-74854bb35744be413458d0609d6511aa4c9802b5fc4ac73abb520cf2577e1d84.json similarity index 79% rename from .sqlx/query-c94faba99d486b11509fff59465b7cc71983551b035e936ce4d9776510afb514.json rename to .sqlx/query-74854bb35744be413458d0609d6511aa4c9802b5fc4ac73abb520cf2577e1d84.json index b02376be..5c868155 100644 --- a/.sqlx/query-c94faba99d486b11509fff59465b7cc71983551b035e936ce4d9776510afb514.json +++ b/.sqlx/query-74854bb35744be413458d0609d6511aa4c9802b5fc4ac73abb520cf2577e1d84.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT id, user_id, session, created, last_login, expires, refresh_expires, os, platform,\n city, country, ip, user_agent\n FROM sessions\n WHERE id = ANY($1) OR session = ANY($2)\n ORDER BY created DESC\n ", + "query": "\n SELECT id, user_id, session, created, last_login, expires, refresh_expires, os, platform,\n city, country, ip, user_agent\n FROM sessions\n WHERE id = ANY($1) OR session = ANY($2)\n ORDER BY created DESC\n ", "describe": { "columns": [ { @@ -91,5 +91,5 @@ false ] }, - "hash": "c94faba99d486b11509fff59465b7cc71983551b035e936ce4d9776510afb514" + "hash": "74854bb35744be413458d0609d6511aa4c9802b5fc4ac73abb520cf2577e1d84" } diff --git a/.sqlx/query-7ab21e7613dd88e97cf602e76bff62170c13ceef8104a4ce4cb2d101f8ce4f48.json b/.sqlx/query-7ab21e7613dd88e97cf602e76bff62170c13ceef8104a4ce4cb2d101f8ce4f48.json deleted file mode 100644 index 5fb8d0de..00000000 --- a/.sqlx/query-7ab21e7613dd88e97cf602e76bff62170c13ceef8104a4ce4cb2d101f8ce4f48.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE users\n SET balance = balance + $1\n WHERE id = $2\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Numeric", - "Int8" - ] - }, - "nullable": [] - }, - "hash": "7ab21e7613dd88e97cf602e76bff62170c13ceef8104a4ce4cb2d101f8ce4f48" -} diff --git a/.sqlx/query-8f5e2a570cf35b2d158182bac37fd40bcec277bbdeddaece5efaa88600048a70.json b/.sqlx/query-8f5e2a570cf35b2d158182bac37fd40bcec277bbdeddaece5efaa88600048a70.json deleted file mode 100644 index 30713236..00000000 --- a/.sqlx/query-8f5e2a570cf35b2d158182bac37fd40bcec277bbdeddaece5efaa88600048a70.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE threads\n SET show_in_mod_inbox = FALSE\n WHERE id = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [] - }, - "hash": "8f5e2a570cf35b2d158182bac37fd40bcec277bbdeddaece5efaa88600048a70" -} diff --git a/.sqlx/query-95e17b2512494ffcbfe6278b87aa273edc5729633aeaa87f6239667d2f861e68.json b/.sqlx/query-95e17b2512494ffcbfe6278b87aa273edc5729633aeaa87f6239667d2f861e68.json new file mode 100644 index 00000000..063c2e0e --- /dev/null +++ b/.sqlx/query-95e17b2512494ffcbfe6278b87aa273edc5729633aeaa87f6239667d2f861e68.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE mods\n SET status = 'rejected'\n WHERE id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "95e17b2512494ffcbfe6278b87aa273edc5729633aeaa87f6239667d2f861e68" +} diff --git a/.sqlx/query-9c8f3f9503b5bb52e05bbc8a8eee7f640ab7d6b04a59ec111ce8b23e886911de.json b/.sqlx/query-9c8f3f9503b5bb52e05bbc8a8eee7f640ab7d6b04a59ec111ce8b23e886911de.json deleted file mode 100644 index 77c8db51..00000000 --- a/.sqlx/query-9c8f3f9503b5bb52e05bbc8a8eee7f640ab7d6b04a59ec111ce8b23e886911de.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n DELETE FROM dependencies WHERE dependent_id = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [] - }, - "hash": "9c8f3f9503b5bb52e05bbc8a8eee7f640ab7d6b04a59ec111ce8b23e886911de" -} diff --git a/.sqlx/query-9d46594c3dda50dc84defee87fa98210989dd59b06941a5e71b6661f059c9692.json b/.sqlx/query-9d46594c3dda50dc84defee87fa98210989dd59b06941a5e71b6661f059c9692.json new file mode 100644 index 00000000..089981a7 --- /dev/null +++ b/.sqlx/query-9d46594c3dda50dc84defee87fa98210989dd59b06941a5e71b6661f059c9692.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO threads_messages (\n id, author_id, body, thread_id, hide_identity\n )\n VALUES (\n $1, $2, $3, $4, $5\n )\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int8", + "Jsonb", + "Int8", + "Bool" + ] + }, + "nullable": [] + }, + "hash": "9d46594c3dda50dc84defee87fa98210989dd59b06941a5e71b6661f059c9692" +} diff --git a/.sqlx/query-e6f5a150cbd3bd6b9bde9e5cdad224a45c96d678b69ec12508e81246710e3f6d.json b/.sqlx/query-a1331f7c6f33234e413978c0d9318365e7de5948b93e8c0c85a1d179f4968517.json similarity index 74% rename from .sqlx/query-e6f5a150cbd3bd6b9bde9e5cdad224a45c96d678b69ec12508e81246710e3f6d.json rename to .sqlx/query-a1331f7c6f33234e413978c0d9318365e7de5948b93e8c0c85a1d179f4968517.json index 384c572e..165e3c68 100644 --- a/.sqlx/query-e6f5a150cbd3bd6b9bde9e5cdad224a45c96d678b69ec12508e81246710e3f6d.json +++ b/.sqlx/query-a1331f7c6f33234e413978c0d9318365e7de5948b93e8c0c85a1d179f4968517.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT id, name, access_token, scopes, user_id, created, expires, last_used\n FROM pats\n WHERE id = ANY($1) OR access_token = ANY($2)\n ORDER BY created DESC\n ", + "query": "\n SELECT id, name, access_token, scopes, user_id, created, expires, last_used\n FROM pats\n WHERE id = ANY($1) OR access_token = ANY($2)\n ORDER BY created DESC\n ", "describe": { "columns": [ { @@ -61,5 +61,5 @@ true ] }, - "hash": "e6f5a150cbd3bd6b9bde9e5cdad224a45c96d678b69ec12508e81246710e3f6d" + "hash": "a1331f7c6f33234e413978c0d9318365e7de5948b93e8c0c85a1d179f4968517" } diff --git a/.sqlx/query-a40e4075ba1bff5b6fde104ed1557ad8d4a75d7d90d481decd222f31685c4981.json b/.sqlx/query-a40e4075ba1bff5b6fde104ed1557ad8d4a75d7d90d481decd222f31685c4981.json new file mode 100644 index 00000000..dd7086e8 --- /dev/null +++ b/.sqlx/query-a40e4075ba1bff5b6fde104ed1557ad8d4a75d7d90d481decd222f31685c4981.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM dependencies WHERE dependent_id = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "a40e4075ba1bff5b6fde104ed1557ad8d4a75d7d90d481decd222f31685c4981" +} diff --git a/.sqlx/query-5e7e85c8c1f4b4e600c51669b6591b5cc279bd7482893ec687e83ee22d00a3a0.json b/.sqlx/query-a47456ecddbd1787301a2765168db0df31980ae48cb2ec37c323da10ba55a785.json similarity index 81% rename from .sqlx/query-5e7e85c8c1f4b4e600c51669b6591b5cc279bd7482893ec687e83ee22d00a3a0.json rename to .sqlx/query-a47456ecddbd1787301a2765168db0df31980ae48cb2ec37c323da10ba55a785.json index 2932ef87..fca3ad56 100644 --- a/.sqlx/query-5e7e85c8c1f4b4e600c51669b6591b5cc279bd7482893ec687e83ee22d00a3a0.json +++ b/.sqlx/query-a47456ecddbd1787301a2765168db0df31980ae48cb2ec37c323da10ba55a785.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT id, name, email,\n avatar_url, username, bio,\n created, role, badges,\n balance,\n github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id,\n email_verified, password, totp_secret, paypal_id, paypal_country, paypal_email,\n venmo_handle\n FROM users\n WHERE id = ANY($1) OR LOWER(username) = ANY($2)\n ", + "query": "\n SELECT id, name, email,\n avatar_url, username, bio,\n created, role, badges,\n balance,\n github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id,\n email_verified, password, totp_secret, paypal_id, paypal_country, paypal_email,\n venmo_handle\n FROM users\n WHERE id = ANY($1) OR LOWER(username) = ANY($2)\n ", "describe": { "columns": [ { @@ -151,5 +151,5 @@ true ] }, - "hash": "5e7e85c8c1f4b4e600c51669b6591b5cc279bd7482893ec687e83ee22d00a3a0" + "hash": "a47456ecddbd1787301a2765168db0df31980ae48cb2ec37c323da10ba55a785" } diff --git a/.sqlx/query-c387574b32f6b70adc88132df96fbbc7dd57a6f633a787dd31aafc0584547345.json b/.sqlx/query-a5007d03b1b5b2a95814a3070d114c55731403dcd75d44420acce8df5bd2009b.json similarity index 74% rename from .sqlx/query-c387574b32f6b70adc88132df96fbbc7dd57a6f633a787dd31aafc0584547345.json rename to .sqlx/query-a5007d03b1b5b2a95814a3070d114c55731403dcd75d44420acce8df5bd2009b.json index dd76374d..1b838c4a 100644 --- a/.sqlx/query-c387574b32f6b70adc88132df96fbbc7dd57a6f633a787dd31aafc0584547345.json +++ b/.sqlx/query-a5007d03b1b5b2a95814a3070d114c55731403dcd75d44420acce8df5bd2009b.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT id, team_id, role AS member_role, is_owner, permissions, organization_permissions,\n accepted, payouts_split, \n ordering, user_id\n FROM team_members\n WHERE team_id = ANY($1)\n ORDER BY team_id, ordering;\n ", + "query": "\n SELECT id, team_id, role AS member_role, is_owner, permissions, organization_permissions,\n accepted, payouts_split,\n ordering, user_id\n FROM team_members\n WHERE team_id = ANY($1)\n ORDER BY team_id, ordering;\n ", "describe": { "columns": [ { @@ -72,5 +72,5 @@ false ] }, - "hash": "c387574b32f6b70adc88132df96fbbc7dd57a6f633a787dd31aafc0584547345" + "hash": "a5007d03b1b5b2a95814a3070d114c55731403dcd75d44420acce8df5bd2009b" } diff --git a/.sqlx/query-b768d9db6c785d6a701324ea746794d33e94121403163a774b6ef775640fd3d3.json b/.sqlx/query-a8bfce13de871daf0bb1cf73b4c5ded611ff58d94461404182942210492e8010.json similarity index 76% rename from .sqlx/query-b768d9db6c785d6a701324ea746794d33e94121403163a774b6ef775640fd3d3.json rename to .sqlx/query-a8bfce13de871daf0bb1cf73b4c5ded611ff58d94461404182942210492e8010.json index 7d789042..f762fb0e 100644 --- a/.sqlx/query-b768d9db6c785d6a701324ea746794d33e94121403163a774b6ef775640fd3d3.json +++ b/.sqlx/query-a8bfce13de871daf0bb1cf73b4c5ded611ff58d94461404182942210492e8010.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT m.id id, tm.user_id user_id, tm.payouts_split payouts_split\n FROM mods m\n INNER JOIN team_members tm on m.team_id = tm.team_id AND tm.accepted = TRUE\n WHERE m.id = ANY($1) AND m.monetization_status = $2\n ", + "query": "\n SELECT m.id id, tm.user_id user_id, tm.payouts_split payouts_split\n FROM mods m\n INNER JOIN team_members tm on m.team_id = tm.team_id AND tm.accepted = TRUE\n WHERE m.id = ANY($1) AND m.monetization_status = $2 AND m.status = ANY($3)\n ", "describe": { "columns": [ { @@ -22,7 +22,8 @@ "parameters": { "Left": [ "Int8Array", - "Text" + "Text", + "TextArray" ] }, "nullable": [ @@ -31,5 +32,5 @@ false ] }, - "hash": "b768d9db6c785d6a701324ea746794d33e94121403163a774b6ef775640fd3d3" + "hash": "a8bfce13de871daf0bb1cf73b4c5ded611ff58d94461404182942210492e8010" } diff --git a/.sqlx/query-ae95eabd762a497932e5e66d37e743ee22f0d4665ced3aa66122a4f6623b4769.json b/.sqlx/query-ae95eabd762a497932e5e66d37e743ee22f0d4665ced3aa66122a4f6623b4769.json index 83aaa79c..4dcf6bfd 100644 --- a/.sqlx/query-ae95eabd762a497932e5e66d37e743ee22f0d4665ced3aa66122a4f6623b4769.json +++ b/.sqlx/query-ae95eabd762a497932e5e66d37e743ee22f0d4665ced3aa66122a4f6623b4769.json @@ -1,6 +1,10 @@ { "db_name": "PostgreSQL", +<<<<<<<< HEAD:.sqlx/query-ae95eabd762a497932e5e66d37e743ee22f0d4665ced3aa66122a4f6623b4769.json "query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering\n FROM versions v\n WHERE v.id = ANY($1)\n ORDER BY v.ordering ASC NULLS LAST, v.date_published ASC;\n ", +======== + "query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering\n FROM versions v\n WHERE v.id = ANY($1);\n ", +>>>>>>>> master:.sqlx/query-32f4aa1ab67fbdcd7187fbae475876bf3d3225ca7b4994440a67cbd6a7b610f6.json "describe": { "columns": [ { @@ -90,5 +94,9 @@ true ] }, +<<<<<<<< HEAD:.sqlx/query-ae95eabd762a497932e5e66d37e743ee22f0d4665ced3aa66122a4f6623b4769.json "hash": "ae95eabd762a497932e5e66d37e743ee22f0d4665ced3aa66122a4f6623b4769" +======== + "hash": "32f4aa1ab67fbdcd7187fbae475876bf3d3225ca7b4994440a67cbd6a7b610f6" +>>>>>>>> master:.sqlx/query-32f4aa1ab67fbdcd7187fbae475876bf3d3225ca7b4994440a67cbd6a7b610f6.json } diff --git a/.sqlx/query-b0c29c51bd3ae5b93d487471a98ee9bbb43a4df468ba781852b137dd315b9608.json b/.sqlx/query-b0c29c51bd3ae5b93d487471a98ee9bbb43a4df468ba781852b137dd315b9608.json deleted file mode 100644 index a06786be..00000000 --- a/.sqlx/query-b0c29c51bd3ae5b93d487471a98ee9bbb43a4df468ba781852b137dd315b9608.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO threads_messages (\n id, author_id, body, thread_id\n )\n VALUES (\n $1, $2, $3, $4\n )\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Jsonb", - "Int8" - ] - }, - "nullable": [] - }, - "hash": "b0c29c51bd3ae5b93d487471a98ee9bbb43a4df468ba781852b137dd315b9608" -} diff --git a/.sqlx/query-03cd8926d18aa8c11934fdc0da32ccbbbccf2527c523336f230c0e344c471a0f.json b/.sqlx/query-b82d35429e009e515ae1e0332142b3bd0bec55f38807eded9130b932929f2ebe.json similarity index 81% rename from .sqlx/query-03cd8926d18aa8c11934fdc0da32ccbbbccf2527c523336f230c0e344c471a0f.json rename to .sqlx/query-b82d35429e009e515ae1e0332142b3bd0bec55f38807eded9130b932929f2ebe.json index 6671362a..d78e5d15 100644 --- a/.sqlx/query-03cd8926d18aa8c11934fdc0da32ccbbbccf2527c523336f230c0e344c471a0f.json +++ b/.sqlx/query-b82d35429e009e515ae1e0332142b3bd0bec55f38807eded9130b932929f2ebe.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT m.id id, tm.user_id user_id, tm.payouts_split payouts_split\n FROM mods m\n INNER JOIN organizations o ON m.organization_id = o.id\n INNER JOIN team_members tm on o.team_id = tm.team_id AND tm.accepted = TRUE\n WHERE m.id = ANY($1) AND m.monetization_status = $2 AND m.organization_id IS NOT NULL\n ", + "query": "\n SELECT m.id id, tm.user_id user_id, tm.payouts_split payouts_split\n FROM mods m\n INNER JOIN organizations o ON m.organization_id = o.id\n INNER JOIN team_members tm on o.team_id = tm.team_id AND tm.accepted = TRUE\n WHERE m.id = ANY($1) AND m.monetization_status = $2 AND m.status = ANY($3) AND m.organization_id IS NOT NULL\n ", "describe": { "columns": [ { @@ -22,7 +22,8 @@ "parameters": { "Left": [ "Int8Array", - "Text" + "Text", + "TextArray" ] }, "nullable": [ @@ -31,5 +32,5 @@ false ] }, - "hash": "03cd8926d18aa8c11934fdc0da32ccbbbccf2527c523336f230c0e344c471a0f" + "hash": "b82d35429e009e515ae1e0332142b3bd0bec55f38807eded9130b932929f2ebe" } diff --git a/.sqlx/query-b993ec7579f06603a2a308dccd1ea1fbffd94286db48bc0e36a30f4f6a9d39af.json b/.sqlx/query-b993ec7579f06603a2a308dccd1ea1fbffd94286db48bc0e36a30f4f6a9d39af.json deleted file mode 100644 index 0db3e537..00000000 --- a/.sqlx/query-b993ec7579f06603a2a308dccd1ea1fbffd94286db48bc0e36a30f4f6a9d39af.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "UPDATE versions\n SET downloads = downloads + 1\n WHERE id = ANY($1)", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8Array" - ] - }, - "nullable": [] - }, - "hash": "b993ec7579f06603a2a308dccd1ea1fbffd94286db48bc0e36a30f4f6a9d39af" -} diff --git a/.sqlx/query-bb6afad07ebfa3b92399bb07aa9e15fa69bd328f44b4bf991e80f6b91fcd3a50.json b/.sqlx/query-c07277bcf62120ac4fac8678e09512f3984031919a71af59fc10995fb21f480c.json similarity index 70% rename from .sqlx/query-bb6afad07ebfa3b92399bb07aa9e15fa69bd328f44b4bf991e80f6b91fcd3a50.json rename to .sqlx/query-c07277bcf62120ac4fac8678e09512f3984031919a71af59fc10995fb21f480c.json index 01b0c698..f7b9866a 100644 --- a/.sqlx/query-bb6afad07ebfa3b92399bb07aa9e15fa69bd328f44b4bf991e80f6b91fcd3a50.json +++ b/.sqlx/query-c07277bcf62120ac4fac8678e09512f3984031919a71af59fc10995fb21f480c.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT DISTINCT lf.id, lf.field, lf.field_type, lf.optional, lf.min_val, lf.max_val, lf.enum_type, lfl.loader_id\n FROM loader_fields lf\n LEFT JOIN loader_fields_loaders lfl ON lfl.loader_field_id = lf.id\n WHERE lfl.loader_id = ANY($1)\n ", + "query": "\n SELECT DISTINCT lf.id, lf.field, lf.field_type, lf.optional, lf.min_val, lf.max_val, lf.enum_type, lfl.loader_id\n FROM loader_fields lf\n LEFT JOIN loader_fields_loaders lfl ON lfl.loader_field_id = lf.id\n WHERE lfl.loader_id = ANY($1)\n ", "describe": { "columns": [ { @@ -60,5 +60,5 @@ false ] }, - "hash": "bb6afad07ebfa3b92399bb07aa9e15fa69bd328f44b4bf991e80f6b91fcd3a50" + "hash": "c07277bcf62120ac4fac8678e09512f3984031919a71af59fc10995fb21f480c" } diff --git a/.sqlx/query-c2924fff035e92f7bd2279517310ba391ced72b38be97d462cdfe60048e947db.json b/.sqlx/query-c2924fff035e92f7bd2279517310ba391ced72b38be97d462cdfe60048e947db.json new file mode 100644 index 00000000..fba958d5 --- /dev/null +++ b/.sqlx/query-c2924fff035e92f7bd2279517310ba391ced72b38be97d462cdfe60048e947db.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE files\n SET metadata = $1\n WHERE id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Jsonb", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "c2924fff035e92f7bd2279517310ba391ced72b38be97d462cdfe60048e947db" +} diff --git a/.sqlx/query-320d73cd900a6e00f0e74b7a8c34a7658d16034b01a35558cb42fa9c16185eb5.json b/.sqlx/query-caa4f261950f027cd34e2099e5489c02de214299004ea182f5eae93396e1d313.json similarity index 70% rename from .sqlx/query-320d73cd900a6e00f0e74b7a8c34a7658d16034b01a35558cb42fa9c16185eb5.json rename to .sqlx/query-caa4f261950f027cd34e2099e5489c02de214299004ea182f5eae93396e1d313.json index 6284d141..0fc2034d 100644 --- a/.sqlx/query-320d73cd900a6e00f0e74b7a8c34a7658d16034b01a35558cb42fa9c16185eb5.json +++ b/.sqlx/query-caa4f261950f027cd34e2099e5489c02de214299004ea182f5eae93396e1d313.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT tm.id, tm.author_id, tm.thread_id, tm.body, tm.created\n FROM threads_messages tm\n WHERE tm.id = ANY($1)\n ", + "query": "\n SELECT tm.id, tm.author_id, tm.thread_id, tm.body, tm.created, tm.hide_identity\n FROM threads_messages tm\n WHERE tm.id = ANY($1)\n ", "describe": { "columns": [ { @@ -27,6 +27,11 @@ "ordinal": 4, "name": "created", "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "hide_identity", + "type_info": "Bool" } ], "parameters": { @@ -39,8 +44,9 @@ true, false, false, + false, false ] }, - "hash": "320d73cd900a6e00f0e74b7a8c34a7658d16034b01a35558cb42fa9c16185eb5" + "hash": "caa4f261950f027cd34e2099e5489c02de214299004ea182f5eae93396e1d313" } diff --git a/.sqlx/query-cc1f2f568a0ba1d285a95fd9b6e3b118a0eaa26e2851bcc3f1920ae0140b48ae.json b/.sqlx/query-cc1f2f568a0ba1d285a95fd9b6e3b118a0eaa26e2851bcc3f1920ae0140b48ae.json new file mode 100644 index 00000000..953a6002 --- /dev/null +++ b/.sqlx/query-cc1f2f568a0ba1d285a95fd9b6e3b118a0eaa26e2851bcc3f1920ae0140b48ae.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n f.metadata, v.id version_id\n FROM versions v\n INNER JOIN files f ON f.version_id = v.id\n WHERE v.mod_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "metadata", + "type_info": "Jsonb" + }, + { + "ordinal": 1, + "name": "version_id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + true, + false + ] + }, + "hash": "cc1f2f568a0ba1d285a95fd9b6e3b118a0eaa26e2851bcc3f1920ae0140b48ae" +} diff --git a/.sqlx/query-ccf57f9c1026927afc940a20ebad9fb58ded7171b21e91973d1f13c91eab9b37.json b/.sqlx/query-ccf57f9c1026927afc940a20ebad9fb58ded7171b21e91973d1f13c91eab9b37.json new file mode 100644 index 00000000..8b28b3d9 --- /dev/null +++ b/.sqlx/query-ccf57f9c1026927afc940a20ebad9fb58ded7171b21e91973d1f13c91eab9b37.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE files\n SET metadata = $1\n WHERE id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Jsonb", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "ccf57f9c1026927afc940a20ebad9fb58ded7171b21e91973d1f13c91eab9b37" +} diff --git a/.sqlx/query-d08c9ef6a8829ce1d23d66f27c58f4b9b64f4ce985e60ded871d1f31eb0c818b.json b/.sqlx/query-d08c9ef6a8829ce1d23d66f27c58f4b9b64f4ce985e60ded871d1f31eb0c818b.json deleted file mode 100644 index 7eab9304..00000000 --- a/.sqlx/query-d08c9ef6a8829ce1d23d66f27c58f4b9b64f4ce985e60ded871d1f31eb0c818b.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "UPDATE mods\n SET downloads = downloads + 1\n WHERE id = ANY($1)", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8Array" - ] - }, - "nullable": [] - }, - "hash": "d08c9ef6a8829ce1d23d66f27c58f4b9b64f4ce985e60ded871d1f31eb0c818b" -} diff --git a/.sqlx/query-e1c24a57013cbc64f463d3a49cb68989eced49b475c0bbab90b21908ae0e77b4.json b/.sqlx/query-e1c24a57013cbc64f463d3a49cb68989eced49b475c0bbab90b21908ae0e77b4.json new file mode 100644 index 00000000..dc23d4e2 --- /dev/null +++ b/.sqlx/query-e1c24a57013cbc64f463d3a49cb68989eced49b475c0bbab90b21908ae0e77b4.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE users u\n SET balance = u.balance + v.amount\n FROM unnest($1::BIGINT[], $2::NUMERIC[]) AS v(id, amount)\n WHERE u.id = v.id\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8Array", + "NumericArray" + ] + }, + "nullable": [] + }, + "hash": "e1c24a57013cbc64f463d3a49cb68989eced49b475c0bbab90b21908ae0e77b4" +} diff --git a/.sqlx/query-e37ecb6dc1509d390bb6f68ba25899d19f693554d8969bbf8f8ee14a78adf0f9.json b/.sqlx/query-e37ecb6dc1509d390bb6f68ba25899d19f693554d8969bbf8f8ee14a78adf0f9.json deleted file mode 100644 index 798f248b..00000000 --- a/.sqlx/query-e37ecb6dc1509d390bb6f68ba25899d19f693554d8969bbf8f8ee14a78adf0f9.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE threads\n SET show_in_mod_inbox = $1\n WHERE id = $2\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Bool", - "Int8" - ] - }, - "nullable": [] - }, - "hash": "e37ecb6dc1509d390bb6f68ba25899d19f693554d8969bbf8f8ee14a78adf0f9" -} diff --git a/.sqlx/query-e9d863c1793939d5ae7137d810f23d06460c28a9058b251448e3786c436f80cd.json b/.sqlx/query-e9d863c1793939d5ae7137d810f23d06460c28a9058b251448e3786c436f80cd.json deleted file mode 100644 index d07589c7..00000000 --- a/.sqlx/query-e9d863c1793939d5ae7137d810f23d06460c28a9058b251448e3786c436f80cd.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT id\n FROM threads\n WHERE show_in_mod_inbox = TRUE\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - }, - "hash": "e9d863c1793939d5ae7137d810f23d06460c28a9058b251448e3786c436f80cd" -} diff --git a/.sqlx/query-f297b517bc3bbd8628c0c222c0e3daf8f4efbe628ee2e8ddbbb4b9734cc9c915.json b/.sqlx/query-f297b517bc3bbd8628c0c222c0e3daf8f4efbe628ee2e8ddbbb4b9734cc9c915.json new file mode 100644 index 00000000..dc923578 --- /dev/null +++ b/.sqlx/query-f297b517bc3bbd8628c0c222c0e3daf8f4efbe628ee2e8ddbbb4b9734cc9c915.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO moderation_external_files (sha1, external_license_id)\n SELECT * FROM UNNEST ($1::bytea[], $2::bigint[])\n ON CONFLICT (sha1) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "ByteaArray", + "Int8Array" + ] + }, + "nullable": [] + }, + "hash": "f297b517bc3bbd8628c0c222c0e3daf8f4efbe628ee2e8ddbbb4b9734cc9c915" +} diff --git a/Cargo.lock b/Cargo.lock index 58026003..6a43e988 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -19,9 +19,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "aes" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac1f845298e95f983ff1944b728ae08b8cebab80d684f0a832ed0fc74dfa27e2" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" dependencies = [ "cfg-if", "cipher", @@ -30,9 +30,9 @@ dependencies = [ [[package]] name = "ahash" -version = "0.7.7" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" +checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ "getrandom", "once_cell", @@ -41,9 +41,9 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.7" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", "getrandom", @@ -54,9 +54,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] @@ -99,15 +99,15 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.79" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" +checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247" [[package]] name = "argon2" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ba4cac0a46bc1d2912652a751c47f2a9f3a7fe89bcae2275d418f5270402f9" +checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072" dependencies = [ "base64ct", "blake2", @@ -144,19 +144,19 @@ dependencies = [ "memchr", "pin-project-lite", "tokio", - "zstd 0.13.0", - "zstd-safe 7.0.0", + "zstd 0.13.1", + "zstd-safe 7.1.0", ] [[package]] name = "async-trait" -version = "0.1.77" +version = "0.1.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" +checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] @@ -168,23 +168,13 @@ dependencies = [ "num-traits", ] -[[package]] -name = "atomic-write-file" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edcdbedc2236483ab103a53415653d6b4442ea6141baf1ffa85df29635e88436" -dependencies = [ - "nix", - "rand", -] - [[package]] name = "attohttpc" version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fcf00bc6d5abb29b5f97e3c61a90b6d3caa12f3faf897d4a3e3607c050a35a7" dependencies = [ - "http 0.2.11", + "http 0.2.12", "log", "native-tls", "serde", @@ -200,9 +190,9 @@ checksum = "3c1e7e457ea78e524f48639f551fd79703ac3f2237f5ecccdf4708f8a75ad373" [[package]] name = "autocfg" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80" [[package]] name = "aws-creds" @@ -232,9 +222,9 @@ dependencies = [ [[package]] name = "axum" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1236b4b292f6c4d6dc34604bb5120d85c3fe1d1aa596bd5cc52ca054d13e7b9e" +checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" dependencies = [ "async-trait", "axum-core", @@ -242,10 +232,10 @@ dependencies = [ "base64 0.21.7", "bytes", "futures-util", - "http 1.0.0", + "http 1.1.0", "http-body 1.0.0", "http-body-util", - "hyper 1.1.0", + "hyper 1.2.0", "hyper-util", "itoa", "matchit", @@ -260,7 +250,7 @@ dependencies = [ "serde_path_to_error", "serde_urlencoded", "sha1 0.10.6", - "sync_wrapper", + "sync_wrapper 1.0.0", "tokio", "tokio-tungstenite", "tower", @@ -278,13 +268,13 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.0.0", + "http 1.1.0", "http-body 1.0.0", "http-body-util", "mime", "pin-project-lite", "rustversion", - "sync_wrapper", + "sync_wrapper 0.1.2", "tower-layer", "tower-service", "tracing", @@ -299,20 +289,20 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] name = "axum-prometheus" -version = "0.5.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f9e72192ebbdc3ce795e9c41291a990b4028d3e7419ee9267e567b046a472fa" +checksum = "b683cbc43010e9a3d72c2f31ca464155ff4f95819e88a32924b0f47a43898978" dependencies = [ "axum", "bytes", "futures", "futures-core", - "http 1.0.0", + "http 1.1.0", "http-body 1.0.0", "matchit", "metrics", @@ -326,9 +316,9 @@ dependencies = [ [[package]] name = "axum-test" -version = "14.2.2" +version = "14.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d15e9969313df61a64e25ce39cc8e586d42432696a0c8e0cfac1d377013d9c" +checksum = "decb9ebc8ff6be9415e72d734657a7a53847bf36fa98a7f8a5cc324d5e6705fe" dependencies = [ "anyhow", "async-trait", @@ -336,9 +326,9 @@ dependencies = [ "axum", "bytes", "cookie", - "http 1.0.0", + "http 1.1.0", "http-body-util", - "hyper 1.1.0", + "hyper 1.2.0", "hyper-util", "mime", "pretty_assertions", @@ -355,9 +345,9 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.69" +version = "0.3.71" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" dependencies = [ "addr2line", "cc", @@ -386,6 +376,12 @@ version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +[[package]] +name = "base64" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" + [[package]] name = "base64ct" version = "1.6.0" @@ -430,9 +426,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.2" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" +checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" dependencies = [ "serde", ] @@ -478,9 +474,9 @@ dependencies = [ [[package]] name = "borsh" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f58b559fd6448c6e2fd0adb5720cd98a2506594cafa4737ff98c396f3e82f667" +checksum = "0901fc8eb0aca4c83be0106d6f2db17d86a08dfc2c25f0e84464bf381158add6" dependencies = [ "borsh-derive", "cfg_aliases", @@ -488,23 +484,23 @@ dependencies = [ [[package]] name = "borsh-derive" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7aadb5b6ccbd078890f6d7003694e33816e6b784358f18e15e7e6d9f065a57cd" +checksum = "51670c3aa053938b0ee3bd67c3817e471e626151131b934038e83c5bf8de48f5" dependencies = [ "once_cell", "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", "syn_derive", ] [[package]] name = "brotli" -version = "3.4.0" +version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "516074a47ef4bce09577a3b379392300159ce5b1ba2e501ff1c819950066100f" +checksum = "d640d25bc63c50fb1f0b545ffd80207d2e10a4c965530809b40ba3386825c391" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -523,24 +519,24 @@ dependencies = [ [[package]] name = "bstr" -version = "1.9.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c48f0051a4b4c5e0b6d365cd04af53aeaa209e3cc15ec2cdb69e73cc87fbd0dc" +checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706" dependencies = [ "memchr", ] [[package]] name = "bumpalo" -version = "3.14.0" +version = "3.15.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" +checksum = "7ff69b9dd49fd426c69a0db9fc04dd934cdb6645ff000864d98f7e2af8830eaa" [[package]] name = "bytecheck" -version = "0.6.11" +version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6372023ac861f6e6dc89c8344a8f398fb42aaba2b5dbc649ca0c0e9dbcb627" +checksum = "23cdc57ce23ac53c931e88a43d06d070a6fd142f2617be5855eb75efc9beb1c2" dependencies = [ "bytecheck_derive", "ptr_meta", @@ -549,9 +545,9 @@ dependencies = [ [[package]] name = "bytecheck_derive" -version = "0.6.11" +version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61" +checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659" dependencies = [ "proc-macro2", "quote", @@ -560,9 +556,9 @@ dependencies = [ [[package]] name = "bytemuck" -version = "1.14.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "374d28ec25809ee0e23827c2ab573d729e293f281dfe393500e7ad618baa61c6" +checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15" [[package]] name = "byteorder" @@ -572,9 +568,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.5.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" [[package]] name = "bzip2" @@ -605,9 +601,9 @@ checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" [[package]] name = "cc" -version = "1.0.83" +version = "1.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5" dependencies = [ "jobserver", "libc", @@ -636,9 +632,9 @@ checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" [[package]] name = "chrono" -version = "0.4.31" +version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +checksum = "8a0d04d43504c61aa6c7531f1871dd0d418d91130162063b789da00fd7057a5e" dependencies = [ "android-tzdata", "iana-time-zone", @@ -646,7 +642,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.48.5", + "windows-targets 0.52.4", ] [[package]] @@ -781,9 +777,9 @@ dependencies = [ [[package]] name = "cookie" -version = "0.18.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cd91cf61412820176e137621345ee43b3f4423e589e7ae4e50d601d93e35ef8" +checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" dependencies = [ "time", "version_check", @@ -831,9 +827,9 @@ checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc32fast" -version = "1.3.2" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" dependencies = [ "cfg-if", ] @@ -921,24 +917,24 @@ dependencies = [ [[package]] name = "curl" -version = "0.4.44" +version = "0.4.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "509bd11746c7ac09ebd19f0b17782eae80aadee26237658a6b4808afb5c11a22" +checksum = "1e2161dd6eba090ff1594084e95fd67aeccf04382ffea77999ea94ed42ec67b6" dependencies = [ "curl-sys", "libc", "openssl-probe", "openssl-sys", "schannel", - "socket2 0.4.10", - "winapi", + "socket2 0.5.6", + "windows-sys 0.52.0", ] [[package]] name = "curl-sys" -version = "0.4.70+curl-8.5.0" +version = "0.4.72+curl-8.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c0333d8849afe78a4c8102a429a446bfdd055832af071945520e835ae2d841e" +checksum = "29cbdc8314c447d11e8fd156dcdd031d9e02a7a976163e396b548c03153bc9ea" dependencies = [ "cc", "libc", @@ -947,7 +943,7 @@ dependencies = [ "openssl-sys", "pkg-config", "vcpkg", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -962,12 +958,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.3" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e" +checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391" dependencies = [ - "darling_core 0.20.3", - "darling_macro 0.20.3", + "darling_core 0.20.8", + "darling_macro 0.20.8", ] [[package]] @@ -986,16 +982,16 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.3" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "177e3443818124b357d8e76f53be906d60937f0d3a90773a664fa63fa253e621" +checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] @@ -1011,13 +1007,13 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.20.3" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" +checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ - "darling_core 0.20.3", + "darling_core 0.20.8", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] @@ -1109,7 +1105,7 @@ checksum = "d150dea618e920167e5973d70ae6ece4385b7164e0d799fe7c122dd0a5d912ad" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] @@ -1225,20 +1221,20 @@ checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" [[package]] name = "either" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" dependencies = [ "serde", ] [[package]] name = "email-encoding" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbfb21b9878cf7a348dcb8559109aabc0ec40d69924bd706fa5149846c4fef75" +checksum = "60d1d33cdaede7e24091f039632eb5d3c7469fe5b066a985281a34fc70fa317f" dependencies = [ - "base64 0.21.7", + "base64 0.22.0", "memchr", ] @@ -1298,9 +1294,9 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "exr" -version = "1.71.0" +version = "1.72.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "832a761f35ab3e6664babfbdc6cef35a4860e816ec3916dcfd0882954e98a8a8" +checksum = "887d93f60543e9a9362ef8a21beedd0a833c5d9610e18c67abe15a5963dcb1a4" dependencies = [ "bit_field", "flume", @@ -1333,9 +1329,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.1" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" +checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984" [[package]] name = "fdeflate" @@ -1515,7 +1511,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] @@ -1532,9 +1528,9 @@ checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" [[package]] name = "futures-timer" -version = "3.0.2" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" [[package]] name = "futures-util" @@ -1577,9 +1573,9 @@ dependencies = [ [[package]] name = "gif" -version = "0.12.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80792593675e051cf94a4b111980da2ba60d4a83e43e0048c5693baab3977045" +checksum = "3fb2d69b19215e18bb912fa30f7ce15846e301408695e44e0ef719f1da9e19f2" dependencies = [ "color_quant", "weezl", @@ -1593,9 +1589,9 @@ checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" [[package]] name = "governor" -version = "0.6.0" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "821239e5672ff23e2a7060901fa622950bbd80b649cdaadd78d1c1767ed14eb4" +checksum = "68a7f542ee6b35af73b06abc0dad1c1bae89964e4e253bc4b587b91c9637867b" dependencies = [ "cfg-if", "dashmap", @@ -1604,43 +1600,26 @@ dependencies = [ "no-std-compat", "nonzero_ext", "parking_lot", + "portable-atomic", "quanta", "rand", "smallvec", + "spinning_top", ] [[package]] name = "h2" -version = "0.3.24" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" +checksum = "4fbd2820c5e49886948654ab546d0688ff24530286bdcf8fca3cefb16d4618eb" dependencies = [ "bytes", "fnv", "futures-core", "futures-sink", "futures-util", - "http 0.2.11", - "indexmap 2.1.0", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "h2" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31d030e59af851932b72ceebadf4a2b5986dba4c3b99dd2493f8273a0f151943" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http 1.0.0", - "indexmap 2.1.0", + "http 0.2.12", + "indexmap 2.2.6", "slab", "tokio", "tokio-util", @@ -1649,10 +1628,11 @@ dependencies = [ [[package]] name = "half" -version = "2.2.1" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b4af3693f1b705df946e9fe5631932443781d0aabb423b62fcd4d73f6d2fd0" +checksum = "b5eceaaeec696539ddaf7b333340f1af35a5aa87ae3e4f3ead0532f72affab2e" dependencies = [ + "cfg-if", "crunchy", ] @@ -1662,16 +1642,7 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ - "ahash 0.7.7", -] - -[[package]] -name = "hashbrown" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038" -dependencies = [ - "ahash 0.8.7", + "ahash 0.7.8", ] [[package]] @@ -1680,7 +1651,7 @@ version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" dependencies = [ - "ahash 0.8.7", + "ahash 0.8.11", "allocator-api2", ] @@ -1713,9 +1684,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.4" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d3d0e0f38255e7fa3cf31335b3a56f05febd18025f4db5ef7a0cfb4f8da651f" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hex" @@ -1773,9 +1744,9 @@ dependencies = [ [[package]] name = "http" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ "bytes", "fnv", @@ -1784,9 +1755,9 @@ dependencies = [ [[package]] name = "http" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b32afd38673a8016f7c9ae69e5af41a58f81b1d31689040f2f1959594ce194ea" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" dependencies = [ "bytes", "fnv", @@ -1800,7 +1771,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http 0.2.11", + "http 0.2.12", "pin-project-lite", ] @@ -1811,18 +1782,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" dependencies = [ "bytes", - "http 1.0.0", + "http 1.1.0", ] [[package]] name = "http-body-util" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41cb79eb393015dadd30fc252023adb0b2400a0caee0fa2a077e6e21a551e840" +checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" dependencies = [ "bytes", - "futures-util", - "http 1.0.0", + "futures-core", + "http 1.1.0", "http-body 1.0.0", "pin-project-lite", ] @@ -1855,14 +1826,14 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2 0.3.24", - "http 0.2.11", + "h2", + "http 0.2.12", "http-body 0.4.6", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2 0.5.5", + "socket2 0.5.6", "tokio", "tower-service", "tracing", @@ -1871,20 +1842,20 @@ dependencies = [ [[package]] name = "hyper" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5aa53871fc917b1a9ed87b683a5d86db645e23acb32c2e0785a353e522fb75" +checksum = "186548d73ac615b32a73aafe38fb4f56c0d340e110e5a200bcadbaf2e199263a" dependencies = [ "bytes", "futures-channel", "futures-util", - "h2 0.4.2", - "http 1.0.0", + "http 1.1.0", "http-body 1.0.0", "httparse", "httpdate", "itoa", "pin-project-lite", + "smallvec", "tokio", "want", ] @@ -1904,18 +1875,18 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdea9aac0dbe5a9240d68cfd9501e2db94222c6dc06843e06640b9e07f0fdc67" +checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.0.0", + "http 1.1.0", "http-body 1.0.0", - "hyper 1.1.0", + "hyper 1.2.0", "pin-project-lite", - "socket2 0.5.5", + "socket2 0.5.6", "tokio", "tower", "tower-service", @@ -1924,9 +1895,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.59" +version = "0.1.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6a67363e2aa4443928ce15e57ebae94fd8949958fd1223c4cfc0cd473ad7539" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -1979,9 +1950,9 @@ checksum = "cb56e1aa765b4b4f3aadfab769793b7087bb03a4ea4920644a6d238e2df5b9ed" [[package]] name = "image" -version = "0.24.8" +version = "0.24.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "034bbe799d1909622a74d1193aa50147769440040ff36cb2baa947609b0a4e23" +checksum = "5690139d2f55868e080017335e4b94cb7414274c74f1669c84fb5feba2c9f69d" dependencies = [ "bytemuck", "byteorder", @@ -2008,9 +1979,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.1.0" +version = "2.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", "hashbrown 0.14.3", @@ -2052,12 +2023,12 @@ dependencies = [ [[package]] name = "is-terminal" -version = "0.4.10" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bad00257d07be169d870ab665980b06cdb366d792ad690bf2e76876dc503455" +checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" dependencies = [ "hermit-abi", - "rustix", + "libc", "windows-sys 0.52.0", ] @@ -2075,7 +2046,7 @@ dependencies = [ "encoding_rs", "event-listener", "futures-lite", - "http 0.2.11", + "http 0.2.12", "log", "mime", "once_cell", @@ -2117,18 +2088,18 @@ dependencies = [ [[package]] name = "itertools" -version = "0.12.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" dependencies = [ "either", ] [[package]] name = "itoa" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jemalloc-sys" @@ -2152,9 +2123,9 @@ dependencies = [ [[package]] name = "jobserver" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c37f63953c4c63420ed5fd3d6d398c719489b9f872b9fa683262f8edd363c7d" +checksum = "ab46a6e9526ddef3ae7f787c06f0f2600639ba80ea3eade3d8e670a2230f51d6" dependencies = [ "libc", ] @@ -2170,9 +2141,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.67" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a1d36f1235bc969acba30b7f5990b864423a6068a10f7c90ae8f0112e3a59d1" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" dependencies = [ "wasm-bindgen", ] @@ -2211,7 +2182,7 @@ dependencies = [ "axum-prometheus", "axum-test", "base64 0.21.7", - "bitflags 2.4.2", + "bitflags 2.5.0", "bytes", "censor", "chrono", @@ -2231,7 +2202,7 @@ dependencies = [ "hyper 0.14.28", "hyper-tls", "image", - "itertools 0.12.0", + "itertools 0.12.1", "jemallocator", "json-patch", "lazy_static", @@ -2239,6 +2210,7 @@ dependencies = [ "maxminddb", "meilisearch-sdk", "multer", + "murmur2", "rand", "rand_chacha", "redis", @@ -2248,7 +2220,6 @@ dependencies = [ "rust_decimal", "rust_iso3166", "sentry", - "sentry-tower", "serde", "serde_json", "serde_urlencoded", @@ -2295,15 +2266,15 @@ checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8" [[package]] name = "lettre" -version = "0.11.3" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5aaf628956b6b0852e12ac3505d20d7a12ecc1e32d5ea921f002af4a74036a5" +checksum = "47460276655930189e0919e4fbf46e46476b14f934f18a63dd726a5fb7b60e2e" dependencies = [ - "base64 0.21.7", + "base64 0.22.0", "chumsky", "email-encoding", "email_address", - "fastrand 2.0.1", + "fastrand 2.0.2", "futures-util", "hostname", "httpdate", @@ -2311,17 +2282,18 @@ dependencies = [ "mime", "native-tls", "nom", + "percent-encoding", "quoted_printable", - "socket2 0.5.5", + "socket2 0.5.6", "tokio", "url", ] [[package]] name = "libc" -version = "0.2.152" +version = "0.2.153" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" [[package]] name = "libm" @@ -2345,7 +2317,7 @@ version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "libc", "redox_syscall", ] @@ -2363,9 +2335,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.14" +version = "1.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "295c17e837573c8c821dbaeb3cceb3d745ad082f7572191409e69cbc1b3fd050" +checksum = "5e143b5e666b2695d28f6bca6497720813f699c9602dd7f5cac91008b8ada7f9" dependencies = [ "cc", "libc", @@ -2397,9 +2369,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.20" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" [[package]] name = "lru-cache" @@ -2430,15 +2402,6 @@ dependencies = [ "libc", ] -[[package]] -name = "mach2" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b955cdeb2a02b9117f121ce63aa52d08ade45de53e48fe6a38b39c10f6f709" -dependencies = [ - "libc", -] - [[package]] name = "match_cfg" version = "0.1.0" @@ -2474,13 +2437,13 @@ dependencies = [ [[package]] name = "maybe-async" -version = "0.2.7" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f1b8c13cb1f814b634a96b2c725449fe7ed464a7b8781de8688be5ffbd3f305" +checksum = "5cf92c10c7e361d6b99666ec1c6f9805b0bea2c3bd8c78dc6fe98ac5bd78db11" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.55", ] [[package]] @@ -2540,59 +2503,46 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.1" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" +checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" [[package]] name = "metrics" -version = "0.21.1" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fde3af1a009ed76a778cb84fdef9e7dbbdf5775ae3e4cc1f434a6a307f6f76c5" +checksum = "2be3cbd384d4e955b231c895ce10685e3d8260c5ccffae898c96c723b0772835" dependencies = [ - "ahash 0.8.7", - "metrics-macros", + "ahash 0.8.11", "portable-atomic", ] [[package]] name = "metrics-exporter-prometheus" -version = "0.12.2" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d4fa7ce7c4862db464a37b0b31d89bca874562f034bd7993895572783d02950" +checksum = "9bf4e7146e30ad172c42c39b3246864bd2d3c6396780711a1baf749cfe423e21" dependencies = [ "base64 0.21.7", "hyper 0.14.28", - "indexmap 1.9.3", + "indexmap 2.2.6", "ipnet", "metrics", "metrics-util", "quanta", "thiserror", "tokio", - "tracing", -] - -[[package]] -name = "metrics-macros" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b4faf00617defe497754acde3024865bc143d44a86799b24e191ecff91354f" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.48", ] [[package]] name = "metrics-util" -version = "0.15.1" +version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4de2ed6e491ed114b40b732e4d1659a9d53992ebd87490c44a6ffe23739d973e" +checksum = "8b07a5eb561b8cbc16be2d216faf7757f9baf3bfb94dbb0fae3df8387a5bb47f" dependencies = [ "crossbeam-epoch", "crossbeam-utils", - "hashbrown 0.13.1", + "hashbrown 0.14.3", "metrics", "num_cpus", "quanta", @@ -2632,9 +2582,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" dependencies = [ "adler", "simd-adler32", @@ -2642,9 +2592,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.10" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "wasi", @@ -2660,7 +2610,7 @@ dependencies = [ "bytes", "encoding_rs", "futures-util", - "http 1.0.0", + "http 1.1.0", "httparse", "log", "memchr", @@ -2669,6 +2619,12 @@ dependencies = [ "version_check", ] +[[package]] +name = "murmur2" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb585ade2549a017db2e35978b77c319214fa4b37cede841e27954dd6e8f3ca8" + [[package]] name = "native-tls" version = "0.2.11" @@ -2687,17 +2643,6 @@ dependencies = [ "tempfile", ] -[[package]] -name = "nix" -version = "0.27.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" -dependencies = [ - "bitflags 2.4.2", - "cfg-if", - "libc", -] - [[package]] name = "no-std-compat" version = "0.4.1" @@ -2747,21 +2692,26 @@ dependencies = [ "zeroize", ] +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + [[package]] name = "num-integer" -version = "0.1.45" +version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ - "autocfg", "num-traits", ] [[package]] name = "num-iter" -version = "0.1.43" +version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" +checksum = "d869c01cc0c455284163fd0092f1f93835385ccab5a98a0dcc497b2f8bf055a9" dependencies = [ "autocfg", "num-integer", @@ -2770,9 +2720,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" +checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" dependencies = [ "autocfg", "libm", @@ -2811,17 +2761,17 @@ checksum = "44d11de466f4a3006fe8a5e7ec84e93b79c70cb992ae0aa0eb631ad2df8abfe2" [[package]] name = "opaque-debug" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" [[package]] name = "openssl" -version = "0.10.62" +version = "0.10.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cde4d2d9200ad5909f8dac647e29482e07c3a35de8a13fce7c9c7747ad9f671" +checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "cfg-if", "foreign-types", "libc", @@ -2838,7 +2788,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] @@ -2849,9 +2799,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.98" +version = "0.9.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1665caf8ab2dc9aef43d1c0023bd904633a6a05cb30b0ad59bec2ae986e57a7" +checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" dependencies = [ "cc", "libc", @@ -2871,13 +2821,13 @@ dependencies = [ [[package]] name = "os_info" -version = "3.7.0" +version = "3.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "006e42d5b888366f1880eda20371fedde764ed2213dc8496f49622fa0c99cd5e" +checksum = "ae99c7fa6dd38c7cafe1ec085e804f8f555a2f8659b0dbe03f1f9963a9b51092" dependencies = [ "log", "serde", - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -3000,7 +2950,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] @@ -3035,29 +2985,29 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.3" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.3" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] name = "pin-project-lite" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" [[package]] name = "pin-utils" @@ -3088,15 +3038,15 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" [[package]] name = "png" -version = "0.17.11" +version = "0.17.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f6c3c3e617595665b8ea2ff95a86066be38fb121ff920a9c0eb282abcd1da5a" +checksum = "06e4b0d3d1312775e782c86c91a111aa1f910cbb65e1337f9975b5f9a554b5e1" dependencies = [ "bitflags 1.3.2", "crc32fast", @@ -3198,9 +3148,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.76" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" +checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" dependencies = [ "unicode-ident", ] @@ -3245,13 +3195,12 @@ dependencies = [ [[package]] name = "quanta" -version = "0.11.1" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17e662a7a8291a865152364c20c7abc5e60486ab2001e8ec10b24862de0b9ab" +checksum = "9ca0b7bac0b97248c40bb77288fc52029cf1459c0461ea1b05ee32ccf011de2c" dependencies = [ "crossbeam-utils", "libc", - "mach2", "once_cell", "raw-cpuid", "wasi", @@ -3348,18 +3297,18 @@ dependencies = [ [[package]] name = "raw-cpuid" -version = "10.7.0" +version = "11.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c297679cb867470fa8c9f67dbba74a78d78e3e98d7cf2b08d6d71540f797332" +checksum = "9d86a7c4638d42c44551f4791a20e687dbb4c3de1f33c43dd71e355cd429def1" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.5.0", ] [[package]] name = "rayon" -version = "1.8.1" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa7237101a77a10773db45d62004a272517633fbcc3df19d96455ede1122e051" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ "either", "rayon-core", @@ -3381,7 +3330,7 @@ version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c580d9cbbe1d1b479e8d67cf9daf6a62c957e6846048408b80b43ac3f6af84cd" dependencies = [ - "ahash 0.8.7", + "ahash 0.8.11", "async-trait", "bytes", "combine", @@ -3420,14 +3369,14 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.2" +version = "1.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.3", - "regex-syntax 0.8.2", + "regex-automata 0.4.6", + "regex-syntax 0.8.3", ] [[package]] @@ -3441,13 +3390,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.3" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.2", + "regex-syntax 0.8.3", ] [[package]] @@ -3470,32 +3419,32 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" +checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" [[package]] name = "rend" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2571463863a6bd50c32f94402933f03457a3fbaf697a707c5be741e459f08fd" +checksum = "71fe3824f5629716b1589be05dacd749f6aa084c87e00e016714a8cdfccc997c" dependencies = [ "bytecheck", ] [[package]] name = "reqwest" -version = "0.11.23" +version = "0.11.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b1ae8d9ac08420c66222fb9096fc5de435c3c48542bc5336c51892cffafb41" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" dependencies = [ "base64 0.21.7", "bytes", "encoding_rs", "futures-core", "futures-util", - "h2 0.3.24", - "http 0.2.11", + "h2", + "http 0.2.12", "http-body 0.4.6", "hyper 0.14.28", "hyper-tls", @@ -3508,9 +3457,11 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", + "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", + "sync_wrapper 0.1.2", "system-configuration", "tokio", "tokio-native-tls", @@ -3560,23 +3511,24 @@ dependencies = [ [[package]] name = "ring" -version = "0.17.7" +version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", + "cfg-if", "getrandom", "libc", "spin 0.9.8", "untrusted 0.9.0", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "rkyv" -version = "0.7.43" +version = "0.7.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "527a97cdfef66f65998b5f3b637c26f5a5ec09cc52a3f9932313ac645f4190f5" +checksum = "5cba464629b3394fc4dbc6f940ff8f5b4ff5c7aef40f29166fd4ad12acbc99c0" dependencies = [ "bitvec", "bytecheck", @@ -3592,9 +3544,9 @@ dependencies = [ [[package]] name = "rkyv_derive" -version = "0.7.43" +version = "0.7.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5c462a1328c8e67e4d6dbad1eb0355dd43e8ab432c6e227a43657f16ade5033" +checksum = "a7dddfff8de25e6f62b9d64e6e432bf1c6736c57d20323e15ee10435fbda7c65" dependencies = [ "proc-macro2", "quote", @@ -3640,7 +3592,7 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "http 0.2.11", + "http 0.2.12", "mime", "mime_guess", "rand", @@ -3662,7 +3614,7 @@ dependencies = [ "futures", "hex", "hmac 0.12.1", - "http 0.2.11", + "http 0.2.12", "log", "maybe-async", "md5", @@ -3682,9 +3634,9 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.33.1" +version = "1.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06676aec5ccb8fc1da723cc8c0f9a46549f21ebb8753d3915c6c41db1e7f1dc4" +checksum = "1790d1c4c0ca81211399e0e0af16333276f375209e71a37b67698a373db5b47a" dependencies = [ "arrayvec", "borsh", @@ -3698,9 +3650,9 @@ dependencies = [ [[package]] name = "rust_iso3166" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc46f436f726b768364d35d099f43a94f22fd34857ff4f679b1f5cbcb03b9f71" +checksum = "e676677b94480848a8d88c74553bad50aed2ee77d8c0985aa50d8c4e26f3054b" dependencies = [ "js-sys", "phf", @@ -3725,11 +3677,11 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.30" +version = "0.38.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "322394588aaf33c24007e8bb3238ee3e4c5c09c084ab32bc73890b99ff326bca" +checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.5.0", "errno", "libc", "linux-raw-sys", @@ -3742,7 +3694,7 @@ version = "0.21.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" dependencies = [ - "ring 0.17.7", + "ring 0.17.8", "rustls-webpki", "sct", ] @@ -3762,7 +3714,7 @@ version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring 0.17.7", + "ring 0.17.8", "untrusted 0.9.0", ] @@ -3791,9 +3743,9 @@ checksum = "22a197350ece202f19a166d1ad6d9d6de145e1d2a8ef47db299abe164dbd7530" [[package]] name = "ryu" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" +checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" [[package]] name = "schannel" @@ -3825,7 +3777,7 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring 0.17.7", + "ring 0.17.8", "untrusted 0.9.0", ] @@ -3872,15 +3824,15 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.21" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0" +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" [[package]] name = "sentry" -version = "0.32.1" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab18211f62fb890f27c9bb04861f76e4be35e4c2fcbfc2d98afa37aadebb16f1" +checksum = "766448f12e44d68e675d5789a261515c46ac6ccd240abdd451a9c46c84a49523" dependencies = [ "httpdate", "native-tls", @@ -3898,9 +3850,9 @@ dependencies = [ [[package]] name = "sentry-backtrace" -version = "0.32.1" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf018ff7d5ce5b23165a9cbfee60b270a55ae219bc9eebef2a3b6039356dd7e5" +checksum = "32701cad8b3c78101e1cd33039303154791b0ff22e7802ed8cc23212ef478b45" dependencies = [ "backtrace", "once_cell", @@ -3910,9 +3862,9 @@ dependencies = [ [[package]] name = "sentry-contexts" -version = "0.32.1" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d934df6f9a17b8c15b829860d9d6d39e78126b5b970b365ccbd817bc0fe82c9" +checksum = "17ddd2a91a13805bd8dab4ebf47323426f758c35f7bf24eacc1aded9668f3824" dependencies = [ "hostname", "libc", @@ -3924,9 +3876,9 @@ dependencies = [ [[package]] name = "sentry-core" -version = "0.32.1" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e362d3fb1c5de5124bf1681086eaca7adf6a8c4283a7e1545359c729f9128ff" +checksum = "b1189f68d7e7e102ef7171adf75f83a59607fafd1a5eecc9dc06c026ff3bdec4" dependencies = [ "once_cell", "rand", @@ -3937,9 +3889,9 @@ dependencies = [ [[package]] name = "sentry-debug-images" -version = "0.32.1" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8bca420d75d9e7a8e54a4806bf4fa8a7e9a804e8f2ff05c7c80234168c6ca66" +checksum = "7b4d0a615e5eeca5699030620c119a094e04c14cf6b486ea1030460a544111a7" dependencies = [ "findshlibs", "once_cell", @@ -3948,9 +3900,9 @@ dependencies = [ [[package]] name = "sentry-panic" -version = "0.32.1" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0224e7a8e2bd8a32d96804acb8243d6d6e073fed55618afbdabae8249a964d8" +checksum = "d1c18d0b5fba195a4950f2f4c31023725c76f00aabb5840b7950479ece21b5ca" dependencies = [ "sentry-backtrace", "sentry-core", @@ -3958,12 +3910,12 @@ dependencies = [ [[package]] name = "sentry-tower" -version = "0.32.1" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca654f9bb134581169b51f2dcf713ae0909157121870a0b94e369368f75ab050" +checksum = "d87dfe009138dc515009842619b562e03b2b3f926a91318ec7ae23d09435f8b4" dependencies = [ "axum", - "http 1.0.0", + "http 1.1.0", "pin-project", "sentry-core", "tower-layer", @@ -3973,9 +3925,9 @@ dependencies = [ [[package]] name = "sentry-tracing" -version = "0.32.1" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "087bed8c616d176a9c6b662a8155e5f23b40dc9e1fa96d0bd5fb56e8636a9275" +checksum = "3012699a9957d7f97047fd75d116e22d120668327db6e7c59824582e16e791b2" dependencies = [ "sentry-backtrace", "sentry-core", @@ -3985,9 +3937,9 @@ dependencies = [ [[package]] name = "sentry-types" -version = "0.32.1" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb4f0e37945b7a8ce7faebc310af92442e2d7c5aa7ef5b42fe6daa98ee133f65" +checksum = "c7173fd594569091f68a7c37a886e202f4d0c1db1e1fa1d18a051ba695b2e2ec" dependencies = [ "debugid", "hex", @@ -4002,22 +3954,22 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.195" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63261df402c67811e9ac6def069e4786148c4563f4b50fd4bf30aa370d626b02" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.195" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46fe8f8603d81ba86327b23a2e9cdf49e1255fb94a4c5f297f6ee0547178ea2c" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] @@ -4033,9 +3985,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.111" +version = "1.0.115" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "176e46fa42316f18edd598015a5166857fc835ec732f5215eac6b7bdbf0a84f4" +checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd" dependencies = [ "itoa", "ryu", @@ -4044,9 +3996,9 @@ dependencies = [ [[package]] name = "serde_path_to_error" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebd154a240de39fdebcf5775d2675c204d7c13cf39a4c697be6493c8e734337c" +checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" dependencies = [ "itoa", "serde", @@ -4066,16 +4018,17 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.4.0" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23" +checksum = "ee80b0e361bbf88fd2f6e242ccd19cfda072cb0faa6ae694ecee08199938569a" dependencies = [ "base64 0.21.7", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.1.0", + "indexmap 2.2.6", "serde", + "serde_derive", "serde_json", "serde_with_macros", "time", @@ -4083,14 +4036,14 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.4.0" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788" +checksum = "6561dc161a9224638a31d876ccdfefbc1df91d3f3a8342eddb35f055d48c7655" dependencies = [ - "darling 0.20.3", + "darling 0.20.8", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] @@ -4182,9 +4135,9 @@ checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "sketches-ddsketch" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68a406c1882ed7f29cd5e248c9848a80e7cb6ae0fea82346d2746f2f941c07e1" +checksum = "85636c14b73d81f541e525f585c0a2109e6744e1565b5c1668e31c70c10ed65c" [[package]] name = "slab" @@ -4208,9 +4161,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.13.1" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "smartstring" @@ -4235,19 +4188,19 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.5" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "spdx" -version = "0.10.3" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62bde1398b09b9f93fc2fc9b9da86e362693e999d3a54a8ac47a99a5a73f638b" +checksum = "29ef1a0fa1e39ac22972c8db23ff89aea700ab96aa87114e1fb55937a631a0c9" dependencies = [ "smallvec", ] @@ -4267,6 +4220,15 @@ dependencies = [ "lock_api", ] +[[package]] +name = "spinning_top" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d96d2d1d716fb500937168cc09353ffdc7a012be8475ac7308e1bdf0e3923300" +dependencies = [ + "lock_api", +] + [[package]] name = "spki" version = "0.7.3" @@ -4283,16 +4245,16 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c" dependencies = [ - "itertools 0.12.0", + "itertools 0.12.1", "nom", "unicode_categories", ] [[package]] name = "sqlx" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dba03c279da73694ef99763320dea58b51095dfe87d001b1d4b5fe78ba8763cf" +checksum = "c9a2ccff1a000a5a59cd33da541d9f2fdcd9e6e8229cc200565942bff36d0aaa" dependencies = [ "sqlx-core", "sqlx-macros", @@ -4303,18 +4265,17 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d84b0a3c3739e220d94b3239fd69fb1f74bc36e16643423bd99de3b43c21bfbd" +checksum = "24ba59a9342a3d9bab6c56c118be528b27c9b60e490080e9711a04dccac83ef6" dependencies = [ - "ahash 0.8.7", + "ahash 0.8.11", "atoi", "byteorder", "bytes", "chrono", "crc", "crossbeam-queue", - "dotenvy", "either", "event-listener", "futures-channel", @@ -4324,7 +4285,7 @@ dependencies = [ "futures-util", "hashlink", "hex", - "indexmap 2.1.0", + "indexmap 2.2.6", "log", "memchr", "once_cell", @@ -4348,9 +4309,9 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89961c00dc4d7dffb7aee214964b065072bff69e36ddb9e2c107541f75e4f2a5" +checksum = "4ea40e2345eb2faa9e1e5e326db8c34711317d2b5e08d0d5741619048a803127" dependencies = [ "proc-macro2", "quote", @@ -4361,11 +4322,10 @@ dependencies = [ [[package]] name = "sqlx-macros-core" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0bd4519486723648186a08785143599760f7cc81c52334a55d6a83ea1e20841" +checksum = "5833ef53aaa16d860e92123292f1f6a3d53c34ba8b1969f152ef1a7bb803f3c8" dependencies = [ - "atomic-write-file", "dotenvy", "either", "heck 0.4.1", @@ -4388,13 +4348,13 @@ dependencies = [ [[package]] name = "sqlx-mysql" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e37195395df71fd068f6e2082247891bc11e3289624bbc776a0cdfa1ca7f1ea4" +checksum = "1ed31390216d20e538e447a7a9b959e06ed9fc51c37b514b46eb758016ecd418" dependencies = [ "atoi", "base64 0.21.7", - "bitflags 2.4.2", + "bitflags 2.5.0", "byteorder", "bytes", "chrono", @@ -4432,13 +4392,13 @@ dependencies = [ [[package]] name = "sqlx-postgres" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6ac0ac3b7ccd10cc96c7ab29791a7dd236bd94021f31eec7ba3d46a74aa1c24" +checksum = "7c824eb80b894f926f89a0b9da0c7f435d27cdd35b8c655b114e58223918577e" dependencies = [ "atoi", "base64 0.21.7", - "bitflags 2.4.2", + "bitflags 2.5.0", "byteorder", "chrono", "crc", @@ -4461,7 +4421,6 @@ dependencies = [ "rust_decimal", "serde", "serde_json", - "sha1 0.10.6", "sha2 0.10.8", "smallvec", "sqlx-core", @@ -4473,9 +4432,9 @@ dependencies = [ [[package]] name = "sqlx-sqlite" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "210976b7d948c7ba9fced8ca835b11cbb2d677c59c79de41ac0d397e14547490" +checksum = "b244ef0a8414da0bed4bb1910426e890b19e5e9bccc27ada6b797d05c55ae0aa" dependencies = [ "atoi", "chrono", @@ -4572,9 +4531,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.48" +version = "2.0.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" +checksum = "002a1b3dbf967edfafc32655d0f377ab0bb7b994aa1d32c8cc7e9b8bf3ebb8f0" dependencies = [ "proc-macro2", "quote", @@ -4590,7 +4549,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] @@ -4599,6 +4558,12 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "sync_wrapper" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384595c11a4e2969895cad5a8c4029115f5ab956a9e5ef4de79d11a426e5f20c" + [[package]] name = "system-configuration" version = "0.5.1" @@ -4639,13 +4604,12 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.9.0" +version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01ce4141aa927a6d1bd34a041795abd0db1cccba5d5f24b009f694bdf3a1f3fa" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ "cfg-if", - "fastrand 2.0.1", - "redox_syscall", + "fastrand 2.0.2", "rustix", "windows-sys 0.52.0", ] @@ -4663,29 +4627,29 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.56" +version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" +checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.56" +version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" +checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] name = "thread_local" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" dependencies = [ "cfg-if", "once_cell", @@ -4704,12 +4668,13 @@ dependencies = [ [[package]] name = "time" -version = "0.3.31" +version = "0.3.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f657ba42c3f86e7680e53c8cd3af8abbe56b5491790b46e22e19c0d57463583e" +checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" dependencies = [ "deranged", "itoa", + "num-conv", "powerfmt", "serde", "time-core", @@ -4724,10 +4689,11 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26197e33420244aeb70c3e8c78376ca46571bc4e701e4791c2cd9f57dcb3a43f" +checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" dependencies = [ + "num-conv", "time-core", ] @@ -4748,9 +4714,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.35.1" +version = "1.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c89b4efa943be685f629b149f53829423f8f5531ea21249408e8e2f8671ec104" +checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" dependencies = [ "backtrace", "bytes", @@ -4758,7 +4724,7 @@ dependencies = [ "mio", "num_cpus", "pin-project-lite", - "socket2 0.5.5", + "socket2 0.5.6", "tokio-macros", "windows-sys 0.48.0", ] @@ -4771,7 +4737,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] @@ -4786,9 +4752,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", "pin-project-lite", @@ -4829,11 +4795,11 @@ checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" [[package]] name = "toml_edit" -version = "0.21.0" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03" +checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.6", "toml_datetime", "winnow", ] @@ -4870,16 +4836,16 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0da193277a4e2c33e59e09b5861580c33dd0a637c3883d0fa74ba40c0374af2e" +checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5" dependencies = [ "async-compression", - "bitflags 2.4.2", + "bitflags 2.5.0", "bytes", "futures-core", "futures-util", - "http 1.0.0", + "http 1.1.0", "http-body 1.0.0", "http-body-util", "http-range-header", @@ -4927,7 +4893,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] @@ -4981,9 +4947,9 @@ dependencies = [ [[package]] name = "treediff" -version = "4.0.2" +version = "4.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52984d277bdf2a751072b5df30ec0377febdb02f7696d64c2d7d54630bac4303" +checksum = "4d127780145176e2b5d16611cc25a900150e86e9fd79d3bde6ff3a37359c9cb5" dependencies = [ "serde_json", ] @@ -5003,7 +4969,7 @@ dependencies = [ "byteorder", "bytes", "data-encoding", - "http 1.0.0", + "http 1.1.0", "httparse", "log", "rand", @@ -5051,18 +5017,18 @@ checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" dependencies = [ "tinyvec", ] [[package]] name = "unicode-segmentation" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" +checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" [[package]] name = "unicode-width" @@ -5090,9 +5056,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "ureq" -version = "2.9.1" +version = "2.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cdd25c339e200129fe4de81451814e5228c9b771d57378817d6117cc2b3f97" +checksum = "11f214ce18d8b2cbe84ed3aa6486ed3f5b285cf8d8fbdbce9f3f767a724adc35" dependencies = [ "base64 0.21.7", "log", @@ -5127,9 +5093,9 @@ checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "uuid" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" +checksum = "a183cf7feeba97b4dd1c0d46788634f6221d87fa961b305bed08c851829efcc0" dependencies = [ "getrandom", "rand", @@ -5218,11 +5184,17 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + [[package]] name = "wasm-bindgen" -version = "0.2.90" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1223296a201415c7fad14792dbefaace9bd52b62d33453ade1c5b5f07555406" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -5230,24 +5202,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.90" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcdc935b63408d58a32f8cc9738a0bffd8f05cc7c002086c6ef20b7312ad9dcd" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.40" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bde2032aeb86bdfaecc8b261eef3cba735cc426c1f3a3416d1e0791be95fc461" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" dependencies = [ "cfg-if", "js-sys", @@ -5257,9 +5229,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.90" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e4c238561b2d428924c49815533a8b9121c664599558a5d9ec51f8a1740a999" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -5267,28 +5239,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.90" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bae1abb6806dc1ad9e560ed242107c0f6c84335f1749dd4e8ddb012ebd5e25a7" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.90" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d91413b1c31d7539ba5ef2451af3f0b833a005eb27a631cec32bc0635a8602b" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" [[package]] name = "wasm-streams" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4609d447824375f43e1ffbc051b50ad8f4b3ae8219680c94452ea05eb240ac7" +checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" dependencies = [ "futures-util", "js-sys", @@ -5299,9 +5271,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.67" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58cd2333b6e0be7a39605f0e255892fd7418a682d8da8fe042fe25128794d2ed" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" dependencies = [ "js-sys", "wasm-bindgen", @@ -5309,21 +5281,25 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.25.3" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" [[package]] name = "weezl" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9193164d4de03a926d909d3bc7c30543cecb35400c02114792c2cae20d5e2dbb" +checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" [[package]] name = "whoami" -version = "1.4.1" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50" +checksum = "a44ab49fad634e88f55bf8f9bb3abd2f27d7204172a112c7c9987e01c1c94ea9" +dependencies = [ + "redox_syscall", + "wasite", +] [[package]] name = "winapi" @@ -5353,7 +5329,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.0", + "windows-targets 0.52.4", ] [[package]] @@ -5371,7 +5347,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.0", + "windows-targets 0.52.4", ] [[package]] @@ -5391,17 +5367,17 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" dependencies = [ - "windows_aarch64_gnullvm 0.52.0", - "windows_aarch64_msvc 0.52.0", - "windows_i686_gnu 0.52.0", - "windows_i686_msvc 0.52.0", - "windows_x86_64_gnu 0.52.0", - "windows_x86_64_gnullvm 0.52.0", - "windows_x86_64_msvc 0.52.0", + "windows_aarch64_gnullvm 0.52.4", + "windows_aarch64_msvc 0.52.4", + "windows_i686_gnu 0.52.4", + "windows_i686_msvc 0.52.4", + "windows_x86_64_gnu 0.52.4", + "windows_x86_64_gnullvm 0.52.4", + "windows_x86_64_msvc 0.52.4", ] [[package]] @@ -5412,9 +5388,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" +checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" [[package]] name = "windows_aarch64_msvc" @@ -5424,9 +5400,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" +checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" [[package]] name = "windows_i686_gnu" @@ -5436,9 +5412,9 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" +checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" [[package]] name = "windows_i686_msvc" @@ -5448,9 +5424,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" +checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" [[package]] name = "windows_x86_64_gnu" @@ -5460,9 +5436,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" +checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" [[package]] name = "windows_x86_64_gnullvm" @@ -5472,9 +5448,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" +checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" [[package]] name = "windows_x86_64_msvc" @@ -5484,15 +5460,15 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.0" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" +checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" [[package]] name = "winnow" -version = "0.5.34" +version = "0.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7cf47b659b318dccbd69cc4797a39ae128f533dce7902a1096044d1967b9c16" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" dependencies = [ "memchr", ] @@ -5600,7 +5576,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.55", ] [[package]] @@ -5640,11 +5616,11 @@ dependencies = [ [[package]] name = "zstd" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bffb3309596d527cfcba7dfc6ed6052f1d39dfbd7c867aa2e865e4a449c10110" +checksum = "2d789b1514203a1120ad2429eae43a7bd32b90976a7bb8a05f7ec02fa88cc23a" dependencies = [ - "zstd-safe 7.0.0", + "zstd-safe 7.1.0", ] [[package]] @@ -5659,18 +5635,18 @@ dependencies = [ [[package]] name = "zstd-safe" -version = "7.0.0" +version = "7.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43747c7422e2924c11144d5229878b98180ef8b06cca4ab5af37afc8a8d8ea3e" +checksum = "1cd99b45c6bc03a018c8b8a86025678c87e55526064e38f9df301989dce7ec0a" dependencies = [ "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.9+zstd.1.5.5" +version = "2.0.10+zstd.1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e16efa8a874a0481a574084d34cc26fdb3b99627480f785888deb6386506656" +checksum = "c253a4914af5bafc8fa8c86ee400827e83cf6ec01195ec1f1ed8441bf00d65aa" dependencies = [ "cc", "pkg-config", diff --git a/Cargo.toml b/Cargo.toml index cfaf2999..dce8069c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,12 +11,11 @@ name = "labrinth" path = "src/main.rs" [dependencies] -axum = { version = "0.7.4", features = ["json", "form", "multipart", "query", "ws", "macros"] } -axum-prometheus = "0.5.0" +axum = { version = "0.7.5", features = ["json", "form", "multipart", "query", "ws", "macros"] } +axum-prometheus = "0.6.1" # middleware + utils sentry = { version = "0.32.1", features = ["tracing", "tower", "tower-http", "tower-axum-matched-path"] } -sentry-tower = { verison = "0.32.1", features = ["axum-matched-path", "http"] } tower-http = { version = "0.5.0", features = ["trace", "cors", "sensitive-headers", "fs", "compression-zstd", "compression-gzip", "compression-deflate", "compression-br"] } tower = "0.4.13" governor = "0.6.0" @@ -57,6 +56,7 @@ sha1 = { version = "0.6.1", features = ["std"] } sha2 = "0.9.9" hmac = "0.11.0" argon2 = { version = "0.5.0", features = ["std"] } +murmur2 = "0.1.0" bitflags = "2.4.0" hex = "0.4.3" zxcvbn = "2.2.2" diff --git a/migrations/20240131224610_moderation_packs.sql b/migrations/20240131224610_moderation_packs.sql new file mode 100644 index 00000000..49040ec5 --- /dev/null +++ b/migrations/20240131224610_moderation_packs.sql @@ -0,0 +1,19 @@ +CREATE TABLE moderation_external_licenses ( + id bigint PRIMARY KEY, + title text not null, + status text not null, + link text null, + exceptions text null, + proof text null, + flame_project_id integer null +); + +CREATE TABLE moderation_external_files ( + sha1 bytea PRIMARY KEY, + external_license_id bigint references moderation_external_licenses not null +); + +ALTER TABLE files ADD COLUMN metadata jsonb NULL; + +INSERT INTO users (id, username, name, email, avatar_url, bio, role, badges, balance) +VALUES (0, 'AutoMod', 'AutoMod', 'support@modrinth.com', 'https://cdn.modrinth.com/user/2REoufqX/6aabaf2d1fca2935662eca4ce451cd9775054c22.png', 'An automated account performing moderation utilities for Modrinth.', 'moderator', 0, 0) \ No newline at end of file diff --git a/migrations/20240221215354_moderation_pack_fixes.sql b/migrations/20240221215354_moderation_pack_fixes.sql new file mode 100644 index 00000000..67eff677 --- /dev/null +++ b/migrations/20240221215354_moderation_pack_fixes.sql @@ -0,0 +1,2 @@ +ALTER TABLE moderation_external_files ALTER COLUMN sha1 SET NOT NULL; +ALTER TABLE moderation_external_licenses ALTER COLUMN title DROP NOT NULL; diff --git a/migrations/20240319195753_threads-updates.sql b/migrations/20240319195753_threads-updates.sql new file mode 100644 index 00000000..4681958b --- /dev/null +++ b/migrations/20240319195753_threads-updates.sql @@ -0,0 +1,9 @@ +ALTER TABLE threads DROP COLUMN show_in_mod_inbox; + +ALTER TABLE threads_messages ADD COLUMN hide_identity BOOLEAN default false NOT NULL; + +UPDATE threads_messages +SET hide_identity = TRUE +FROM users +WHERE threads_messages.author_id = users.id +AND users.role IN ('moderator', 'admin'); \ No newline at end of file diff --git a/src/auth/mod.rs b/src/auth/mod.rs index fac7776d..746bffb6 100644 --- a/src/auth/mod.rs +++ b/src/auth/mod.rs @@ -55,7 +55,7 @@ impl IntoResponse for AuthenticationError { fn into_response(self) -> Response { let error_message = ApiError { error: self.error_name(), - description: &self.to_string(), + description: self.to_string(), }; (self.status_code(), Json(error_message)).into_response() diff --git a/src/auth/oauth/errors.rs b/src/auth/oauth/errors.rs index d3e18d06..9e4c67d8 100644 --- a/src/auth/oauth/errors.rs +++ b/src/auth/oauth/errors.rs @@ -105,7 +105,7 @@ impl IntoResponse for OAuthError { status_code, Json(ApiError { error: &self.error_type.error_name(), - description: &self.error_type.to_string(), + description: self.error_type.to_string(), }), ) .into_response() diff --git a/src/clickhouse/mod.rs b/src/clickhouse/mod.rs index c1763dc6..a89d47f4 100644 --- a/src/clickhouse/mod.rs +++ b/src/clickhouse/mod.rs @@ -42,14 +42,15 @@ pub async fn init_client_with_database( user_id UInt64, project_id UInt64, + monetized Bool DEFAULT True, ip IPv6, country String, user_agent String, - headers Array(Tuple(String, String)), + headers Array(Tuple(String, String)) ) ENGINE = MergeTree() - PRIMARY KEY (project_id, recorded) + PRIMARY KEY (project_id, recorded, ip) " )) .execute() @@ -71,10 +72,10 @@ pub async fn init_client_with_database( ip IPv6, country String, user_agent String, - headers Array(Tuple(String, String)), + headers Array(Tuple(String, String)) ) ENGINE = MergeTree() - PRIMARY KEY (project_id, recorded) + PRIMARY KEY (project_id, recorded, ip) " )) .execute() @@ -94,10 +95,10 @@ pub async fn init_client_with_database( loader String, game_version String, - parent UInt64, + parent UInt64 ) ENGINE = MergeTree() - PRIMARY KEY (project_id, recorded) + PRIMARY KEY (project_id, recorded, user_id) " )) .execute() diff --git a/src/database/models/collection_item.rs b/src/database/models/collection_item.rs index a2c29283..1f703950 100644 --- a/src/database/models/collection_item.rs +++ b/src/database/models/collection_item.rs @@ -4,6 +4,8 @@ use crate::database::models::DatabaseError; use crate::database::redis::RedisPool; use crate::models::collections::CollectionStatus; use chrono::{DateTime, Utc}; +use dashmap::DashMap; +use futures::TryStreamExt; use serde::{Deserialize, Serialize}; const COLLECTIONS_NAMESPACE: &str = "collections"; @@ -155,93 +157,55 @@ impl Collection { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - use futures::TryStreamExt; - - let mut redis = redis.connect().await?; - - if collection_ids.is_empty() { - return Ok(Vec::new()); - } - - let mut found_collections = Vec::new(); - let mut remaining_collections: Vec = collection_ids.to_vec(); - - if !collection_ids.is_empty() { - let collections = redis - .multi_get::( - COLLECTIONS_NAMESPACE, - collection_ids.iter().map(|x| x.0.to_string()), - ) - .await?; - - for collection in collections { - if let Some(collection) = - collection.and_then(|x| serde_json::from_str::(&x).ok()) - { - remaining_collections.retain(|x| collection.id.0 != x.0); - found_collections.push(collection); - continue; - } - } - } + let val = redis + .get_cached_keys( + COLLECTIONS_NAMESPACE, + &collection_ids.iter().map(|x| x.0).collect::>(), + |collection_ids| async move { + let collections = sqlx::query!( + " + SELECT c.id id, c.name name, c.description description, + c.icon_url icon_url, c.color color, c.created created, c.user_id user_id, + c.updated updated, c.status status, + ARRAY_AGG(DISTINCT cm.mod_id) filter (where cm.mod_id is not null) mods + FROM collections c + LEFT JOIN collections_mods cm ON cm.collection_id = c.id + WHERE c.id = ANY($1) + GROUP BY c.id; + ", + &collection_ids, + ) + .fetch(exec) + .try_fold(DashMap::new(), |acc, m| { + let collection = Collection { + id: CollectionId(m.id), + user_id: UserId(m.user_id), + name: m.name.clone(), + description: m.description.clone(), + icon_url: m.icon_url.clone(), + color: m.color.map(|x| x as u32), + created: m.created, + updated: m.updated, + status: CollectionStatus::from_string(&m.status), + projects: m + .mods + .unwrap_or_default() + .into_iter() + .map(ProjectId) + .collect(), + }; + + acc.insert(m.id, collection); + async move { Ok(acc) } + }) + .await?; - if !remaining_collections.is_empty() { - let collection_ids_parsed: Vec = - remaining_collections.iter().map(|x| x.0).collect(); - let db_collections: Vec = sqlx::query!( - " - SELECT c.id id, c.name name, c.description description, - c.icon_url icon_url, c.color color, c.created created, c.user_id user_id, - c.updated updated, c.status status, - ARRAY_AGG(DISTINCT cm.mod_id) filter (where cm.mod_id is not null) mods - FROM collections c - LEFT JOIN collections_mods cm ON cm.collection_id = c.id - WHERE c.id = ANY($1) - GROUP BY c.id; - ", - &collection_ids_parsed, + Ok(collections) + }, ) - .fetch_many(exec) - .try_filter_map(|e| async { - Ok(e.right().map(|m| { - let id = m.id; - - Collection { - id: CollectionId(id), - user_id: UserId(m.user_id), - name: m.name.clone(), - description: m.description.clone(), - icon_url: m.icon_url.clone(), - color: m.color.map(|x| x as u32), - created: m.created, - updated: m.updated, - status: CollectionStatus::from_string(&m.status), - projects: m - .mods - .unwrap_or_default() - .into_iter() - .map(ProjectId) - .collect(), - } - })) - }) - .try_collect::>() .await?; - for collection in db_collections { - redis - .set_serialized_to_json( - COLLECTIONS_NAMESPACE, - collection.id.0, - &collection, - None, - ) - .await?; - found_collections.push(collection); - } - } - - Ok(found_collections) + Ok(val) } pub async fn clear_cache(id: CollectionId, redis: &RedisPool) -> Result<(), DatabaseError> { diff --git a/src/database/models/ids.rs b/src/database/models/ids.rs index aea86e2e..17b19f03 100644 --- a/src/database/models/ids.rs +++ b/src/database/models/ids.rs @@ -1,6 +1,6 @@ use super::DatabaseError; use crate::models::ids::base62_impl::to_base62; -use crate::models::ids::random_base62_rng; +use crate::models::ids::{random_base62_rng, random_base62_rng_range}; use censor::Censor; use rand::SeedableRng; use rand_chacha::ChaCha20Rng; @@ -43,6 +43,37 @@ macro_rules! generate_ids { }; } +macro_rules! generate_bulk_ids { + ($vis:vis $function_name:ident, $return_type:ty, $select_stmnt:literal, $id_function:expr) => { + $vis async fn $function_name( + count: usize, + con: &mut sqlx::Transaction<'_, sqlx::Postgres>, + ) -> Result, DatabaseError> { + let mut rng = ChaCha20Rng::from_entropy(); + let mut retry_count = 0; + + // Check if ID is unique + loop { + let base = random_base62_rng_range(&mut rng, 1, 10) as i64; + let ids = (0..count).map(|x| base + x as i64).collect::>(); + + let results = sqlx::query!($select_stmnt, &ids) + .fetch_one(&mut **con) + .await?; + + if !results.exists.unwrap_or(true) { + return Ok(ids.into_iter().map(|x| $id_function(x)).collect()); + } + + retry_count += 1; + if retry_count > ID_RETRY_COUNT { + return Err(DatabaseError::RandomId); + } + } + } + }; +} + generate_ids!( pub generate_project_id, ProjectId, @@ -123,6 +154,13 @@ generate_ids!( NotificationId ); +generate_bulk_ids!( + pub generate_many_notification_ids, + NotificationId, + "SELECT EXISTS(SELECT 1 FROM notifications WHERE id = ANY($1))", + NotificationId +); + generate_ids!( pub generate_thread_id, ThreadId, diff --git a/src/database/models/image_item.rs b/src/database/models/image_item.rs index 68477304..28297c15 100644 --- a/src/database/models/image_item.rs +++ b/src/database/models/image_item.rs @@ -2,6 +2,7 @@ use super::ids::*; use crate::database::redis::RedisPool; use crate::{database::models::DatabaseError, models::images::ImageContext}; use chrono::{DateTime, Utc}; +use dashmap::DashMap; use serde::{Deserialize, Serialize}; const IMAGES_NAMESPACE: &str = "images"; @@ -180,70 +181,44 @@ impl Image { { use futures::TryStreamExt; - let mut redis = redis.connect().await?; - if image_ids.is_empty() { - return Ok(Vec::new()); - } - - let mut found_images = Vec::new(); - let mut remaining_ids = image_ids.to_vec(); - - let image_ids = image_ids.iter().map(|x| x.0).collect::>(); - - if !image_ids.is_empty() { - let images = redis - .multi_get::(IMAGES_NAMESPACE, image_ids.iter().map(|x| x.to_string())) - .await?; - for image in images { - if let Some(image) = image.and_then(|x| serde_json::from_str::(&x).ok()) { - remaining_ids.retain(|x| image.id.0 != x.0); - found_images.push(image); - continue; - } - } - } - - if !remaining_ids.is_empty() { - let db_images: Vec = sqlx::query!( - " - SELECT id, url, size, created, owner_id, context, mod_id, version_id, thread_message_id, report_id - FROM uploaded_images - WHERE id = ANY($1) - GROUP BY id; - ", - &remaining_ids.iter().map(|x| x.0).collect::>(), - ) - .fetch_many(exec) - .try_filter_map(|e| async { - Ok(e.right().map(|i| { - let id = i.id; - - Image { - id: ImageId(id), - url: i.url, - size: i.size as u64, - created: i.created, - owner_id: UserId(i.owner_id), - context: i.context, - project_id: i.mod_id.map(ProjectId), - version_id: i.version_id.map(VersionId), - thread_message_id: i.thread_message_id.map(ThreadMessageId), - report_id: i.report_id.map(ReportId), - } - })) - }) - .try_collect::>() - .await?; - - for image in db_images { - redis - .set_serialized_to_json(IMAGES_NAMESPACE, image.id.0, &image, None) + let val = redis.get_cached_keys( + IMAGES_NAMESPACE, + &image_ids.iter().map(|x| x.0).collect::>(), + |image_ids| async move { + let images = sqlx::query!( + " + SELECT id, url, size, created, owner_id, context, mod_id, version_id, thread_message_id, report_id + FROM uploaded_images + WHERE id = ANY($1) + GROUP BY id; + ", + &image_ids, + ) + .fetch(exec) + .try_fold(DashMap::new(), |acc, i| { + let img = Image { + id: ImageId(i.id), + url: i.url, + size: i.size as u64, + created: i.created, + owner_id: UserId(i.owner_id), + context: i.context, + project_id: i.mod_id.map(ProjectId), + version_id: i.version_id.map(VersionId), + thread_message_id: i.thread_message_id.map(ThreadMessageId), + report_id: i.report_id.map(ReportId), + }; + + acc.insert(i.id, img); + async move { Ok(acc) } + }) .await?; - found_images.push(image); - } - } - Ok(found_images) + Ok(images) + }, + ).await?; + + Ok(val) } pub async fn clear_cache(id: ImageId, redis: &RedisPool) -> Result<(), DatabaseError> { diff --git a/src/database/models/legacy_loader_fields.rs b/src/database/models/legacy_loader_fields.rs index 4b5ec833..f05e97bc 100644 --- a/src/database/models/legacy_loader_fields.rs +++ b/src/database/models/legacy_loader_fields.rs @@ -212,6 +212,13 @@ impl<'a> MinecraftGameVersionBuilder<'a> { .fetch_one(exec) .await?; + let mut conn = redis.connect().await?; + conn.delete( + crate::database::models::loader_fields::LOADER_FIELD_ENUM_VALUES_NAMESPACE, + game_versions_enum.id.0, + ) + .await?; + Ok(LoaderFieldEnumValueId(result.id)) } } diff --git a/src/database/models/loader_fields.rs b/src/database/models/loader_fields.rs index 5f3f72d4..e31b07ee 100644 --- a/src/database/models/loader_fields.rs +++ b/src/database/models/loader_fields.rs @@ -6,6 +6,7 @@ use super::DatabaseError; use crate::database::redis::RedisPool; use chrono::DateTime; use chrono::Utc; +use dashmap::DashMap; use futures::TryStreamExt; use itertools::Itertools; use serde::{Deserialize, Serialize}; @@ -16,7 +17,7 @@ const LOADERS_LIST_NAMESPACE: &str = "loaders"; const LOADER_FIELDS_NAMESPACE: &str = "loader_fields"; const LOADER_FIELDS_NAMESPACE_ALL: &str = "loader_fields_all"; const LOADER_FIELD_ENUMS_ID_NAMESPACE: &str = "loader_field_enums"; -const LOADER_FIELD_ENUM_VALUES_NAMESPACE: &str = "loader_field_enum_values"; +pub const LOADER_FIELD_ENUM_VALUES_NAMESPACE: &str = "loader_field_enum_values"; #[derive(Clone, Serialize, Deserialize, Debug)] pub struct Game { @@ -380,75 +381,47 @@ impl LoaderField { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - type RedisLoaderFieldTuple = (LoaderId, Vec); - - let mut redis = redis.connect().await?; - - let mut loader_ids = loader_ids.to_vec(); - let cached_fields: Vec = redis - .multi_get::(LOADER_FIELDS_NAMESPACE, loader_ids.iter().map(|x| x.0)) - .await? - .into_iter() - .flatten() - .filter_map(|x: String| serde_json::from_str::(&x).ok()) - .collect(); - - let mut found_loader_fields = HashMap::new(); - if !cached_fields.is_empty() { - for (loader_id, fields) in cached_fields { - if loader_ids.contains(&loader_id) { - found_loader_fields.insert(loader_id, fields); - loader_ids.retain(|x| x != &loader_id); - } - } - } - - if !loader_ids.is_empty() { - let result = sqlx::query!( - " - SELECT DISTINCT lf.id, lf.field, lf.field_type, lf.optional, lf.min_val, lf.max_val, lf.enum_type, lfl.loader_id - FROM loader_fields lf - LEFT JOIN loader_fields_loaders lfl ON lfl.loader_field_id = lf.id - WHERE lfl.loader_id = ANY($1) - ", - &loader_ids.iter().map(|x| x.0).collect::>() - ) - .fetch_many(exec) - .try_filter_map(|e| async { - Ok(e.right().and_then(|r| { - Some((LoaderId(r.loader_id) ,LoaderField { - id: LoaderFieldId(r.id), - field_type: LoaderFieldType::build(&r.field_type, r.enum_type)?, - field: r.field, - optional: r.optional, - min_val: r.min_val, - max_val: r.max_val, - })) - })) - }) - .try_collect::>() - .await?; - - let result: Vec = result - .into_iter() - .fold( - HashMap::new(), - |mut acc: HashMap>, x| { - acc.entry(x.0).or_default().push(x.1); - acc - }, + let val = redis.get_cached_keys_raw( + LOADER_FIELDS_NAMESPACE, + &loader_ids.iter().map(|x| x.0).collect::>(), + |loader_ids| async move { + let result = sqlx::query!( + " + SELECT DISTINCT lf.id, lf.field, lf.field_type, lf.optional, lf.min_val, lf.max_val, lf.enum_type, lfl.loader_id + FROM loader_fields lf + LEFT JOIN loader_fields_loaders lfl ON lfl.loader_field_id = lf.id + WHERE lfl.loader_id = ANY($1) + ", + &loader_ids, ) - .into_iter() - .collect_vec(); - - for (k, v) in result.into_iter() { - redis - .set_serialized_to_json(LOADER_FIELDS_NAMESPACE, k.0, (k, &v), None) + .fetch(exec) + .try_fold(DashMap::new(), |acc: DashMap>, r| { + if let Some(field_type) = LoaderFieldType::build(&r.field_type, r.enum_type) { + let loader_field = LoaderField { + id: LoaderFieldId(r.id), + field_type, + field: r.field, + optional: r.optional, + min_val: r.min_val, + max_val: r.max_val, + }; + + acc.entry(r.loader_id) + .or_default() + .push(loader_field); + } + + async move { + Ok(acc) + } + }) .await?; - found_loader_fields.insert(k, v); - } - } - Ok(found_loader_fields) + + Ok(result) + }, + ).await?; + + Ok(val.into_iter().map(|x| (LoaderId(x.0), x.1)).collect()) } // Gets all fields for a given loader(s) @@ -597,71 +570,51 @@ impl LoaderFieldEnumValue { loader_field_enum_ids: &[LoaderFieldEnumId], exec: E, redis: &RedisPool, - ) -> Result)>, DatabaseError> + ) -> Result>, DatabaseError> where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - let mut redis = redis.connect().await?; - let mut found_enums = Vec::new(); - let mut remaining_enums: Vec = loader_field_enum_ids.to_vec(); - - if !remaining_enums.is_empty() { - let enums = redis - .multi_get::( - LOADER_FIELD_ENUM_VALUES_NAMESPACE, - loader_field_enum_ids.iter().map(|x| x.0), + let val = redis.get_cached_keys_raw( + LOADER_FIELD_ENUM_VALUES_NAMESPACE, + &loader_field_enum_ids.iter().map(|x| x.0).collect::>(), + |loader_field_enum_ids| async move { + let values = sqlx::query!( + " + SELECT id, enum_id, value, ordering, metadata, created FROM loader_field_enum_values + WHERE enum_id = ANY($1) + ORDER BY enum_id, ordering, created DESC + ", + &loader_field_enum_ids ) - .await?; - - for lfe in enums { - if let Some(lfe) = lfe.and_then(|x| { - serde_json::from_str::<(LoaderFieldEnumId, Vec)>(&x).ok() - }) { - remaining_enums.retain(|x| lfe.0 .0 != x.0); - found_enums.push(lfe.1); - continue; - } - } - } + .fetch(exec) + .try_fold(DashMap::new(), |acc: DashMap>, c| { + let value = LoaderFieldEnumValue { + id: LoaderFieldEnumValueId(c.id), + enum_id: LoaderFieldEnumId(c.enum_id), + value: c.value, + ordering: c.ordering, + created: c.created, + metadata: c.metadata.unwrap_or_default(), + }; + + acc.entry(c.enum_id) + .or_default() + .push(value); + + async move { + Ok(acc) + } + }) + .await?; - let remaining_enums = remaining_enums.iter().map(|x| x.0).collect::>(); - let result = sqlx::query!( - " - SELECT id, enum_id, value, ordering, metadata, created FROM loader_field_enum_values - WHERE enum_id = ANY($1) - ORDER BY enum_id, ordering, created DESC - ", - &remaining_enums - ) - .fetch_many(exec) - .try_filter_map(|e| async { - Ok(e.right().map(|c| LoaderFieldEnumValue { - id: LoaderFieldEnumValueId(c.id), - enum_id: LoaderFieldEnumId(c.enum_id), - value: c.value, - ordering: c.ordering, - created: c.created, - metadata: c.metadata.unwrap_or_default(), - })) - }) - .try_collect::>() - .await?; + Ok(values) + }, + ).await?; - // Convert from an Vec to a Vec<(LoaderFieldEnumId, Vec)> - let cachable_enum_sets: Vec<(LoaderFieldEnumId, Vec)> = result - .clone() - .into_iter() - .group_by(|x| x.enum_id) // we sort by enum_id, so this will group all values of the same enum_id together + Ok(val .into_iter() - .map(|(k, v)| (k, v.collect::>().to_vec())) - .collect(); - for (k, v) in cachable_enum_sets.iter() { - redis - .set_serialized_to_json(LOADER_FIELD_ENUM_VALUES_NAMESPACE, k.0, v, None) - .await?; - } - - Ok(cachable_enum_sets) + .map(|x| (LoaderFieldEnumId(x.0), x.1)) + .collect()) } // Matches filter against metadata of enum values diff --git a/src/database/models/mod.rs b/src/database/models/mod.rs index eb931f7d..eafde1b4 100644 --- a/src/database/models/mod.rs +++ b/src/database/models/mod.rs @@ -48,4 +48,6 @@ pub enum DatabaseError { SerdeCacheError(#[from] serde_json::Error), #[error("Schema error: {0}")] SchemaError(String), + #[error("Timeout when waiting for cache subscriber")] + CacheTimeout, } diff --git a/src/database/models/notification_item.rs b/src/database/models/notification_item.rs index 51521a44..49d2fe1f 100644 --- a/src/database/models/notification_item.rs +++ b/src/database/models/notification_item.rs @@ -3,7 +3,6 @@ use crate::database::{models::DatabaseError, redis::RedisPool}; use crate::models::notifications::NotificationBody; use chrono::{DateTime, Utc}; use futures::TryStreamExt; -use itertools::Itertools; use serde::{Deserialize, Serialize}; const USER_NOTIFICATIONS_NAMESPACE: &str = "user_notifications"; @@ -46,37 +45,15 @@ impl NotificationBuilder { transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, redis: &RedisPool, ) -> Result<(), DatabaseError> { - let mut notifications = Vec::new(); - for user in users { - let id = generate_notification_id(&mut *transaction).await?; - - notifications.push(Notification { - id, - user_id: user, - body: self.body.clone(), - read: false, - created: Utc::now(), - }); - } - - Notification::insert_many(¬ifications, transaction, redis).await?; + let notification_ids = + generate_many_notification_ids(users.len(), &mut *transaction).await?; - Ok(()) - } -} - -impl Notification { - pub async fn insert_many( - notifications: &[Notification], - transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, - redis: &RedisPool, - ) -> Result<(), DatabaseError> { - let notification_ids = notifications.iter().map(|n| n.id.0).collect_vec(); - let user_ids = notifications.iter().map(|n| n.user_id.0).collect_vec(); - let bodies = notifications + let body = serde_json::value::to_value(&self.body)?; + let bodies = notification_ids .iter() - .map(|n| Ok(serde_json::value::to_value(n.body.clone())?)) - .collect::, DatabaseError>>()?; + .map(|_| body.clone()) + .collect::>(); + sqlx::query!( " INSERT INTO notifications ( @@ -84,19 +61,23 @@ impl Notification { ) SELECT * FROM UNNEST($1::bigint[], $2::bigint[], $3::jsonb[]) ", - ¬ification_ids[..], - &user_ids[..], + ¬ification_ids + .into_iter() + .map(|x| x.0) + .collect::>()[..], + &users.iter().map(|x| x.0).collect::>()[..], &bodies[..], ) .execute(&mut **transaction) .await?; - let user_ids = notifications.iter().map(|n| n.user_id).collect::>(); - Notification::clear_user_notifications_cache(&user_ids, redis).await?; + Notification::clear_user_notifications_cache(&users, redis).await?; Ok(()) } +} +impl Notification { pub async fn get<'a, 'b, E>( id: NotificationId, executor: E, diff --git a/src/database/models/organization_item.rs b/src/database/models/organization_item.rs index c0c08949..3530820d 100644 --- a/src/database/models/organization_item.rs +++ b/src/database/models/organization_item.rs @@ -1,7 +1,8 @@ -use crate::{ - database::redis::RedisPool, - models::ids::base62_impl::{parse_base62, to_base62}, -}; +use crate::{database::redis::RedisPool, models::ids::base62_impl::parse_base62}; +use dashmap::DashMap; +use futures::TryStreamExt; +use std::fmt::{Debug, Display}; +use std::hash::Hash; use super::{ids::*, TeamMember}; use serde::{Deserialize, Serialize}; @@ -97,7 +98,7 @@ impl Organization { Self::get_many(&ids, exec, redis).await } - pub async fn get_many<'a, E, T: ToString>( + pub async fn get_many<'a, E, T: Display + Hash + Eq + PartialEq + Clone + Debug + Send>( organization_strings: &[T], exec: E, redis: &RedisPool, @@ -105,120 +106,56 @@ impl Organization { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - use futures::stream::TryStreamExt; - - let mut redis = redis.connect().await?; - - if organization_strings.is_empty() { - return Ok(Vec::new()); - } - - let mut found_organizations = Vec::new(); - let mut remaining_strings = organization_strings - .iter() - .map(|x| x.to_string()) - .collect::>(); - - let mut organization_ids = organization_strings - .iter() - .flat_map(|x| parse_base62(&x.to_string()).map(|x| x as i64)) - .collect::>(); - - organization_ids.append( - &mut redis - .multi_get::( - ORGANIZATIONS_TITLES_NAMESPACE, - organization_strings + let val = redis + .get_cached_keys_with_slug( + ORGANIZATIONS_NAMESPACE, + ORGANIZATIONS_TITLES_NAMESPACE, + false, + organization_strings, + |ids| async move { + let org_ids: Vec = ids .iter() + .flat_map(|x| parse_base62(&x.to_string()).ok()) + .map(|x| x as i64) + .collect(); + let slugs = ids + .into_iter() .map(|x| x.to_string().to_lowercase()) - .collect::>(), - ) - .await? - .into_iter() - .flatten() - .collect(), - ); - - if !organization_ids.is_empty() { - let organizations = redis - .multi_get::( - ORGANIZATIONS_NAMESPACE, - organization_ids.iter().map(|x| x.to_string()), - ) - .await?; - - for organization in organizations { - if let Some(organization) = - organization.and_then(|x| serde_json::from_str::(&x).ok()) - { - remaining_strings.retain(|x| { - &to_base62(organization.id.0 as u64) != x - && organization.slug.to_lowercase() != x.to_lowercase() - }); - found_organizations.push(organization); - continue; - } - } - } - - if !remaining_strings.is_empty() { - let organization_ids_parsed: Vec = remaining_strings - .iter() - .flat_map(|x| parse_base62(&x.to_string()).ok()) - .map(|x| x as i64) - .collect(); - - let organizations: Vec = sqlx::query!( - " - SELECT o.id, o.slug, o.name, o.team_id, o.description, o.icon_url, o.color - FROM organizations o - WHERE o.id = ANY($1) OR LOWER(o.slug) = ANY($2) - GROUP BY o.id; - ", - &organization_ids_parsed, - &remaining_strings - .into_iter() - .map(|x| x.to_string().to_lowercase()) - .collect::>(), - ) - .fetch_many(exec) - .try_filter_map(|e| async { - Ok(e.right().map(|m| Organization { - id: OrganizationId(m.id), - slug: m.slug, - name: m.name, - team_id: TeamId(m.team_id), - description: m.description, - icon_url: m.icon_url, - color: m.color.map(|x| x as u32), - })) - }) - .try_collect::>() - .await?; - - for organization in organizations { - redis - .set_serialized_to_json( - ORGANIZATIONS_NAMESPACE, - organization.id.0, - &organization, - None, - ) - .await?; - redis - .set( - ORGANIZATIONS_TITLES_NAMESPACE, - &organization.slug.to_lowercase(), - &organization.id.0.to_string(), - None, + .collect::>(); + + let organizations = sqlx::query!( + " + SELECT o.id, o.slug, o.name, o.team_id, o.description, o.icon_url, o.color + FROM organizations o + WHERE o.id = ANY($1) OR LOWER(o.slug) = ANY($2) + GROUP BY o.id; + ", + &org_ids, + &slugs, ) + .fetch(exec) + .try_fold(DashMap::new(), |acc, m| { + let org = Organization { + id: OrganizationId(m.id), + slug: m.slug.clone(), + name: m.name, + team_id: TeamId(m.team_id), + description: m.description, + icon_url: m.icon_url, + color: m.color.map(|x| x as u32), + }; + + acc.insert(m.id, (Some(m.slug), org)); + async move { Ok(acc) } + }) .await?; - found_organizations.push(organization); - } - } + Ok(organizations) + }, + ) + .await?; - Ok(found_organizations) + Ok(val) } // Gets organization associated with a project ID, if it exists and there is one diff --git a/src/database/models/pat_item.rs b/src/database/models/pat_item.rs index 9352d637..73e1c9fe 100644 --- a/src/database/models/pat_item.rs +++ b/src/database/models/pat_item.rs @@ -1,10 +1,14 @@ use super::ids::*; use crate::database::models::DatabaseError; use crate::database::redis::RedisPool; -use crate::models::ids::base62_impl::{parse_base62, to_base62}; +use crate::models::ids::base62_impl::parse_base62; use crate::models::pats::Scopes; use chrono::{DateTime, Utc}; +use dashmap::DashMap; +use futures::TryStreamExt; use serde::{Deserialize, Serialize}; +use std::fmt::{Debug, Display}; +use std::hash::Hash; const PATS_NAMESPACE: &str = "pats"; const PATS_TOKENS_NAMESPACE: &str = "pats_tokens"; @@ -51,7 +55,7 @@ impl PersonalAccessToken { Ok(()) } - pub async fn get<'a, E, T: ToString>( + pub async fn get<'a, E, T: Display + Hash + Eq + PartialEq + Clone + Debug + Send>( id: T, exec: E, redis: &RedisPool, @@ -79,7 +83,7 @@ impl PersonalAccessToken { PersonalAccessToken::get_many(&ids, exec, redis).await } - pub async fn get_many<'a, E, T: ToString>( + pub async fn get_many<'a, E, T: Display + Hash + Eq + PartialEq + Clone + Debug + Send>( pat_strings: &[T], exec: E, redis: &RedisPool, @@ -87,105 +91,53 @@ impl PersonalAccessToken { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - use futures::TryStreamExt; - - let mut redis = redis.connect().await?; - - if pat_strings.is_empty() { - return Ok(Vec::new()); - } - - let mut found_pats = Vec::new(); - let mut remaining_strings = pat_strings - .iter() - .map(|x| x.to_string()) - .collect::>(); - - let mut pat_ids = pat_strings - .iter() - .flat_map(|x| parse_base62(&x.to_string()).map(|x| x as i64)) - .collect::>(); - - pat_ids.append( - &mut redis - .multi_get::( - PATS_TOKENS_NAMESPACE, - pat_strings.iter().map(|x| x.to_string()), - ) - .await? - .into_iter() - .flatten() - .collect(), - ); - - if !pat_ids.is_empty() { - let pats = redis - .multi_get::(PATS_NAMESPACE, pat_ids.iter().map(|x| x.to_string())) - .await?; - for pat in pats { - if let Some(pat) = - pat.and_then(|x| serde_json::from_str::(&x).ok()) - { - remaining_strings - .retain(|x| &to_base62(pat.id.0 as u64) != x && &pat.access_token != x); - found_pats.push(pat); - continue; - } - } - } - - if !remaining_strings.is_empty() { - let pat_ids_parsed: Vec = remaining_strings - .iter() - .flat_map(|x| parse_base62(&x.to_string()).ok()) - .map(|x| x as i64) - .collect(); - let db_pats: Vec = sqlx::query!( - " - SELECT id, name, access_token, scopes, user_id, created, expires, last_used - FROM pats - WHERE id = ANY($1) OR access_token = ANY($2) - ORDER BY created DESC - ", - &pat_ids_parsed, - &remaining_strings - .into_iter() - .map(|x| x.to_string()) - .collect::>(), - ) - .fetch_many(exec) - .try_filter_map(|e| async { - Ok(e.right().map(|x| PersonalAccessToken { - id: PatId(x.id), - name: x.name, - access_token: x.access_token, - scopes: Scopes::from_bits(x.scopes as u64).unwrap_or(Scopes::NONE), - user_id: UserId(x.user_id), - created: x.created, - expires: x.expires, - last_used: x.last_used, - })) - }) - .try_collect::>() - .await?; - - for pat in db_pats { - redis - .set_serialized_to_json(PATS_NAMESPACE, pat.id.0, &pat, None) - .await?; - redis - .set( - PATS_TOKENS_NAMESPACE, - &pat.access_token, - &pat.id.0.to_string(), - None, + let val = redis + .get_cached_keys_with_slug( + PATS_NAMESPACE, + PATS_TOKENS_NAMESPACE, + true, + pat_strings, + |ids| async move { + let pat_ids: Vec = ids + .iter() + .flat_map(|x| parse_base62(&x.to_string()).ok()) + .map(|x| x as i64) + .collect(); + let slugs = ids.into_iter().map(|x| x.to_string()).collect::>(); + + let pats = sqlx::query!( + " + SELECT id, name, access_token, scopes, user_id, created, expires, last_used + FROM pats + WHERE id = ANY($1) OR access_token = ANY($2) + ORDER BY created DESC + ", + &pat_ids, + &slugs, ) + .fetch(exec) + .try_fold(DashMap::new(), |acc, x| { + let pat = PersonalAccessToken { + id: PatId(x.id), + name: x.name, + access_token: x.access_token.clone(), + scopes: Scopes::from_bits(x.scopes as u64).unwrap_or(Scopes::NONE), + user_id: UserId(x.user_id), + created: x.created, + expires: x.expires, + last_used: x.last_used, + }; + + acc.insert(x.id, (Some(x.access_token), pat)); + async move { Ok(acc) } + }) .await?; - found_pats.push(pat); - } - } + Ok(pats) + }, + ) + .await?; - Ok(found_pats) + Ok(val) } pub async fn get_user_pats<'a, E>( @@ -206,14 +158,13 @@ impl PersonalAccessToken { return Ok(res.into_iter().map(PatId).collect()); } - use futures::TryStreamExt; let db_pats: Vec = sqlx::query!( " - SELECT id - FROM pats - WHERE user_id = $1 - ORDER BY created DESC - ", + SELECT id + FROM pats + WHERE user_id = $1 + ORDER BY created DESC + ", user_id.0, ) .fetch_many(exec) diff --git a/src/database/models/project_item.rs b/src/database/models/project_item.rs index 87ced9ea..1a982d65 100644 --- a/src/database/models/project_item.rs +++ b/src/database/models/project_item.rs @@ -5,14 +5,16 @@ use super::{ids::*, User}; use crate::database::models; use crate::database::models::DatabaseError; use crate::database::redis::RedisPool; -use crate::models::ids::base62_impl::{parse_base62, to_base62}; +use crate::models::ids::base62_impl::parse_base62; use crate::models::projects::{MonetizationStatus, ProjectStatus}; use chrono::{DateTime, Utc}; use dashmap::{DashMap, DashSet}; use futures::TryStreamExt; use itertools::Itertools; use serde::{Deserialize, Serialize}; +use std::fmt::{Debug, Display}; use std::future::Future; +use std::hash::Hash; pub const PROJECTS_NAMESPACE: &str = "projects"; pub const PROJECTS_SLUGS_NAMESPACE: &str = "projects_slugs"; @@ -517,7 +519,7 @@ impl Project { } #[allow(clippy::manual_async_fn)] - pub fn get_many<'a, 'c, E, T: ToString + std::marker::Sync>( + pub fn get_many<'a, 'c, E, T: Display + Hash + Eq + PartialEq + Clone + Debug + std::marker::Sync + std::marker::Send>( project_strings: &'a [T], exec: E, redis: &'a RedisPool, @@ -526,72 +528,25 @@ impl Project { E: sqlx::Acquire<'c, Database = sqlx::Postgres> + Send + 'a, { async move { - let project_strings = project_strings - .iter() - .map(|x| x.to_string()) - .unique() - .collect::>(); - - if project_strings.is_empty() { - return Ok(Vec::new()); - } - - let mut redis = redis.connect().await?; - let mut exec = exec.acquire().await?; - - let mut found_projects = Vec::new(); - let mut remaining_strings = project_strings.clone(); - - let mut project_ids = project_strings - .iter() - .flat_map(|x| parse_base62(&x.to_string()).map(|x| x as i64)) - .collect::>(); - - project_ids.append( - &mut redis - .multi_get::( - PROJECTS_SLUGS_NAMESPACE, - project_strings.iter().map(|x| x.to_string().to_lowercase()), - ) - .await? - .into_iter() - .flatten() - .collect(), - ); - if !project_ids.is_empty() { - let projects = redis - .multi_get::( - PROJECTS_NAMESPACE, - project_ids.iter().map(|x| x.to_string()), - ) - .await?; - for project in projects { - if let Some(project) = - project.and_then(|x| serde_json::from_str::(&x).ok()) - { - remaining_strings.retain(|x| { - &to_base62(project.inner.id.0 as u64) != x - && project.inner.slug.as_ref().map(|x| x.to_lowercase()) - != Some(x.to_lowercase()) - }); - found_projects.push(project); - continue; - } - } - } - if !remaining_strings.is_empty() { - let project_ids_parsed: Vec = remaining_strings - .iter() - .flat_map(|x| parse_base62(&x.to_string()).ok()) - .map(|x| x as i64) - .collect(); - let slugs = remaining_strings - .into_iter() - .map(|x| x.to_lowercase()) - .collect::>(); - - let all_version_ids = DashSet::new(); - let versions: DashMap)>> = sqlx::query!( + let val = redis.get_cached_keys_with_slug( + PROJECTS_NAMESPACE, + PROJECTS_SLUGS_NAMESPACE, + false, + project_strings, + |ids| async move { + let mut exec = exec.acquire().await?; + let project_ids_parsed: Vec = ids + .iter() + .flat_map(|x| parse_base62(&x.to_string()).ok()) + .map(|x| x as i64) + .collect(); + let slugs = ids + .into_iter() + .map(|x| x.to_string().to_lowercase()) + .collect::>(); + + let all_version_ids = DashSet::new(); + let versions: DashMap)>> = sqlx::query!( " SELECT DISTINCT mod_id, v.id as id, date_published FROM mods m @@ -605,23 +560,23 @@ impl Project { .map(|x| x.to_string()) .collect::>() ) - .fetch(&mut *exec) - .try_fold( - DashMap::new(), - |acc: DashMap)>>, m| { - let version_id = VersionId(m.id); - let date_published = m.date_published; - all_version_ids.insert(version_id); - acc.entry(ProjectId(m.mod_id)) - .or_default() - .push((version_id, date_published)); - async move { Ok(acc) } - }, - ) - .await?; + .fetch(&mut *exec) + .try_fold( + DashMap::new(), + |acc: DashMap)>>, m| { + let version_id = VersionId(m.id); + let date_published = m.date_published; + all_version_ids.insert(version_id); + acc.entry(ProjectId(m.mod_id)) + .or_default() + .push((version_id, date_published)); + async move { Ok(acc) } + }, + ) + .await?; - let loader_field_enum_value_ids = DashSet::new(); - let version_fields: DashMap> = sqlx::query!( + let loader_field_enum_value_ids = DashSet::new(); + let version_fields: DashMap> = sqlx::query!( " SELECT DISTINCT mod_id, version_id, field_id, int_value, enum_value, string_value FROM versions v @@ -630,29 +585,29 @@ impl Project { ", &all_version_ids.iter().map(|x| x.0).collect::>() ) - .fetch(&mut *exec) - .try_fold( - DashMap::new(), - |acc: DashMap>, m| { - let qvf = QueryVersionField { - version_id: VersionId(m.version_id), - field_id: LoaderFieldId(m.field_id), - int_value: m.int_value, - enum_value: m.enum_value.map(LoaderFieldEnumValueId), - string_value: m.string_value, - }; - - if let Some(enum_value) = m.enum_value { - loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value)); - } - - acc.entry(ProjectId(m.mod_id)).or_default().push(qvf); - async move { Ok(acc) } - }, - ) - .await?; + .fetch(&mut *exec) + .try_fold( + DashMap::new(), + |acc: DashMap>, m| { + let qvf = QueryVersionField { + version_id: VersionId(m.version_id), + field_id: LoaderFieldId(m.field_id), + int_value: m.int_value, + enum_value: m.enum_value.map(LoaderFieldEnumValueId), + string_value: m.string_value, + }; + + if let Some(enum_value) = m.enum_value { + loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value)); + } + + acc.entry(ProjectId(m.mod_id)).or_default().push(qvf); + async move { Ok(acc) } + }, + ) + .await?; - let loader_field_enum_values: Vec = sqlx::query!( + let loader_field_enum_values: Vec = sqlx::query!( " SELECT DISTINCT id, enum_id, value, ordering, created, metadata FROM loader_field_enum_values lfev @@ -664,19 +619,19 @@ impl Project { .map(|x| x.0) .collect::>() ) - .fetch(&mut *exec) - .map_ok(|m| QueryLoaderFieldEnumValue { - id: LoaderFieldEnumValueId(m.id), - enum_id: LoaderFieldEnumId(m.enum_id), - value: m.value, - ordering: m.ordering, - created: m.created, - metadata: m.metadata, - }) - .try_collect() - .await?; - - let mods_gallery: DashMap> = sqlx::query!( + .fetch(&mut *exec) + .map_ok(|m| QueryLoaderFieldEnumValue { + id: LoaderFieldEnumValueId(m.id), + enum_id: LoaderFieldEnumId(m.enum_id), + value: m.value, + ordering: m.ordering, + created: m.created, + metadata: m.metadata, + }) + .try_collect() + .await?; + + let mods_gallery: DashMap> = sqlx::query!( " SELECT DISTINCT mod_id, mg.image_url, mg.featured, mg.name, mg.description, mg.created, mg.ordering FROM mods_gallery mg @@ -686,22 +641,22 @@ impl Project { &project_ids_parsed, &slugs ).fetch(&mut *exec) - .try_fold(DashMap::new(), |acc : DashMap>, m| { - acc.entry(ProjectId(m.mod_id)) - .or_default() - .push(GalleryItem { - image_url: m.image_url, - featured: m.featured.unwrap_or(false), - name: m.name, - description: m.description, - created: m.created, - ordering: m.ordering, - }); - async move { Ok(acc) } - } - ).await?; + .try_fold(DashMap::new(), |acc : DashMap>, m| { + acc.entry(ProjectId(m.mod_id)) + .or_default() + .push(GalleryItem { + image_url: m.image_url, + featured: m.featured.unwrap_or(false), + name: m.name, + description: m.description, + created: m.created, + ordering: m.ordering, + }); + async move { Ok(acc) } + } + ).await?; - let links: DashMap> = sqlx::query!( + let links: DashMap> = sqlx::query!( " SELECT DISTINCT joining_mod_id as mod_id, joining_platform_id as platform_id, lp.name as platform_name, url, lp.donation as donation FROM mods_links ml @@ -711,31 +666,30 @@ impl Project { ", &project_ids_parsed, &slugs - ) - .fetch(&mut *exec) - .try_fold(DashMap::new(), |acc : DashMap>, m| { - acc.entry(ProjectId(m.mod_id)) - .or_default() - .push(LinkUrl { - platform_id: LinkPlatformId(m.platform_id), - platform_name: m.platform_name, - url: m.url, - donation: m.donation, - }); - async move { Ok(acc) } + ).fetch(&mut *exec) + .try_fold(DashMap::new(), |acc : DashMap>, m| { + acc.entry(ProjectId(m.mod_id)) + .or_default() + .push(LinkUrl { + platform_id: LinkPlatformId(m.platform_id), + platform_name: m.platform_name, + url: m.url, + donation: m.donation, + }); + async move { Ok(acc) } + } + ).await?; + + #[derive(Default)] + struct VersionLoaderData { + loaders: Vec, + project_types: Vec, + games: Vec, + loader_loader_field_ids: Vec, } - ).await?; - - #[derive(Default)] - struct VersionLoaderData { - loaders: Vec, - project_types: Vec, - games: Vec, - loader_loader_field_ids: Vec, - } - - let loader_field_ids = DashSet::new(); - let loaders_ptypes_games: DashMap = sqlx::query!( + + let loader_field_ids = DashSet::new(); + let loaders_ptypes_games: DashMap = sqlx::query!( " SELECT DISTINCT mod_id, ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders, @@ -754,31 +708,30 @@ impl Project { GROUP BY mod_id ", &all_version_ids.iter().map(|x| x.0).collect::>() - ) - .fetch(&mut *exec) - .map_ok(|m| { - let project_id = ProjectId(m.mod_id); - - // Add loader fields to the set we need to fetch - let loader_loader_field_ids = m.loader_fields.unwrap_or_default().into_iter().map(LoaderFieldId).collect::>(); - for loader_field_id in loader_loader_field_ids.iter() { - loader_field_ids.insert(*loader_field_id); - } - - // Add loader + loader associated data to the map - let version_loader_data = VersionLoaderData { - loaders: m.loaders.unwrap_or_default(), - project_types: m.project_types.unwrap_or_default(), - games: m.games.unwrap_or_default(), - loader_loader_field_ids, - }; + ).fetch(&mut *exec) + .map_ok(|m| { + let project_id = ProjectId(m.mod_id); + + // Add loader fields to the set we need to fetch + let loader_loader_field_ids = m.loader_fields.unwrap_or_default().into_iter().map(LoaderFieldId).collect::>(); + for loader_field_id in loader_loader_field_ids.iter() { + loader_field_ids.insert(*loader_field_id); + } + + // Add loader + loader associated data to the map + let version_loader_data = VersionLoaderData { + loaders: m.loaders.unwrap_or_default(), + project_types: m.project_types.unwrap_or_default(), + games: m.games.unwrap_or_default(), + loader_loader_field_ids, + }; - (project_id, version_loader_data) + (project_id, version_loader_data) - } - ).try_collect().await?; + } + ).try_collect().await?; - let loader_fields: Vec = sqlx::query!( + let loader_fields: Vec = sqlx::query!( " SELECT DISTINCT id, field, field_type, enum_type, min_val, max_val, optional FROM loader_fields lf @@ -786,20 +739,20 @@ impl Project { ", &loader_field_ids.iter().map(|x| x.0).collect::>() ) - .fetch(&mut *exec) - .map_ok(|m| QueryLoaderField { - id: LoaderFieldId(m.id), - field: m.field, - field_type: m.field_type, - enum_type: m.enum_type.map(LoaderFieldEnumId), - min_val: m.min_val, - max_val: m.max_val, - optional: m.optional, - }) - .try_collect() - .await?; - - let db_projects: Vec = sqlx::query!( + .fetch(&mut *exec) + .map_ok(|m| QueryLoaderField { + id: LoaderFieldId(m.id), + field: m.field, + field_type: m.field_type, + enum_type: m.enum_type.map(LoaderFieldEnumId), + min_val: m.min_val, + max_val: m.max_val, + optional: m.optional, + }) + .try_collect() + .await?; + + let projects = sqlx::query!( " SELECT m.id id, m.name name, m.summary summary, m.downloads downloads, m.follows follows, m.icon_url icon_url, m.description description, m.published published, @@ -820,9 +773,8 @@ impl Project { &project_ids_parsed, &slugs, ) - .fetch_many(&mut *exec) - .try_filter_map(|e| async { - Ok(e.right().map(|m| { + .fetch(&mut *exec) + .try_fold(DashMap::new(), |acc, m| { let id = m.id; let project_id = ProjectId(id); let VersionLoaderData { @@ -840,7 +792,7 @@ impl Project { .filter(|x| loader_loader_field_ids.contains(&x.id)) .collect::>(); - QueryProject { + let project = QueryProject { inner: Project { id: ProjectId(id), team_id: TeamId(m.team_id), @@ -889,35 +841,18 @@ impl Project { urls, aggregate_version_fields: VersionField::from_query_json(version_fields, &loader_fields, &loader_field_enum_values, true), thread_id: ThreadId(m.thread_id), - }})) - }) - .try_collect::>() - .await?; - - for project in db_projects { - redis - .set_serialized_to_json( - PROJECTS_NAMESPACE, - project.inner.id.0, - &project, - None, - ) + }; + + acc.insert(m.id, (m.slug, project)); + async move { Ok(acc) } + }) .await?; - if let Some(slug) = &project.inner.slug { - redis - .set( - PROJECTS_SLUGS_NAMESPACE, - &slug.to_lowercase(), - &project.inner.id.0.to_string(), - None, - ) - .await?; - } - found_projects.push(project); - } - } - Ok(found_projects) + Ok(projects) + }, + ).await?; + + Ok(val) } } diff --git a/src/database/models/session_item.rs b/src/database/models/session_item.rs index f27af5bb..a85425d5 100644 --- a/src/database/models/session_item.rs +++ b/src/database/models/session_item.rs @@ -1,9 +1,12 @@ use super::ids::*; use crate::database::models::DatabaseError; use crate::database::redis::RedisPool; -use crate::models::ids::base62_impl::{parse_base62, to_base62}; +use crate::models::ids::base62_impl::parse_base62; use chrono::{DateTime, Utc}; +use dashmap::DashMap; use serde::{Deserialize, Serialize}; +use std::fmt::{Debug, Display}; +use std::hash::Hash; const SESSIONS_NAMESPACE: &str = "sessions"; const SESSIONS_IDS_NAMESPACE: &str = "sessions_ids"; @@ -79,7 +82,7 @@ pub struct Session { } impl Session { - pub async fn get<'a, E, T: ToString>( + pub async fn get<'a, E, T: Display + Hash + Eq + PartialEq + Clone + Debug + Send>( id: T, exec: E, redis: &RedisPool, @@ -120,7 +123,7 @@ impl Session { Session::get_many(&ids, exec, redis).await } - pub async fn get_many<'a, E, T: ToString>( + pub async fn get_many<'a, E, T: Display + Hash + Eq + PartialEq + Clone + Debug + Send>( session_strings: &[T], exec: E, redis: &RedisPool, @@ -130,109 +133,60 @@ impl Session { { use futures::TryStreamExt; - let mut redis = redis.connect().await?; - - if session_strings.is_empty() { - return Ok(Vec::new()); - } - - let mut found_sessions = Vec::new(); - let mut remaining_strings = session_strings - .iter() - .map(|x| x.to_string()) - .collect::>(); - - let mut session_ids = session_strings - .iter() - .flat_map(|x| parse_base62(&x.to_string()).map(|x| x as i64)) - .collect::>(); - - session_ids.append( - &mut redis - .multi_get::( - SESSIONS_IDS_NAMESPACE, - session_strings.iter().map(|x| x.to_string()), - ) - .await? - .into_iter() - .flatten() - .collect(), - ); - - if !session_ids.is_empty() { - let sessions = redis - .multi_get::( - SESSIONS_NAMESPACE, - session_ids.iter().map(|x| x.to_string()), + let val = redis.get_cached_keys_with_slug( + SESSIONS_NAMESPACE, + SESSIONS_IDS_NAMESPACE, + true, + session_strings, + |ids| async move { + let session_ids: Vec = ids + .iter() + .flat_map(|x| parse_base62(&x.to_string()).ok()) + .map(|x| x as i64) + .collect(); + let slugs = ids + .into_iter() + .map(|x| x.to_string()) + .collect::>(); + let db_sessions = sqlx::query!( + " + SELECT id, user_id, session, created, last_login, expires, refresh_expires, os, platform, + city, country, ip, user_agent + FROM sessions + WHERE id = ANY($1) OR session = ANY($2) + ORDER BY created DESC + ", + &session_ids, + &slugs, ) - .await?; - for session in sessions { - if let Some(session) = - session.and_then(|x| serde_json::from_str::(&x).ok()) - { - remaining_strings - .retain(|x| &to_base62(session.id.0 as u64) != x && &session.session != x); - found_sessions.push(session); - continue; - } - } - } - - if !remaining_strings.is_empty() { - let session_ids_parsed: Vec = remaining_strings - .iter() - .flat_map(|x| parse_base62(&x.to_string()).ok()) - .map(|x| x as i64) - .collect(); - let db_sessions: Vec = sqlx::query!( - " - SELECT id, user_id, session, created, last_login, expires, refresh_expires, os, platform, - city, country, ip, user_agent - FROM sessions - WHERE id = ANY($1) OR session = ANY($2) - ORDER BY created DESC - ", - &session_ids_parsed, - &remaining_strings.into_iter().map(|x| x.to_string()).collect::>(), - ) - .fetch_many(exec) - .try_filter_map(|e| async { - Ok(e.right().map(|x| Session { - id: SessionId(x.id), - session: x.session, - user_id: UserId(x.user_id), - created: x.created, - last_login: x.last_login, - expires: x.expires, - refresh_expires: x.refresh_expires, - os: x.os, - platform: x.platform, - city: x.city, - country: x.country, - ip: x.ip, - user_agent: x.user_agent, - })) - }) - .try_collect::>() - .await?; - - for session in db_sessions { - redis - .set_serialized_to_json(SESSIONS_NAMESPACE, session.id.0, &session, None) - .await?; - redis - .set( - SESSIONS_IDS_NAMESPACE, - &session.session, - &session.id.0.to_string(), - None, - ) + .fetch(exec) + .try_fold(DashMap::new(), |acc, x| { + let session = Session { + id: SessionId(x.id), + session: x.session.clone(), + user_id: UserId(x.user_id), + created: x.created, + last_login: x.last_login, + expires: x.expires, + refresh_expires: x.refresh_expires, + os: x.os, + platform: x.platform, + city: x.city, + country: x.country, + ip: x.ip, + user_agent: x.user_agent, + }; + + acc.insert(x.id, (Some(x.session), session)); + + async move { Ok(acc) } + }) .await?; - found_sessions.push(session); - } - } - Ok(found_sessions) + Ok(db_sessions) + }).await?; + + Ok(val) } pub async fn get_user_sessions<'a, E>( diff --git a/src/database/models/team_item.rs b/src/database/models/team_item.rs index 4af8b007..e27432f2 100644 --- a/src/database/models/team_item.rs +++ b/src/database/models/team_item.rs @@ -3,6 +3,8 @@ use crate::{ database::redis::RedisPool, models::teams::{OrganizationPermissions, ProjectPermissions}, }; +use dashmap::DashMap; +use futures::TryStreamExt; use itertools::Itertools; use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; @@ -190,7 +192,7 @@ impl TeamMember { redis: &RedisPool, ) -> Result, super::DatabaseError> where - E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, + E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy + Send + Sync, { Self::get_from_team_full_many(&[id], executor, redis).await } @@ -201,89 +203,57 @@ impl TeamMember { redis: &RedisPool, ) -> Result, super::DatabaseError> where - E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, + E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy + Send + Sync, { - use futures::stream::TryStreamExt; - if team_ids.is_empty() { return Ok(Vec::new()); } - let mut redis = redis.connect().await?; - - let mut team_ids_parsed: Vec = team_ids.iter().map(|x| x.0).collect(); - - let mut found_teams = Vec::new(); - - let teams = redis - .multi_get::( - TEAMS_NAMESPACE, - team_ids_parsed.iter().map(|x| x.to_string()), - ) - .await?; - - for team_raw in teams { - if let Some(mut team) = team_raw - .clone() - .and_then(|x| serde_json::from_str::>(&x).ok()) - { - if let Some(team_id) = team.first().map(|x| x.team_id) { - team_ids_parsed.retain(|x| &team_id.0 != x); - } - - found_teams.append(&mut team); - continue; - } - } - - if !team_ids_parsed.is_empty() { - let teams: Vec = sqlx::query!( - " - SELECT id, team_id, role AS member_role, is_owner, permissions, organization_permissions, - accepted, payouts_split, - ordering, user_id - FROM team_members - WHERE team_id = ANY($1) - ORDER BY team_id, ordering; - ", - &team_ids_parsed - ) - .fetch_many(exec) - .try_filter_map(|e| async { - Ok(e.right().map(|m| TeamMember { - id: TeamMemberId(m.id), - team_id: TeamId(m.team_id), - role: m.member_role, - is_owner: m.is_owner, - permissions: ProjectPermissions::from_bits(m.permissions as u64) - .unwrap_or_default(), - organization_permissions: m - .organization_permissions - .map(|p| OrganizationPermissions::from_bits(p as u64).unwrap_or_default()), - accepted: m.accepted, - user_id: UserId(m.user_id), - payouts_split: m.payouts_split, - ordering: m.ordering, - })) - }) - .try_collect::>() - .await?; - - for (id, mut members) in teams - .into_iter() - .group_by(|x| x.team_id) - .into_iter() - .map(|(key, group)| (key, group.collect::>())) - .collect::>() - { - redis - .set_serialized_to_json(TEAMS_NAMESPACE, id.0, &members, None) + let val = redis.get_cached_keys( + TEAMS_NAMESPACE, + &team_ids.iter().map(|x| x.0).collect::>(), + |team_ids| async move { + let teams = sqlx::query!( + " + SELECT id, team_id, role AS member_role, is_owner, permissions, organization_permissions, + accepted, payouts_split, + ordering, user_id + FROM team_members + WHERE team_id = ANY($1) + ORDER BY team_id, ordering; + ", + &team_ids + ) + .fetch(exec) + .try_fold(DashMap::new(), |acc: DashMap>, m| { + let member = TeamMember { + id: TeamMemberId(m.id), + team_id: TeamId(m.team_id), + role: m.member_role, + is_owner: m.is_owner, + permissions: ProjectPermissions::from_bits(m.permissions as u64) + .unwrap_or_default(), + organization_permissions: m + .organization_permissions + .map(|p| OrganizationPermissions::from_bits(p as u64).unwrap_or_default()), + accepted: m.accepted, + user_id: UserId(m.user_id), + payouts_split: m.payouts_split, + ordering: m.ordering, + }; + + acc.entry(m.team_id) + .or_default() + .push(member); + async move { Ok(acc) } + }) .await?; - found_teams.append(&mut members); - } - } - Ok(found_teams) + Ok(teams) + }, + ).await?; + + Ok(val.into_iter().flatten().collect()) } pub async fn clear_cache(id: TeamId, redis: &RedisPool) -> Result<(), super::DatabaseError> { @@ -315,8 +285,6 @@ impl TeamMember { where E: sqlx::Executor<'a, Database = sqlx::Postgres>, { - use futures::stream::TryStreamExt; - let team_ids_parsed: Vec = team_ids.iter().map(|x| x.0).collect(); let team_members = sqlx::query!( diff --git a/src/database/models/thread_item.rs b/src/database/models/thread_item.rs index ce9a3c36..e085bb1b 100644 --- a/src/database/models/thread_item.rs +++ b/src/database/models/thread_item.rs @@ -21,13 +21,13 @@ pub struct Thread { pub messages: Vec, pub members: Vec, - pub show_in_mod_inbox: bool, } pub struct ThreadMessageBuilder { pub author_id: Option, pub body: MessageBody, pub thread_id: ThreadId, + pub hide_identity: bool, } #[derive(Serialize, Deserialize, Clone)] @@ -37,6 +37,7 @@ pub struct ThreadMessage { pub author_id: Option, pub body: MessageBody, pub created: DateTime, + pub hide_identity: bool, } impl ThreadMessageBuilder { @@ -49,16 +50,17 @@ impl ThreadMessageBuilder { sqlx::query!( " INSERT INTO threads_messages ( - id, author_id, body, thread_id + id, author_id, body, thread_id, hide_identity ) VALUES ( - $1, $2, $3, $4 + $1, $2, $3, $4, $5 ) ", thread_message_id as ThreadMessageId, self.author_id.map(|x| x.0), serde_json::value::to_value(self.body.clone())?, self.thread_id as ThreadId, + self.hide_identity ) .execute(&mut **transaction) .await?; @@ -131,9 +133,9 @@ impl Thread { let thread_ids_parsed: Vec = thread_ids.iter().map(|x| x.0).collect(); let threads = sqlx::query!( " - SELECT t.id, t.thread_type, t.mod_id, t.report_id, t.show_in_mod_inbox, + SELECT t.id, t.thread_type, t.mod_id, t.report_id, ARRAY_AGG(DISTINCT tm.user_id) filter (where tm.user_id is not null) members, - JSONB_AGG(DISTINCT jsonb_build_object('id', tmsg.id, 'author_id', tmsg.author_id, 'thread_id', tmsg.thread_id, 'body', tmsg.body, 'created', tmsg.created)) filter (where tmsg.id is not null) messages + JSONB_AGG(DISTINCT jsonb_build_object('id', tmsg.id, 'author_id', tmsg.author_id, 'thread_id', tmsg.thread_id, 'body', tmsg.body, 'created', tmsg.created, 'hide_identity', tmsg.hide_identity)) filter (where tmsg.id is not null) messages FROM threads t LEFT OUTER JOIN threads_messages tmsg ON tmsg.thread_id = t.id LEFT OUTER JOIN threads_members tm ON tm.thread_id = t.id @@ -159,7 +161,6 @@ impl Thread { messages }, members: x.members.unwrap_or_default().into_iter().map(UserId).collect(), - show_in_mod_inbox: x.show_in_mod_inbox, })) }) .try_collect::>() @@ -229,7 +230,7 @@ impl ThreadMessage { let message_ids_parsed: Vec = message_ids.iter().map(|x| x.0).collect(); let messages = sqlx::query!( " - SELECT tm.id, tm.author_id, tm.thread_id, tm.body, tm.created + SELECT tm.id, tm.author_id, tm.thread_id, tm.body, tm.created, tm.hide_identity FROM threads_messages tm WHERE tm.id = ANY($1) ", @@ -244,6 +245,7 @@ impl ThreadMessage { body: serde_json::from_value(x.body) .unwrap_or(MessageBody::Deleted { private: false }), created: x.created, + hide_identity: x.hide_identity, })) }) .try_collect::>() diff --git a/src/database/models/user_item.rs b/src/database/models/user_item.rs index 6f821db8..67503a6b 100644 --- a/src/database/models/user_item.rs +++ b/src/database/models/user_item.rs @@ -5,8 +5,11 @@ use crate::database::redis::RedisPool; use crate::models::ids::base62_impl::{parse_base62, to_base62}; use crate::models::users::Badges; use chrono::{DateTime, Utc}; +use dashmap::DashMap; use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; +use std::fmt::{Debug, Display}; +use std::hash::Hash; const USERS_NAMESPACE: &str = "users"; const USER_USERNAMES_NAMESPACE: &str = "users_usernames"; @@ -132,7 +135,7 @@ impl User { User::get_many(&ids, exec, redis).await } - pub async fn get_many<'a, E, T: ToString>( + pub async fn get_many<'a, E, T: Display + Hash + Eq + PartialEq + Clone + Debug + Send>( users_strings: &[T], exec: E, redis: &RedisPool, @@ -142,123 +145,73 @@ impl User { { use futures::TryStreamExt; - let mut redis = redis.connect().await?; - - if users_strings.is_empty() { - return Ok(Vec::new()); - } - - let mut found_users = Vec::new(); - let mut remaining_strings = users_strings - .iter() - .map(|x| x.to_string()) - .collect::>(); - - let mut user_ids = users_strings - .iter() - .flat_map(|x| parse_base62(&x.to_string()).map(|x| x as i64)) - .collect::>(); - - user_ids.append( - &mut redis - .multi_get::( - USER_USERNAMES_NAMESPACE, - users_strings.iter().map(|x| x.to_string().to_lowercase()), - ) - .await? - .into_iter() - .flatten() - .collect(), - ); - - if !user_ids.is_empty() { - let users = redis - .multi_get::(USERS_NAMESPACE, user_ids.iter().map(|x| x.to_string())) - .await?; - for user in users { - if let Some(user) = user.and_then(|x| serde_json::from_str::(&x).ok()) { - remaining_strings.retain(|x| { - &to_base62(user.id.0 as u64) != x - && user.username.to_lowercase() != x.to_lowercase() - }); - found_users.push(user); - continue; - } - } - } - - if !remaining_strings.is_empty() { - let user_ids_parsed: Vec = remaining_strings - .iter() - .flat_map(|x| parse_base62(&x.to_string()).ok()) - .map(|x| x as i64) - .collect(); - let db_users: Vec = sqlx::query!( - " - SELECT id, name, email, - avatar_url, username, bio, - created, role, badges, - balance, - github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id, - email_verified, password, totp_secret, paypal_id, paypal_country, paypal_email, - venmo_handle - FROM users - WHERE id = ANY($1) OR LOWER(username) = ANY($2) - ", - &user_ids_parsed, - &remaining_strings + let val = redis.get_cached_keys_with_slug( + USERS_NAMESPACE, + USER_USERNAMES_NAMESPACE, + false, + users_strings, + |ids| async move { + let user_ids: Vec = ids + .iter() + .flat_map(|x| parse_base62(&x.to_string()).ok()) + .map(|x| x as i64) + .collect(); + let slugs = ids .into_iter() .map(|x| x.to_string().to_lowercase()) - .collect::>(), - ) - .fetch_many(exec) - .try_filter_map(|e| async { - Ok(e.right().map(|u| User { - id: UserId(u.id), - github_id: u.github_id, - discord_id: u.discord_id, - gitlab_id: u.gitlab_id, - google_id: u.google_id, - steam_id: u.steam_id, - microsoft_id: u.microsoft_id, - name: u.name, - email: u.email, - email_verified: u.email_verified, - avatar_url: u.avatar_url, - username: u.username, - bio: u.bio, - created: u.created, - role: u.role, - badges: Badges::from_bits(u.badges as u64).unwrap_or_default(), - balance: u.balance, - password: u.password, - paypal_id: u.paypal_id, - paypal_country: u.paypal_country, - paypal_email: u.paypal_email, - venmo_handle: u.venmo_handle, - totp_secret: u.totp_secret, - })) - }) - .try_collect::>() - .await?; - - for user in db_users { - redis - .set_serialized_to_json(USERS_NAMESPACE, user.id.0, &user, None) - .await?; - redis - .set( - USER_USERNAMES_NAMESPACE, - &user.username.to_lowercase(), - &user.id.0.to_string(), - None, - ) + .collect::>(); + + let users = sqlx::query!( + " + SELECT id, name, email, + avatar_url, username, bio, + created, role, badges, + balance, + github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id, + email_verified, password, totp_secret, paypal_id, paypal_country, paypal_email, + venmo_handle + FROM users + WHERE id = ANY($1) OR LOWER(username) = ANY($2) + ", + &user_ids, + &slugs, + ) + .fetch(exec) + .try_fold(DashMap::new(), |acc, u| { + let user = User { + id: UserId(u.id), + github_id: u.github_id, + discord_id: u.discord_id, + gitlab_id: u.gitlab_id, + google_id: u.google_id, + steam_id: u.steam_id, + microsoft_id: u.microsoft_id, + name: u.name, + email: u.email, + email_verified: u.email_verified, + avatar_url: u.avatar_url, + username: u.username.clone(), + bio: u.bio, + created: u.created, + role: u.role, + badges: Badges::from_bits(u.badges as u64).unwrap_or_default(), + balance: u.balance, + password: u.password, + paypal_id: u.paypal_id, + paypal_country: u.paypal_country, + paypal_email: u.paypal_email, + venmo_handle: u.venmo_handle, + totp_secret: u.totp_secret, + }; + + acc.insert(u.id, (Some(u.username), user)); + async move { Ok(acc) } + }) .await?; - found_users.push(user); - } - } - Ok(found_users) + Ok(users) + }).await?; + Ok(val) } pub async fn get_email<'a, E>(email: &str, exec: E) -> Result, sqlx::Error> diff --git a/src/database/models/version_item.rs b/src/database/models/version_item.rs index 3be3f313..2622c4ac 100644 --- a/src/database/models/version_item.rs +++ b/src/database/models/version_item.rs @@ -8,6 +8,7 @@ use crate::database::redis::RedisPool; use crate::models::projects::{FileType, VersionStatus}; use chrono::{DateTime, Utc}; use dashmap::{DashMap, DashSet}; +use futures::TryStreamExt; use itertools::Itertools; use serde::{Deserialize, Serialize}; use std::cmp::Ordering; @@ -475,88 +476,53 @@ impl Version { E: sqlx::Acquire<'c, Database = sqlx::Postgres> + Send + 'a, { async move { - let version_ids = version_ids - .iter() - .unique() - .copied() - .collect::>(); - - use futures::stream::TryStreamExt; - - if version_ids.is_empty() { - return Ok(Vec::new()); - } - - let mut exec = exec.acquire().await?; - let mut redis = redis.connect().await?; - - let mut version_ids_parsed: Vec = version_ids.iter().map(|x| x.0).collect(); - - let mut found_versions = Vec::new(); - - let versions = redis - .multi_get::( - VERSIONS_NAMESPACE, - version_ids_parsed - .clone() - .iter() - .map(|x| x.to_string()) - .collect::>(), - ) - .await?; - - for version in versions { - if let Some(version) = - version.and_then(|x| serde_json::from_str::(&x).ok()) - { - version_ids_parsed.retain(|x| &version.inner.id.0 != x); - found_versions.push(version); - continue; - } - } - - if !version_ids_parsed.is_empty() { - let loader_field_enum_value_ids = DashSet::new(); - let version_fields: DashMap> = sqlx::query!( + let mut val = redis.get_cached_keys( + VERSIONS_NAMESPACE, + &version_ids.iter().map(|x| x.0).collect::>(), + |version_ids| async move { + let mut exec = exec.acquire().await?; + + let loader_field_enum_value_ids = DashSet::new(); + let version_fields: DashMap> = sqlx::query!( " SELECT version_id, field_id, int_value, enum_value, string_value FROM version_fields WHERE version_id = ANY($1) ", - &version_ids_parsed + &version_ids ) - .fetch(&mut *exec) - .try_fold( - DashMap::new(), - |acc: DashMap>, m| { - let qvf = QueryVersionField { - version_id: VersionId(m.version_id), - field_id: LoaderFieldId(m.field_id), - int_value: m.int_value, - enum_value: m.enum_value.map(LoaderFieldEnumValueId), - string_value: m.string_value, - }; - - if let Some(enum_value) = m.enum_value { - loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value)); - } + .fetch(&mut *exec) + .try_fold( + DashMap::new(), + |acc: DashMap>, m| { + let qvf = QueryVersionField { + version_id: VersionId(m.version_id), + field_id: LoaderFieldId(m.field_id), + int_value: m.int_value, + enum_value: m.enum_value.map(LoaderFieldEnumValueId), + string_value: m.string_value, + }; + + if let Some(enum_value) = m.enum_value { + loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value)); + } + + acc.entry(VersionId(m.version_id)).or_default().push(qvf); + async move { Ok(acc) } + }, + ) + .await?; - acc.entry(VersionId(m.version_id)).or_default().push(qvf); - async move { Ok(acc) } - }, - ) - .await?; - - #[derive(Default)] - struct VersionLoaderData { - loaders: Vec, - project_types: Vec, - games: Vec, - loader_loader_field_ids: Vec, - } - - let loader_field_ids = DashSet::new(); - let loaders_ptypes_games: DashMap = sqlx::query!( + #[derive(Default)] + struct VersionLoaderData { + loaders: Vec, + project_types: Vec, + games: Vec, + loader_loader_field_ids: Vec, + } + + let loader_field_ids = DashSet::new(); + let loaders_ptypes_games: DashMap = sqlx::query!( " SELECT DISTINCT version_id, ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders, @@ -574,32 +540,31 @@ impl Version { WHERE v.id = ANY($1) GROUP BY version_id ", - &version_ids_parsed - ) - .fetch(&mut *exec) - .map_ok(|m| { - let version_id = VersionId(m.version_id); - - // Add loader fields to the set we need to fetch - let loader_loader_field_ids = m.loader_fields.unwrap_or_default().into_iter().map(LoaderFieldId).collect::>(); - for loader_field_id in loader_loader_field_ids.iter() { - loader_field_ids.insert(*loader_field_id); - } + &version_ids + ).fetch(&mut *exec) + .map_ok(|m| { + let version_id = VersionId(m.version_id); + + // Add loader fields to the set we need to fetch + let loader_loader_field_ids = m.loader_fields.unwrap_or_default().into_iter().map(LoaderFieldId).collect::>(); + for loader_field_id in loader_loader_field_ids.iter() { + loader_field_ids.insert(*loader_field_id); + } + + // Add loader + loader associated data to the map + let version_loader_data = VersionLoaderData { + loaders: m.loaders.unwrap_or_default(), + project_types: m.project_types.unwrap_or_default(), + games: m.games.unwrap_or_default(), + loader_loader_field_ids, + }; + (version_id,version_loader_data) - // Add loader + loader associated data to the map - let version_loader_data = VersionLoaderData { - loaders: m.loaders.unwrap_or_default(), - project_types: m.project_types.unwrap_or_default(), - games: m.games.unwrap_or_default(), - loader_loader_field_ids, - }; - (version_id,version_loader_data) - - } - ).try_collect().await?; + } + ).try_collect().await?; - // Fetch all loader fields from any version - let loader_fields: Vec = sqlx::query!( + // Fetch all loader fields from any version + let loader_fields: Vec = sqlx::query!( " SELECT DISTINCT id, field, field_type, enum_type, min_val, max_val, optional FROM loader_fields lf @@ -607,20 +572,20 @@ impl Version { ", &loader_field_ids.iter().map(|x| x.0).collect::>() ) - .fetch(&mut *exec) - .map_ok(|m| QueryLoaderField { - id: LoaderFieldId(m.id), - field: m.field, - field_type: m.field_type, - enum_type: m.enum_type.map(LoaderFieldEnumId), - min_val: m.min_val, - max_val: m.max_val, - optional: m.optional, - }) - .try_collect() - .await?; - - let loader_field_enum_values: Vec = sqlx::query!( + .fetch(&mut *exec) + .map_ok(|m| QueryLoaderField { + id: LoaderFieldId(m.id), + field: m.field, + field_type: m.field_type, + enum_type: m.enum_type.map(LoaderFieldEnumId), + min_val: m.min_val, + max_val: m.max_val, + optional: m.optional, + }) + .try_collect() + .await?; + + let loader_field_enum_values: Vec = sqlx::query!( " SELECT DISTINCT id, enum_id, value, ordering, created, metadata FROM loader_field_enum_values lfev @@ -632,67 +597,66 @@ impl Version { .map(|x| x.0) .collect::>() ) - .fetch(&mut *exec) - .map_ok(|m| QueryLoaderFieldEnumValue { - id: LoaderFieldEnumValueId(m.id), - enum_id: LoaderFieldEnumId(m.enum_id), - value: m.value, - ordering: m.ordering, - created: m.created, - metadata: m.metadata, - }) - .try_collect() - .await?; - - #[derive(Deserialize)] - struct Hash { - pub file_id: FileId, - pub algorithm: String, - pub hash: String, - } - - #[derive(Deserialize)] - struct File { - pub id: FileId, - pub url: String, - pub filename: String, - pub primary: bool, - pub size: u32, - pub file_type: Option, - } - - let file_ids = DashSet::new(); - let reverse_file_map = DashMap::new(); - let files : DashMap> = sqlx::query!( + .fetch(&mut *exec) + .map_ok(|m| QueryLoaderFieldEnumValue { + id: LoaderFieldEnumValueId(m.id), + enum_id: LoaderFieldEnumId(m.enum_id), + value: m.value, + ordering: m.ordering, + created: m.created, + metadata: m.metadata, + }) + .try_collect() + .await?; + + #[derive(Deserialize)] + struct Hash { + pub file_id: FileId, + pub algorithm: String, + pub hash: String, + } + + #[derive(Deserialize)] + struct File { + pub id: FileId, + pub url: String, + pub filename: String, + pub primary: bool, + pub size: u32, + pub file_type: Option, + } + + let file_ids = DashSet::new(); + let reverse_file_map = DashMap::new(); + let files : DashMap> = sqlx::query!( " SELECT DISTINCT version_id, f.id, f.url, f.filename, f.is_primary, f.size, f.file_type FROM files f WHERE f.version_id = ANY($1) ", - &version_ids_parsed - ) - .fetch(&mut *exec) - .try_fold(DashMap::new(), |acc : DashMap>, m| { - let file = File { - id: FileId(m.id), - url: m.url, - filename: m.filename, - primary: m.is_primary, - size: m.size as u32, - file_type: m.file_type.map(|x| FileType::from_string(&x)), - }; - - file_ids.insert(FileId(m.id)); - reverse_file_map.insert(FileId(m.id), VersionId(m.version_id)); - - acc.entry(VersionId(m.version_id)) - .or_default() - .push(file); - async move { Ok(acc) } - } - ).await?; + &version_ids + ).fetch(&mut *exec) + .try_fold(DashMap::new(), |acc : DashMap>, m| { + let file = File { + id: FileId(m.id), + url: m.url, + filename: m.filename, + primary: m.is_primary, + size: m.size as u32, + file_type: m.file_type.map(|x| FileType::from_string(&x)), + }; + + file_ids.insert(FileId(m.id)); + reverse_file_map.insert(FileId(m.id), VersionId(m.version_id)); + + acc.entry(VersionId(m.version_id)) + .or_default() + .push(file); + async move { Ok(acc) } + } + ).await?; - let hashes: DashMap> = sqlx::query!( + let hashes: DashMap> = sqlx::query!( " SELECT DISTINCT file_id, algorithm, encode(hash, 'escape') hash FROM hashes @@ -700,157 +664,146 @@ impl Version { ", &file_ids.iter().map(|x| x.0).collect::>() ) - .fetch(&mut *exec) - .try_fold(DashMap::new(), |acc: DashMap>, m| { - if let Some(found_hash) = m.hash { - let hash = Hash { - file_id: FileId(m.file_id), - algorithm: m.algorithm, - hash: found_hash, - }; - - if let Some(version_id) = reverse_file_map.get(&FileId(m.file_id)) { - acc.entry(*version_id).or_default().push(hash); - } - } - async move { Ok(acc) } - }) - .await?; + .fetch(&mut *exec) + .try_fold(DashMap::new(), |acc: DashMap>, m| { + if let Some(found_hash) = m.hash { + let hash = Hash { + file_id: FileId(m.file_id), + algorithm: m.algorithm, + hash: found_hash, + }; + + if let Some(version_id) = reverse_file_map.get(&FileId(m.file_id)) { + acc.entry(*version_id).or_default().push(hash); + } + } + async move { Ok(acc) } + }) + .await?; - let dependencies : DashMap> = sqlx::query!( + let dependencies : DashMap> = sqlx::query!( " SELECT DISTINCT dependent_id as version_id, d.mod_dependency_id as dependency_project_id, d.dependency_id as dependency_version_id, d.dependency_file_name as file_name, d.dependency_type as dependency_type FROM dependencies d WHERE dependent_id = ANY($1) ", - &version_ids_parsed - ) - .fetch(&mut *exec) - .try_fold(DashMap::new(), |acc : DashMap<_,Vec>, m| { - let dependency = QueryDependency { - project_id: m.dependency_project_id.map(ProjectId), - version_id: m.dependency_version_id.map(VersionId), - file_name: m.file_name, - dependency_type: m.dependency_type, - }; - - acc.entry(VersionId(m.version_id)) - .or_default() - .push(dependency); - async move { Ok(acc) } - } - ).await?; + &version_ids + ).fetch(&mut *exec) + .try_fold(DashMap::new(), |acc : DashMap<_,Vec>, m| { + let dependency = QueryDependency { + project_id: m.dependency_project_id.map(ProjectId), + version_id: m.dependency_version_id.map(VersionId), + file_name: m.file_name, + dependency_type: m.dependency_type, + }; + + acc.entry(VersionId(m.version_id)) + .or_default() + .push(dependency); + async move { Ok(acc) } + } + ).await?; - let db_versions: Vec = sqlx::query!( + let res = sqlx::query!( " SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number, v.changelog changelog, v.date_published date_published, v.downloads downloads, v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering FROM versions v - WHERE v.id = ANY($1) - ORDER BY v.ordering ASC NULLS LAST, v.date_published ASC; + WHERE v.id = ANY($1); ", - &version_ids_parsed + &version_ids ) - .fetch_many(&mut *exec) - .try_filter_map(|e| async { - Ok(e.right().map(|v| - { - let version_id = VersionId(v.id); - let VersionLoaderData { - loaders, - project_types, - games, - loader_loader_field_ids, - } = loaders_ptypes_games.remove(&version_id).map(|x|x.1).unwrap_or_default(); - let files = files.remove(&version_id).map(|x|x.1).unwrap_or_default(); - let hashes = hashes.remove(&version_id).map(|x|x.1).unwrap_or_default(); - let version_fields = version_fields.remove(&version_id).map(|x|x.1).unwrap_or_default(); - let dependencies = dependencies.remove(&version_id).map(|x|x.1).unwrap_or_default(); - - let loader_fields = loader_fields.iter() - .filter(|x| loader_loader_field_ids.contains(&x.id)) - .collect::>(); - - QueryVersion { - inner: Version { - id: VersionId(v.id), - project_id: ProjectId(v.mod_id), - author_id: UserId(v.author_id), - name: v.version_name, - version_number: v.version_number, - changelog: v.changelog, - date_published: v.date_published, - downloads: v.downloads, - version_type: v.version_type, - featured: v.featured, - status: VersionStatus::from_string(&v.status), - requested_status: v.requested_status - .map(|x| VersionStatus::from_string(&x)), - ordering: v.ordering, - }, - files: { - let mut files = files.into_iter().map(|x| { - let mut file_hashes = HashMap::new(); - - for hash in hashes.iter() { - if hash.file_id == x.id { - file_hashes.insert( - hash.algorithm.clone(), - hash.hash.clone(), - ); - } - } - - QueryFile { - id: x.id, - url: x.url.clone(), - filename: x.filename.clone(), - hashes: file_hashes, - primary: x.primary, - size: x.size, - file_type: x.file_type, - } - }).collect::>(); - - files.sort_by(|a, b| { - if a.primary { - Ordering::Less - } else if b.primary { - Ordering::Greater - } else { - a.filename.cmp(&b.filename) - } - }); - - files - }, - version_fields: VersionField::from_query_json(version_fields, &loader_fields, &loader_field_enum_values, false), - loaders, - project_types, - games, - dependencies, - } - })) + .fetch(&mut *exec) + .try_fold(DashMap::new(), |acc, v| { + let version_id = VersionId(v.id); + let VersionLoaderData { + loaders, + project_types, + games, + loader_loader_field_ids, + } = loaders_ptypes_games.remove(&version_id).map(|x|x.1).unwrap_or_default(); + let files = files.remove(&version_id).map(|x|x.1).unwrap_or_default(); + let hashes = hashes.remove(&version_id).map(|x|x.1).unwrap_or_default(); + let version_fields = version_fields.remove(&version_id).map(|x|x.1).unwrap_or_default(); + let dependencies = dependencies.remove(&version_id).map(|x|x.1).unwrap_or_default(); + + let loader_fields = loader_fields.iter() + .filter(|x| loader_loader_field_ids.contains(&x.id)) + .collect::>(); + + let query_version = QueryVersion { + inner: Version { + id: VersionId(v.id), + project_id: ProjectId(v.mod_id), + author_id: UserId(v.author_id), + name: v.version_name, + version_number: v.version_number, + changelog: v.changelog, + date_published: v.date_published, + downloads: v.downloads, + version_type: v.version_type, + featured: v.featured, + status: VersionStatus::from_string(&v.status), + requested_status: v.requested_status + .map(|x| VersionStatus::from_string(&x)), + ordering: v.ordering, + }, + files: { + let mut files = files.into_iter().map(|x| { + let mut file_hashes = HashMap::new(); + + for hash in hashes.iter() { + if hash.file_id == x.id { + file_hashes.insert( + hash.algorithm.clone(), + hash.hash.clone(), + ); + } + } + + QueryFile { + id: x.id, + url: x.url.clone(), + filename: x.filename.clone(), + hashes: file_hashes, + primary: x.primary, + size: x.size, + file_type: x.file_type, + } + }).collect::>(); + + files.sort_by(|a, b| { + if a.primary { + Ordering::Less + } else if b.primary { + Ordering::Greater + } else { + a.filename.cmp(&b.filename) + } + }); + + files + }, + version_fields: VersionField::from_query_json(version_fields, &loader_fields, &loader_field_enum_values, false), + loaders, + project_types, + games, + dependencies, + }; + + acc.insert(v.id, query_version); + async move { Ok(acc) } }) - .try_collect::>() .await?; - for version in db_versions { - redis - .set_serialized_to_json( - VERSIONS_NAMESPACE, - version.inner.id.0, - &version, - None, - ) - .await?; + Ok(res) + }, + ).await?; - found_versions.push(version); - } - } + val.sort(); - Ok(found_versions) + Ok(val) } } @@ -862,7 +815,7 @@ impl Version { redis: &RedisPool, ) -> Result, DatabaseError> where - E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, + E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy + Send + Sync, { Self::get_files_from_hash(algo, &[hash], executor, redis) .await @@ -879,112 +832,68 @@ impl Version { redis: &RedisPool, ) -> Result, DatabaseError> where - E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, + E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy + Send + Sync, { - use futures::stream::TryStreamExt; - - let mut redis = redis.connect().await?; - - if hashes.is_empty() { - return Ok(Vec::new()); - } - - let mut file_ids_parsed = hashes.to_vec(); - - let mut found_files = Vec::new(); - - let files = redis - .multi_get::( - VERSION_FILES_NAMESPACE, - file_ids_parsed - .iter() - .map(|hash| format!("{}_{}", algorithm, hash)) - .collect::>(), - ) - .await?; - for file in files { - if let Some(mut file) = - file.and_then(|x| serde_json::from_str::>(&x).ok()) - { - file_ids_parsed.retain(|x| { - !file - .iter() - .any(|y| y.hashes.iter().any(|z| z.0 == &algorithm && z.1 == x)) - }); - found_files.append(&mut file); - continue; - } - } - - if !file_ids_parsed.is_empty() { - let db_files: Vec = sqlx::query!( - " - SELECT f.id, f.version_id, v.mod_id, f.url, f.filename, f.is_primary, f.size, f.file_type, - JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'))) filter (where h.hash is not null) hashes - FROM files f - INNER JOIN versions v on v.id = f.version_id - INNER JOIN hashes h on h.file_id = f.id - WHERE h.algorithm = $1 AND h.hash = ANY($2) - GROUP BY f.id, v.mod_id, v.date_published - ORDER BY v.date_published - ", - algorithm, - &file_ids_parsed.into_iter().map(|x| x.as_bytes().to_vec()).collect::>(), - ) - .fetch_many(executor) - .try_filter_map(|e| async { - Ok(e.right().map(|f| { + let val = redis.get_cached_keys( + VERSION_FILES_NAMESPACE, + &hashes.iter().map(|x| format!("{algorithm}_{x}")).collect::>(), + |file_ids| async move { + let files = sqlx::query!( + " + SELECT f.id, f.version_id, v.mod_id, f.url, f.filename, f.is_primary, f.size, f.file_type, + JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'))) filter (where h.hash is not null) hashes + FROM files f + INNER JOIN versions v on v.id = f.version_id + INNER JOIN hashes h on h.file_id = f.id + WHERE h.algorithm = $1 AND h.hash = ANY($2) + GROUP BY f.id, v.mod_id, v.date_published + ORDER BY v.date_published + ", + algorithm, + &file_ids.into_iter().flat_map(|x| x.split('_').last().map(|x| x.as_bytes().to_vec())).collect::>(), + ) + .fetch(executor) + .try_fold(DashMap::new(), |acc, f| { #[derive(Deserialize)] struct Hash { pub algorithm: String, pub hash: String, } - SingleFile { - id: FileId(f.id), - version_id: VersionId(f.version_id), - project_id: ProjectId(f.mod_id), - url: f.url, - filename: f.filename, - hashes: serde_json::from_value::>( - f.hashes.unwrap_or_default(), - ) - .ok() - .unwrap_or_default().into_iter().map(|x| (x.algorithm, x.hash)).collect(), - primary: f.is_primary, - size: f.size as u32, - file_type: f.file_type.map(|x| FileType::from_string(&x)), + let hashes = serde_json::from_value::>( + f.hashes.unwrap_or_default(), + ) + .ok() + .unwrap_or_default().into_iter().map(|x| (x.algorithm, x.hash)) + .collect::>(); + + if let Some(hash) = hashes.get(&algorithm) { + let key = format!("{algorithm}_{hash}"); + + let file = SingleFile { + id: FileId(f.id), + version_id: VersionId(f.version_id), + project_id: ProjectId(f.mod_id), + url: f.url, + filename: f.filename, + hashes, + primary: f.is_primary, + size: f.size as u32, + file_type: f.file_type.map(|x| FileType::from_string(&x)), + }; + + acc.insert(key, file); } - } - )) - }) - .try_collect::>() - .await?; - - let mut save_files: HashMap> = HashMap::new(); - - for file in db_files { - for (algo, hash) in &file.hashes { - let key = format!("{}_{}", algo, hash); - if let Some(files) = save_files.get_mut(&key) { - files.push(file.clone()); - } else { - save_files.insert(key, vec![file.clone()]); - } - } - } - - for (key, mut files) in save_files { - redis - .set_serialized_to_json(VERSION_FILES_NAMESPACE, key, &files, None) + async move { Ok(acc) } + }) .await?; - found_files.append(&mut files); + Ok(files) } - } + ).await?; - Ok(found_files) + Ok(val) } pub async fn clear_cache( diff --git a/src/database/redis.rs b/src/database/redis.rs index c80450cd..35f6f5a8 100644 --- a/src/database/redis.rs +++ b/src/database/redis.rs @@ -1,10 +1,20 @@ use super::models::DatabaseError; +use crate::models::ids::base62_impl::{parse_base62, to_base62}; +use chrono::{TimeZone, Utc}; +use dashmap::DashMap; use deadpool_redis::{Config, Runtime}; -use itertools::Itertools; -use redis::{cmd, Cmd, FromRedisValue}; -use std::fmt::Display; +use redis::{cmd, Cmd, ExistenceCheck, SetExpiry, SetOptions}; +use serde::de::DeserializeOwned; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::fmt::{Debug, Display}; +use std::future::Future; +use std::hash::Hash; +use std::pin::Pin; +use std::time::Duration; -const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes +const DEFAULT_EXPIRY: i64 = 60 * 60 * 12; // 12 hours +const ACTUAL_EXPIRY: i64 = 60 * 30; // 30 minutes #[derive(Clone)] pub struct RedisPool { @@ -47,6 +57,364 @@ impl RedisPool { meta_namespace: self.meta_namespace.clone(), }) } + + pub async fn get_cached_keys( + &self, + namespace: &str, + keys: &[K], + closure: F, + ) -> Result, DatabaseError> + where + F: FnOnce(Vec) -> Fut + Send, + Fut: Future, DatabaseError>> + Send, + T: Serialize + DeserializeOwned + Send, + K: Display + Hash + Eq + PartialEq + Clone + DeserializeOwned + Serialize + Debug + Send, + { + Ok(self + .get_cached_keys_raw(namespace, keys, closure) + .await? + .into_iter() + .map(|x| x.1) + .collect()) + } + + pub async fn get_cached_keys_raw( + &self, + namespace: &str, + keys: &[K], + closure: F, + ) -> Result, DatabaseError> + where + F: FnOnce(Vec) -> Fut + Send, + Fut: Future, DatabaseError>> + Send, + T: Serialize + DeserializeOwned + Send, + K: Display + Hash + Eq + PartialEq + Clone + DeserializeOwned + Serialize + Debug + Send, + { + self.get_cached_keys_raw_with_slug(namespace, None, false, keys, |ids| async move { + Ok(closure(ids) + .await? + .into_iter() + .map(|(key, val)| (key, (None::, val))) + .collect()) + }) + .await + } + + pub async fn get_cached_keys_with_slug( + &self, + namespace: &str, + slug_namespace: &str, + case_sensitive: bool, + keys: &[I], + closure: F, + ) -> Result, DatabaseError> + where + F: FnOnce(Vec) -> Fut + Send, + Fut: Future, T)>, DatabaseError>> + Send, + T: Serialize + DeserializeOwned + Send, + I: Display + Hash + Eq + PartialEq + Clone + Debug + Send, + K: Display + Hash + Eq + PartialEq + Clone + DeserializeOwned + Serialize + Send, + S: Display + Clone + DeserializeOwned + Serialize + Debug + Send, + { + Ok(self + .get_cached_keys_raw_with_slug( + namespace, + Some(slug_namespace), + case_sensitive, + keys, + closure, + ) + .await? + .into_iter() + .map(|x| x.1) + .collect()) + } + + pub async fn get_cached_keys_raw_with_slug( + &self, + namespace: &str, + slug_namespace: Option<&str>, + case_sensitive: bool, + keys: &[I], + closure: F, + ) -> Result, DatabaseError> + where + F: FnOnce(Vec) -> Fut + Send, + Fut: Future, T)>, DatabaseError>> + Send, + T: Serialize + DeserializeOwned + Send, + I: Display + Hash + Eq + PartialEq + Clone + Debug + Send, + K: Display + Hash + Eq + PartialEq + Clone + DeserializeOwned + Serialize + Send, + S: Display + Clone + DeserializeOwned + Serialize + Debug + Send, + { + let connection = self.connect().await?.connection; + + let ids = keys + .iter() + .map(|x| (x.to_string(), x.clone())) + .collect::>(); + + if ids.is_empty() { + return Ok(HashMap::new()); + } + + let get_cached_values = + |ids: DashMap, mut connection: deadpool_redis::Connection| async move { + let slug_ids = if let Some(slug_namespace) = slug_namespace { + cmd("MGET") + .arg( + ids.iter() + .map(|x| { + format!( + "{}_{slug_namespace}:{}", + self.meta_namespace, + if case_sensitive { + x.value().to_string() + } else { + x.value().to_string().to_lowercase() + } + ) + }) + .collect::>(), + ) + .query_async::<_, Vec>>(&mut connection) + .await? + .into_iter() + .flatten() + .collect::>() + } else { + Vec::new() + }; + + let cached_values = cmd("MGET") + .arg( + ids.iter() + .map(|x| x.value().to_string()) + .chain(ids.iter().filter_map(|x| { + parse_base62(&x.value().to_string()) + .ok() + .map(|x| x.to_string()) + })) + .chain(slug_ids) + .map(|x| format!("{}_{namespace}:{x}", self.meta_namespace)) + .collect::>(), + ) + .query_async::<_, Vec>>(&mut connection) + .await? + .into_iter() + .filter_map(|x| { + x.and_then(|val| serde_json::from_str::>(&val).ok()) + .map(|val| (val.key.clone(), val)) + }) + .collect::>(); + + Ok::<_, DatabaseError>((cached_values, connection, ids)) + }; + + let current_time = Utc::now(); + let mut expired_values = HashMap::new(); + + let (cached_values_raw, mut connection, ids) = get_cached_values(ids, connection).await?; + let mut cached_values = cached_values_raw + .into_iter() + .filter_map(|(key, val)| { + if Utc.timestamp(val.iat + ACTUAL_EXPIRY, 0) < current_time { + expired_values.insert(val.key.to_string(), val); + + None + } else { + let key_str = val.key.to_string(); + ids.remove(&key_str); + + if let Ok(value) = key_str.parse::() { + let base62 = to_base62(value); + ids.remove(&base62); + } + + if let Some(ref alias) = val.alias { + ids.remove(&alias.to_string()); + } + + Some((key, val)) + } + }) + .collect::>(); + + let subscribe_ids = DashMap::new(); + + if !ids.is_empty() { + let mut pipe = redis::pipe(); + + let fetch_ids = ids.iter().map(|x| x.key().clone()).collect::>(); + + fetch_ids.iter().for_each(|key| { + pipe.atomic().set_options( + format!("{}_{namespace}:{}/lock", self.meta_namespace, key), + 100, + SetOptions::default() + .get(true) + .conditional_set(ExistenceCheck::NX) + .with_expiration(SetExpiry::EX(60)), + ); + }); + let results = pipe + .query_async::<_, Vec>>(&mut connection) + .await?; + + for (idx, key) in fetch_ids.into_iter().enumerate() { + if let Some(locked) = results.get(idx) { + if locked.is_none() { + continue; + } + } + + if let Some((key, raw_key)) = ids.remove(&key) { + if let Some(val) = expired_values.remove(&key) { + if let Some(ref alias) = val.alias { + ids.remove(&alias.to_string()); + } + + if let Ok(value) = val.key.to_string().parse::() { + let base62 = to_base62(value); + ids.remove(&base62); + } + + cached_values.insert(val.key.clone(), val); + } else { + subscribe_ids.insert(key, raw_key); + } + } + } + } + + #[allow(clippy::type_complexity)] + let mut fetch_tasks: Vec< + Pin>, DatabaseError>> + Send>>, + > = Vec::new(); + + if !ids.is_empty() { + fetch_tasks.push(Box::pin(async { + let fetch_ids = ids.iter().map(|x| x.value().clone()).collect::>(); + + let vals = closure(fetch_ids).await?; + let mut return_values = HashMap::new(); + + let mut pipe = redis::pipe(); + if !vals.is_empty() { + for (key, (slug, value)) in vals { + let value = RedisValue { + key: key.clone(), + iat: Utc::now().timestamp(), + val: value, + alias: slug.clone(), + }; + + pipe.atomic().set_ex( + format!("{}_{namespace}:{key}", self.meta_namespace), + serde_json::to_string(&value)?, + DEFAULT_EXPIRY as u64, + ); + + if let Some(slug) = slug { + ids.remove(&slug.to_string()); + + if let Some(slug_namespace) = slug_namespace { + let actual_slug = if case_sensitive { + slug.to_string() + } else { + slug.to_string().to_lowercase() + }; + + pipe.atomic().set_ex( + format!( + "{}_{slug_namespace}:{}", + self.meta_namespace, actual_slug + ), + key.to_string(), + DEFAULT_EXPIRY as u64, + ); + + pipe.atomic().del(format!( + "{}_{namespace}:{}/lock", + self.meta_namespace, actual_slug + )); + } + } + + let key_str = key.to_string(); + ids.remove(&key_str); + + if let Ok(value) = key_str.parse::() { + let base62 = to_base62(value); + ids.remove(&base62); + + pipe.atomic() + .del(format!("{}_{namespace}:{base62}/lock", self.meta_namespace)); + } + + pipe.atomic() + .del(format!("{}_{namespace}:{key}/lock", self.meta_namespace)); + + return_values.insert(key, value); + } + } + + for (key, _) in ids { + pipe.atomic() + .del(format!("{}_{namespace}:{key}/lock", self.meta_namespace)); + } + + pipe.query_async(&mut connection).await?; + + Ok(return_values) + })); + } + + if !subscribe_ids.is_empty() { + fetch_tasks.push(Box::pin(async { + let mut connection = self.pool.get().await?; + + let mut interval = tokio::time::interval(Duration::from_millis(100)); + let start = Utc::now(); + loop { + let results = cmd("MGET") + .arg( + subscribe_ids + .iter() + .map(|x| { + format!("{}_{namespace}:{}/lock", self.meta_namespace, x.key()) + }) + .collect::>(), + ) + .query_async::<_, Vec>>(&mut connection) + .await?; + + if results.into_iter().all(|x| x.is_none()) { + break; + } + + if (Utc::now() - start) > chrono::Duration::seconds(5) { + return Err(DatabaseError::CacheTimeout); + } + + interval.tick().await; + } + + let (return_values, _, _) = get_cached_values(subscribe_ids, connection).await?; + + Ok(return_values) + })); + } + + if !fetch_tasks.is_empty() { + for map in futures::future::try_join_all(fetch_tasks).await? { + for (key, value) in map { + cached_values.insert(key, value); + } + } + } + + Ok(cached_values.into_iter().map(|x| (x.0, x.1.val)).collect()) + } } impl RedisConnection { @@ -120,26 +488,6 @@ impl RedisConnection { .and_then(|x| serde_json::from_str(&x).ok())) } - pub async fn multi_get( - &mut self, - namespace: &str, - ids: impl IntoIterator, - ) -> Result>, DatabaseError> - where - R: FromRedisValue, - { - let mut cmd = cmd("MGET"); - - let ids = ids.into_iter().map(|x| x.to_string()).collect_vec(); - redis_args( - &mut cmd, - &ids.into_iter() - .map(|x| format!("{}_{}:{}", self.meta_namespace, namespace, x)) - .collect_vec(), - ); - Ok(redis_execute(&mut cmd, &mut self.connection).await?) - } - pub async fn delete(&mut self, namespace: &str, id: T1) -> Result<(), DatabaseError> where T1: Display, @@ -177,6 +525,15 @@ impl RedisConnection { } } +#[derive(Serialize, Deserialize)] +pub struct RedisValue { + key: K, + #[serde(skip_serializing_if = "Option::is_none")] + alias: Option, + iat: i64, + val: T, +} + pub fn redis_args(cmd: &mut Cmd, args: &[String]) { for arg in args { cmd.arg(arg); diff --git a/src/lib.rs b/src/lib.rs index d2345d85..fb54b54b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -18,6 +18,7 @@ use clickhouse_crate::Client; use crate::routes::not_found; use crate::scheduler::schedule; use crate::util::cors::default_cors; +use crate::queue::moderation::AutomatedModerationQueue; use crate::{ queue::payouts::process_payout, search::indexing::index_projects, @@ -54,6 +55,7 @@ pub struct LabrinthConfig { pub payouts_queue: Arc, pub analytics_queue: Arc, pub active_sockets: Arc, + pub automated_moderation_queue: Arc, } pub fn app_setup( @@ -70,6 +72,17 @@ pub fn app_setup( dotenvy::var("BIND_ADDR").unwrap() ); + let automated_moderation_queue = Arc::new(AutomatedModerationQueue::default()); + + let automated_moderation_queue_ref = automated_moderation_queue.clone(); + let pool_ref = pool.clone(); + let redis_pool_ref = redis_pool.clone(); + tokio::spawn(async move { + automated_moderation_queue_ref + .task(pool_ref, redis_pool_ref) + .await; + }); + // The interval in seconds at which the local database is indexed // for searching. Defaults to 1 hour if unset. let local_index_interval = @@ -245,6 +258,7 @@ pub fn app_setup( payouts_queue, analytics_queue, active_sockets, + automated_moderation_queue, } } @@ -266,6 +280,7 @@ pub fn app_config(labrinth_config: LabrinthConfig) -> Router { .layer(Extension(labrinth_config.clickhouse.clone())) .layer(Extension(labrinth_config.maxmind.clone())) .layer(Extension(labrinth_config.active_sockets.clone())) + .layer(Extension(labrinth_config.automated_moderation_queue.clone())) .layer(DefaultBodyLimit::max(5 * 1024 * 1024)) } @@ -387,5 +402,7 @@ pub fn check_env_vars() -> bool { failed |= check_var::("PAYOUTS_BUDGET"); + failed |= check_var::("FLAME_ANVIL_URL"); + failed } diff --git a/src/main.rs b/src/main.rs index ae48b92d..eef0ec98 100644 --- a/src/main.rs +++ b/src/main.rs @@ -110,7 +110,6 @@ async fn main() -> std::io::Result<()> { let maxmind_reader = Arc::new(queue::maxmind::MaxMindIndexer::new().await.unwrap()); let search_config = search::SearchConfig::new(None); - info!("Starting Actix HTTP server!"); let labrinth_config = labrinth::app_setup( pool.clone(), @@ -160,8 +159,8 @@ async fn main() -> std::io::Result<()> { AUTHORIZATION, ))) .layer(TraceLayer::new_for_http()) - .layer(sentry_tower::NewSentryLayer::new_from_top()) - .layer(sentry_tower::SentryHttpLayer::with_transaction()) + // .layer(sentry::NewSentryLayer::new_from_top()) + // .layer(sentry::SentryHttpLayer::with_transaction()) .into_make_service_with_connect_info::(); // run our app with hyper, listening globally on port 3000 diff --git a/src/models/error.rs b/src/models/error.rs index 5ac3c607..28f737c1 100644 --- a/src/models/error.rs +++ b/src/models/error.rs @@ -4,5 +4,5 @@ use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize)] pub struct ApiError<'a> { pub error: &'a str, - pub description: &'a str, + pub description: String, } diff --git a/src/models/v3/analytics.rs b/src/models/v3/analytics.rs index 669175ad..b59254a7 100644 --- a/src/models/v3/analytics.rs +++ b/src/models/v3/analytics.rs @@ -34,6 +34,8 @@ pub struct PageView { pub user_id: u64, // Modrinth Project ID (used for payouts) pub project_id: u64, + // whether this view will be monetized / counted for payouts + pub monetized: bool, // The below information is used exclusively for data aggregation and fraud detection // (ex: page view botting). diff --git a/src/models/v3/ids.rs b/src/models/v3/ids.rs index e8cecae8..0e795a78 100644 --- a/src/models/v3/ids.rs +++ b/src/models/v3/ids.rs @@ -40,11 +40,15 @@ pub fn random_base62(n: usize) -> u64 { /// This method panics if `n` is 0 or greater than 11, since a `u64` /// can only represent up to 11 character base62 strings pub fn random_base62_rng(rng: &mut R, n: usize) -> u64 { + random_base62_rng_range(rng, n, n) +} + +pub fn random_base62_rng_range(rng: &mut R, n_min: usize, n_max: usize) -> u64 { use rand::Rng; - assert!(n > 0 && n <= 11); + assert!(n_min > 0 && n_max <= 11 && n_min <= n_max); // gen_range is [low, high): max value is `MULTIPLES[n] - 1`, // which is n characters long when encoded - rng.gen_range(MULTIPLES[n - 1]..MULTIPLES[n]) + rng.gen_range(MULTIPLES[n_min - 1]..MULTIPLES[n_max]) } const MULTIPLES: [u64; 12] = [ diff --git a/src/models/v3/organizations.rs b/src/models/v3/organizations.rs index 11a0f72d..f2817e36 100644 --- a/src/models/v3/organizations.rs +++ b/src/models/v3/organizations.rs @@ -5,7 +5,7 @@ use super::{ use serde::{Deserialize, Serialize}; /// The ID of a team -#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, Debug)] #[serde(from = "Base62Id")] #[serde(into = "Base62Id")] pub struct OrganizationId(pub u64); diff --git a/src/models/v3/pack.rs b/src/models/v3/pack.rs index c73def00..49e22ca3 100644 --- a/src/models/v3/pack.rs +++ b/src/models/v3/pack.rs @@ -18,7 +18,7 @@ pub struct PackFormat { pub dependencies: std::collections::HashMap, } -#[derive(Serialize, Deserialize, Validate, Eq, PartialEq, Debug)] +#[derive(Serialize, Deserialize, Validate, Eq, PartialEq, Debug, Clone)] #[serde(rename_all = "camelCase")] pub struct PackFile { pub path: String, @@ -54,7 +54,7 @@ fn validate_download_url(values: &[String]) -> Result<(), validator::ValidationE Ok(()) } -#[derive(Serialize, Deserialize, Eq, PartialEq, Hash, Debug)] +#[derive(Serialize, Deserialize, Eq, PartialEq, Hash, Debug, Clone)] #[serde(rename_all = "camelCase", from = "String")] pub enum PackFileHash { Sha1, @@ -72,7 +72,7 @@ impl From for PackFileHash { } } -#[derive(Serialize, Deserialize, Eq, PartialEq, Hash, Debug)] +#[derive(Serialize, Deserialize, Eq, PartialEq, Hash, Debug, Clone)] #[serde(rename_all = "camelCase")] pub enum EnvType { Client, diff --git a/src/models/v3/pats.rs b/src/models/v3/pats.rs index d4ef6e28..4de7e7c8 100644 --- a/src/models/v3/pats.rs +++ b/src/models/v3/pats.rs @@ -5,7 +5,7 @@ use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; /// The ID of a team -#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, Debug)] #[serde(from = "Base62Id")] #[serde(into = "Base62Id")] pub struct PatId(pub u64); diff --git a/src/models/v3/projects.rs b/src/models/v3/projects.rs index 5bb0710b..8e75d079 100644 --- a/src/models/v3/projects.rs +++ b/src/models/v3/projects.rs @@ -12,7 +12,7 @@ use serde::{Deserialize, Serialize}; use validator::Validate; /// The ID of a specific project, encoded as base62 for usage in the API -#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] +#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)] #[serde(from = "Base62Id")] #[serde(into = "Base62Id")] pub struct ProjectId(pub u64); diff --git a/src/models/v3/sessions.rs b/src/models/v3/sessions.rs index 9cfb6d50..46a8a69a 100644 --- a/src/models/v3/sessions.rs +++ b/src/models/v3/sessions.rs @@ -3,7 +3,7 @@ use crate::models::users::UserId; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; -#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, Debug)] #[serde(from = "Base62Id")] #[serde(into = "Base62Id")] pub struct SessionId(pub u64); diff --git a/src/models/v3/threads.rs b/src/models/v3/threads.rs index 5a18de7f..2a7436ab 100644 --- a/src/models/v3/threads.rs +++ b/src/models/v3/threads.rs @@ -32,6 +32,7 @@ pub struct ThreadMessage { pub author_id: Option, pub body: MessageBody, pub created: DateTime, + pub hide_identity: bool, } #[derive(Serialize, Deserialize, Clone)] @@ -114,24 +115,17 @@ impl Thread { }) .map(|x| ThreadMessage { id: x.id.into(), - author_id: if users - .iter() - .find(|y| x.author_id == Some(y.id.into())) - .map(|x| x.role.is_mod() && !user.role.is_mod()) - .unwrap_or(false) - { + author_id: if x.hide_identity && !user.role.is_mod() { None } else { x.author_id.map(|x| x.into()) }, body: x.body, created: x.created, + hide_identity: x.hide_identity, }) .collect(), - members: users - .into_iter() - .filter(|x| !x.role.is_mod() || user.role.is_mod()) - .collect(), + members: users, } } } diff --git a/src/queue/analytics.rs b/src/queue/analytics.rs index 0d0fa869..f1fd9121 100644 --- a/src/queue/analytics.rs +++ b/src/queue/analytics.rs @@ -5,12 +5,15 @@ use crate::routes::ApiError; use dashmap::{DashMap, DashSet}; use redis::cmd; use sqlx::PgPool; +use std::collections::HashMap; +use std::net::Ipv6Addr; const DOWNLOADS_NAMESPACE: &str = "downloads"; +const VIEWS_NAMESPACE: &str = "views"; pub struct AnalyticsQueue { - views_queue: DashSet, - downloads_queue: DashMap, + views_queue: DashMap<(u64, u64), Vec>, + downloads_queue: DashMap<(u64, u64), Download>, playtime_queue: DashSet, } @@ -24,26 +27,37 @@ impl Default for AnalyticsQueue { impl AnalyticsQueue { pub fn new() -> Self { AnalyticsQueue { - views_queue: DashSet::with_capacity(1000), + views_queue: DashMap::with_capacity(1000), downloads_queue: DashMap::with_capacity(1000), playtime_queue: DashSet::with_capacity(1000), } } - pub fn add_view(&self, page_view: PageView) { - self.views_queue.insert(page_view); + fn strip_ip(ip: Ipv6Addr) -> u64 { + if let Some(ip) = ip.to_ipv4_mapped() { + let octets = ip.octets(); + u64::from_be_bytes([octets[0], octets[1], octets[2], octets[3], 0, 0, 0, 0]) + } else { + let octets = ip.octets(); + u64::from_be_bytes([ + octets[0], octets[1], octets[2], octets[3], octets[4], octets[5], octets[6], + octets[7], + ]) + } } + pub fn add_view(&self, page_view: PageView) { + let ip_stripped = Self::strip_ip(page_view.ip); + + self.views_queue + .entry((ip_stripped, page_view.project_id)) + .or_default() + .push(page_view); + } pub fn add_download(&self, download: Download) { - let ip_stripped = if let Some(ip) = download.ip.to_ipv4_mapped() { - let octets = ip.octets(); - u64::from_be_bytes([0, 0, 0, 0, octets[0], octets[1], octets[2], octets[3]]) - } else { - let octets = download.ip.octets(); - u64::from_be_bytes([0, 0, 0, 0, octets[0], octets[1], octets[2], octets[3]]) - }; + let ip_stripped = Self::strip_ip(download.ip); self.downloads_queue - .insert(format!("{}-{}", ip_stripped, download.project_id), download); + .insert((ip_stripped, download.project_id), download); } pub fn add_playtime(&self, playtime: Playtime) { @@ -65,16 +79,6 @@ impl AnalyticsQueue { let playtime_queue = self.playtime_queue.clone(); self.playtime_queue.clear(); - if !views_queue.is_empty() { - let mut views = client.insert("views")?; - - for view in views_queue { - views.write(&view).await?; - } - - views.end().await?; - } - if !playtime_queue.is_empty() { let mut playtimes = client.insert("playtime")?; @@ -85,6 +89,76 @@ impl AnalyticsQueue { playtimes.end().await?; } + if !views_queue.is_empty() { + let mut views_keys = Vec::new(); + let mut raw_views = Vec::new(); + + for (key, views) in views_queue { + views_keys.push(key); + raw_views.push((views, true)); + } + + let mut redis = redis.pool.get().await.map_err(DatabaseError::RedisPool)?; + + let results = cmd("MGET") + .arg( + views_keys + .iter() + .map(|x| format!("{}:{}-{}", VIEWS_NAMESPACE, x.0, x.1)) + .collect::>(), + ) + .query_async::<_, Vec>>(&mut redis) + .await + .map_err(DatabaseError::CacheError)?; + + let mut pipe = redis::pipe(); + for (idx, count) in results.into_iter().enumerate() { + let key = &views_keys[idx]; + + let new_count = if let Some((views, monetized)) = raw_views.get_mut(idx) { + if let Some(count) = count { + if count > 3 { + *monetized = false; + continue; + } + + if (count + views.len() as u32) > 3 { + *monetized = false; + } + + count + (views.len() as u32) + } else { + views.len() as u32 + } + } else { + 1 + }; + + pipe.atomic().set_ex( + format!("{}:{}-{}", VIEWS_NAMESPACE, key.0, key.1), + new_count, + 6 * 60 * 60, + ); + } + pipe.query_async(&mut *redis) + .await + .map_err(DatabaseError::CacheError)?; + + let mut views = client.insert("views")?; + + for (all_views, monetized) in raw_views { + for (idx, mut view) in all_views.into_iter().enumerate() { + if idx != 0 || !monetized { + view.monetized = false; + } + + views.write(&view).await?; + } + } + + views.end().await?; + } + if !downloads_queue.is_empty() { let mut downloads_keys = Vec::new(); let raw_downloads = DashMap::new(); @@ -100,7 +174,7 @@ impl AnalyticsQueue { .arg( downloads_keys .iter() - .map(|x| format!("{}:{}", DOWNLOADS_NAMESPACE, x)) + .map(|x| format!("{}:{}-{}", DOWNLOADS_NAMESPACE, x.0, x.1)) .collect::>(), ) .query_async::<_, Vec>>(&mut redis) @@ -123,7 +197,7 @@ impl AnalyticsQueue { }; pipe.atomic().set_ex( - format!("{}:{}", DOWNLOADS_NAMESPACE, key), + format!("{}:{}-{}", DOWNLOADS_NAMESPACE, key.0, key.1), new_count, 6 * 60 * 60, ); @@ -132,37 +206,46 @@ impl AnalyticsQueue { .await .map_err(DatabaseError::CacheError)?; - let version_ids = raw_downloads - .iter() - .map(|x| x.version_id as i64) - .collect::>(); - let project_ids = raw_downloads - .iter() - .map(|x| x.project_id as i64) - .collect::>(); - let mut transaction = pool.begin().await?; let mut downloads = client.insert("downloads")?; + let mut version_downloads: HashMap = HashMap::new(); + let mut project_downloads: HashMap = HashMap::new(); + for (_, download) in raw_downloads { + *version_downloads + .entry(download.version_id as i64) + .or_default() += 1; + *project_downloads + .entry(download.project_id as i64) + .or_default() += 1; + downloads.write(&download).await?; } - sqlx::query!( - "UPDATE versions - SET downloads = downloads + 1 - WHERE id = ANY($1)", - &version_ids + sqlx::query( + " + UPDATE versions v + SET downloads = v.downloads + x.amount + FROM unnest($1::BIGINT[], $2::int[]) AS x(id, amount) + WHERE v.id = x.id + ", ) + .bind(version_downloads.keys().copied().collect::>()) + .bind(version_downloads.values().copied().collect::>()) .execute(&mut *transaction) .await?; - sqlx::query!( - "UPDATE mods - SET downloads = downloads + 1 - WHERE id = ANY($1)", - &project_ids + sqlx::query( + " + UPDATE mods m + SET downloads = m.downloads + x.amount + FROM unnest($1::BIGINT[], $2::int[]) AS x(id, amount) + WHERE m.id = x.id + ", ) + .bind(project_downloads.keys().copied().collect::>()) + .bind(project_downloads.values().copied().collect::>()) .execute(&mut *transaction) .await?; diff --git a/src/queue/mod.rs b/src/queue/mod.rs index 9501640b..7ccf81c0 100644 --- a/src/queue/mod.rs +++ b/src/queue/mod.rs @@ -1,5 +1,6 @@ pub mod analytics; pub mod maxmind; +pub mod moderation; pub mod payouts; pub mod session; pub mod socket; diff --git a/src/queue/moderation.rs b/src/queue/moderation.rs new file mode 100644 index 00000000..761b6f4f --- /dev/null +++ b/src/queue/moderation.rs @@ -0,0 +1,881 @@ +use crate::auth::checks::filter_visible_versions; +use crate::database; +use crate::database::models::notification_item::NotificationBuilder; +use crate::database::models::thread_item::ThreadMessageBuilder; +use crate::database::redis::RedisPool; +use crate::models::ids::ProjectId; +use crate::models::notifications::NotificationBody; +use crate::models::pack::{PackFile, PackFileHash, PackFormat}; +use crate::models::projects::ProjectStatus; +use crate::models::threads::MessageBody; +use crate::routes::ApiError; +use dashmap::DashSet; +use itertools::Itertools; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use std::collections::HashMap; +use std::io::{Cursor, Read}; +use std::time::Duration; +use zip::ZipArchive; + +const AUTOMOD_ID: i64 = 0; + +pub struct ModerationMessages { + pub messages: Vec, + pub version_specific: HashMap>, +} + +impl ModerationMessages { + pub fn is_empty(&self) -> bool { + self.messages.is_empty() && self.version_specific.is_empty() + } + + pub fn markdown(&self, auto_mod: bool) -> String { + let mut str = "".to_string(); + + for message in &self.messages { + str.push_str(&format!("## {}\n", message.header())); + str.push_str(&format!("{}\n", message.body())); + str.push('\n'); + } + + for (version_num, messages) in &self.version_specific { + for message in messages { + str.push_str(&format!( + "## Version {}: {}\n", + version_num, + message.header() + )); + str.push_str(&format!("{}\n", message.body())); + str.push('\n'); + } + } + + if auto_mod { + str.push_str("
\n\n"); + str.push_str("🤖 This is an automated message generated by AutoMod (BETA). If you are facing issues, please [contact support](https://support.modrinth.com)."); + } + + str + } + + pub fn should_reject(&self, first_time: bool) -> bool { + self.messages.iter().any(|x| x.rejectable(first_time)) + || self + .version_specific + .values() + .any(|x| x.iter().any(|x| x.rejectable(first_time))) + } + + pub fn approvable(&self) -> bool { + self.messages.iter().all(|x| x.approvable()) + && self + .version_specific + .values() + .all(|x| x.iter().all(|x| x.approvable())) + } +} + +pub enum ModerationMessage { + MissingGalleryImage, + NoPrimaryFile, + NoSideTypes, + PackFilesNotAllowed { + files: HashMap, + incomplete: bool, + }, + MissingLicense, + MissingCustomLicenseUrl { + license: String, + }, +} + +impl ModerationMessage { + pub fn rejectable(&self, first_time: bool) -> bool { + match self { + ModerationMessage::NoPrimaryFile => true, + ModerationMessage::PackFilesNotAllowed { files, incomplete } => { + (!incomplete || first_time) + && files.values().any(|x| match x.status { + ApprovalType::Yes => false, + ApprovalType::WithAttributionAndSource => false, + ApprovalType::WithAttribution => false, + ApprovalType::No => first_time, + ApprovalType::PermanentNo => true, + ApprovalType::Unidentified => first_time, + }) + } + ModerationMessage::MissingGalleryImage => true, + ModerationMessage::MissingLicense => true, + ModerationMessage::MissingCustomLicenseUrl { .. } => true, + ModerationMessage::NoSideTypes => true, + } + } + + pub fn approvable(&self) -> bool { + match self { + ModerationMessage::NoPrimaryFile => false, + ModerationMessage::PackFilesNotAllowed { files, .. } => { + files.values().all(|x| x.status.approved()) + } + ModerationMessage::MissingGalleryImage => false, + ModerationMessage::MissingLicense => false, + ModerationMessage::MissingCustomLicenseUrl { .. } => false, + ModerationMessage::NoSideTypes => false, + } + } + + pub fn header(&self) -> &'static str { + match self { + ModerationMessage::NoPrimaryFile => "No primary files", + ModerationMessage::PackFilesNotAllowed { .. } => "Copyrighted Content", + ModerationMessage::MissingGalleryImage => "Missing Gallery Images", + ModerationMessage::MissingLicense => "Missing License", + ModerationMessage::MissingCustomLicenseUrl { .. } => "Missing License URL", + ModerationMessage::NoSideTypes => "Missing Environment Information", + } + } + + pub fn body(&self) -> String { + match self { + ModerationMessage::NoPrimaryFile => "Please attach a file to this version. All files on Modrinth must have files associated with their versions.\n".to_string(), + ModerationMessage::PackFilesNotAllowed { files, .. } => { + let mut str = "".to_string(); + str.push_str("This pack redistributes copyrighted material. Please refer to [Modrinth's guide on obtaining modpack permissions](https://docs.modrinth.com/modpacks/permissions) for more information.\n\n"); + + let mut attribute_mods = Vec::new(); + let mut no_mods = Vec::new(); + let mut permanent_no_mods = Vec::new(); + let mut unidentified_mods = Vec::new(); + for (_, approval) in files.iter() { + match approval.status { + ApprovalType::Yes | ApprovalType::WithAttributionAndSource => {} + ApprovalType::WithAttribution => attribute_mods.push(&approval.file_name), + ApprovalType::No => no_mods.push(&approval.file_name), + ApprovalType::PermanentNo => permanent_no_mods.push(&approval.file_name), + ApprovalType::Unidentified => unidentified_mods.push(&approval.file_name), + } + } + + fn print_mods(projects: Vec<&String>, headline: &str, val: &mut String) { + if projects.is_empty() { return } + + val.push_str(&format!("{headline}\n\n")); + + for project in &projects { + let additional_text = if project.contains("ftb-quests") { + Some("Heracles") + } else if project.contains("ftb-ranks") || project.contains("ftb-essentials") { + Some("Prometheus") + } else if project.contains("ftb-teams") { + Some("Argonauts") + } else if project.contains("ftb-chunks") { + Some("Cadmus") + } else { + None + }; + + val.push_str(&if let Some(additional_text) = additional_text { + format!("- {project}(consider using [{additional_text}](https://modrinth.com/mod/{}) instead)\n", additional_text.to_lowercase()) + } else { + format!("- {project}\n") + }) + } + + if !projects.is_empty() { + val.push('\n'); + } + } + + print_mods(attribute_mods, "The following content has attribution requirements, meaning that you must link back to the page where you originally found this content in your modpack description or version changelog (e.g. linking a mod's CurseForge page if you got it from CurseForge):", &mut str); + print_mods(no_mods, "The following content is not allowed in Modrinth modpacks due to licensing restrictions. Please contact the author(s) directly for permission or remove the content from your modpack:", &mut str); + print_mods(permanent_no_mods, "The following content is not allowed in Modrinth modpacks, regardless of permission obtained. This may be because it breaks Modrinth's content rules or because the authors, upon being contacted for permission, have declined. Please remove the content from your modpack:", &mut str); + print_mods(unidentified_mods, "The following content could not be identified. Please provide proof of its origin along with proof that you have permission to include it:", &mut str); + + str + }, + ModerationMessage::MissingGalleryImage => "We ask that resource packs like yours show off their content using images in the Gallery, or optionally in the Description, in order to effectively and clearly inform users of the content in your pack per section 2.1 of [Modrinth's content rules](https://modrinth.com/legal/rules#general-expectations).\n +Keep in mind that you should:\n +- Set a featured image that best represents your pack. +- Ensure all your images have titles that accurately label the image, and optionally, details on the contents of the image in the images Description. +- Upload any relevant images in your Description to your Gallery tab for best results.".to_string(), + ModerationMessage::MissingLicense => "You must select a License before your project can be published publicly, having a License associated with your project is important to protecting your rights and allowing others to use your content as you intend. For more information, you can see our [Guide to Licensing Mods]().".to_string(), + ModerationMessage::MissingCustomLicenseUrl { license } => format!("It looks like you've selected the License \"{license}\" without providing a valid License link. When using a custom License you must provide a link directly to the License in the License Link field."), + ModerationMessage::NoSideTypes => "Your project's side types are currently set to Unknown on both sides. Please set accurate side types!".to_string(), + } + } +} + +pub struct AutomatedModerationQueue { + pub projects: DashSet, +} + +impl Default for AutomatedModerationQueue { + fn default() -> Self { + Self { + projects: DashSet::new(), + } + } +} + +impl AutomatedModerationQueue { + pub async fn task(&self, pool: PgPool, redis: RedisPool) { + loop { + let projects = self.projects.clone(); + self.projects.clear(); + + for project in projects { + async { + let project = + database::Project::get_id((project).into(), &pool, &redis).await?; + + if let Some(project) = project { + let res = async { + let mut mod_messages = ModerationMessages { + messages: vec![], + version_specific: HashMap::new(), + }; + + if project.project_types.iter().any(|x| ["mod", "modpack"].contains(&&**x)) && !project.aggregate_version_fields.iter().any(|x| ["server_only", "client_only", "client_and_server", "singleplayer"].contains(&&*x.field_name)) { + mod_messages.messages.push(ModerationMessage::NoSideTypes); + } + + if project.inner.license == "LicenseRef-Unknown" || project.inner.license == "LicenseRef-" { + mod_messages.messages.push(ModerationMessage::MissingLicense); + } else if project.inner.license.starts_with("LicenseRef-") && project.inner.license != "LicenseRef-All-Rights-Reserved" && project.inner.license_url.is_none() { + mod_messages.messages.push(ModerationMessage::MissingCustomLicenseUrl { license: project.inner.license.clone() }); + } + + if (project.project_types.contains(&"resourcepack".to_string()) || project.project_types.contains(&"shader".to_string())) && project.gallery_items.is_empty() { + mod_messages.messages.push(ModerationMessage::MissingGalleryImage); + } + + let versions = + database::Version::get_many(&project.versions, &pool, &redis) + .await? + .into_iter() + // we only support modpacks at this time + .filter(|x| x.project_types.contains(&"modpack".to_string())) + .collect::>(); + + for version in versions { + let primary_file = version.files.iter().find_or_first(|x| x.primary); + + if let Some(primary_file) = primary_file { + let data = reqwest::get(&primary_file.url).await?.bytes().await?; + + let reader = Cursor::new(data); + let mut zip = ZipArchive::new(reader)?; + + let pack: PackFormat = { + let mut file = + if let Ok(file) = zip.by_name("modrinth.index.json") { + file + } else { + continue; + }; + + let mut contents = String::new(); + file.read_to_string(&mut contents)?; + + serde_json::from_str(&contents)? + }; + + // sha1, pack file, file path, murmur + let mut hashes: Vec<( + String, + Option, + String, + Option, + )> = pack + .files + .clone() + .into_iter() + .flat_map(|x| { + let hash = x.hashes.get(&PackFileHash::Sha1); + + if let Some(hash) = hash { + let path = x.path.clone(); + Some((hash.clone(), Some(x), path, None)) + } else { + None + } + }) + .collect(); + + for i in 0..zip.len() { + let mut file = zip.by_index(i)?; + + if file.name().starts_with("overrides/mods") + || file.name().starts_with("client-overrides/mods") + || file.name().starts_with("server-overrides/mods") + || file.name().starts_with("overrides/shaderpacks") + || file.name().starts_with("client-overrides/shaderpacks") + || file.name().starts_with("overrides/resourcepacks") + || file.name().starts_with("client-overrides/resourcepacks") + { + if file.name().matches('/').count() > 2 || file.name().ends_with(".txt") { + continue; + } + + let mut contents = Vec::new(); + file.read_to_end(&mut contents)?; + + let hash = sha1::Sha1::from(&contents).hexdigest(); + let murmur = hash_flame_murmur32(contents); + + hashes.push(( + hash, + None, + file.name().to_string(), + Some(murmur), + )); + } + } + + let files = database::models::Version::get_files_from_hash( + "sha1".to_string(), + &hashes.iter().map(|x| x.0.clone()).collect::>(), + &pool, + &redis, + ) + .await?; + + let version_ids = + files.iter().map(|x| x.version_id).collect::>(); + let versions_data = filter_visible_versions( + database::models::Version::get_many( + &version_ids, + &pool, + &redis, + ) + .await?, + &None, + &pool, + &redis, + ) + .await?; + + let mut final_hashes = HashMap::new(); + + for version in versions_data { + for file in + files.iter().filter(|x| x.version_id == version.id.into()) + { + if let Some(hash) = file.hashes.get(&"sha1".to_string()) { + if let Some((index, (sha1, _, file_name, _))) = hashes + .iter() + .enumerate() + .find(|(_, (value, _, _, _))| value == hash) + { + final_hashes + .insert(sha1.clone(), IdentifiedFile { status: ApprovalType::Yes, file_name: file_name.clone() }); + + hashes.remove(index); + } + } + } + } + + // All files are on Modrinth, so we don't send any messages + if hashes.is_empty() { + sqlx::query!( + " + UPDATE files + SET metadata = $1 + WHERE id = $2 + ", + serde_json::to_value(&MissingMetadata { + identified: final_hashes, + flame_files: Default::default(), + unknown_files: Default::default(), + })?, + primary_file.id.0 + ) + .execute(&pool) + .await?; + + continue; + } + + let rows = sqlx::query!( + " + SELECT encode(mef.sha1, 'escape') sha1, mel.status status + FROM moderation_external_files mef + INNER JOIN moderation_external_licenses mel ON mef.external_license_id = mel.id + WHERE mef.sha1 = ANY($1) + ", + &hashes.iter().map(|x| x.0.as_bytes().to_vec()).collect::>() + ) + .fetch_all(&pool) + .await?; + + for row in rows { + if let Some(sha1) = row.sha1 { + if let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == &sha1) { + final_hashes.insert(sha1.clone(), IdentifiedFile { file_name: file_name.clone(), status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified) }); + hashes.remove(index); + } + } + } + + if hashes.is_empty() { + let metadata = MissingMetadata { + identified: final_hashes, + flame_files: Default::default(), + unknown_files: Default::default(), + }; + + sqlx::query!( + " + UPDATE files + SET metadata = $1 + WHERE id = $2 + ", + serde_json::to_value(&metadata)?, + primary_file.id.0 + ) + .execute(&pool) + .await?; + + if metadata.identified.values().any(|x| x.status != ApprovalType::Yes && x.status != ApprovalType::WithAttributionAndSource) { + let val = mod_messages.version_specific.entry(version.inner.version_number).or_default(); + val.push(ModerationMessage::PackFilesNotAllowed {files: metadata.identified, incomplete: false }); + } + continue; + } + + let client = reqwest::Client::new(); + let res = client + .post(format!("{}/v1/fingerprints", dotenvy::var("FLAME_ANVIL_URL")?)) + .json(&serde_json::json!({ + "fingerprints": hashes.iter().filter_map(|x| x.3).collect::>() + })) + .send() + .await?.text() + .await?; + + let flame_hashes = serde_json::from_str::>(&res)? + .data + .exact_matches + .into_iter() + .map(|x| x.file) + .collect::>(); + + let mut flame_files = Vec::new(); + + for file in flame_hashes { + let hash = file + .hashes + .iter() + .find(|x| x.algo == 1) + .map(|x| x.value.clone()); + + if let Some(hash) = hash { + flame_files.push((hash, file.mod_id)) + } + } + + let rows = sqlx::query!( + " + SELECT mel.id, mel.flame_project_id, mel.status status + FROM moderation_external_licenses mel + WHERE mel.flame_project_id = ANY($1) + ", + &flame_files.iter().map(|x| x.1 as i32).collect::>() + ) + .fetch_all(&pool).await?; + + let mut insert_hashes = Vec::new(); + let mut insert_ids = Vec::new(); + + for row in rows { + if let Some((curse_index, (hash, _flame_id))) = flame_files.iter().enumerate().find(|(_, x)| Some(x.1 as i32) == row.flame_project_id) { + if let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == hash) { + final_hashes.insert(sha1.clone(), IdentifiedFile { + file_name: file_name.clone(), + status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified), + }); + + insert_hashes.push(hash.clone().as_bytes().to_vec()); + insert_ids.push(row.id); + + hashes.remove(index); + flame_files.remove(curse_index); + } + } + } + + if !insert_ids.is_empty() && !insert_hashes.is_empty() { + sqlx::query!( + " + INSERT INTO moderation_external_files (sha1, external_license_id) + SELECT * FROM UNNEST ($1::bytea[], $2::bigint[]) + ON CONFLICT (sha1) DO NOTHING + ", + &insert_hashes[..], + &insert_ids[..] + ) + .execute(&pool) + .await?; + } + + if hashes.is_empty() { + let metadata = MissingMetadata { + identified: final_hashes, + flame_files: Default::default(), + unknown_files: Default::default(), + }; + + sqlx::query!( + " + UPDATE files + SET metadata = $1 + WHERE id = $2 + ", + serde_json::to_value(&metadata)?, + primary_file.id.0 + ) + .execute(&pool) + .await?; + + if metadata.identified.values().any(|x| x.status != ApprovalType::Yes && x.status != ApprovalType::WithAttributionAndSource) { + let val = mod_messages.version_specific.entry(version.inner.version_number).or_default(); + val.push(ModerationMessage::PackFilesNotAllowed {files: metadata.identified, incomplete: false }); + } + + continue; + } + + let flame_projects = if flame_files.is_empty() { + Vec::new() + } else { + let res = client + .post(format!("{}v1/mods", dotenvy::var("FLAME_ANVIL_URL")?)) + .json(&serde_json::json!({ + "modIds": flame_files.iter().map(|x| x.1).collect::>() + })) + .send() + .await? + .text() + .await?; + + serde_json::from_str::>>(&res)?.data + }; + + let mut missing_metadata = MissingMetadata { + identified: final_hashes, + flame_files: HashMap::new(), + unknown_files: HashMap::new(), + }; + + for (sha1, _pack_file, file_name, _mumur2) in hashes { + let flame_file = flame_files.iter().find(|x| x.0 == sha1); + + if let Some((_, flame_project_id)) = flame_file { + if let Some(project) = flame_projects.iter().find(|x| &x.id == flame_project_id) { + missing_metadata.flame_files.insert(sha1, MissingMetadataFlame { + title: project.name.clone(), + file_name, + url: project.links.website_url.clone(), + id: *flame_project_id, + }); + + continue; + } + } + + missing_metadata.unknown_files.insert(sha1, file_name); + } + + sqlx::query!( + " + UPDATE files + SET metadata = $1 + WHERE id = $2 + ", + serde_json::to_value(&missing_metadata)?, + primary_file.id.0 + ) + .execute(&pool) + .await?; + + if missing_metadata.identified.values().any(|x| x.status != ApprovalType::Yes && x.status != ApprovalType::WithAttributionAndSource) { + let val = mod_messages.version_specific.entry(version.inner.version_number).or_default(); + val.push(ModerationMessage::PackFilesNotAllowed {files: missing_metadata.identified, incomplete: true }); + } + } else { + let val = mod_messages.version_specific.entry(version.inner.version_number).or_default(); + val.push(ModerationMessage::NoPrimaryFile); + } + } + + if !mod_messages.is_empty() { + let first_time = database::models::Thread::get(project.thread_id, &pool).await? + .map(|x| x.messages.iter().all(|x| x.author_id == Some(database::models::UserId(AUTOMOD_ID)) || x.hide_identity)) + .unwrap_or(true); + + let mut transaction = pool.begin().await?; + let id = ThreadMessageBuilder { + author_id: Some(database::models::UserId(AUTOMOD_ID)), + body: MessageBody::Text { + body: mod_messages.markdown(true), + private: false, + replying_to: None, + associated_images: vec![], + }, + thread_id: project.thread_id, + hide_identity: false, + } + .insert(&mut transaction) + .await?; + + let members = database::models::TeamMember::get_from_team_full( + project.inner.team_id, + &pool, + &redis, + ) + .await?; + + if mod_messages.should_reject(first_time) { + ThreadMessageBuilder { + author_id: Some(database::models::UserId(AUTOMOD_ID)), + body: MessageBody::StatusChange { + new_status: ProjectStatus::Rejected, + old_status: project.inner.status, + }, + thread_id: project.thread_id, + hide_identity: false, + } + .insert(&mut transaction) + .await?; + + NotificationBuilder { + body: NotificationBody::StatusChange { + project_id: project.inner.id.into(), + old_status: project.inner.status, + new_status: ProjectStatus::Rejected, + }, + } + .insert_many(members.into_iter().map(|x| x.user_id).collect(), &mut transaction, &redis) + .await?; + + if let Ok(webhook_url) = dotenvy::var("MODERATION_DISCORD_WEBHOOK") { + crate::util::webhook::send_discord_webhook( + project.inner.id.into(), + &pool, + &redis, + webhook_url, + Some( + format!( + "**[AutoMod]({}/user/AutoMod)** changed project status from **{}** to **Rejected**", + dotenvy::var("SITE_URL")?, + &project.inner.status.as_friendly_str(), + ) + .to_string(), + ), + ) + .await + .ok(); + } + + sqlx::query!( + " + UPDATE mods + SET status = 'rejected' + WHERE id = $1 + ", + project.inner.id.0 + ) + .execute(&pool) + .await?; + + database::models::Project::clear_cache( + project.inner.id, + project.inner.slug.clone(), + None, + &redis, + ) + .await?; + } else { + NotificationBuilder { + body: NotificationBody::ModeratorMessage { + thread_id: project.thread_id.into(), + message_id: id.into(), + project_id: Some(project.inner.id.into()), + report_id: None, + }, + } + .insert_many( + members.into_iter().map(|x| x.user_id).collect(), + &mut transaction, + &redis, + ) + .await?; + } + + transaction.commit().await?; + } + + Ok::<(), ApiError>(()) + }.await; + + if let Err(err) = res { + let err = err.as_api_error(); + + let mut str = String::new(); + str.push_str("## Internal AutoMod Error\n\n"); + str.push_str(&format!("Error code: {}\n\n", err.error)); + str.push_str(&format!("Error description: {}\n\n", err.description)); + + let mut transaction = pool.begin().await?; + ThreadMessageBuilder { + author_id: Some(database::models::UserId(AUTOMOD_ID)), + body: MessageBody::Text { + body: str, + private: true, + replying_to: None, + associated_images: vec![], + }, + thread_id: project.thread_id, + hide_identity: false, + } + .insert(&mut transaction) + .await?; + transaction.commit().await?; + } + } + + Ok::<(), ApiError>(()) + }.await.ok(); + } + + tokio::time::sleep(Duration::from_secs(5)).await + } + } +} + +#[derive(Serialize, Deserialize)] +pub struct MissingMetadata { + pub identified: HashMap, + pub flame_files: HashMap, + pub unknown_files: HashMap, +} + +#[derive(Serialize, Deserialize)] +pub struct IdentifiedFile { + pub file_name: String, + pub status: ApprovalType, +} + +#[derive(Serialize, Deserialize)] +pub struct MissingMetadataFlame { + pub title: String, + pub file_name: String, + pub url: String, + pub id: u32, +} + +#[derive(Deserialize, Serialize, Copy, Clone, PartialEq, Eq, Debug)] +#[serde(rename_all = "kebab-case")] +pub enum ApprovalType { + Yes, + WithAttributionAndSource, + WithAttribution, + No, + PermanentNo, + Unidentified, +} + +impl ApprovalType { + fn approved(&self) -> bool { + match self { + ApprovalType::Yes => true, + ApprovalType::WithAttributionAndSource => true, + ApprovalType::WithAttribution => true, + ApprovalType::No => false, + ApprovalType::PermanentNo => false, + ApprovalType::Unidentified => false, + } + } + + pub fn from_string(string: &str) -> Option { + match string { + "yes" => Some(ApprovalType::Yes), + "with-attribution-and-source" => Some(ApprovalType::WithAttributionAndSource), + "with-attribution" => Some(ApprovalType::WithAttribution), + "no" => Some(ApprovalType::No), + "permanent-no" => Some(ApprovalType::PermanentNo), + "unidentified" => Some(ApprovalType::Unidentified), + _ => None, + } + } + + pub(crate) fn as_str(&self) -> &'static str { + match self { + ApprovalType::Yes => "yes", + ApprovalType::WithAttributionAndSource => "with-attribution-and-source", + ApprovalType::WithAttribution => "with-attribution", + ApprovalType::No => "no", + ApprovalType::PermanentNo => "permanent-no", + ApprovalType::Unidentified => "unidentified", + } + } +} + +#[derive(Deserialize, Serialize)] +pub struct FlameResponse { + pub data: T, +} + +#[derive(Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct FingerprintResponse { + pub exact_matches: Vec, +} + +#[derive(Deserialize, Serialize)] +pub struct FingerprintMatch { + pub id: u32, + pub file: FlameFile, +} + +#[derive(Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct FlameFile { + pub id: u32, + pub mod_id: u32, + pub hashes: Vec, + pub file_fingerprint: u32, +} + +#[derive(Deserialize, Serialize, Debug)] +pub struct FlameFileHash { + pub value: String, + pub algo: u32, +} + +#[derive(Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct FlameProject { + pub id: u32, + pub name: String, + pub slug: String, + pub links: FlameLinks, +} + +#[derive(Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct FlameLinks { + pub website_url: String, +} + +fn hash_flame_murmur32(input: Vec) -> u32 { + murmur2::murmur2( + &input + .into_iter() + .filter(|x| *x != 9 && *x != 10 && *x != 13 && *x != 32) + .collect::>(), + 1, + ) +} diff --git a/src/queue/payouts.rs b/src/queue/payouts.rs index fe341616..eed9079e 100644 --- a/src/queue/payouts.rs +++ b/src/queue/payouts.rs @@ -1,4 +1,3 @@ -use crate::models::ids::UserId; use crate::models::payouts::{ PayoutDecimal, PayoutInterval, PayoutMethod, PayoutMethodFee, PayoutMethodType, }; @@ -8,6 +7,7 @@ use crate::{database::redis::RedisPool, models::projects::MonetizationStatus}; use base64::Engine; use chrono::{DateTime, Datelike, Duration, Utc, Weekday}; use dashmap::DashMap; +use futures::TryStreamExt; use reqwest::Method; use rust_decimal::Decimal; use serde::de::DeserializeOwned; @@ -16,13 +16,12 @@ use serde_json::Value; use sqlx::postgres::PgQueryResult; use sqlx::PgPool; use std::collections::HashMap; -use std::sync::Arc; use tokio::sync::{Mutex, RwLock}; pub struct PayoutsQueue { credential: RwLock>, payout_options: RwLock>, - payouts_locks: DashMap>>, + pub payouts_locks: Mutex<()>, } #[derive(Clone)] @@ -49,7 +48,7 @@ impl PayoutsQueue { PayoutsQueue { credential: RwLock::new(None), payout_options: RwLock::new(None), - payouts_locks: DashMap::new(), + payouts_locks: Mutex::new(()), } } @@ -346,8 +345,14 @@ impl PayoutsQueue { "OEFTMSBA5ELH", "A3CQK6UHNV27", ]; - const SUPPORTED_METHODS: &[&str] = - &["merchant_cards", "visa", "bank", "ach", "visa_card"]; + const SUPPORTED_METHODS: &[&str] = &[ + "merchant_cards", + "merchant_card", + "visa", + "bank", + "ach", + "visa_card", + ]; if !SUPPORTED_METHODS.contains(&&*product.category) || BLACKLISTED_IDS.contains(&&*product.id) @@ -506,13 +511,6 @@ impl PayoutsQueue { Ok(options.options) } - - pub fn lock_user_payouts(&self, user_id: UserId) -> Arc> { - self.payouts_locks - .entry(user_id) - .or_insert_with(|| Arc::new(Mutex::new(()))) - .clone() - } } pub async fn process_payout( @@ -552,7 +550,7 @@ pub async fn process_payout( r#" SELECT COUNT(1) page_views, project_id FROM views - WHERE (recorded BETWEEN ? AND ?) AND (project_id != 0) + WHERE (recorded BETWEEN ? AND ?) AND (project_id != 0) AND (monetized = TRUE) GROUP BY project_id ORDER BY page_views DESC "#, @@ -561,7 +559,7 @@ pub async fn process_payout( .bind(end.timestamp()) .fetch_all::(), client - .query("SELECT COUNT(1) FROM views WHERE (recorded BETWEEN ? AND ?) AND (project_id != 0)") + .query("SELECT COUNT(1) FROM views WHERE (recorded BETWEEN ? AND ?) AND (project_id != 0) AND (monetized = TRUE)") .bind(start.timestamp()) .bind(end.timestamp()) .fetch_one::(), @@ -631,12 +629,22 @@ pub async fn process_payout( FROM mods m INNER JOIN organizations o ON m.organization_id = o.id INNER JOIN team_members tm on o.team_id = tm.team_id AND tm.accepted = TRUE - WHERE m.id = ANY($1) AND m.monetization_status = $2 AND m.organization_id IS NOT NULL + WHERE m.id = ANY($1) AND m.monetization_status = $2 AND m.status = ANY($3) AND m.organization_id IS NOT NULL ", &project_ids, MonetizationStatus::Monetized.as_str(), + &*crate::models::projects::ProjectStatus::iterator() + .filter(|x| !x.is_hidden()) + .map(|x| x.to_string()) + .collect::>(), ) - .fetch_all(&mut *transaction) + .fetch(&mut *transaction) + .try_fold(DashMap::new(), |acc: DashMap>, r| { + acc.entry(r.id) + .or_default() + .insert(r.user_id, r.payouts_split); + async move { Ok(acc) } + }) .await?; let project_team_members = sqlx::query!( @@ -644,25 +652,36 @@ pub async fn process_payout( SELECT m.id id, tm.user_id user_id, tm.payouts_split payouts_split FROM mods m INNER JOIN team_members tm on m.team_id = tm.team_id AND tm.accepted = TRUE - WHERE m.id = ANY($1) AND m.monetization_status = $2 + WHERE m.id = ANY($1) AND m.monetization_status = $2 AND m.status = ANY($3) ", &project_ids, MonetizationStatus::Monetized.as_str(), + &*crate::models::projects::ProjectStatus::iterator() + .filter(|x| !x.is_hidden()) + .map(|x| x.to_string()) + .collect::>(), + ) + .fetch(&mut *transaction) + .try_fold( + DashMap::new(), + |acc: DashMap>, r| { + acc.entry(r.id) + .or_default() + .insert(r.user_id, r.payouts_split); + async move { Ok(acc) } + }, ) - .fetch_all(&mut *transaction) .await?; for project_id in project_ids { let team_members: HashMap = project_team_members - .iter() - .filter(|r| r.id == project_id) - .map(|r| (r.user_id, r.payouts_split)) - .collect(); + .remove(&project_id) + .unwrap_or((0, HashMap::new())) + .1; let org_team_members: HashMap = project_org_members - .iter() - .filter(|r| r.id == project_id) - .map(|r| (r.user_id, r.payouts_split)) - .collect(); + .remove(&project_id) + .unwrap_or((0, HashMap::new())) + .1; let mut all_team_members = vec![]; @@ -707,6 +726,7 @@ pub async fn process_payout( let mut clear_cache_users = Vec::new(); let (mut insert_user_ids, mut insert_project_ids, mut insert_payouts, mut insert_starts) = (Vec::new(), Vec::new(), Vec::new(), Vec::new()); + let mut update_user_balance: HashMap = HashMap::new(); for (id, project) in projects_map { if let Some(value) = &multipliers.values.get(&(id as u64)) { let project_multiplier: Decimal = @@ -724,17 +744,7 @@ pub async fn process_payout( insert_payouts.push(payout); insert_starts.push(start); - sqlx::query!( - " - UPDATE users - SET balance = balance + $1 - WHERE id = $2 - ", - payout, - user_id - ) - .execute(&mut *transaction) - .await?; + *update_user_balance.entry(user_id).or_default() += payout; clear_cache_users.push(user_id); } @@ -743,6 +753,26 @@ pub async fn process_payout( } } + let (mut update_user_ids, mut update_user_balances) = (Vec::new(), Vec::new()); + + for (user_id, payout) in update_user_balance { + update_user_ids.push(user_id); + update_user_balances.push(payout); + } + + sqlx::query!( + " + UPDATE users u + SET balance = u.balance + v.amount + FROM unnest($1::BIGINT[], $2::NUMERIC[]) AS v(id, amount) + WHERE u.id = v.id + ", + &update_user_ids, + &update_user_balances + ) + .execute(&mut *transaction) + .await?; + sqlx::query!( " INSERT INTO payouts_values (user_id, mod_id, amount, created) diff --git a/src/routes/analytics.rs b/src/routes/analytics.rs index e63e3f11..41fa4244 100644 --- a/src/routes/analytics.rs +++ b/src/routes/analytics.rs @@ -56,6 +56,7 @@ pub struct UrlInput { } //this route should be behind the cloudflare WAF to prevent non-browsers from calling it +#[axum::debug_handler] pub async fn page_view_ingest( ConnectInfo(addr): ConnectInfo, headers: HeaderMap, @@ -115,6 +116,7 @@ pub async fn page_view_ingest( .into_iter() .filter(|x| !FILTERED_HEADERS.contains(&&*x.0)) .collect(), + monetized: true, }; if let Some(segments) = url.path_segments() { diff --git a/src/routes/internal/mod.rs b/src/routes/internal/mod.rs index 67f59f30..60105654 100644 --- a/src/routes/internal/mod.rs +++ b/src/routes/internal/mod.rs @@ -1,5 +1,6 @@ pub(crate) mod admin; pub mod flows; +pub mod moderation; pub mod pats; pub mod session; @@ -18,6 +19,7 @@ pub fn config() -> Router { .merge(session::config()) .merge(flows::config()) .merge(pats::config()) + .merge(moderation::config()) .layer(default_cors()), ) } diff --git a/src/routes/internal/moderation.rs b/src/routes/internal/moderation.rs new file mode 100644 index 00000000..d6324a52 --- /dev/null +++ b/src/routes/internal/moderation.rs @@ -0,0 +1,329 @@ +use super::ApiError; +use crate::database; +use crate::database::redis::RedisPool; +use crate::models::ids::random_base62; +use crate::models::projects::{Project, ProjectStatus}; +use crate::queue::moderation::{ApprovalType, IdentifiedFile, MissingMetadata}; +use crate::queue::session::AuthQueue; +use crate::{auth::check_is_moderator_from_headers, models::pats::Scopes}; +use serde::Deserialize; +use sqlx::PgPool; +use std::collections::HashMap; +use std::net::SocketAddr; +use std::sync::Arc; +use axum::http::{HeaderMap, StatusCode}; +use axum::Router; +use axum::routing::{get, post}; +use crate::util::extract::{ConnectInfo, Extension, Json, Path, Query}; + + +pub fn config() -> Router { + Router::new() + .nest( + "/moderation", + Router::new() + .route("/projects", get(get_projects)) + .route("/project/:id", get(get_project_meta)) + .route("/project", post(set_project_meta)), + ) +} + +#[derive(Deserialize)] +pub struct ResultCount { + #[serde(default = "default_count")] + pub count: i16, +} + +fn default_count() -> i16 { + 100 +} + +pub async fn get_projects( + ConnectInfo(addr): ConnectInfo, + headers: HeaderMap, + Extension(pool): Extension, + Extension(redis): Extension, + Query(count): Query, + Extension(session_queue): Extension>, +) -> Result>, ApiError> { + check_is_moderator_from_headers( + &addr, + &headers, + &pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ]), + ) + .await?; + + use futures::stream::TryStreamExt; + + let project_ids = sqlx::query!( + " + SELECT id FROM mods + WHERE status = $1 + ORDER BY queued ASC + LIMIT $2; + ", + ProjectStatus::Processing.as_str(), + count.count as i64 + ) + .fetch_many(&pool) + .try_filter_map(|e| async { Ok(e.right().map(|m| database::models::ProjectId(m.id))) }) + .try_collect::>() + .await?; + + let projects: Vec<_> = database::Project::get_many_ids(&project_ids, &pool, &redis) + .await? + .into_iter() + .map(crate::models::projects::Project::from) + .collect(); + + Ok(Json(projects)) +} + +pub async fn get_project_meta( + ConnectInfo(addr): ConnectInfo, + headers: HeaderMap, + Extension(pool): Extension, + Extension(redis): Extension, + Extension(session_queue): Extension>, + Path(project_id): Path, +) -> Result, ApiError> { + check_is_moderator_from_headers( + &addr, + &headers, + &pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_READ]), + ) + .await?; + + let project = database::models::Project::get(&project_id, &pool, &redis).await?; + + if let Some(project) = project { + let rows = sqlx::query!( + " + SELECT + f.metadata, v.id version_id + FROM versions v + INNER JOIN files f ON f.version_id = v.id + WHERE v.mod_id = $1 + ", + project.inner.id.0 + ) + .fetch_all(&pool) + .await?; + + let mut merged = MissingMetadata { + identified: HashMap::new(), + flame_files: HashMap::new(), + unknown_files: HashMap::new(), + }; + + let mut check_hashes = Vec::new(); + let mut check_flames = Vec::new(); + + for row in rows { + if let Some(metadata) = row + .metadata + .and_then(|x| serde_json::from_value::(x).ok()) + { + merged.identified.extend(metadata.identified); + merged.flame_files.extend(metadata.flame_files); + merged.unknown_files.extend(metadata.unknown_files); + + check_hashes.extend(merged.flame_files.keys().cloned()); + check_hashes.extend(merged.unknown_files.keys().cloned()); + check_flames.extend(merged.flame_files.values().map(|x| x.id as i32)); + } + } + + let rows = sqlx::query!( + " + SELECT encode(mef.sha1, 'escape') sha1, mel.status status + FROM moderation_external_files mef + INNER JOIN moderation_external_licenses mel ON mef.external_license_id = mel.id + WHERE mef.sha1 = ANY($1) + ", + &check_hashes + .iter() + .map(|x| x.as_bytes().to_vec()) + .collect::>() + ) + .fetch_all(&pool) + .await?; + + for row in rows { + if let Some(sha1) = row.sha1 { + if let Some(val) = merged.flame_files.remove(&sha1) { + merged.identified.insert( + sha1, + IdentifiedFile { + file_name: val.file_name, + status: ApprovalType::from_string(&row.status) + .unwrap_or(ApprovalType::Unidentified), + }, + ); + } else if let Some(val) = merged.unknown_files.remove(&sha1) { + merged.identified.insert( + sha1, + IdentifiedFile { + file_name: val, + status: ApprovalType::from_string(&row.status) + .unwrap_or(ApprovalType::Unidentified), + }, + ); + } + } + } + + let rows = sqlx::query!( + " + SELECT mel.id, mel.flame_project_id, mel.status status + FROM moderation_external_licenses mel + WHERE mel.flame_project_id = ANY($1) + ", + &check_flames, + ) + .fetch_all(&pool) + .await?; + + for row in rows { + if let Some(sha1) = merged + .flame_files + .iter() + .find(|x| Some(x.1.id as i32) == row.flame_project_id) + .map(|x| x.0.clone()) + { + if let Some(val) = merged.flame_files.remove(&sha1) { + merged.identified.insert( + sha1, + IdentifiedFile { + file_name: val.file_name.clone(), + status: ApprovalType::from_string(&row.status) + .unwrap_or(ApprovalType::Unidentified), + }, + ); + } + } + } + + Ok(Json(merged)) + } else { + Err(ApiError::NotFound) + } +} + +#[derive(Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum Judgement { + Flame { + id: i32, + status: ApprovalType, + link: String, + title: String, + }, + Unknown { + status: ApprovalType, + proof: Option, + link: Option, + title: Option, + }, +} + +pub async fn set_project_meta( + ConnectInfo(addr): ConnectInfo, + headers: HeaderMap, + Extension(pool): Extension, + Extension(redis): Extension, + Extension(session_queue): Extension>, + Json(judgements): Json>, +) -> Result { + check_is_moderator_from_headers( + &addr, + &headers, + &pool, + &redis, + &session_queue, + Some(&[Scopes::PROJECT_WRITE]), + ) + .await?; + + let mut transaction = pool.begin().await?; + + let mut ids = Vec::new(); + let mut titles = Vec::new(); + let mut statuses = Vec::new(); + let mut links = Vec::new(); + let mut proofs = Vec::new(); + let mut flame_ids = Vec::new(); + + let mut file_hashes = Vec::new(); + + for (hash, judgement) in judgements { + let id = random_base62(8); + + let (title, status, link, proof, flame_id) = match judgement { + Judgement::Flame { + id, + status, + link, + title, + } => ( + Some(title), + status, + Some(link), + Some("See Flame page/license for permission".to_string()), + Some(id), + ), + Judgement::Unknown { + status, + proof, + link, + title, + } => (title, status, link, proof, None), + }; + + ids.push(id as i64); + titles.push(title); + statuses.push(status.as_str()); + links.push(link); + proofs.push(proof); + flame_ids.push(flame_id); + file_hashes.push(hash); + } + + sqlx::query( + " + INSERT INTO moderation_external_licenses (id, title, status, link, proof, flame_project_id) + SELECT * FROM UNNEST ($1::bigint[], $2::varchar[], $3::varchar[], $4::varchar[], $5::varchar[], $6::integer[]) + " + ) + .bind(&ids[..]) + .bind(&titles[..]) + .bind(&statuses[..]) + .bind(&links[..]) + .bind(&proofs[..]) + .bind(&flame_ids[..]) + .execute(&mut *transaction) + .await?; + + sqlx::query( + " + INSERT INTO moderation_external_files (sha1, external_license_id) + SELECT * FROM UNNEST ($1::bytea[], $2::bigint[]) + ON CONFLICT (sha1) + DO NOTHING + ", + ) + .bind(&file_hashes[..]) + .bind(&ids[..]) + .execute(&mut *transaction) + .await?; + + transaction.commit().await?; + + Ok(StatusCode::NO_CONTENT) +} diff --git a/src/routes/mod.rs b/src/routes/mod.rs index 496921da..11b3f822 100644 --- a/src/routes/mod.rs +++ b/src/routes/mod.rs @@ -98,6 +98,10 @@ pub enum ApiError { Mail(#[from] crate::auth::email::MailError), #[error("Error while rerouting request: {0}")] Reroute(#[from] reqwest::Error), + #[error("Unable to read Zip Archive: {0}")] + Zip(#[from] zip::result::ZipError), + #[error("IO Error: {0}")] + Io(#[from] std::io::Error), #[error("Resource not found")] NotFound, #[error("Could not read JSON body: {0}")] @@ -122,6 +126,51 @@ pub enum ApiError { RateLimitError(u128, u32), } +impl ApiError { + pub fn as_api_error(&self) -> crate::models::error::ApiError { + crate::models::error::ApiError { + error: match &self { + ApiError::Env(..) => "environment_error", + ApiError::SqlxDatabase(..) => "database_error", + ApiError::Database(..) => "database_error", + ApiError::Authentication(..) => "unauthorized", + ApiError::CustomAuthentication(..) => "unauthorized", + ApiError::Xml(..) => "xml_error", + ApiError::Json(..) => "json_error", + ApiError::Search(..) => "search_error", + ApiError::Indexing(..) => "indexing_error", + ApiError::FileHosting(..) => "file_hosting_error", + ApiError::InvalidInput(..) => "invalid_input", + ApiError::Validation(..) => "invalid_input", + ApiError::Payments(..) => "payments_error", + ApiError::Discord(..) => "discord_error", + ApiError::Turnstile => "turnstile_error", + ApiError::Decoding(..) => "decoding_error", + ApiError::ImageParse(..) => "invalid_image", + ApiError::PasswordHashing(..) => "password_hashing_error", + ApiError::PasswordStrengthCheck(..) => "strength_check_error", + ApiError::Mail(..) => "mail_error", + ApiError::Clickhouse(..) => "clickhouse_error", + ApiError::Reroute(..) => "reroute_error", + ApiError::NotFound => "not_found", + ApiError::JsonBody(..) => "invalid_json_body", + ApiError::Form(..) => "invalid_form_body", + ApiError::Query(..) => "invalid_query", + ApiError::Path(..) => "invalid_path", + ApiError::Extension(..) => "extension_error", + ApiError::String(..) => "invalid_body", + ApiError::Bytes(..) => "invalid_body", + ApiError::WebSocket(..) => "websocket_error", + ApiError::Multipart(..) => "invalid_multipart_body", + ApiError::RateLimitError(..) => "ratelimit_error", + ApiError::Zip(..) => "zip_error", + ApiError::Io(..) => "io_error", + }, + description: self.to_string(), + } + } +} + impl IntoResponse for ApiError { fn into_response(self) -> Response { let status_code = match &self { @@ -158,46 +207,11 @@ impl IntoResponse for ApiError { ApiError::WebSocket(..) => StatusCode::BAD_REQUEST, ApiError::Multipart(..) => StatusCode::BAD_REQUEST, ApiError::RateLimitError(..) => StatusCode::TOO_MANY_REQUESTS, + ApiError::Zip(..) => StatusCode::BAD_REQUEST, + ApiError::Io(..) => StatusCode::BAD_REQUEST, }; - let error_message = crate::models::error::ApiError { - error: match &self { - ApiError::Env(..) => "environment_error", - ApiError::SqlxDatabase(..) => "database_error", - ApiError::Database(..) => "database_error", - ApiError::Authentication(..) => "unauthorized", - ApiError::CustomAuthentication(..) => "unauthorized", - ApiError::Xml(..) => "xml_error", - ApiError::Json(..) => "json_error", - ApiError::Search(..) => "search_error", - ApiError::Indexing(..) => "indexing_error", - ApiError::FileHosting(..) => "file_hosting_error", - ApiError::InvalidInput(..) => "invalid_input", - ApiError::Validation(..) => "invalid_input", - ApiError::Payments(..) => "payments_error", - ApiError::Discord(..) => "discord_error", - ApiError::Turnstile => "turnstile_error", - ApiError::Decoding(..) => "decoding_error", - ApiError::ImageParse(..) => "invalid_image", - ApiError::PasswordHashing(..) => "password_hashing_error", - ApiError::PasswordStrengthCheck(..) => "strength_check_error", - ApiError::Mail(..) => "mail_error", - ApiError::Clickhouse(..) => "clickhouse_error", - ApiError::Reroute(..) => "reroute_error", - ApiError::NotFound => "not_found", - ApiError::JsonBody(..) => "invalid_json_body", - ApiError::Form(..) => "invalid_form_body", - ApiError::Query(..) => "invalid_query", - ApiError::Path(..) => "invalid_path", - ApiError::Extension(..) => "extension_error", - ApiError::String(..) => "invalid_body", - ApiError::Bytes(..) => "invalid_body", - ApiError::WebSocket(..) => "websocket_error", - ApiError::Multipart(..) => "invalid_multipart_body", - ApiError::RateLimitError(..) => "ratelimit_error", - }, - description: &self.to_string(), - }; + let error_message = self.as_api_error(); (status_code, Json(error_message)).into_response() } diff --git a/src/routes/not_found.rs b/src/routes/not_found.rs index 1dc7f914..f056be66 100644 --- a/src/routes/not_found.rs +++ b/src/routes/not_found.rs @@ -8,7 +8,7 @@ pub async fn not_found() -> (StatusCode, Json>) { StatusCode::NOT_FOUND, Json(ApiError { error: "not_found", - description: "the requested route does not exist", + description: "the requested route does not exist".to_string(), }), ) } @@ -19,7 +19,7 @@ pub async fn api_v1_gone() -> impl IntoResponse { Json( ApiError { error:"api_deprecated", - description: "You are using an application that uses an outdated version of Modrinth's API. Please either update it or switch to another application. For developers: https://docs.modrinth.com/docs/migrations/v1-to-v2/" + description: "You are using an application that uses an outdated version of Modrinth's API. Please either update it or switch to another application. For developers: https://docs.modrinth.com/docs/migrations/v1-to-v2/".to_string() } ) ) diff --git a/src/routes/v2/mod.rs b/src/routes/v2/mod.rs index 3b91849f..365ca5aa 100644 --- a/src/routes/v2/mod.rs +++ b/src/routes/v2/mod.rs @@ -1,4 +1,3 @@ -mod moderation; mod notifications; pub(crate) mod project_creation; mod projects; @@ -24,7 +23,6 @@ pub fn config() -> Router { .merge(super::internal::session::config()) .merge(super::internal::flows::config()) .merge(super::internal::pats::config()) - .merge(moderation::config()) .merge(notifications::config()) .merge(project_creation::config()) .merge(projects::config()) diff --git a/src/routes/v2/moderation.rs b/src/routes/v2/moderation.rs deleted file mode 100644 index b5a3b889..00000000 --- a/src/routes/v2/moderation.rs +++ /dev/null @@ -1,52 +0,0 @@ -use std::net::SocketAddr; -use std::sync::Arc; - -use crate::database::redis::RedisPool; -use crate::models::v2::projects::LegacyProject; -use crate::queue::session::AuthQueue; -use crate::routes::{v3, ApiErrorV2}; -use crate::util::extract::{ConnectInfo, Extension, Json, Query}; -use axum::http::HeaderMap; -use axum::routing::get; -use axum::Router; -use serde::Deserialize; -use sqlx::PgPool; -use v3::ApiError; - -pub fn config() -> Router { - Router::new().route("/moderation/projects", get(get_projects)) -} - -#[derive(Deserialize)] -pub struct ResultCount { - #[serde(default = "default_count")] - pub count: i16, -} - -fn default_count() -> i16 { - 100 -} - -pub async fn get_projects( - ConnectInfo(addr): ConnectInfo, - headers: HeaderMap, - Extension(pool): Extension, - Extension(redis): Extension, - Query(count): Query, - Extension(session_queue): Extension>, -) -> Result>, ApiErrorV2> { - let Json(response) = v3::moderation::get_projects( - ConnectInfo(addr), - headers, - Extension(pool.clone()), - Extension(redis.clone()), - Query(v3::moderation::ResultCount { count: count.count }), - Extension(session_queue), - ) - .await?; - - let legacy_projects = LegacyProject::from_many(response, &pool, &redis) - .await - .map_err(ApiError::from)?; - Ok(Json(legacy_projects)) -} diff --git a/src/routes/v2/projects.rs b/src/routes/v2/projects.rs index 01612e6b..1afe9fcc 100644 --- a/src/routes/v2/projects.rs +++ b/src/routes/v2/projects.rs @@ -5,6 +5,7 @@ use crate::file_hosting::FileHost; use crate::models::projects::{Link, MonetizationStatus, ProjectStatus, SearchRequest, Version}; use crate::models::v2::projects::{DonationLink, LegacyProject, LegacySideType, LegacyVersion}; use crate::models::v2::search::LegacySearchResults; +use crate::queue::moderation::AutomatedModerationQueue; use crate::queue::session::AuthQueue; use crate::routes::v3::projects::ProjectIds; use crate::routes::{v2_reroute, v3, ApiErrorV2}; @@ -378,6 +379,7 @@ pub struct EditProject { pub monetization_status: Option, } +#[axum::debug_handler] pub async fn project_edit( ConnectInfo(addr): ConnectInfo, headers: HeaderMap, @@ -386,6 +388,7 @@ pub async fn project_edit( Extension(search_config): Extension, Extension(redis): Extension, Extension(session_queue): Extension>, + Extension(moderation_queue): Extension>, Json(v2_new_project): Json, ) -> Result { let client_side = v2_new_project.client_side; @@ -494,6 +497,7 @@ pub async fn project_edit( Extension(search_config.clone()), Extension(redis.clone()), Extension(session_queue.clone()), + Extension(moderation_queue.clone()), Json(new_project), ) .await?; diff --git a/src/routes/v2/teams.rs b/src/routes/v2/teams.rs index e84c12f5..7a1c5076 100644 --- a/src/routes/v2/teams.rs +++ b/src/routes/v2/teams.rs @@ -167,6 +167,7 @@ pub struct NewTeamMember { pub ordering: i64, } +#[axum::debug_handler] pub async fn add_team_member( ConnectInfo(addr): ConnectInfo, headers: HeaderMap, diff --git a/src/routes/v2/threads.rs b/src/routes/v2/threads.rs index 52854050..63e79f91 100644 --- a/src/routes/v2/threads.rs +++ b/src/routes/v2/threads.rs @@ -10,7 +10,7 @@ use crate::queue::session::AuthQueue; use crate::routes::{v3, ApiErrorV2}; use crate::util::extract::{ConnectInfo, Extension, Json, Path, Query}; use axum::http::{HeaderMap, StatusCode}; -use axum::routing::{delete, get, post}; +use axum::routing::{delete, get}; use axum::Router; use serde::Deserialize; use sqlx::PgPool; @@ -20,9 +20,7 @@ pub fn config() -> Router { .nest( "/thread", Router::new() - .route("/inbox", get(moderation_inbox)) .route("/:id", get(thread_get).post(thread_send_message)) - .route("/:id/read", post(thread_read)), ) .nest( "/message", @@ -31,6 +29,7 @@ pub fn config() -> Router { .route("/threads", get(threads_get)) } +#[axum::debug_handler] pub async fn thread_get( ConnectInfo(addr): ConnectInfo, headers: HeaderMap, @@ -113,49 +112,6 @@ pub async fn thread_send_message( .await?) } -pub async fn moderation_inbox( - ConnectInfo(addr): ConnectInfo, - headers: HeaderMap, - Extension(pool): Extension, - Extension(redis): Extension, - Extension(session_queue): Extension>, -) -> Result>, ApiErrorV2> { - let Json(threads) = v3::threads::moderation_inbox( - ConnectInfo(addr), - headers, - Extension(pool), - Extension(redis), - Extension(session_queue), - ) - .await?; - - // Convert response to V2 format - let threads = threads - .into_iter() - .map(LegacyThread::from) - .collect::>(); - Ok(Json(threads)) -} - -pub async fn thread_read( - ConnectInfo(addr): ConnectInfo, - headers: HeaderMap, - Path(info): Path, - Extension(pool): Extension, - Extension(redis): Extension, - Extension(session_queue): Extension>, -) -> Result { - Ok(v3::threads::thread_read( - ConnectInfo(addr), - headers, - Path(info), - Extension(pool), - Extension(redis), - Extension(session_queue), - ) - .await?) -} - pub async fn message_delete( ConnectInfo(addr): ConnectInfo, headers: HeaderMap, diff --git a/src/routes/v2/version_file.rs b/src/routes/v2/version_file.rs index fed7ba14..7da59338 100644 --- a/src/routes/v2/version_file.rs +++ b/src/routes/v2/version_file.rs @@ -251,35 +251,21 @@ pub struct ManyUpdateData { } pub async fn update_files( - ConnectInfo(addr): ConnectInfo, - headers: HeaderMap, Extension(pool): Extension, Extension(redis): Extension, - Extension(session_queue): Extension>, Json(update_data): Json, ) -> Result>, ApiErrorV2> { - let mut loader_fields = HashMap::new(); - let mut game_versions = vec![]; - for gv in update_data.game_versions.into_iter().flatten() { - game_versions.push(serde_json::json!(gv.clone())); - } - if !game_versions.is_empty() { - loader_fields.insert("game_versions".to_string(), game_versions); - } let update_data = v3::version_file::ManyUpdateData { loaders: update_data.loaders.clone(), version_types: update_data.version_types.clone(), - loader_fields: Some(loader_fields), + game_versions: update_data.game_versions.clone(), algorithm: update_data.algorithm, hashes: update_data.hashes, }; let Json(map) = v3::version_file::update_files( - ConnectInfo(addr), - headers, Extension(pool), Extension(redis), - Extension(session_queue), Json(update_data), ) .await?; diff --git a/src/routes/v3/mod.rs b/src/routes/v3/mod.rs index e762c64f..372a4f6c 100644 --- a/src/routes/v3/mod.rs +++ b/src/routes/v3/mod.rs @@ -5,7 +5,6 @@ use axum::Router; pub mod analytics_get; pub mod collections; pub mod images; -pub mod moderation; pub mod notifications; pub mod organizations; pub mod payouts; @@ -30,7 +29,6 @@ pub fn config() -> Router { .merge(analytics_get::config()) .merge(collections::config()) .merge(images::config()) - .merge(moderation::config()) .merge(notifications::config()) .merge(organizations::config()) .merge(project_creation::config()) diff --git a/src/routes/v3/moderation.rs b/src/routes/v3/moderation.rs deleted file mode 100644 index ded461d5..00000000 --- a/src/routes/v3/moderation.rs +++ /dev/null @@ -1,72 +0,0 @@ -use super::ApiError; -use crate::database; -use crate::database::redis::RedisPool; -use crate::models::projects::ProjectStatus; -use crate::queue::session::AuthQueue; -use crate::util::extract::{ConnectInfo, Extension, Json, Query}; -use crate::{auth::check_is_moderator_from_headers, models::pats::Scopes}; -use axum::http::HeaderMap; -use axum::routing::get; -use axum::Router; -use serde::Deserialize; -use sqlx::PgPool; -use std::net::SocketAddr; -use std::sync::Arc; - -pub fn config() -> Router { - Router::new().route("/moderation/projects", get(get_projects)) -} - -#[derive(Deserialize)] -pub struct ResultCount { - #[serde(default = "default_count")] - pub count: i16, -} - -fn default_count() -> i16 { - 100 -} - -pub async fn get_projects( - ConnectInfo(addr): ConnectInfo, - headers: HeaderMap, - Extension(pool): Extension, - Extension(redis): Extension, - Query(count): Query, - Extension(session_queue): Extension>, -) -> Result>, ApiError> { - check_is_moderator_from_headers( - &addr, - &headers, - &pool, - &redis, - &session_queue, - Some(&[Scopes::PROJECT_READ]), - ) - .await?; - - use futures::stream::TryStreamExt; - - let project_ids = sqlx::query!( - " - SELECT id FROM mods - WHERE status = $1 - ORDER BY queued ASC - LIMIT $2; - ", - ProjectStatus::Processing.as_str(), - count.count as i64 - ) - .fetch_many(&pool) - .try_filter_map(|e| async { Ok(e.right().map(|m| database::models::ProjectId(m.id))) }) - .try_collect::>() - .await?; - - let projects: Vec<_> = database::Project::get_many_ids(&project_ids, &pool, &redis) - .await? - .into_iter() - .map(crate::models::projects::Project::from) - .collect(); - - Ok(Json(projects)) -} diff --git a/src/routes/v3/oauth_clients.rs b/src/routes/v3/oauth_clients.rs index 21cd3109..bc7a471b 100644 --- a/src/routes/v3/oauth_clients.rs +++ b/src/routes/v3/oauth_clients.rs @@ -14,12 +14,6 @@ use sqlx::PgPool; use validator::Validate; use super::ApiError; -use crate::{ - auth::checks::ValidateAllAuthorized, - file_hosting::FileHost, - models::{ids::base62_impl::parse_base62, oauth_clients::DeleteOAuthClientQueryParam}, - util::routes::read_from_payload, -}; use crate::{ auth::{checks::ValidateAuthorized, get_user_from_headers}, database::{ @@ -40,6 +34,11 @@ use crate::{ routes::v3::project_creation::CreateError, util::validate::validation_errors_to_string, }; +use crate::{ + file_hosting::FileHost, + models::{ids::base62_impl::parse_base62, oauth_clients::DeleteOAuthClientQueryParam}, + util::routes::read_from_payload, +}; use crate::database::models::oauth_client_item::OAuthClient as DBOAuthClient; use crate::models::ids::OAuthClientId as ApiOAuthClientId; @@ -90,10 +89,13 @@ pub async fn get_user_clients( let target_user = User::get(&info, &pool, &redis).await?; if let Some(target_user) = target_user { + if target_user.id != current_user.id.into() && !current_user.role.is_admin() { + return Err(ApiError::CustomAuthentication( + "You do not have permission to see the OAuth clients of this user!".to_string(), + )); + } + let clients = OAuthClient::get_all_user_clients(target_user.id, &pool).await?; - clients - .iter() - .validate_all_authorized(Some(¤t_user))?; let response = clients .into_iter() @@ -107,14 +109,11 @@ pub async fn get_user_clients( } pub async fn get_client( - ConnectInfo(addr): ConnectInfo, - headers: HeaderMap, Path(id): Path, Extension(pool): Extension, - Extension(redis): Extension, - Extension(session_queue): Extension>, ) -> Result, ApiError> { - let clients = get_clients_inner(&[id], addr, headers, pool, redis, session_queue).await?; + let clients = get_clients_inner(&[id], pool).await?; + if let Some(client) = clients.into_iter().next() { Ok(Json(client)) } else { @@ -123,12 +122,8 @@ pub async fn get_client( } pub async fn get_clients( - ConnectInfo(addr): ConnectInfo, - headers: HeaderMap, Query(info): Query, Extension(pool): Extension, - Extension(redis): Extension, - Extension(session_queue): Extension>, ) -> Result>, ApiError> { let ids: Vec<_> = info .ids @@ -136,7 +131,7 @@ pub async fn get_clients( .map(|id| parse_base62(id).map(ApiOAuthClientId)) .collect::>()?; - let clients = get_clients_inner(&ids, addr, headers, pool, redis, session_queue).await?; + let clients = get_clients_inner(&ids, pool).await?; Ok(Json(clients)) } @@ -597,28 +592,10 @@ async fn edit_redirects( pub async fn get_clients_inner( ids: &[ApiOAuthClientId], - addr: SocketAddr, - headers: HeaderMap, pool: PgPool, - redis: RedisPool, - session_queue: Arc, ) -> Result, ApiError> { - let current_user = get_user_from_headers( - &addr, - &headers, - &pool, - &redis, - &session_queue, - Some(&[Scopes::SESSION_ACCESS]), - ) - .await? - .1; - let ids: Vec = ids.iter().map(|i| (*i).into()).collect(); let clients = OAuthClient::get_many(&ids, &pool).await?; - clients - .iter() - .validate_all_authorized(Some(¤t_user))?; Ok(clients.into_iter().map(|c| c.into()).collect_vec()) } diff --git a/src/routes/v3/payouts.rs b/src/routes/v3/payouts.rs index 95e5ee4d..55038d51 100644 --- a/src/routes/v3/payouts.rs +++ b/src/routes/v3/payouts.rs @@ -129,9 +129,7 @@ pub async fn paypal_webhook( .await?; if let Some(result) = result { - let mtx = - payouts.lock_user_payouts(crate::models::ids::UserId(result.user_id as u64)); - let _guard = mtx.lock().await; + let _guard = payouts.payouts_locks.lock().await; sqlx::query!( " @@ -248,9 +246,7 @@ pub async fn tremendous_webhook( .await?; if let Some(result) = result { - let mtx = - payouts.lock_user_payouts(crate::models::ids::UserId(result.user_id as u64)); - let _guard = mtx.lock().await; + let _guard = payouts.payouts_locks.lock().await; sqlx::query!( " @@ -371,8 +367,7 @@ pub async fn create_payout( )); } - let mtx = payouts_queue.lock_user_payouts(user.id.into()); - let _guard = mtx.lock().await; + let _guard = payouts_queue.payouts_locks.lock().await; if user.balance < body.amount || body.amount < Decimal::ZERO { return Err(ApiError::InvalidInput( diff --git a/src/routes/v3/project_creation.rs b/src/routes/v3/project_creation.rs index d40fe466..6825d81d 100644 --- a/src/routes/v3/project_creation.rs +++ b/src/routes/v3/project_creation.rs @@ -12,7 +12,7 @@ use crate::models::pats::Scopes; use crate::models::projects::{ License, Link, MonetizationStatus, Project, ProjectId, ProjectStatus, VersionId, VersionStatus, }; -use crate::models::teams::ProjectPermissions; +use crate::models::teams::{OrganizationPermissions, ProjectPermissions}; use crate::models::threads::ThreadType; use crate::models::users::UserId; use crate::queue::session::AuthQueue; @@ -142,7 +142,7 @@ impl IntoResponse for CreateError { CreateError::ImageError(..) => "invalid_image", CreateError::RerouteError(..) => "reroute_error", }, - description: &self.to_string(), + description: self.to_string(), }; (status_code, Json(error)).into_response() @@ -619,7 +619,30 @@ async fn project_create_inner( let mut members = vec![]; - if project_create_data.organization_id.is_none() { + if let Some(organization_id) = project_create_data.organization_id { + let org = models::Organization::get_id(organization_id.into(), pool, redis) + .await? + .ok_or_else(|| { + CreateError::InvalidInput("Invalid organization ID specified!".to_string()) + })?; + + let team_member = + models::TeamMember::get_from_user_id(org.team_id, current_user.id.into(), pool) + .await?; + + let perms = + OrganizationPermissions::get_permissions_by_role(¤t_user.role, &team_member); + + if !perms + .map(|x| x.contains(OrganizationPermissions::ADD_PROJECT)) + .unwrap_or(false) + { + return Err(CreateError::CustomAuthenticationError( + "You do not have the permissions to create projects in this organization!" + .to_string(), + )); + } + } else { members.push(models::team_item::TeamMemberBuilder { user_id: current_user.id.into(), role: crate::models::teams::DEFAULT_ROLE.to_owned(), @@ -631,7 +654,6 @@ async fn project_create_inner( ordering: 0, }) } - let team = models::team_item::TeamBuilder { members }; let team_id = team.insert(&mut *transaction).await?; diff --git a/src/routes/v3/projects.rs b/src/routes/v3/projects.rs index 3e9a4244..646940c4 100644 --- a/src/routes/v3/projects.rs +++ b/src/routes/v3/projects.rs @@ -6,7 +6,7 @@ use std::collections::HashMap; use std::net::SocketAddr; use std::sync::Arc; -use crate::auth::checks::is_visible_project; +use crate::auth::checks::{filter_visible_versions, is_visible_project}; use crate::auth::{filter_visible_projects, get_user_from_headers}; use crate::database::models::notification_item::NotificationBuilder; use crate::database::models::project_item::{GalleryItem, ModCategory}; @@ -25,6 +25,7 @@ use crate::models::projects::{ }; use crate::models::teams::ProjectPermissions; use crate::models::threads::MessageBody; +use crate::queue::moderation::AutomatedModerationQueue; use crate::queue::session::AuthQueue; use crate::routes::ApiError; use crate::search::indexing::remove_documents; @@ -234,6 +235,7 @@ pub struct EditProject { pub monetization_status: Option, } +#[allow(clippy::too_many_arguments)] pub async fn project_edit( ConnectInfo(addr): ConnectInfo, headers: HeaderMap, @@ -242,6 +244,7 @@ pub async fn project_edit( Extension(search_config): Extension, Extension(redis): Extension, Extension(session_queue): Extension>, + Extension(moderation_queue): Extension>, Json(new_project): Json, ) -> Result { let user = get_user_from_headers( @@ -358,16 +361,9 @@ pub async fn project_edit( .execute(&mut *transaction) .await?; - sqlx::query!( - " - UPDATE threads - SET show_in_mod_inbox = FALSE - WHERE id = $1 - ", - project_item.thread_id as db_ids::ThreadId, - ) - .execute(&mut *transaction) - .await?; + moderation_queue + .projects + .insert(project_item.inner.id.into()); } if status.is_approved() && !project_item.inner.status.is_approved() { @@ -463,6 +459,7 @@ pub async fn project_edit( old_status: project_item.inner.status, }, thread_id: project_item.thread_id, + hide_identity: true, } .insert(&mut transaction) .await?; @@ -1008,14 +1005,10 @@ pub async fn dependency_list( ) .await?; - let mut projects = projects_result - .into_iter() - .map(models::projects::Project::from) - .collect::>(); - let mut versions = versions_result - .into_iter() - .map(models::projects::Version::from) - .collect::>(); + let mut projects = + filter_visible_projects(projects_result, &user_option, &pool, false).await?; + let mut versions = + filter_visible_versions(versions_result, &user_option, &pool, &redis).await?; projects.sort_by(|a, b| b.published.cmp(&a.published)); projects.dedup_by(|a, b| a.id == b.id); diff --git a/src/routes/v3/reports.rs b/src/routes/v3/reports.rs index 8b0c862b..3485d9fd 100644 --- a/src/routes/v3/reports.rs +++ b/src/routes/v3/reports.rs @@ -441,6 +441,7 @@ pub async fn report_edit( MessageBody::ThreadClosure }, thread_id: report.thread_id, + hide_identity: user.role.is_mod(), } .insert(&mut transaction) .await?; @@ -456,18 +457,6 @@ pub async fn report_edit( ) .execute(&mut *transaction) .await?; - - sqlx::query!( - " - UPDATE threads - SET show_in_mod_inbox = $1 - WHERE id = $2 - ", - !(edit_closed || report.closed), - report.thread_id.0, - ) - .execute(&mut *transaction) - .await?; } // delete any images no longer in the body diff --git a/src/routes/v3/threads.rs b/src/routes/v3/threads.rs index 606bca1a..8f1ca939 100644 --- a/src/routes/v3/threads.rs +++ b/src/routes/v3/threads.rs @@ -1,10 +1,10 @@ use axum::http::HeaderMap; -use axum::routing::{delete, get, post}; +use axum::routing::{delete, get}; use axum::Router; use std::net::SocketAddr; use std::sync::Arc; -use crate::auth::{check_is_moderator_from_headers, get_user_from_headers}; +use crate::auth::get_user_from_headers; use crate::database; use crate::database::models::image_item; use crate::database::models::notification_item::NotificationBuilder; @@ -30,9 +30,7 @@ pub fn config() -> Router { Router::new() .route("/threads", get(threads_get)) .route("/message/:id", delete(message_delete)) - .route("/thread/inbox", get(moderation_inbox)) .route("/thread/:id", get(thread_get).post(thread_send_message)) - .route("/thread/:id/read", post(thread_read)) } pub async fn is_authorized_thread( @@ -254,7 +252,13 @@ pub async fn filter_authorized_threads( &mut thread .messages .iter() - .filter_map(|x| x.author_id) + .filter_map(|x| { + if x.hide_identity && !user.role.is_mod() { + None + } else { + x.author_id + } + }) .collect::>(), ); @@ -303,7 +307,13 @@ pub async fn thread_get( &mut data .messages .iter() - .filter_map(|x| x.author_id) + .filter_map(|x| { + if x.hide_identity && !user.role.is_mod() { + None + } else { + x.author_id + } + }) .collect::>(), ); @@ -437,11 +447,12 @@ pub async fn thread_send_message( author_id: Some(user.id.into()), body: new_message.body.clone(), thread_id: thread.id, + hide_identity: user.role.is_mod(), } .insert(&mut transaction) .await?; - let mod_notif = if let Some(project_id) = thread.project_id { + if let Some(project_id) = thread.project_id { let project = database::models::Project::get_id(project_id, &pool, &redis).await?; if let Some(project) = project { @@ -469,8 +480,6 @@ pub async fn thread_send_message( .await?; } } - - !user.role.is_mod() } else if let Some(report_id) = thread.report_id { let report = database::models::report_item::Report::get(report_id, &pool).await?; @@ -494,23 +503,7 @@ pub async fn thread_send_message( .await?; } } - - !user.role.is_mod() - } else { - false - }; - - sqlx::query!( - " - UPDATE threads - SET show_in_mod_inbox = $1 - WHERE id = $2 - ", - mod_notif, - thread.id.0, - ) - .execute(&mut *transaction) - .await?; + } if let MessageBody::Text { associated_images, .. @@ -560,75 +553,6 @@ pub async fn thread_send_message( } } -pub async fn moderation_inbox( - ConnectInfo(addr): ConnectInfo, - headers: HeaderMap, - Extension(pool): Extension, - Extension(redis): Extension, - Extension(session_queue): Extension>, -) -> Result>, ApiError> { - let user = check_is_moderator_from_headers( - &addr, - &headers, - &pool, - &redis, - &session_queue, - Some(&[Scopes::THREAD_READ]), - ) - .await?; - let ids = sqlx::query!( - " - SELECT id - FROM threads - WHERE show_in_mod_inbox = TRUE - " - ) - .fetch_many(&pool) - .try_filter_map(|e| async { Ok(e.right().map(|m| database::models::ThreadId(m.id))) }) - .try_collect::>() - .await?; - - let threads_data = database::models::Thread::get_many(&ids, &pool).await?; - let threads = filter_authorized_threads(threads_data, &user, &pool, &redis).await?; - Ok(Json(threads)) -} - -pub async fn thread_read( - ConnectInfo(addr): ConnectInfo, - headers: HeaderMap, - Path(info): Path, - Extension(pool): Extension, - Extension(redis): Extension, - Extension(session_queue): Extension>, -) -> Result { - check_is_moderator_from_headers( - &addr, - &headers, - &pool, - &redis, - &session_queue, - Some(&[Scopes::THREAD_READ]), - ) - .await?; - - let mut transaction = pool.begin().await?; - - sqlx::query!( - " - UPDATE threads - SET show_in_mod_inbox = FALSE - WHERE id = $1 - ", - info.0 as i64, - ) - .execute(&mut *transaction) - .await?; - - transaction.commit().await?; - - Ok(StatusCode::NO_CONTENT) -} - pub async fn message_delete( ConnectInfo(addr): ConnectInfo, headers: HeaderMap, diff --git a/src/routes/v3/version_file.rs b/src/routes/v3/version_file.rs index daebdf62..24240886 100644 --- a/src/routes/v3/version_file.rs +++ b/src/routes/v3/version_file.rs @@ -15,6 +15,8 @@ use axum::http::StatusCode; use axum::response::IntoResponse; use axum::routing::{get, post}; use axum::Router; +use dashmap::DashMap; +use futures::TryStreamExt; use itertools::Itertools; use serde::{Deserialize, Serialize}; use sqlx::PgPool; @@ -39,6 +41,7 @@ pub fn config() -> Router { ) } +#[axum::debug_handler] pub async fn get_version_from_hash( ConnectInfo(addr): ConnectInfo, headers: HeaderMap, @@ -321,29 +324,14 @@ pub struct ManyUpdateData { pub algorithm: Option, // Defaults to calculation based on size of hash pub hashes: Vec, pub loaders: Option>, - pub loader_fields: Option>>, + pub game_versions: Option>, pub version_types: Option>, } pub async fn update_files( - ConnectInfo(addr): ConnectInfo, - headers: HeaderMap, Extension(pool): Extension, Extension(redis): Extension, - Extension(session_queue): Extension>, Json(update_data): Json, ) -> Result>, ApiError> { - let user_option = get_user_from_headers( - &addr, - &headers, - &pool, - &redis, - &session_queue, - Some(&[Scopes::VERSION_READ]), - ) - .await - .map(|x| x.1) - .ok(); - let algorithm = update_data .algorithm .clone() @@ -356,16 +344,36 @@ pub async fn update_files( ) .await?; - let projects = database::models::Project::get_many_ids( - &files.iter().map(|x| x.project_id).collect::>(), - &pool, - &redis, + // TODO: de-hardcode this and actually use version fields system + let update_version_ids = sqlx::query!( + " + SELECT v.id version_id, v.mod_id mod_id + FROM versions v + INNER JOIN version_fields vf ON vf.field_id = 3 AND v.id = vf.version_id + INNER JOIN loader_field_enum_values lfev ON vf.enum_value = lfev.id AND (cardinality($2::varchar[]) = 0 OR lfev.value = ANY($2::varchar[])) + INNER JOIN loaders_versions lv ON lv.version_id = v.id + INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[])) + WHERE v.mod_id = ANY($1) AND (cardinality($4::varchar[]) = 0 OR v.version_type = ANY($4)) + ORDER BY v.date_published ASC + ", + &files.iter().map(|x| x.project_id.0).collect::>(), + &update_data.game_versions.clone().unwrap_or_default(), + &update_data.loaders.clone().unwrap_or_default(), + &update_data.version_types.clone().unwrap_or_default().iter().map(|x| x.to_string()).collect::>(), ) - .await?; - let all_versions = database::models::Version::get_many( - &projects - .iter() - .flat_map(|x| x.versions.clone()) + .fetch(&pool) + .try_fold(DashMap::new(), |acc : DashMap<_,Vec>, m| { + acc.entry(database::models::ProjectId(m.mod_id)) + .or_default() + .push(database::models::VersionId(m.version_id)); + async move { Ok(acc) } + }) + .await?; + + let versions = database::models::Version::get_many( + &update_version_ids + .into_iter() + .filter_map(|x| x.1.last().copied()) .collect::>(), &pool, &redis, @@ -373,50 +381,16 @@ pub async fn update_files( .await?; let mut response = HashMap::new(); - - for project in projects { - for file in files.iter().filter(|x| x.project_id == project.inner.id) { - let version = all_versions - .iter() - .filter(|x| x.inner.project_id == file.project_id) - .filter(|x| { - // TODO: Behaviour here is repeated in a few other filtering places, should be abstracted - let mut bool = true; - - if let Some(version_types) = &update_data.version_types { - bool &= version_types - .iter() - .any(|y| y.as_str() == x.inner.version_type); - } - if let Some(loaders) = &update_data.loaders { - bool &= x.loaders.iter().any(|y| loaders.contains(y)); - } - if let Some(loader_fields) = &update_data.loader_fields { - for (key, values) in loader_fields { - bool &= if let Some(x_vf) = - x.version_fields.iter().find(|y| y.field_name == *key) - { - values.iter().any(|v| x_vf.value.contains_json_value(v)) - } else { - true - }; - } - } - - bool - }) - .sorted() - .last(); - - if let Some(version) = version { - if is_visible_version(&version.inner, &user_option, &pool, &redis).await? { - if let Some(hash) = file.hashes.get(&algorithm) { - response.insert( - hash.clone(), - models::projects::Version::from(version.clone()), - ); - } - } + for file in files { + if let Some(version) = versions + .iter() + .find(|x| x.inner.project_id == file.project_id) + { + if let Some(hash) = file.hashes.get(&algorithm) { + response.insert( + hash.clone(), + models::projects::Version::from(version.clone()), + ); } } } diff --git a/src/routes/v3/versions.rs b/src/routes/v3/versions.rs index 9c64c780..698ff7ec 100644 --- a/src/routes/v3/versions.rs +++ b/src/routes/v3/versions.rs @@ -328,13 +328,10 @@ pub async fn version_edit( } if let Some(dependencies) = &new_version.dependencies { - // TODO: Re-add this exclusions when modpack also has separate dependency retrieval that was removed from validators - // if let Some(project) = project_item { - // if project.project_type != "modpack" { sqlx::query!( " - DELETE FROM dependencies WHERE dependent_id = $1 - ", + DELETE FROM dependencies WHERE dependent_id = $1 + ", id as database::models::ids::VersionId, ) .execute(&mut *transaction) @@ -352,8 +349,6 @@ pub async fn version_edit( DependencyBuilder::insert_many(builders, version_item.inner.id, &mut transaction) .await?; - // } - // } } if !new_version.fields.is_empty() { diff --git a/src/search/mod.rs b/src/search/mod.rs index 3e0fe726..8ecabab0 100644 --- a/src/search/mod.rs +++ b/src/search/mod.rs @@ -50,7 +50,7 @@ impl IntoResponse for SearchError { SearchError::InvalidIndex(..) => "invalid_input", SearchError::FormatError(..) => "invalid_input", }, - description: &self.to_string(), + description: self.to_string(), }; (status_code, Json(error_message)).into_response() diff --git a/src/util/extract.rs b/src/util/extract.rs index 671f6e23..f33544ae 100644 --- a/src/util/extract.rs +++ b/src/util/extract.rs @@ -2,6 +2,7 @@ use axum::extract::FromRequest; use axum::extract::FromRequestParts; use axum::response::{IntoResponse, Response}; use serde::Serialize; +pub use axum::extract::ConnectInfo; #[derive(FromRequest, FromRequestParts)] #[from_request(via(axum::Json), rejection(crate::routes::ApiError))] @@ -32,9 +33,9 @@ pub struct Query(pub T); #[from_request(via(axum::Extension), rejection(crate::routes::ApiError))] pub struct Extension(pub T); -#[derive(FromRequest, FromRequestParts)] -#[from_request(via(axum::extract::ConnectInfo), rejection(crate::routes::ApiError))] -pub struct ConnectInfo(pub T); +// #[derive(FromRequest, FromRequestParts)] +// #[from_request(via(axum::extract::ConnectInfo), rejection(crate::routes::ApiError))] +// pub struct ConnectInfo(pub T); #[derive(FromRequest)] #[from_request(rejection(crate::routes::ApiError))] diff --git a/tests/common/api_v3/version.rs b/tests/common/api_v3/version.rs index c487ecd0..c2dae9b8 100644 --- a/tests/common/api_v3/version.rs +++ b/tests/common/api_v3/version.rs @@ -320,9 +320,7 @@ impl ApiVersion for ApiV3 { json["loaders"] = serde_json::to_value(loaders).unwrap(); } if let Some(game_versions) = game_versions { - json["loader_fields"] = json!({ - "game_versions": game_versions, - }); + json["game_versions"] = serde_json::to_value(game_versions).unwrap(); } if let Some(version_types) = version_types { json["version_types"] = serde_json::to_value(version_types).unwrap(); diff --git a/tests/oauth_clients.rs b/tests/oauth_clients.rs index 945299a5..9c71e67e 100644 --- a/tests/oauth_clients.rs +++ b/tests/oauth_clients.rs @@ -112,21 +112,6 @@ async fn get_oauth_client_for_client_creator_succeeds() { .await; } -#[tokio::test] -async fn get_oauth_client_for_unrelated_user_fails() { - with_test_environment(None, |env: TestEnvironment| async move { - let DummyOAuthClientAlpha { client_id, .. } = env.dummy.oauth_client_alpha.clone(); - - let resp = env - .api - .get_oauth_client(client_id.clone(), FRIEND_USER_PAT) - .await; - - assert_status!(&resp, StatusCode::UNAUTHORIZED); - }) - .await; -} - #[tokio::test] async fn can_delete_oauth_client() { with_test_environment(None, |env: TestEnvironment| async move { diff --git a/tests/project.rs b/tests/project.rs index cd750735..1e84fadb 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -69,7 +69,10 @@ async fn test_get_project() { .unwrap() .unwrap(); let cached_project: serde_json::Value = serde_json::from_str(&cached_project).unwrap(); - assert_eq!(cached_project["inner"]["slug"], json!(alpha_project_slug)); + assert_eq!( + cached_project["val"]["inner"]["slug"], + json!(alpha_project_slug) + ); // Make the request again, this time it should be cached let resp = api.get_project(alpha_project_id, USER_USER_PAT).await; diff --git a/tests/scopes.rs b/tests/scopes.rs index d368642c..91c5b3e9 100644 --- a/tests/scopes.rs +++ b/tests/scopes.rs @@ -838,43 +838,6 @@ pub async fn thread_scopes() { .test(req_gen, thread_write) .await .unwrap(); - - // Check moderation inbox - // Uses moderator PAT, as only moderators can see the moderation inbox - let req_gen = - |pat: Option| async move { api.get_moderation_inbox(pat.as_deref()).await }; - let (_, success) = ScopeTest::new(&test_env) - .with_user_id(MOD_USER_ID_PARSED) - .test(req_gen, thread_read) - .await - .unwrap(); - let thread_id: &str = success[0]["id"].as_str().unwrap(); - - // Moderator 'read' thread - // Uses moderator PAT, as only moderators can see the moderation inbox - let req_gen = - |pat: Option| async move { api.read_thread(thread_id, pat.as_deref()).await }; - ScopeTest::new(&test_env) - .with_user_id(MOD_USER_ID_PARSED) - .test(req_gen, thread_read) - .await - .unwrap(); - - // Delete that message - // First, get message id - let resp = api.get_thread(thread_id, USER_USER_PAT).await; - let success: serde_json::Value = resp.json(); - let thread_message_id = success["messages"][0]["id"].as_str().unwrap(); - - let req_gen = |pat: Option| async move { - api.delete_thread_message(thread_message_id, pat.as_deref()) - .await - }; - ScopeTest::new(&test_env) - .with_user_id(MOD_USER_ID_PARSED) - .test(req_gen, thread_write) - .await - .unwrap(); }) .await; } diff --git a/tests/version.rs b/tests/version.rs index b841088b..fe7309e7 100644 --- a/tests/version.rs +++ b/tests/version.rs @@ -55,7 +55,7 @@ async fn test_get_version() { .unwrap(); let cached_project: serde_json::Value = serde_json::from_str(&cached_project).unwrap(); assert_eq!( - cached_project["inner"]["project_id"], + cached_project["val"]["inner"]["project_id"], json!(parse_base62(alpha_project_id).unwrap()) ); @@ -617,6 +617,7 @@ async fn version_ordering_for_specified_orderings_orders_lower_order_first() { USER_USER_PAT, ) .await; + assert_common_version_ids(&versions, vec![new_version_id, alpha_version_id]); }) .await;