Skip to content

Commit

Permalink
Local development fixes (lint, compose, deps) (#8486)
Browse files Browse the repository at this point in the history
  • Loading branch information
phelpsdb authored May 17, 2024
1 parent 0f0bb34 commit c7ec583
Show file tree
Hide file tree
Showing 20 changed files with 159 additions and 105 deletions.
4 changes: 4 additions & 0 deletions dev-tools/compose/docker-compose.test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -511,6 +511,8 @@ services:
DDEX_CHOREOGRAPHY: "ERNReleaseByRelease"
networks:
- ddex-network
profiles:
- ddex
depends_on:
ddex-ingester:
condition: service_healthy
Expand All @@ -531,6 +533,8 @@ services:
DDEX_CHOREOGRAPHY: "ERNBatched"
networks:
- ddex-network
profiles:
- ddex
depends_on:
ddex-ingester:
condition: service_healthy
Expand Down
6 changes: 3 additions & 3 deletions dev-tools/setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,11 @@ debian | ubuntu)
git \
python3 \
python3-pip \
docker-ce='5:24.0.5-1~ubuntu.22.04~jammy' \
docker-ce-cli='5:24.0.5-1~ubuntu.22.04~jammy' \
docker-ce \
docker-ce-cli \
containerd.io \
docker-buildx-plugin \
docker-compose-plugin='2.21.0-1~ubuntu.22.04~jammy'
docker-compose-plugin

curl -fsSL https://raw.githubusercontent.com/tj/n/master/bin/n | sudo bash -s lts

Expand Down
11 changes: 9 additions & 2 deletions eth-contracts/scripts/setup-predeployed-ganache.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
#!/usr/bin/env sh

set -e
set -x

cd $(dirname "$(readlink -f "$0")")/..

Expand All @@ -18,7 +17,15 @@ if [ -z "networkId" ]; then
fi

mkdir -p $dbPath
npx ganache --wallet.deterministic --wallet.totalAccounts 50 --database.dbPath "$dbPath" --miner.blockTime 1 --chain.networkId "$networkId" &
npx ganache \
--server.host "0.0.0.0" \
--wallet.deterministic \
--wallet.totalAccounts 50 \
--database.dbPath "$dbPath" \
--miner.blockTime 1 \
--chain.networkId "$networkId" \
&

ganache_pid=$!

npx truffle migrate --network predeploy
Expand Down
2 changes: 1 addition & 1 deletion eth-contracts/truffle-config.js
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ module.exports = {
disableConfirmationListener: true
},
predeploy: {
host: 'localhost',
host: '0.0.0.0',
port: 8545,
network_id: '*',
verify: {
Expand Down
38 changes: 22 additions & 16 deletions monitoring/scripts/track-cid-backfill/python/main.py
Original file line number Diff line number Diff line change
@@ -1,62 +1,68 @@

import os
import pandas as pd

csv_path = os.path.abspath(os.path.join(os.getcwd(), '..', 'local-csv'))
csv_path = os.path.abspath(os.path.join(os.getcwd(), "..", "local-csv"))
# csv_path = os.path.abspath(os.path.join(os.getcwd(), '..', 'production-csv'))


def get_all_csvs():
# Get the list of all files and directories
dir_list = os.listdir(csv_path)
print('Num files and directories in', csv_path, 'is', len(dir_list))
print("Num files and directories in", csv_path, "is", len(dir_list))
return dir_list


def generate_aggregate_csv():
csvs = get_all_csvs()
# print(csvs)
dfs = [pd.read_csv('../local-csv/' + csv) for csv in csvs]
dfs = [pd.read_csv("../local-csv/" + csv) for csv in csvs]
# dfs = [pd.read_csv('../production-csv/' + csv) for csv in csvs]
# print(dfs)
final_result = pd.concat(dfs)
final_result = final_result.groupby(final_result.track_id).first()
# print(final_result)
print(final_result.size)

try:
final_result.pop('Unnamed: 0')
try:
final_result.pop("Unnamed: 0")
except:
pass

try:
final_result.pop('Unnamed: 0.1')
final_result.pop("Unnamed: 0.1")
except:
pass

try:
final_result.pop('Unnamed: 0.2')
final_result.pop("Unnamed: 0.2")
except:
pass

final_result = final_result.sort_values(by=['track_id'])
final_result.to_csv('../track_cids.csv')
final_result = final_result.sort_values(by=["track_id"])
final_result.to_csv("../track_cids.csv")
# print(final_result)


def get_missing_track_cids_csv():
final_result_read = pd.read_csv('../track_cids.csv')
final_result_read = pd.read_csv("../track_cids.csv")
# print(final_result_read)

all_track_ids_read = pd.read_csv('../all_track_ids.csv').sort_values(by=['TrackId'])
all_track_ids_read = pd.read_csv("../all_track_ids.csv").sort_values(by=["TrackId"])
# print(all_track_ids_read)

# print(final_result_read[['TrackId']].compare(all_track_ids_read[['TrackId']]))
# print(final_result_read['TrackId'].isnotin(all_track_ids_read['TrackId']).value_counts())

the_missing = all_track_ids_read[~all_track_ids_read['TrackId'].isin(final_result_read['track_id'])].sort_values(by=['TrackId'])
the_missing.to_csv('../missing_cids.csv', index=False, header=False)
the_missing = all_track_ids_read[
~all_track_ids_read["TrackId"].isin(final_result_read["track_id"])
].sort_values(by=["TrackId"])
the_missing.to_csv("../missing_cids.csv", index=False, header=False)


def main():
generate_aggregate_csv()
get_missing_track_cids_csv()

if __name__ == '__main__':

if __name__ == "__main__":
main()
20 changes: 20 additions & 0 deletions packages/discovery-provider/.hooks/pre-commit.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
{
"steps": [
{
"name": "Sort imports",
"command": "isort --diff --check ."
},
{
"name": "Flake8",
"command": "flake8 ."
},
{
"name": "Format",
"command": "black --diff --check ."
},
{
"name": "Typecheck",
"command": "mypy ."
}
]
}
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@
"grantee_address": "0x60",
"is_approved": False,
"is_revoked": True,
}
},
]

# ### get_managed_users ### #
Expand Down Expand Up @@ -131,7 +131,10 @@ def test_get_managed_users_grants_without_users(app):
with app.app_context():
db = get_db()

entities = ({"users": copy.deepcopy(test_users), "grants": copy.deepcopy(test_managed_user_grants)})
entities = {
"users": copy.deepcopy(test_users),
"grants": copy.deepcopy(test_managed_user_grants),
}
# Record for a user which won't be found
entities["grants"].append(
{
Expand Down Expand Up @@ -184,9 +187,7 @@ def test_get_user_managers_default(app):
db = get_db()
populate_mock_db(db, {"users": test_users, "grants": test_user_manager_grants})

user_managers = get_user_managers_with_grants(
{"user_id": 10}
)
user_managers = get_user_managers_with_grants({"user_id": 10})

# return all non-revoked records by default
assert len(user_managers) == 3, "Expected exactly 3 records"
Expand Down Expand Up @@ -216,7 +217,10 @@ def test_get_user_managers_grants_without_users(app):
with app.app_context():
db = get_db()

entities = {"users": copy.deepcopy(test_users), "grants": copy.deepcopy(test_user_manager_grants)}
entities = {
"users": copy.deepcopy(test_users),
"grants": copy.deepcopy(test_user_manager_grants),
}
# Record for a user which won't be found
entities["grants"].append(
{
Expand All @@ -228,9 +232,7 @@ def test_get_user_managers_grants_without_users(app):
)
populate_mock_db(db, entities)

user_managers = get_user_managers_with_grants(
{"user_id": 10}
)
user_managers = get_user_managers_with_grants({"user_id": 10})

# return all non-revoked records by default
assert len(user_managers) == 3, "Expected exactly 3 records"
Expand All @@ -245,9 +247,7 @@ def test_get_user_managers_invalid_parameters(app):
populate_mock_db(db, {"users": test_users, "grants": test_user_manager_grants})

try:
get_user_managers_with_grants(
{}
)
get_user_managers_with_grants({})
assert False, "Should have thrown an error for missing user id"
except ValueError as e:
assert str(e) == "user_id is required"
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from datetime import datetime
import json
import logging
from datetime import datetime
from typing import List
from unittest import mock

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ def test_valid_parse_metadata(app):
"rights_controller": None,
"copyright_line": None,
"producer_copyright_line": None,
"parental_warning_type": None
"parental_warning_type": None,
},
"QmUpdatePlaylist1": {
"playlist_id": 1,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from datetime import datetime
import logging
from datetime import datetime
from unittest import mock

from integration_tests.utils import populate_mock_db
Expand Down
2 changes: 1 addition & 1 deletion packages/discovery-provider/setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,4 @@ paths = ./
ignore_missing_imports = True
show_column_numbers = True
disable_error_code = annotation-unchecked, var-annotated, literal-required
plugins = sqlmypy
plugins = sqlmypy
Original file line number Diff line number Diff line change
Expand Up @@ -167,9 +167,9 @@ def process_events(self, session: Session, max_events=1000) -> Tuple[int, bool]:
events_dicts = list(map(self._json_to_event, events_json))
# Consolidate event types for processing
# map of {"event_type": [{ user_id: number, block_number: number, extra: {} }]}}
event_user_dict: DefaultDict[ChallengeEvent, List[EventMetadata]] = (
defaultdict(lambda: [])
)
event_user_dict: DefaultDict[
ChallengeEvent, List[EventMetadata]
] = defaultdict(lambda: [])
for event_dict in events_dicts:
event_type = event_dict["event"]
event_user_dict[event_type].append(
Expand Down
24 changes: 12 additions & 12 deletions packages/discovery-provider/src/queries/get_feed.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,13 +192,13 @@ def get_feed_sql(args):
oldest_followee_repost_timestamp,
) in followee_reposts:
if repost_type == RepostType.track:
track_repost_timestamp_dict[repost_item_id] = (
oldest_followee_repost_timestamp
)
track_repost_timestamp_dict[
repost_item_id
] = oldest_followee_repost_timestamp
elif repost_type in (RepostType.playlist, RepostType.album):
playlist_repost_timestamp_dict[repost_item_id] = (
oldest_followee_repost_timestamp
)
playlist_repost_timestamp_dict[
repost_item_id
] = oldest_followee_repost_timestamp

# extract reposted_track_ids and reposted_playlist_ids
reposted_track_ids = list(track_repost_timestamp_dict.keys())
Expand Down Expand Up @@ -278,18 +278,18 @@ def get_feed_sql(args):
if track["owner_id"] in followee_user_ids:
track[response_name_constants.activity_timestamp] = track["created_at"]
else:
track[response_name_constants.activity_timestamp] = (
track_repost_timestamp_dict[track["track_id"]]
)
track[
response_name_constants.activity_timestamp
] = track_repost_timestamp_dict[track["track_id"]]
for playlist in playlists:
if playlist["playlist_owner_id"] in followee_user_ids:
playlist[response_name_constants.activity_timestamp] = playlist[
"created_at"
]
else:
playlist[response_name_constants.activity_timestamp] = (
playlist_repost_timestamp_dict[playlist["playlist_id"]]
)
playlist[
response_name_constants.activity_timestamp
] = playlist_repost_timestamp_dict[playlist["playlist_id"]]

# bundle peripheral info into track and playlist objects
track_ids = list(map(lambda track: track["track_id"], tracks))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -108,9 +108,9 @@ def get_undisbursed_challenges(
args["offset"]
)

undisbursed_challenges: List[Tuple[UserChallenge, Challenge, str, str]] = (
undisbursed_challenges_query.all()
)
undisbursed_challenges: List[
Tuple[UserChallenge, Challenge, str, str]
] = undisbursed_challenges_query.all()

undisbursed_challenges_response: List[UndisbursedChallengeResponse] = [
to_challenge_response(user_challenge, challenge, handle, wallet)
Expand Down
Loading

0 comments on commit c7ec583

Please sign in to comment.