Skip to content

Commit

Permalink
[BugFix] Clear Linting Items (#6423)
Browse files Browse the repository at this point in the history
* fix some linting items

* pylint stuff

* black

* index_snapshots

* ruff

* forgot to add this file to commit

* black
  • Loading branch information
deeleeramone authored May 17, 2024
1 parent 4c4f57f commit e124457
Show file tree
Hide file tree
Showing 12 changed files with 36 additions and 23 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ async def response_callback(response: ClientResponse, _: ClientSession):
target = (
"annualEarnings" if query.period == "annual" else "quarterlyEarnings"
)
message = data.get("Information", "")
message = data.get("Information", "") # type: ignore
if message:
messages.append(message)
warn(f"Symbol Error for {symbol}: {message}")
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
"""CBOE Index Snapshots Model."""

# pylint: disable=unused-argument

from datetime import datetime
from typing import Any, Dict, List, Literal, Optional

Expand Down Expand Up @@ -27,9 +29,9 @@ class CboeIndexSnapshotsQueryParams(IndexSnapshotsQueryParams):

@field_validator("region", mode="after", check_fields=False)
@classmethod
def validate_region(cls, v: str):
def validate_region(cls, v):
"""Validate region."""
return "us" if v is None else v
return v if v else "us"


class CboeIndexSnapshotsData(IndexSnapshotsData):
Expand Down Expand Up @@ -89,24 +91,24 @@ def transform_query(params: Dict[str, Any]) -> CboeIndexSnapshotsQueryParams:
@staticmethod
async def aextract_data(
query: CboeIndexSnapshotsQueryParams,
credentials: Optional[Dict[str, str]], # pylint: disable=unused-argument
credentials: Optional[Dict[str, str]],
**kwargs: Any,
) -> List[Dict]:
"""Return the raw data from the Cboe endpoint"""

url: str = ""
if query.region == "us":
url = "https://cdn.cboe.com/api/global/delayed_quotes/quotes/all_us_indices.json"
if query.region == "eu":
url = "https://cdn.cboe.com/api/global/european_indices/index_quotes/all-indices.json"

data = await amake_request(url, **kwargs)
return data.get("data")
return data.get("data") # type: ignore

@staticmethod
def transform_data(
query: CboeIndexSnapshotsQueryParams, # pylint: disable=unused-argument
data: dict,
**kwargs: Any, # pylint: disable=unused-argument
query: CboeIndexSnapshotsQueryParams,
data: List[Dict],
**kwargs: Any,
) -> List[CboeIndexSnapshotsData]:
"""Transform the data to the standard format"""
if not data:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ async def response_callback(response: ClientResponse, _: Any):

async def get_cboe_data(url, use_cache: bool = True, **kwargs) -> Any:
"""Use the generic Cboe HTTP request."""
data: Any = None
if use_cache is True:
async with CachedSession(cache=backend) as cached_session:
try:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Main Indicators"""

from datetime import datetime, timedelta
from typing import Dict, List, Literal
from typing import Dict, List, Literal, Union

from aiohttp_client_cache import SQLiteBackend
from aiohttp_client_cache.session import CachedSession
Expand Down Expand Up @@ -72,6 +72,7 @@

async def fetch_data(url, use_cache: bool = True):
"""Fetch the data with or without the cached session object."""
response: Union[dict, List[dict]] = {}
if use_cache is True:
cache_dir = f"{get_user_cache_directory()}/http/econdb_main_indicators"
async with CachedSession(
Expand Down
4 changes: 2 additions & 2 deletions openbb_platform/providers/fred/openbb_fred/models/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,10 +121,10 @@ async def aextract_data(
api_key = credentials.get("fred_api_key") if credentials else ""

if query.series_id is not None:
results = []
results: List = []

async def get_one(_id: str):
data = {}
data: Dict = {}
url = f"https://api.stlouisfed.org/geofred/series/group?series_id={_id}&api_key={api_key}&file_type=json"
response = await amake_request(url)
data = response.get("series_group") # type: ignore
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -445,12 +445,12 @@ async def aextract_data(

async def callback(response: ClientResponse, _: Any) -> Dict:
"""Return the response."""
statement_data = await response.json()
statement_data = await response.json() # type: ignore
return {
"period_ending": statement_data["fundamental"]["end_date"],
"fiscal_year": statement_data["fundamental"]["fiscal_year"],
"fiscal_period": statement_data["fundamental"]["fiscal_period"],
"financials": statement_data["standardized_financials"],
"period_ending": statement_data["fundamental"]["end_date"], # type: ignore
"fiscal_year": statement_data["fundamental"]["fiscal_year"], # type: ignore
"fiscal_period": statement_data["fundamental"]["fiscal_period"], # type: ignore
"financials": statement_data["standardized_financials"], # type: ignore
}

urls = [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ async def callback(response, session):
: query.limit
]

return await amake_request(url, response_callback=callback, **kwargs)
return await amake_request(url, response_callback=callback, **kwargs) # type: ignore

# pylint: disable=unused-argument
@staticmethod
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ async def aextract_data(
query.cik = cik_ + str(query.cik) # type: ignore

url = f"https://data.sec.gov/submissions/CIK{query.cik}.json"

data: Union[dict, List[dict]] = []
if query.use_cache is True:
cache_dir = f"{get_user_cache_directory()}/http/sec_company_filings"
async with CachedSession(
Expand Down Expand Up @@ -206,7 +206,7 @@ async def callback(response, session):
new_data = DataFrame.from_records(result)
results.extend(new_data.to_dict("records"))

urls = []
urls: List = []
new_urls = (
DataFrame(data["filings"].get("files")) # type: ignore
if "filings" in data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -371,6 +371,7 @@ async def callback(response, session):
"""Response callback for the request."""
return await response.read()

response: Union[dict, List[dict]] = []
if query.use_cache is True:
cache_dir = f"{get_user_cache_directory()}/http/sec_etf"
async with CachedSession(cache=SQLiteBackend(cache_dir)) as session:
Expand Down Expand Up @@ -747,6 +748,9 @@ def transform_data( # noqa: PLR0912
)
# Extract additional information from the form that doesn't belong in the holdings table.
metadata = {}
month_1: str = ""
month_2: str = ""
month_3: str = ""
try:
gen_info = response["edgarSubmission"]["formData"].get("genInfo", {}) # type: ignore
if gen_info:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

# pylint: disable=unused-argument

from typing import Any, Dict, List, Optional
from typing import Any, Dict, List, Optional, Union

import pandas as pd
from aiohttp_client_cache import SQLiteBackend
Expand Down Expand Up @@ -62,6 +62,7 @@ async def aextract_data(
"https://www.sec.gov/corpfin/"
"division-of-corporation-finance-standard-industrial-classification-sic-code-list"
)
response: Union[dict, List[dict], str] = {}
if query.use_cache is True:
cache_dir = f"{get_user_cache_directory()}/http/sec_sic"
async with CachedSession(
Expand Down
7 changes: 5 additions & 2 deletions openbb_platform/providers/sec/openbb_sec/utils/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ async def get_all_companies(use_cache: bool = True) -> pd.DataFrame:
>>> tickers = get_all_companies()
"""
url = "https://www.sec.gov/files/company_tickers.json"

response: Union[dict, List[dict]] = {}
if use_cache is True:
cache_dir = f"{get_user_cache_directory()}/http/sec_companies"
async with CachedSession(
Expand All @@ -65,6 +65,7 @@ async def callback(response, session):
"""Response callback for CIK lookup data."""
return await response.text(encoding="latin-1")

response: Union[dict, List[dict], str] = {}
if use_cache is True:
cache_dir = f"{get_user_cache_directory()}/http/sec_ciks"
async with CachedSession(
Expand Down Expand Up @@ -97,6 +98,7 @@ async def get_mf_and_etf_map(use_cache: bool = True) -> pd.DataFrame:
symbols = pd.DataFrame()

url = "https://www.sec.gov/files/company_tickers_mf.json"
response: Union[dict, List[dict]] = {}
if use_cache is True:
cache_dir = f"{get_user_cache_directory()}/http/sec_mf_etf_map"
async with CachedSession(
Expand Down Expand Up @@ -189,6 +191,7 @@ async def callback(response, session):
"""Response callback for ZIP file downloads."""
return await response.read()

response: Union[dict, List[dict]] = {}
if use_cache is True:
cache_dir = f"{get_user_cache_directory()}/http/sec_ftd"
async with CachedSession(cache=SQLiteBackend(cache_dir)) as session:
Expand Down Expand Up @@ -315,7 +318,7 @@ async def get_nport_candidates(symbol: str, use_cache: bool = True) -> List[Dict
raise ValueError("Fund not found for, the symbol: " + symbol)

url = f"https://efts.sec.gov/LATEST/search-index?q={series_id}&dateRange=all&forms=NPORT-P"

response: Union[dict, List[dict]] = {}
if use_cache is True:
cache_dir = f"{get_user_cache_directory()}/http/sec_etf"
async with CachedSession(cache=SQLiteBackend(cache_dir)) as session:
Expand Down
1 change: 1 addition & 0 deletions openbb_platform/providers/tmx/openbb_tmx/utils/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,7 @@ async def get_data_from_url(
**kwargs: Any,
) -> Any:
"""Make an asynchronous HTTP request to a static file."""
data: Any = None
if use_cache is True:
async with CachedSession(cache=backend) as cached_session:
try:
Expand Down

0 comments on commit e124457

Please sign in to comment.