Skip to content

Commit

Permalink
fix(typing): update types to be compatible with latest mypy (#4234)
Browse files Browse the repository at this point in the history
* Initial mypy fixes for CI

* Ignored mypy error with type and type constructors

* Fix formatting

* Fixed incorrect typing

* Undo initial change

Co-authored-by: Kyle Verhoog <kyle@verhoog.ca>
  • Loading branch information
Yun-Kim and Kyle-Verhoog authored Sep 27, 2022
1 parent ec473f8 commit 5140de4
Show file tree
Hide file tree
Showing 12 changed files with 34 additions and 24 deletions.
4 changes: 2 additions & 2 deletions ddtrace/debugging/_debugger.py
Original file line number Diff line number Diff line change
Expand Up @@ -596,14 +596,14 @@ def _on_configuration(self, event, probes):
else:
raise ValueError("Unknown probe poller event %r" % event)

def _stop_service(self): # type: ignore[override]
def _stop_service(self):
# type: () -> None
self._function_store.restore_all()
for service in self._services:
service.stop()
service.join()

def _start_service(self): # type: ignore[override]
def _start_service(self):
# type: () -> None
for service in self._services:
service.start()
Expand Down
2 changes: 1 addition & 1 deletion ddtrace/internal/atexit.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def register(
return func

def unregister(func):
# type: (typing.Callable[..., None]) -> None
# type: (typing.Callable[..., typing.Any]) -> None
"""
Unregister an exit function which was previously registered using
atexit.register.
Expand Down
2 changes: 1 addition & 1 deletion ddtrace/internal/runtime/runtime_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ def flush(self):
log.debug("Writing metric %s:%s", key, value)
self._dogstatsd_client.distribution(key, value)

def _stop_service(self): # type: ignore[override]
def _stop_service(self):
# type: (...) -> None
# De-register span hook
super(RuntimeWorker, self)._stop_service()
Expand Down
9 changes: 4 additions & 5 deletions ddtrace/internal/utils/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,8 @@
miss = object()

T = TypeVar("T")
S = TypeVar("S")
F = Callable[[T], S]
M = Callable[[Any, T], S]
F = Callable[[T], Any]
M = Callable[[Any, T], Any]


class LFUCache(dict):
Expand All @@ -29,7 +28,7 @@ def __init__(self, maxsize=256):
self.lock = RLock()

def get(self, key, f): # type: ignore[override]
# type: (T, F) -> S
# type: (T, F) -> Any
"""Get a value from the cache.
If the value with the given key is not in the cache, the expensive
Expand Down Expand Up @@ -69,7 +68,7 @@ def cached_wrapper(f):
cache = LFUCache(maxsize)

def cached_f(key):
# type: (T) -> S
# type: (T) -> Any
return cache.get(key, f)

cached_f.invalidate = cache.clear # type: ignore[attr-defined]
Expand Down
2 changes: 1 addition & 1 deletion ddtrace/internal/writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -587,7 +587,7 @@ def flush_queue(self, raise_exc=False):
def periodic(self):
self.flush_queue(raise_exc=False)

def _stop_service( # type: ignore[override]
def _stop_service(
self,
timeout=None, # type: Optional[float]
):
Expand Down
2 changes: 1 addition & 1 deletion ddtrace/profiling/collector/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,5 +77,5 @@ def _create_capture_sampler(collector):

@attr.s
class CaptureSamplerCollector(Collector):
capture_pct = attr.ib(factory=attr_utils.from_env("DD_PROFILING_CAPTURE_PCT", 1.0, float))
capture_pct = attr.ib(factory=attr_utils.from_env("DD_PROFILING_CAPTURE_PCT", 1.0, float)) # type: ignore[arg-type]
_capture_sampler = attr.ib(default=attr.Factory(_create_capture_sampler, takes_self=True), init=False, repr=False)
6 changes: 3 additions & 3 deletions ddtrace/profiling/collector/_lock.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def __get__(self, instance, owner=None):
class LockCollector(collector.CaptureSamplerCollector):
"""Record lock usage."""

nframes = attr.ib(factory=attr_utils.from_env("DD_PROFILING_MAX_FRAMES", 64, int))
nframes = attr.ib(factory=attr_utils.from_env("DD_PROFILING_MAX_FRAMES", 64, int)) # type: ignore[arg-type]
endpoint_collection_enabled = attr.ib(
factory=attr_utils.from_env("DD_PROFILING_ENDPOINT_COLLECTION_ENABLED", True, formats.asbool)
)
Expand All @@ -203,13 +203,13 @@ def _set_original(
# type: (...) -> None
pass

def _start_service(self): # type: ignore[override]
def _start_service(self):
# type: (...) -> None
"""Start collecting lock usage."""
self.patch()
super(LockCollector, self)._start_service()

def _stop_service(self): # type: ignore[override]
def _stop_service(self):
# type: (...) -> None
"""Stop collecting lock usage."""
super(LockCollector, self)._stop_service()
Expand Down
2 changes: 1 addition & 1 deletion ddtrace/profiling/collector/asyncio.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class AsyncioLockCollector(_lock.LockCollector):

PROFILED_LOCK_CLASS = _ProfiledAsyncioLock

def _start_service(self): # type: ignore[override]
def _start_service(self):
# type: (...) -> None
"""Start collecting lock usage."""
try:
Expand Down
14 changes: 10 additions & 4 deletions ddtrace/profiling/collector/memalloc.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,12 +88,18 @@ class MemoryCollector(collector.PeriodicCollector):
_interval = attr.ib(default=_DEFAULT_INTERVAL, repr=False)

# TODO make this dynamic based on the 1. interval and 2. the max number of events allowed in the Recorder
_max_events = attr.ib(factory=attr_utils.from_env("_DD_PROFILING_MEMORY_EVENTS_BUFFER", _DEFAULT_MAX_EVENTS, int))
max_nframe = attr.ib(factory=attr_utils.from_env("DD_PROFILING_MAX_FRAMES", 64, int))
_max_events = attr.ib(
factory=attr_utils.from_env(
"_DD_PROFILING_MEMORY_EVENTS_BUFFER",
_DEFAULT_MAX_EVENTS,
int, # type: ignore[arg-type]
)
)
max_nframe = attr.ib(factory=attr_utils.from_env("DD_PROFILING_MAX_FRAMES", 64, int)) # type: ignore[arg-type]
heap_sample_size = attr.ib(type=int, factory=_get_default_heap_sample_size)
ignore_profiler = attr.ib(factory=attr_utils.from_env("DD_PROFILING_IGNORE_PROFILER", False, formats.asbool))

def _start_service(self): # type: ignore[override]
def _start_service(self):
# type: (...) -> None
"""Start collecting memory profiles."""
if _memalloc is None:
Expand All @@ -103,7 +109,7 @@ def _start_service(self): # type: ignore[override]

super(MemoryCollector, self)._start_service()

def _stop_service(self): # type: ignore[override]
def _stop_service(self):
# type: (...) -> None
super(MemoryCollector, self)._stop_service()

Expand Down
5 changes: 4 additions & 1 deletion ddtrace/profiling/exporter/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,10 @@ class PprofHTTPExporter(pprof.PprofExporter):
api_key = attr.ib(default=None, type=typing.Optional[str])
# Do not use the default agent timeout: it is too short, the agent is just a unbuffered proxy and the profiling
# backend is not as fast as the tracer one.
timeout = attr.ib(factory=attr_utils.from_env("DD_PROFILING_API_TIMEOUT", 10.0, float), type=float)
timeout = attr.ib(
factory=attr_utils.from_env("DD_PROFILING_API_TIMEOUT", 10.0, float), # type: ignore[arg-type]
type=float,
)
service = attr.ib(default=None, type=typing.Optional[str])
env = attr.ib(default=None, type=typing.Optional[str])
version = attr.ib(default=None, type=typing.Optional[str])
Expand Down
4 changes: 2 additions & 2 deletions ddtrace/profiling/profiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ def copy(self):
}
)

def _start_service(self): # type: ignore[override]
def _start_service(self):
# type: (...) -> None
"""Start the profiler."""
collectors = []
Expand All @@ -270,7 +270,7 @@ def _start_service(self): # type: ignore[override]
if self._scheduler is not None:
self._scheduler.start()

def _stop_service( # type: ignore[override]
def _stop_service(
self, flush=True # type: bool
):
# type: (...) -> None
Expand Down
6 changes: 4 additions & 2 deletions ddtrace/profiling/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,17 @@ class Scheduler(periodic.PeriodicService):
recorder = attr.ib()
exporters = attr.ib()
before_flush = attr.ib(default=None, eq=False)
_interval = attr.ib(factory=attr_utils.from_env("DD_PROFILING_UPLOAD_INTERVAL", 60.0, float))
_interval = attr.ib(
factory=attr_utils.from_env("DD_PROFILING_UPLOAD_INTERVAL", 60.0, float) # type: ignore[arg-type]
)
_configured_interval = attr.ib(init=False)
_last_export = attr.ib(init=False, default=None, eq=False)

def __attrs_post_init__(self):
# Copy the value to use it later since we're going to adjust the real interval
self._configured_interval = self.interval

def _start_service(self): # type: ignore[override]
def _start_service(self):
# type: (...) -> None
"""Start the scheduler."""
LOG.debug("Starting scheduler")
Expand Down

0 comments on commit 5140de4

Please sign in to comment.