Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add a linspace class method #44

Merged
merged 13 commits into from
Oct 22, 2024
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.1.5
rev: v0.6.7
hooks:
# Run the linter.
- id: ruff
Expand Down
1 change: 1 addition & 0 deletions cxotime/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from .convert import * # noqa: F401, F403
from .cxotime import * # noqa: F401, F403
from .utils import get_range_in_chunks
jeanconn marked this conversation as resolved.
Show resolved Hide resolved

__version__ = ska_helpers.get_version(__package__)

Expand Down
1 change: 1 addition & 0 deletions cxotime/tests/test_cxotime.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Simple test of CxoTime. The base Time object is extremely well
tested, so this simply confirms that the add-on in CxoTime works.
"""

import io
import time
from dataclasses import dataclass
Expand Down
89 changes: 89 additions & 0 deletions cxotime/tests/test_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
import astropy.units as u
import numpy as np
import pytest

from cxotime import CxoTime, get_range_in_chunks


@pytest.mark.parametrize(
"start, stop, dt_max, expected_len, expected_values",
[
jeanconn marked this conversation as resolved.
Show resolved Hide resolved
("2000:001", "2000:001", 1 * u.day, 2, ["2000:001", "2000:001"]),
(
"2000:001",
"2000:005",
24 * u.hour,
5,
["2000:001", "2000:002", "2000:003", "2000:004", "2000:005"],
),
(
"2000:001",
"2000:005",
2880 * u.minute,
3,
["2000:001", "2000:003", "2000:005"],
),
("2000:001", "2000:005", 10 * 86400 * u.second, 2, ["2000:001", "2000:005"]),
],
)
def test_get_range_in_chunks(start, stop, dt_max, expected_len, expected_values):
start = CxoTime(start)
stop = CxoTime(stop)
result = get_range_in_chunks(start, stop, dt_max)

# Confirm that the time intervals are uniform if there is any interval
if len(result) > 2:
assert all(
np.isclose((result[1:] - result[:-1]).sec, dt_max.to(u.second).value)
)

# Confirm that the time range is covered
assert result[0] == start
assert result[-1] == stop

# And confirm that the result is as expected
assert len(result) == expected_len
assert all(a == CxoTime(b) for a, b in zip(result, expected_values, strict=False))


# Add a test of a negative time range
def test_get_range_in_chunks_negative():
start = CxoTime("2000:005")
stop = CxoTime("2000:001")
dt_max = 24 * u.hour
result = get_range_in_chunks(start, stop, dt_max)
assert len(result) == 5
expected_values = ["2000:005", "2000:004", "2000:003", "2000:002", "2000:001"]
assert all(a == CxoTime(b) for a, b in zip(result, expected_values, strict=False))


# Add a test that shows that the time range is covered even if the time range is less than dt_max
def test_get_range_in_chunks_small():
start = CxoTime("2020:001")
stop = CxoTime("2020:005")
dt_max = 30 * u.day
result = get_range_in_chunks(start, stop, dt_max)
assert len(result) == 2
assert all(
a == CxoTime(b) for a, b in zip(result, ["2020:001", "2020:005"], strict=False)
)


# Add a test that shows we get an error if dt_max is zero
def test_get_range_in_chunks_zero():
start = CxoTime("2020:001")
stop = CxoTime("2020:005")
dt_max = 0 * u.day
with pytest.raises(ValueError) as excinfo:
get_range_in_chunks(start, stop, dt_max)
assert "dt_max must be positive nonzero" in str(excinfo.value)


# Add a test that shows we get an error if dt_max is negative
def test_get_range_in_chunks_negative_dt():
start = CxoTime("2020:001")
stop = CxoTime("2020:005")
dt_max = -1 * u.day
with pytest.raises(ValueError) as excinfo:
get_range_in_chunks(start, stop, dt_max)
assert "dt_max must be positive nonzero" in str(excinfo.value)
44 changes: 44 additions & 0 deletions cxotime/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import astropy.units as u
import numpy as np

from cxotime import CxoTime, CxoTimeLike


def get_range_in_chunks(start: CxoTimeLike, stop: CxoTimeLike, dt_max: u.Quantity):
jeanconn marked this conversation as resolved.
Show resolved Hide resolved
"""
Get uniform time chunks for a given time range.

Output times are spaced uniformly spaced by up to ``dt_max`` and cover the time
range from ``start`` to ``stop``.

Parameters
----------
start : CxoTime
Start time of the time range.
stop : CxoTime
Stop time of the time range.
dt_max : u.Quantity (timelike)
Maximum time interval for each chunk.

Returns
-------
CxoTime
CxoTime with time bin edges for each chunk.
"""
start = CxoTime(start)
stop = CxoTime(stop)

# Require that dt_max is a positive nonzero quantity
if dt_max <= 0 * u.s:
raise ValueError("dt_max must be positive nonzero")

# Let this work if start > stop, but flip the sign of dt_max
if start > stop:
dt_max = -dt_max

# Calculate chunks to cover time range, handling edge case of start == stop
n_chunk = max(np.ceil(float((stop - start) / dt_max)), 1)
dt = (stop - start) / n_chunk
times = start + np.arange(n_chunk + 1) * dt
return times
59 changes: 59 additions & 0 deletions ruff-base.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
# Copied originally from pandas. This config requires ruff >= 0.2.
target-version = "py311"

# fix = true
lint.unfixable = []

lint.select = [
"I", # isort
"F", # pyflakes
"E", "W", # pycodestyle
"YTT", # flake8-2020
"B", # flake8-bugbear
"Q", # flake8-quotes
"T10", # flake8-debugger
"INT", # flake8-gettext
"PLC", "PLE", "PLR", "PLW", # pylint
"PIE", # misc lints
"PYI", # flake8-pyi
"TID", # tidy imports
"ISC", # implicit string concatenation
"TCH", # type-checking imports
"C4", # comprehensions
"PGH" # pygrep-hooks
]

# Some additional rules that are useful
lint.extend-select = [
"UP009", # UTF-8 encoding declaration is unnecessary
"SIM118", # Use `key in dict` instead of `key in dict.keys()`
"D205", # One blank line required between summary line and description
"ARG001", # Unused function argument
"RSE102", # Unnecessary parentheses on raised exception
"PERF401", # Use a list comprehension to create a transformed list
]

lint.ignore = [
"ISC001", # Disable this for compatibility with ruff format
"E402", # module level import not at top of file
"E731", # do not assign a lambda expression, use a def
"PLR2004", # Magic number
"B028", # No explicit `stacklevel` keyword argument found
"PLR0913", # Too many arguments to function call
"PLR1730", # Checks for if statements that can be replaced with min() or max() calls
]

extend-exclude = [
"docs",
]

[lint.pycodestyle]
max-line-length = 100 # E501 reports lines that exceed the length of 100.

[lint.extend-per-file-ignores]
"__init__.py" = ["E402", "F401", "F403"]
# For tests:
# - D205: Don't worry about test docstrings
# - ARG001: Unused function argument false positives for some fixtures
# - E501: Line-too-long
"**/tests/test_*.py" = ["D205", "ARG001", "E501"]
71 changes: 19 additions & 52 deletions ruff.toml
Original file line number Diff line number Diff line change
@@ -1,58 +1,25 @@
# Copied originally from pandas
target-version = "py310"
extend = "ruff-base.toml"

# fix = true
unfixable = []

select = [
"I", # isort
"F", # pyflakes
"E", "W", # pycodestyle
"YTT", # flake8-2020
"B", # flake8-bugbear
"Q", # flake8-quotes
"T10", # flake8-debugger
"INT", # flake8-gettext
"PLC", "PLE", "PLR", "PLW", # pylint
"PIE", # misc lints
"PYI", # flake8-pyi
"TID", # tidy imports
"ISC", # implicit string concatenation
"TCH", # type-checking imports
"C4", # comprehensions
"PGH" # pygrep-hooks
]

# Some additional rules that are useful
extend-select = [
"UP009", # UTF-8 encoding declaration is unnecessary
"SIM118", # Use `key in dict` instead of `key in dict.keys()`
"D205", # One blank line required between summary line and description
"ARG001", # Unused function argument
"RSE102", # Unnecessary parentheses on raised exception
"PERF401", # Use a list comprehension to create a transformed list
# These are files to exclude for this project.
extend-exclude = [
# "**/*.ipynb", # commonly not ruff-compliant
]

ignore = [
"ISC001", # Disable this for compatibility with ruff format
"B028", # No explicit `stacklevel` keyword argument found
# These are rules that commonly cause many ruff warnings. Code will be improved by
# incrementally fixing code to adhere to these rules, but for practical purposes they
# can be ignored by uncommenting each one. You can also add to this list as needed.
lint.extend-ignore = [
"B905", # `zip()` without an explicit `strict=` parameter
"E731", # do not assign a lambda expression, use a def
"PLC1901", # compare-to-empty-string
# "PLC1901", # compare-to-empty-string
# "PLR0911", # Too many returns
"PLR0912", # Too many branches
"PLR2004", # Magic number
# "PLR0915", # Too many statements
# "PGH004", # Use specific rule codes when using `noqa`
# "C401", # Unnecessary generator (rewrite as a `set` comprehension)
# "C402", # Unnecessary generator (rewrite as a dict comprehension)
# "C405", # Unnecessary `list` literal (rewrite as a `set` literal)
# "C408", # Unnecessary `dict` call (rewrite as a literal)
# "C416", # Unnecessary `dict` comprehension (rewrite using `dict()`)
# "G010", # warn is deprecated in favor of warning
# "PYI056", # Calling `.append()` on `__all__` may not be supported by all type checkers
]

extend-exclude = [
"docs",
]

[pycodestyle]
max-line-length = 100 # E501 reports lines that exceed the length of 100.

[lint.extend-per-file-ignores]
"__init__.py" = ["E402", "F401", "F403"]
# For tests:
# - D205: Don't worry about test docstrings
# - ARG001: Unused function argument false positives for some fixtures
"**/tests/test_*.py" = ["D205", "ARG001"]