Skip to content

Commit

Permalink
Merge branch 'master' into sty_precommit_hooks
Browse files Browse the repository at this point in the history
  • Loading branch information
matthewturk authored Jul 16, 2020
2 parents 084039c + 11758a5 commit eda82be
Show file tree
Hide file tree
Showing 41 changed files with 379 additions and 175 deletions.
4 changes: 3 additions & 1 deletion .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@ detail. Why is this change required? What problem does it solve?-->

<!-- Note that some of these check boxes may not apply to all pull requests -->

- [ ] Code passes flake8 checker
- [ ] pass `flake8 yt/`
- [ ] pass `isort -rc . --check-only`
- [ ] pass `black --check yt/`
- [ ] New features are documented, with docstrings and narrative docs
- [ ] Adds a test for any bugs fixed. Adds tests for new features.

Expand Down
5 changes: 5 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,11 @@ jobs:
python: 3.6
script: flake8 yt/

- stage: Lint
python: 3.6
script: isort --check-only -rc yt/
script: black --check yt/

- stage: tests
name: "Python: 3.6 Minimal Dependency Unit Tests"
python: 3.6
Expand Down
22 changes: 22 additions & 0 deletions CONTRIBUTING.rst
Original file line number Diff line number Diff line change
Expand Up @@ -754,6 +754,28 @@ blacklist a large number of the full list of rules that are checked by
for mistakes on every commit because only updated lines are inspected. Note that it
may take a little while the first time it is run.

Import Formatting
-----------------

We use ``isort`` to enforce PEP-8 guidelines for import ordering.
By decreasing priority order:
FUTURE > STDLIB > THIRD PARTY > FIRST PARTY > EXPLICITLY LOCAL

``isort`` can be installed via ``pip``

.. code-block:: bash
$ pip install isort
To validate import order, run ``isort`` recursively at the top level

.. code-block:: bash
$ isort -rc . --check-only
If any error is detected, rerun this without the ``--check-only`` flag to fix them.


Source code style guide
-----------------------

Expand Down
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
[![Data Hub](https://img.shields.io/badge/data-hub-orange.svg)](https://hub.yt/)
[![Powered by NumFOCUS](https://img.shields.io/badge/powered%20by-NumFOCUS-orange.svg?style=flat&colorA=E1523D&colorB=007D8A)](http://numfocus.org)
[![Sponsor our Project](https://img.shields.io/badge/donate-to%20yt-blueviolet)](https://numfocus.salsalabs.org/donate-to-yt/index.html)

[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)

<a href="http://yt-project.org"><img src="doc/source/_static/yt_logo.png" width="300"></a>

yt is an open-source, permissively-licensed python package for analyzing and
Expand Down
35 changes: 35 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
[build-system]
# See https://github.com/scipy/scipy/pull/10431 for the AIX issue.
requires = [
"setuptools>=19.6",
"wheel",

# keep in sync with travis.yml "minimal" specs (Cython and numpy for py36)
"Cython>=0.26.1",
"numpy==1.13.3; python_version=='3.6' and platform_system!='AIX'",
"numpy==1.18.1; python_version=='3.7' and platform_system!='AIX'",
"numpy==1.18.4; python_version=='3.8' and platform_system!='AIX'",
]

# To be kept consistent with "Code Style" section in CONTRIBUTING.rst
[tool.black]
line-length = 88
target-version = ['py36', 'py37', 'py38']
include = '\.pyi?$'
exclude = '''
/(
\.eggs
| \.git
| \.hg
| \.mypy_cache
| \.tox
| \.venv
| _build
| buck-out
| build
| dist
| yt/extern
| yt/frontends/stream/sample_data
)/
| yt/visualization/_colormap_data.py
'''
13 changes: 13 additions & 0 deletions scripts/strip_trailing_whitespace.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import re
from pathlib import Path

yt_dir = Path(__file__).parent.parent
for file in yt_dir.glob("yt/**/*.py"):
matches = None
with open(file, mode="r") as fileobj:
lines = fileobj.readlines()
with open(file, mode="w") as fileobj:
striped_lines = [L.rstrip() for L in lines]
fileobj.write("\n".join(striped_lines))
if striped_lines and striped_lines[-1]:
fileobj.write("\n")
49 changes: 46 additions & 3 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,56 @@
#tag_build = .dev
#tag_svn_revision = 1


# To be kept consistent with "Coding Style Guide" section in CONTRIBUTING.rst
[flake8]
# we exclude:
# api.py, mods.py, _mpl_imports.py, and __init__.py files to avoid spurious
# unused import errors
# autogenerated __config__.py files
# vendored libraries
exclude = doc,benchmarks,*/api.py,*/__init__.py,*/__config__.py,yt/visualization/_mpl_imports.py,yt/utilities/lodgeit.py,yt/utilities/lru_cache.py,yt/utilities/poster/*,yt/extern/*,yt/mods.py,yt/utilities/fits_image.py,yt/units/*
max-line-length=999
ignore = E111,E121,E122,E123,E124,E125,E126,E127,E128,E129,E131,E201,E202,E211,E221,E222,E227,E228,E241,E301,E203,E225,E226,E231,E251,E261,E262,E265,E266,E302,E303,E305,E306,E402,E502,E701,E703,E722,E741,E731,W291,W292,W293,W391,W503,W504,W605
max-line-length=88
exclude = doc,
benchmarks,
*/api.py, # avoid spurious "unused import"
*/__init__.py, # avoid spurious "unused import"
*/__config__.py, # autogenerated
yt/utilities/poster,
yt/extern, # vendored libraries
yt/units, # wrapper around unyt, avoid spurious "unused import"
yt/frontends/stream/sample_data, # autogenerated

# individual files
yt/visualization/_mpl_imports.py,
yt/utilities/fits_image.py,
yt/utilities/lodgeit.py,
yt/utilities/lru_cache.py,
yt/mods.py,
yt/visualization/_colormap_data.py,

ignore = E203, # Whitespace before ':' (black compatibility)
E231, # Missing whitespace after ',', ';', or ':'
E266, # Too many leading '#' for block comment
E302, # Expected 2 blank lines, found 0
E306, # Expected 1 blank line before a nested definition
E501, # Line too long (black compatibility)
E722, # Do not use bare except, specify exception instead TODO: handle
E731, # Do not assign a lambda expression, use a def TODO: add noqas in places this triggers instead of ignoring it everywhere
E741, # Do not use variables named 'I', 'O', or 'l'
W503, # Line break occurred before a binary operator (black compatibility)
W605, # Invalid escape sequence 'x'

jobs=8

# To be kept consistent with "Import Formatting" section in CONTRIBUTING.rst
[tool:isort]
multi_line_output=3
include_trailing_comma=True
force_grid_wrap=0
combine_as_imports=True
line_length=88
# isort can't be applied to yt/__init__.py because it creates circular imports
skip = venv, doc, benchmarks, yt/__init__.py, yt/extern
known_third_party = IPython, nose, numpy, sympy, matplotlib, unyt, git, yaml, dateutil, requests, coverage, pytest
known_first_party = yt
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
8 changes: 5 additions & 3 deletions tests/lint_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
flake8==3.6.0
flake8==3.8.1
mccabe==0.6.1
pycodestyle==2.4.0
pyflakes==2.0.0
pycodestyle==2.6.0
pyflakes==2.2.0
isort==4.3
black==19.10b0
2 changes: 1 addition & 1 deletion tests/test_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ libconf==1.0.1
cartopy==0.17.0
pyaml==17.10.0
mpi4py==3.0.3
unyt==2.7.2
git+https://github.com/yt-project/unyt@e02254e3ecc11a84854a22109354fe3f47bd8985#egg=unyt
pyyaml>=4.2b1
xarray==0.12.3
firefly_api>=0.0.2
Expand Down
3 changes: 2 additions & 1 deletion yt/data_objects/derived_quantities.py
Original file line number Diff line number Diff line change
Expand Up @@ -449,7 +449,8 @@ class AngularMomentumVector(DerivedQuantity):
>>> ds = load("FIRE_M12i_ref11/snapshot_600.hdf5")
>>> _, c = ds.find_max(('gas', 'density'))
>>> sp = ds.sphere(c, (10, 'kpc'))
>>> print(sp.quantities.angular_momentum_vector(use_gas=False, use_particles=True, particle_type='PartType0'))
>>> search_args = dict(use_gas=False, use_particles=True, particle_type='PartType0')
>>> print(sp.quantities.angular_momentum_vector(**search_args))
"""
def count_values(self, use_gas=True, use_particles=True, particle_type='all'):
Expand Down
15 changes: 9 additions & 6 deletions yt/data_objects/tests/test_fluxes.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,31 +69,34 @@ def test_export_obj(self):
rhos = [0.5, 0.25]
trans = [0.5, 1.0]
for i, r in enumerate(rhos):
basename = "my_galaxy_color"
surf = ds.surface(sp,'density',r)
surf.export_obj("my_galaxy_color".format(i),
surf.export_obj(basename,
transparency=trans[i],
color_field='temperature', dist_fac=1.0,
plot_index=i, color_field_max=ma,
color_field_min=mi)

assert os.path.exists('my_galaxy_color.obj')
assert os.path.exists('my_galaxy_color.mtl')
assert os.path.exists('%s.obj' % basename)
assert os.path.exists('%s.mtl' % basename)

def _Emissivity(field, data):
return (data['density']*data['density'] *
np.sqrt(data['temperature']))
ds.add_field("emissivity", sampling_type='cell', function=_Emissivity,
units=r"g**2*sqrt(K)/cm**6")
for i, r in enumerate(rhos):
basename = "my_galaxy_emis"
surf = ds.surface(sp,'density',r)
surf.export_obj("my_galaxy_emis".format(i),
surf.export_obj(basename,
transparency=trans[i],
color_field='temperature',
emit_field='emissivity',
dist_fac=1.0, plot_index=i)

assert os.path.exists('my_galaxy_emis.obj')
assert os.path.exists('my_galaxy_emis.mtl')
basename = "my_galaxy_emis"
assert os.path.exists('%s.obj' % basename)
assert os.path.exists('%s.mtl' % basename)

def test_correct_output_unit_fake_ds():
# see issue #1368
Expand Down
2 changes: 1 addition & 1 deletion yt/exthook.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
:license: BSD, see LICENSE for more details.
"""
# This source code was originally in flask/exthook.py
import sys
import os
import sys


class ExtensionImporter:
Expand Down
6 changes: 5 additions & 1 deletion yt/frontends/artio/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,11 @@ def _parse_parameter_file(self):
self.artio_parameters["length_unit"][0] * abox

if self.artio_parameters["DeltaDC"][0] != 0:
mylog.warning("DeltaDC != 0, which implies auni != abox. Be sure you understand which expansion parameter is appropriate for your use! (Gnedin, Kravtsov, & Rudd 2011)")
mylog.warning(
"DeltaDC != 0, which implies auni != abox. "
"Be sure you understand which expansion parameter "
"is appropriate for your use! (Gnedin, Kravtsov, & Rudd 2011)"
)
else:
self.cosmological_simulation = False

Expand Down
3 changes: 2 additions & 1 deletion yt/frontends/flash/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@

class FLASHGrid(AMRGridPatch):
_id_offset = 1
#__slots__ = ["_level_id", "stop_index"]
# __slots__ = ["_level_id", "stop_index"]

def __init__(self, id, index, level):
AMRGridPatch.__init__(self, id, filename = index.index_filename,
index = index)
Expand Down
12 changes: 7 additions & 5 deletions yt/frontends/open_pmd/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,14 +246,16 @@ def _count_grids(self):
def _parse_index(self):
"""Fills each grid with appropriate properties (extent, dimensions, ...)
This calculates the properties of every OpenPMDGrid based on the total number of grids in the simulation.
The domain is divided into ``self.num_grids`` (roughly) equally sized chunks along the x-axis.
``grid_levels`` is always equal to 0 since we only have one level of refinement in openPMD.
This calculates the properties of every OpenPMDGrid based on the total number of
grids in the simulation. The domain is divided into ``self.num_grids`` (roughly)
equally sized chunks along the x-axis. ``grid_levels`` is always equal to 0
since we only have one level of refinement in openPMD.
Notes
-----
``self.grid_dimensions`` is rounded to the nearest integer. Grid edges are calculated from this dimension.
Grids with dimensions [0, 0, 0] are particle only. The others do not have any particles affiliated with them.
``self.grid_dimensions`` is rounded to the nearest integer. Grid edges are
calculated from this dimension. Grids with dimensions [0, 0, 0] are particle
only. The others do not have any particles affiliated with them.
"""
f = self.dataset._handle
bp = self.dataset.base_path
Expand Down
31 changes: 17 additions & 14 deletions yt/frontends/open_pmd/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,28 +86,30 @@ class OpenPMDFieldInfo(FieldInfoContainer):
"""Specifies which fields from the dataset yt should know about.
``self.known_other_fields`` and ``self.known_particle_fields`` must be populated.
Entries for both of these lists must be tuples of the form
("name", ("units", ["fields", "to", "alias"], "display_name"))
These fields will be represented and handled in yt in the way you define them here.
The fields defined in both ``self.known_other_fields`` and ``self.known_particle_fields`` will only be added
to a dataset (with units, aliases, etc), if they match any entry in the ``OpenPMDHierarchy``'s ``self.field_list``.
Entries for both of these lists must be tuples of the form ("name", ("units",
["fields", "to", "alias"], "display_name")) These fields will be represented and
handled in yt in the way you define them here. The fields defined in both
``self.known_other_fields`` and ``self.known_particle_fields`` will only be added to
a dataset (with units, aliases, etc), if they match any entry in the
``OpenPMDHierarchy``'s ``self.field_list``.
Notes
-----
Contrary to many other frontends, we dynamically obtain the known fields from the simulation output.
The openPMD markup is extremely flexible - names, dimensions and the number of individual datasets
can (and very likely will) vary.
Contrary to many other frontends, we dynamically obtain the known fields from the
simulation output. The openPMD markup is extremely flexible - names, dimensions and
the number of individual datasets can (and very likely will) vary.
openPMD states that names of records and their components are only allowed to contain the
openPMD states that record names and their components are only allowed to contain
* characters a-Z,
* the numbers 0-9
* and the underscore _
* (equivalently, the regex \w).
Since yt widely uses the underscore in field names, openPMD's underscores (_) are replaced by hyphen (-).
Since yt widely uses the underscore in field names, openPMD's underscores (_) are
replaced by hyphen (-).
Derived fields will automatically be set up, if names and units of your known on-disk (or manually derived)
fields match the ones in [1].
Derived fields will automatically be set up, if names and units of your known
on-disk (or manually derived) fields match the ones in [1].
References
----------
Expand Down Expand Up @@ -166,8 +168,9 @@ def __init__(self, ds, field_list):
unit = str(YTQuantity(1, parsed).units)
ytattrib = str(recname).replace("_", "-")
if ytattrib == "position":
# Symbolically rename position to preserve yt's interpretation of the pfield
# particle_position is later derived in setup_absolute_positions in the way yt expects it
# Symbolically rename position to preserve yt's
# interpretation of the pfield particle_position is later
# derived in setup_absolute_positions in the way yt expects it
ytattrib = "positionCoarse"
if isinstance(record, h5.Dataset) or is_const_component(record):
name = ["particle", ytattrib]
Expand Down
3 changes: 2 additions & 1 deletion yt/frontends/open_pmd/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@ def _fill_cache(self, ptype, index=0, offset=None):
self.cache[i])
else:
# Pad accordingly with zeros to make 1D/2D datasets compatible
# These have to be the same shape as the existing axes since that equals the number of particles
# These have to be the same shape as the existing axes since that
# equals the number of particles
self.cache[i] = np.zeros(offset)

def _read_particle_selection(self, chunks, selector, fields):
Expand Down
10 changes: 8 additions & 2 deletions yt/frontends/open_pmd/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,11 +107,17 @@ def get_component(group, component_name, index=0, offset=None):
else:
shape[0] = offset
# component is constant, craft an array by hand
# mylog.debug("open_pmd - get_component: {}/{} [const {}]".format(group.name, component_name, shape))
# mylog.debug(
# "open_pmd - get_component: {}/{} [const {}]".format(group.name, component_name, shape)
# )
return np.full(shape, record_component.attrs["value"] * unit_si)
else:
if offset is not None:
offset += index
# component is a dataset, return it (possibly masked)
# mylog.debug("open_pmd - get_component: {}/{}[{}:{}]".format(group.name, component_name, index, offset))
# mylog.debug(
# "open_pmd - get_component: {}/{}[{}:{}]".format(
# group.name, component_name, index, offset
# )
# )
return np.multiply(record_component[index:offset], unit_si)
Loading

0 comments on commit eda82be

Please sign in to comment.