Skip to content

Commit

Permalink
Merge pull request #2640 from neutrinoceros/cleanup_object_inheritance
Browse files Browse the repository at this point in the history
Cleanup some old python 2 idioms
  • Loading branch information
munkm authored Jun 29, 2020
2 parents 41009b9 + 592a21e commit 6befef2
Show file tree
Hide file tree
Showing 109 changed files with 191 additions and 222 deletions.
2 changes: 1 addition & 1 deletion yt/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@
except IOError:
warnings.warn("unable to write new config file")

class YTConfigParser(configparser.ConfigParser, object):
class YTConfigParser(configparser.ConfigParser):
def __setitem__(self, key, val):
self.set(key[0], key[1], val)

Expand Down
2 changes: 1 addition & 1 deletion yt/data_objects/construction_data_containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1073,7 +1073,7 @@ def _get_grid_bounds_size(self):

return bounds, size

class LevelState(object):
class LevelState:
current_dx = None
current_dims = None
current_level = None
Expand Down
2 changes: 1 addition & 1 deletion yt/data_objects/derived_quantities.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def process_chunk(self, data, *args, **kwargs):
def reduce_intermediate(self, values):
raise NotImplementedError

class DerivedQuantityCollection(object):
class DerivedQuantityCollection:
def __new__(cls, data_source, *args, **kwargs):
inst = object.__new__(cls)
inst.data_source = data_source
Expand Down
2 changes: 1 addition & 1 deletion yt/data_objects/level_sets/clump_info_items.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
def add_clump_info(name, function):
clump_info_registry[name] = ClumpInfoCallback(name, function)

class ClumpInfoCallback(object):
class ClumpInfoCallback:
r"""
A ClumpInfoCallback is a function that takes a clump, computes a
quantity, and returns a string to be printed out for writing clump info.
Expand Down
2 changes: 1 addition & 1 deletion yt/data_objects/level_sets/clump_validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
def add_validator(name, function):
clump_validator_registry[name] = ClumpValidator(function)

class ClumpValidator(object):
class ClumpValidator:
r"""
A ClumpValidator is a function that takes a clump and returns
True or False as to whether the clump is valid and shall be kept.
Expand Down
4 changes: 2 additions & 2 deletions yt/data_objects/octree_subset.py
Original file line number Diff line number Diff line change
Expand Up @@ -445,7 +445,7 @@ def select_particles(self, selector, x, y, z):
mask = selector.select_points(x,y,z, 0.0)
return mask

class OctreeSubsetBlockSlicePosition(object):
class OctreeSubsetBlockSlicePosition:
def __init__(self, ind, block_slice):
self.ind = ind
self.block_slice = block_slice
Expand Down Expand Up @@ -496,7 +496,7 @@ def _field_parameter_state(self, field_parameters):
yield self.block_slice.octree_subset._field_parameter_state(
field_parameters)

class OctreeSubsetBlockSlice(object):
class OctreeSubsetBlockSlice:
def __init__(self, octree_subset):
self.octree_subset = octree_subset
# Cache some attributes
Expand Down
4 changes: 2 additions & 2 deletions yt/data_objects/particle_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@
# One to one mapping
filter_registry = {}

class DummyFieldInfo(object):
class DummyFieldInfo:
particle_type = True
sampling_type = 'particle'

dfi = DummyFieldInfo()

class ParticleFilter(object):
class ParticleFilter:
def __init__(self, name, function, requires, filtered_type):
self.name = name
self.function = function
Expand Down
2 changes: 1 addition & 1 deletion yt/data_objects/particle_trajectories.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import numpy as np
from yt.utilities.on_demand_imports import _h5py as h5py

class ParticleTrajectories(object):
class ParticleTrajectories:
r"""A collection of particle trajectories in time over a series of
datasets.
Expand Down
2 changes: 1 addition & 1 deletion yt/data_objects/profiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def save_state(*args, **kwargs):
return tr
return save_state

class ProfileFieldAccumulator(object):
class ProfileFieldAccumulator:
def __init__(self, n_fields, size):
shape = size + (n_fields,)
self.values = np.zeros(shape, dtype="float64")
Expand Down
2 changes: 1 addition & 1 deletion yt/data_objects/region_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from yt.utilities.exceptions import YTDimensionalityError
from yt.visualization.line_plot import LineBuffer

class RegionExpression(object):
class RegionExpression:
_all_data = None
def __init__(self, ds):
self.ds = weakref.proxy(ds)
Expand Down
6 changes: 3 additions & 3 deletions yt/data_objects/static_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def __init__(cls, name, b, d):
output_type_registry[name] = cls
mylog.debug("Registering: %s as %s", name, cls)

class IndexProxy(object):
class IndexProxy:
# This is a simple proxy for Index objects. It enables backwards
# compatibility so that operations like .h.sphere, .h.print_stats and
# .h.grid_left_edge will correctly pass through to the various dataset or
Expand All @@ -108,7 +108,7 @@ def __getattr__(self, name):
return getattr(self.ds.index, name)
raise AttributeError

class MutableAttribute(object):
class MutableAttribute:
"""A descriptor for mutable data"""
def __init__(self, display_array = False):
self.data = weakref.WeakKeyDictionary()
Expand Down Expand Up @@ -1534,7 +1534,7 @@ def _reconstruct_ds(*args, **kwargs):
return ds

@functools.total_ordering
class ParticleFile(object):
class ParticleFile:
def __init__(self, ds, io, filename, file_id, range = None):
self.ds = ds
self.io = weakref.proxy(io)
Expand Down
2 changes: 1 addition & 1 deletion yt/data_objects/tests/test_derived_quantities.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from __future__ import division


import numpy as np
import yt
Expand Down
2 changes: 1 addition & 1 deletion yt/data_objects/tests/test_octree.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def test_building_tree():
'''
ds = fake_sph_grid_ds()
octree = ds.octree(n_ref=1)
assert(type(octree) == YTOctree)
assert(type(octree) is YTOctree)
assert(octree[('index', 'x')].shape[0] == 456)

def test_saving_loading():
Expand Down
2 changes: 1 addition & 1 deletion yt/data_objects/tests/test_particle_filter.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from __future__ import print_function


import numpy as np
import os
Expand Down
10 changes: 5 additions & 5 deletions yt/data_objects/time_series.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
from yt.utilities.parameter_file_storage import \
simulation_time_series_registry

class AnalysisTaskProxy(object):
class AnalysisTaskProxy:
def __init__(self, time_series):
self.time_series = time_series

Expand Down Expand Up @@ -75,7 +75,7 @@ def get_filenames_from_glob_pattern(filenames):
"omega_matter", "omega_lambda", "omega_radiation",
"hubble_constant")

class TimeSeriesParametersContainer(object):
class TimeSeriesParametersContainer:
def __init__(self, data_object):
self.data_object = data_object

Expand All @@ -84,7 +84,7 @@ def __getattr__(self, attr):
return self.data_object.eval(get_ds_prop(attr)())
raise AttributeError(attr)

class DatasetSeries(object):
class DatasetSeries:
r"""The DatasetSeries object is a container of multiple datasets,
allowing easy iteration and computation on them.
Expand Down Expand Up @@ -449,7 +449,7 @@ def particle_trajectories(self, indices, fields=None, suppress_logging=False, pt
return ParticleTrajectories(self, indices, fields=fields, suppress_logging=suppress_logging,
ptype=ptype)

class TimeSeriesQuantitiesContainer(object):
class TimeSeriesQuantitiesContainer:
def __init__(self, data_object, quantities):
self.data_object = data_object
self.quantities = quantities
Expand All @@ -465,7 +465,7 @@ def run_quantity(*args, **kwargs):
return run_quantity
return run_quantity_wrapper(q, key)

class DatasetSeriesObject(object):
class DatasetSeriesObject:
def __init__(self, time_series, data_object_name, *args, **kwargs):
self.time_series = weakref.proxy(time_series)
self.data_object_name = data_object_name
Expand Down
2 changes: 1 addition & 1 deletion yt/data_objects/unions.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from yt.funcs import ensure_list

class Union(object):
class Union:
_union_type = ""
def __init__(self, name, sub_types):
self.name = name
Expand Down
2 changes: 1 addition & 1 deletion yt/exthook.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import os


class ExtensionImporter(object):
class ExtensionImporter:
"""This importer redirects imports from this submodule to other locations.
This makes it possible to transition from the old flaskext.name to the
newer flask_name without people having a hard time.
Expand Down
4 changes: 2 additions & 2 deletions yt/fields/derived_field.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def _TranslationFunc(field, data):
def NullFunc(field, data):
raise YTFieldNotFound(field.name)

class DerivedField(object):
class DerivedField:
"""
This is the base class used to describe a cell-by-cell derived field.
Expand Down Expand Up @@ -412,7 +412,7 @@ def get_latex_display_name(self):
return label


class FieldValidator(object):
class FieldValidator:
pass

class ValidateParameter(FieldValidator):
Expand Down
2 changes: 1 addition & 1 deletion yt/fields/domain_context.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
domain_context_registry = {}

class DomainContext(object):
class DomainContext:
class __metaclass__(type):
def __init__(cls, name, b, d):
type.__init__(cls, name, b, d)
Expand Down
4 changes: 2 additions & 2 deletions yt/fields/field_detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,8 @@ class fake_dataset(defaultdict):
ds.periodicity = (True, True, True)
self.ds = ds

class fake_index(object):
class fake_io(object):
class fake_index:
class fake_io:
def _read_data_set(io_self, data, field):
return self._read_data(field)
_read_exception = RuntimeError
Expand Down
2 changes: 1 addition & 1 deletion yt/fields/field_info_container.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,7 @@ def check_derived_fields(self, fields_to_check = None):
except Exception as e:
if field in self._show_field_errors:
raise
if type(e) != YTFieldNotFound:
if not isinstance(e, YTFieldNotFound):
# if we're doing field tests, raise an error
# see yt.fields.tests.test_fields
if hasattr(self.ds, '_field_test_dataset'):
Expand Down
28 changes: 14 additions & 14 deletions yt/fields/tests/test_fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def _strip_ftype(field):
return field[1]


class TestFieldAccess(object):
class TestFieldAccess:
description = None

def __init__(self, field_name, ds, nprocs):
Expand Down Expand Up @@ -405,19 +405,19 @@ def test_ion_field_labels():
ds = fake_random_ds(16, fields=fields, units=units)

# by default labels should use roman numerals
default_labels = {"O_p1_number_density":u"$\\rm{O\ II\ Number\ Density}$",
"O2_p1_number_density":u"$\\rm{O_{2}\ II\ Number\ Density}$",
"CO2_p1_number_density":u"$\\rm{CO_{2}\ II\ Number\ Density}$",
"Co_p1_number_density":u"$\\rm{Co\ II\ Number\ Density}$",
"O2_p2_number_density":u"$\\rm{O_{2}\ III\ Number\ Density}$",
"H2O_p1_number_density":u"$\\rm{H_{2}O\ II\ Number\ Density}$"}

pm_labels = {"O_p1_number_density":u"$\\rm{{O}^{+}\ Number\ Density}$",
"O2_p1_number_density":u"$\\rm{{O_{2}}^{+}\ Number\ Density}$",
"CO2_p1_number_density":u"$\\rm{{CO_{2}}^{+}\ Number\ Density}$",
"Co_p1_number_density":u"$\\rm{{Co}^{+}\ Number\ Density}$",
"O2_p2_number_density":u"$\\rm{{O_{2}}^{++}\ Number\ Density}$",
"H2O_p1_number_density":u"$\\rm{{H_{2}O}^{+}\ Number\ Density}$"}
default_labels = {"O_p1_number_density":"$\\rm{O\ II\ Number\ Density}$",
"O2_p1_number_density":"$\\rm{O_{2}\ II\ Number\ Density}$",
"CO2_p1_number_density":"$\\rm{CO_{2}\ II\ Number\ Density}$",
"Co_p1_number_density":"$\\rm{Co\ II\ Number\ Density}$",
"O2_p2_number_density":"$\\rm{O_{2}\ III\ Number\ Density}$",
"H2O_p1_number_density":"$\\rm{H_{2}O\ II\ Number\ Density}$"}

pm_labels = {"O_p1_number_density":"$\\rm{{O}^{+}\ Number\ Density}$",
"O2_p1_number_density":"$\\rm{{O_{2}}^{+}\ Number\ Density}$",
"CO2_p1_number_density":"$\\rm{{CO_{2}}^{+}\ Number\ Density}$",
"Co_p1_number_density":"$\\rm{{Co}^{+}\ Number\ Density}$",
"O2_p2_number_density":"$\\rm{{O_{2}}^{++}\ Number\ Density}$",
"H2O_p1_number_density":"$\\rm{{H_{2}O}^{+}\ Number\ Density}$"}

fobj = ds.fields.stream

Expand Down
2 changes: 1 addition & 1 deletion yt/fields/xray_emission_fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def __str__(self):
return self.msg


class XrayEmissivityIntegrator(object):
class XrayEmissivityIntegrator:
r"""Class for making X-ray emissivity fields. Uses hdf5 data tables
generated from Cloudy and AtomDB/APEC.
Expand Down
2 changes: 1 addition & 1 deletion yt/frontends/art/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -730,7 +730,7 @@ def fill(self, content, ftfields, selector):
source)
return tr

class ARTDomainFile(object):
class ARTDomainFile:
"""
Read in the AMR, left/right edges, fill out the octhandler
"""
Expand Down
4 changes: 2 additions & 2 deletions yt/frontends/art/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ def _read_art_level_info(f, level_oct_offsets, level, coarse_grid=128,
fl = np.ones((nLevel, 6), dtype='int64')
iocts = np.zeros(nLevel+1, dtype='int64')
idxa, idxb = 0, 0
chunk = long(1e6) # this is ~111MB for 15 dimensional 64 bit arrays
chunk = int(1e6) # this is ~111MB for 15 dimensional 64 bit arrays
left = nLevel
while left > 0:
this_chunk = min(chunk, left)
Expand Down Expand Up @@ -480,7 +480,7 @@ def _read_child_mask_level(f, level_child_offsets, level, nLevel, nhydro_vars):
ioctch = np.zeros(nLevel, dtype='uint8')
idc = np.zeros(nLevel, dtype='int32')

chunk = long(1e6)
chunk = int(1e6)
left = nLevel
width = nhydro_vars+6
a, b = 0, 0
Expand Down
2 changes: 1 addition & 1 deletion yt/frontends/artio/definitions.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
art_to_yt = dict(zip(yt_to_art.values(), yt_to_art.keys()))


class ARTIOconstants():
class ARTIOconstants:
def __init__(self):
self.yr = 365.25*86400
self.Myr = 1.0e6*self.yr
Expand Down
6 changes: 3 additions & 3 deletions yt/frontends/boxlib/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def _fill_child_mask(self, child, mask, tofill, dlevel=1):
startIndex[2]:endIndex[2]] = tofill


class BoxLibParticleHeader(object):
class BoxLibParticleHeader:

def __init__(self, ds, directory_name, is_checkpoint,
extra_field_names=None):
Expand Down Expand Up @@ -220,7 +220,7 @@ def _generate_particle_fields(self, extra_field_names):
for t in self.known_real_fields])


class AMReXParticleHeader(object):
class AMReXParticleHeader:

def __init__(self, ds, directory_name, is_checkpoint,
extra_field_names=None):
Expand Down Expand Up @@ -1405,7 +1405,7 @@ def _read_header(raw_file, field):
return nghost, all_boxes, all_file_names, all_offsets


class WarpXHeader(object):
class WarpXHeader:
def __init__(self, header_fn):
self.data = {}
with open(header_fn, "r") as f:
Expand Down
2 changes: 1 addition & 1 deletion yt/frontends/chombo/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def box_size(corners):
self._offsets = {}
num_comp = self._handle.attrs['num_components']
level = 0
while 1:
while True:
lname = 'level_%i' % level
if lname not in self._handle: break
boxes = self._handle['level_0']['boxes'][()]
Expand Down
Loading

0 comments on commit 6befef2

Please sign in to comment.