diff --git a/yt/frontends/boxlib/data_structures.py b/yt/frontends/boxlib/data_structures.py index 977aaf4fabd..2158ea801b3 100644 --- a/yt/frontends/boxlib/data_structures.py +++ b/yt/frontends/boxlib/data_structures.py @@ -21,6 +21,7 @@ NyxFieldInfo, WarpXFieldInfo, ) +from .misc import BoxlibReadParticleFileMixin # This is what we use to find scientific notation that might include d's # instead of e's. @@ -921,7 +922,7 @@ def relative_refinement(self, l0, l1): return self.refine_by ** (l1 - l0 + offset) -class OrionHierarchy(BoxlibHierarchy): +class OrionHierarchy(BoxlibHierarchy, BoxlibReadParticleFileMixin): def __init__(self, ds, dataset_type="orion_native"): BoxlibHierarchy.__init__(self, ds, dataset_type) self._read_particles() @@ -962,50 +963,6 @@ def _read_particles(self): if self.particle_filename is not None: self._read_particle_file(self.particle_filename) - def _read_particle_file(self, fn): - """actually reads the orion particle data file itself.""" - if not os.path.exists(fn): - return - with open(fn) as f: - lines = f.readlines() - self.num_stars = int(lines[0].strip()[0]) - for num, line in enumerate(lines[1:]): - particle_position_x = float(line.split(" ")[1]) - particle_position_y = float(line.split(" ")[2]) - particle_position_z = float(line.split(" ")[3]) - coord = [particle_position_x, particle_position_y, particle_position_z] - # for each particle, determine which grids contain it - # copied from object_finding_mixin.py - mask = np.ones(self.num_grids) - for i in range(len(coord)): - np.choose( - np.greater(self.grid_left_edge.d[:, i], coord[i]), - (mask, 0), - mask, - ) - np.choose( - np.greater(self.grid_right_edge.d[:, i], coord[i]), - (0, mask), - mask, - ) - ind = np.where(mask == 1) - selected_grids = self.grids[ind] - # in orion, particles always live on the finest level. - # so, we want to assign the particle to the finest of - # the grids we just found - if len(selected_grids) != 0: - grid = sorted(selected_grids, key=lambda grid: grid.Level)[-1] - ind = np.where(self.grids == grid)[0][0] - self.grid_particle_count[ind] += 1 - self.grids[ind].NumberOfParticles += 1 - - # store the position in the particle file for fast access. - try: - self.grids[ind]._particle_line_numbers.append(num + 1) - except AttributeError: - self.grids[ind]._particle_line_numbers = [num + 1] - return True - class OrionDataset(BoxlibDataset): diff --git a/yt/frontends/boxlib/fields.py b/yt/frontends/boxlib/fields.py index cf2ecd758cb..84d41c73356 100644 --- a/yt/frontends/boxlib/fields.py +++ b/yt/frontends/boxlib/fields.py @@ -4,6 +4,7 @@ import numpy as np from yt.fields.field_info_container import FieldInfoContainer +from yt.frontends.boxlib.misc import BoxlibSetupParticleFieldsMixin from yt.units import YTQuantity from yt.utilities.physical_constants import amu_cgs, boltzmann_constant_cgs, c @@ -186,7 +187,7 @@ class NyxFieldInfo(FieldInfoContainer): ) -class BoxlibFieldInfo(FieldInfoContainer): +class BoxlibFieldInfo(FieldInfoContainer, BoxlibSetupParticleFieldsMixin): known_other_fields = ( ("density", (rho_units, ["density"], None)), ("eden", (eden_units, ["total_energy_density"], None)), @@ -225,26 +226,6 @@ class BoxlibFieldInfo(FieldInfoContainer): # "luminosity", ) - def setup_particle_fields(self, ptype): - def _get_vel(axis): - def velocity(field, data): - return ( - data[(ptype, f"particle_momentum_{axis}")] - / data[(ptype, "particle_mass")] - ) - - return velocity - - for ax in "xyz": - self.add_field( - (ptype, f"particle_velocity_{ax}"), - sampling_type="particle", - function=_get_vel(ax), - units="code_length/code_time", - ) - - super().setup_particle_fields(ptype) - def setup_fluid_fields(self): unit_system = self.ds.unit_system # Now, let's figure out what fields are included. diff --git a/yt/frontends/boxlib/io.py b/yt/frontends/boxlib/io.py index 707102da736..e455bf5050d 100644 --- a/yt/frontends/boxlib/io.py +++ b/yt/frontends/boxlib/io.py @@ -2,12 +2,13 @@ from collections import defaultdict import numpy as np +from misc import BoxlibParticleSelectionMixin -from yt.frontends.chombo.io import parse_orion_sinks from yt.funcs import mylog -from yt.geometry.selection_routines import GridSelector from yt.utilities.io_handler import BaseIOHandler +from .misc import IOHandlerParticlesBoxlibMixin + def _remove_raw(all_fields, raw_fields): centered_fields = set(all_fields) @@ -16,7 +17,7 @@ def _remove_raw(all_fields, raw_fields): return list(centered_fields) -class IOHandlerBoxlib(BaseIOHandler): +class IOHandlerBoxlib(BaseIOHandler, BoxlibParticleSelectionMixin): _dataset_type = "boxlib_native" @@ -188,80 +189,12 @@ def _read_particle_fields(self, chunks, ptf, selector): yield (ptype, field), data[mask].flatten() -class IOHandlerOrion(IOHandlerBoxlib): +class IOHandlerOrion(IOHandlerBoxlib, IOHandlerParticlesBoxlibMixin): _dataset_type = "orion_native" - _particle_filename = None - @property def particle_filename(self): fn = self.ds.output_dir + "/StarParticles" if not os.path.exists(fn): fn = self.ds.output_dir + "/SinkParticles" - self._particle_filename = fn - return self._particle_filename - - _particle_field_index = None - - @property - def particle_field_index(self): - - index = parse_orion_sinks(self.particle_filename) - - self._particle_field_index = index - return self._particle_field_index - - def _read_particle_selection(self, chunks, selector, fields): - rv = {} - chunks = list(chunks) - - if isinstance(selector, GridSelector): - - if not (len(chunks) == len(chunks[0].objs) == 1): - raise RuntimeError - - grid = chunks[0].objs[0] - - for ftype, fname in fields: - rv[ftype, fname] = self._read_particles(grid, fname) - - return rv - - rv = {f: np.array([]) for f in fields} - for chunk in chunks: - for grid in chunk.objs: - for ftype, fname in fields: - data = self._read_particles(grid, fname) - rv[ftype, fname] = np.concatenate((data, rv[ftype, fname])) - return rv - - def _read_particles(self, grid, field): - """ - parses the Orion Star Particle text files - - """ - - particles = [] - - if grid.NumberOfParticles == 0: - return np.array(particles) - - def read(line, field): - entry = line.strip().split(" ")[self.particle_field_index[field]] - return float(entry) - - try: - lines = self._cached_lines - for num in grid._particle_line_numbers: - line = lines[num] - particles.append(read(line, field)) - return np.array(particles) - except AttributeError: - fn = self.particle_filename - with open(fn) as f: - lines = f.readlines() - self._cached_lines = lines - for num in grid._particle_line_numbers: - line = lines[num] - particles.append(read(line, field)) - return np.array(particles) + return fn diff --git a/yt/frontends/boxlib/misc.py b/yt/frontends/boxlib/misc.py index e69de29bb2d..88298e46ee7 100644 --- a/yt/frontends/boxlib/misc.py +++ b/yt/frontends/boxlib/misc.py @@ -0,0 +1,145 @@ +import os + +import numpy as np + +from yt.frontends.chombo.io import parse_orion_sinks +from yt.geometry.selection_routines import GridSelector + + +class BoxlibParticleSelectionMixin: + def _read_particle_selection(self, chunks, selector, fields): + rv = {} + chunks = list(chunks) + + if isinstance(selector, GridSelector): + + if not (len(chunks) == len(chunks[0].objs) == 1): + raise RuntimeError + + grid = chunks[0].objs[0] + + for ftype, fname in fields: + rv[ftype, fname] = self._read_particles(grid, fname) + + return rv + + rv = {f: np.array([]) for f in fields} + for chunk in chunks: + for grid in chunk.objs: + for ftype, fname in fields: + data = self._read_particles(grid, fname) + rv[ftype, fname] = np.concatenate((data, rv[ftype, fname])) + return rv + + +class BoxlibReadParticleFileMixin: + def _read_particle_file(self, fn): + """actually reads the orion particle data file itself.""" + if not os.path.exists(fn): + return + with open(fn) as f: + lines = f.readlines() + self.num_stars = int(lines[0].strip()[0]) + for num, line in enumerate(lines[1:]): + particle_position_x = float(line.split(" ")[1]) + particle_position_y = float(line.split(" ")[2]) + particle_position_z = float(line.split(" ")[3]) + coord = [particle_position_x, particle_position_y, particle_position_z] + # for each particle, determine which grids contain it + # copied from object_finding_mixin.py + mask = np.ones(self.num_grids) + for i in range(len(coord)): + np.choose( + np.greater(self.grid_left_edge.d[:, i], coord[i]), + (mask, 0), + mask, + ) + np.choose( + np.greater(self.grid_right_edge.d[:, i], coord[i]), + (0, mask), + mask, + ) + ind = np.where(mask == 1) + selected_grids = self.grids[ind] + # in orion, particles always live on the finest level. + # so, we want to assign the particle to the finest of + # the grids we just found + if len(selected_grids) != 0: + grid = sorted(selected_grids, key=lambda grid: grid.Level)[-1] + ind = np.where(self.grids == grid)[0][0] + self.grid_particle_count[ind] += 1 + self.grids[ind].NumberOfParticles += 1 + + # store the position in the particle file for fast access. + try: + self.grids[ind]._particle_line_numbers.append(num + 1) + except AttributeError: + self.grids[ind]._particle_line_numbers = [num + 1] + return True + + +class BoxlibSetupParticleFieldsMixin: + def setup_particle_fields(self, ptype): + def _get_vel(axis): + def velocity(field, data): + return ( + data[(ptype, f"particle_momentum_{axis}")] + / data[(ptype, "particle_mass")] + ) + + return velocity + + for ax in "xyz": + self.add_field( + (ptype, f"particle_velocity_{ax}"), + sampling_type="particle", + function=_get_vel(ax), + units="code_length/code_time", + ) + + super().setup_particle_fields(ptype) + + +class IOHandlerParticlesBoxlibMixin: + def _read_particles(self, grid, field): + """ + parses the Orion Star Particle text files + + """ + + particles = [] + + if grid.NumberOfParticles == 0: + return np.array(particles) + + def read(line, field): + entry = line.strip().split(" ")[self.particle_field_index[field]] + return float(entry) + + try: + lines = self._cached_lines + for num in grid._particle_line_numbers: + line = lines[num] + particles.append(read(line, field)) + return np.array(particles) + except AttributeError: + fn = self.particle_filename + with open(fn) as f: + lines = f.readlines() + self._cached_lines = lines + for num in grid._particle_line_numbers: + line = lines[num] + particles.append(read(line, field)) + return np.array(particles) + + _particle_field_index = None + + @property + def particle_field_index(self): + if self._particle_field_index: + return self._particle_field_index + + index = parse_orion_sinks(self.particle_filename) + + self._particle_field_index = index + return self._particle_field_index diff --git a/yt/frontends/chombo/data_structures.py b/yt/frontends/chombo/data_structures.py index 59c1a23b506..a0c941a8ab6 100644 --- a/yt/frontends/chombo/data_structures.py +++ b/yt/frontends/chombo/data_structures.py @@ -6,6 +6,7 @@ from yt.data_objects.index_subobjects.grid_patch import AMRGridPatch from yt.data_objects.static_output import Dataset +from yt.frontends.boxlib.misc import BoxlibReadParticleFileMixin from yt.funcs import mylog, setdefaultattr from yt.geometry.grid_geometry_handler import GridIndex from yt.utilities.file_handler import HDF5FileHandler, warn_h5py @@ -575,7 +576,7 @@ def _is_valid(cls, filename, *args, **kwargs): return False -class Orion2Hierarchy(ChomboHierarchy): +class Orion2Hierarchy(ChomboHierarchy, BoxlibReadParticleFileMixin): def __init__(self, ds, dataset_type="orion_chombo_native"): ChomboHierarchy.__init__(self, ds, dataset_type) @@ -596,46 +597,7 @@ def _detect_output_fields(self): self.field_list.extend(pfield_list) def _read_particles(self): - if not os.path.exists(self.particle_filename): - return - with open(self.particle_filename) as f: - lines = f.readlines() - self.num_stars = int(lines[0].strip().split(" ")[0]) - for num, line in enumerate(lines[1:]): - particle_position_x = float(line.split(" ")[1]) - particle_position_y = float(line.split(" ")[2]) - particle_position_z = float(line.split(" ")[3]) - coord = [particle_position_x, particle_position_y, particle_position_z] - # for each particle, determine which grids contain it - # copied from object_finding_mixin.py - mask = np.ones(self.num_grids) - for i in range(len(coord)): - np.choose( - np.greater(self.grid_left_edge.d[:, i], coord[i]), - (mask, 0), - mask, - ) - np.choose( - np.greater(self.grid_right_edge.d[:, i], coord[i]), - (0, mask), - mask, - ) - ind = np.where(mask == 1) - selected_grids = self.grids[ind] - # in orion, particles always live on the finest level. - # so, we want to assign the particle to the finest of - # the grids we just found - if len(selected_grids) != 0: - grid = sorted(selected_grids, key=lambda grid: grid.Level)[-1] - ind = np.where(self.grids == grid)[0][0] - self.grid_particle_count[ind] += 1 - self.grids[ind].NumberOfParticles += 1 - - # store the position in the *.sink file for fast access. - try: - self.grids[ind]._particle_line_numbers.append(num + 1) - except AttributeError: - self.grids[ind]._particle_line_numbers = [num + 1] + return self._read_particle_file(self.particle_filename) class Orion2Dataset(ChomboDataset): diff --git a/yt/frontends/chombo/fields.py b/yt/frontends/chombo/fields.py index 0e27f525301..89938e32546 100644 --- a/yt/frontends/chombo/fields.py +++ b/yt/frontends/chombo/fields.py @@ -6,6 +6,7 @@ particle_vector_functions, standard_particle_fields, ) +from yt.frontends.boxlib.misc import BoxlibSetupParticleFieldsMixin from yt.units.unit_object import Unit from yt.utilities.exceptions import YTFieldNotFound @@ -25,7 +26,7 @@ class ChomboFieldInfo(FieldInfoContainer): # Orion 2 Fields # We duplicate everything here from Boxlib, because we want to be able to # subclass it and that can be somewhat tricky. -class Orion2FieldInfo(ChomboFieldInfo): +class Orion2FieldInfo(ChomboFieldInfo, BoxlibSetupParticleFieldsMixin): known_other_fields = ( ("density", (rho_units, ["density"], None)), ("energy-density", (eden_units, ["total_energy_density"], None)), @@ -64,26 +65,6 @@ class Orion2FieldInfo(ChomboFieldInfo): ("particle_id", ("", ["particle_index"], None)), ) - def setup_particle_fields(self, ptype): - def _get_vel(axis): - def velocity(field, data): - return ( - data[(ptype, f"particle_momentum_{axis}")] - / data[(ptype, "particle_mass")] - ) - - return velocity - - for ax in "xyz": - self.add_field( - (ptype, f"particle_velocity_{ax}"), - sampling_type="particle", - function=_get_vel(ax), - units="code_length/code_time", - ) - - super().setup_particle_fields(ptype) - def setup_fluid_fields(self): from yt.fields.magnetic_field import setup_magnetic_field_aliases diff --git a/yt/frontends/chombo/io.py b/yt/frontends/chombo/io.py index db67fdbe9e0..a32e7db5fef 100644 --- a/yt/frontends/chombo/io.py +++ b/yt/frontends/chombo/io.py @@ -2,12 +2,16 @@ import numpy as np +from yt.frontends.boxlib.misc import ( + BoxlibParticleSelectionMixin, + IOHandlerParticlesBoxlibMixin, +) from yt.geometry.selection_routines import GridSelector from yt.utilities.io_handler import BaseIOHandler from yt.utilities.logger import ytLogger as mylog -class IOHandlerChomboHDF5(BaseIOHandler): +class IOHandlerChomboHDF5(BaseIOHandler, BoxlibParticleSelectionMixin): _dataset_type = "chombo_hdf5" _offset_string = "data:offsets=0" _data_string = "data:datatype=0" @@ -141,30 +145,6 @@ def _read_fluid_selection(self, chunks, selector, fields, size): ind += nd return rv - def _read_particle_selection(self, chunks, selector, fields): - rv = {} - chunks = list(chunks) - - if isinstance(selector, GridSelector): - - if not (len(chunks) == len(chunks[0].objs) == 1): - raise RuntimeError - - grid = chunks[0].objs[0] - - for ftype, fname in fields: - rv[ftype, fname] = self._read_particles(grid, fname) - - return rv - - rv = {f: np.array([]) for f in fields} - for chunk in chunks: - for grid in chunk.objs: - for ftype, fname in fields: - data = self._read_particles(grid, fname) - rv[ftype, fname] = np.concatenate((data, rv[ftype, fname])) - return rv - def _read_particles(self, grid, name): field_index = self.particle_field_index[name] @@ -259,48 +239,11 @@ def parse_orion_sinks(fn): return index -class IOHandlerOrion2HDF5(IOHandlerChomboHDF5): +class IOHandlerOrion2HDF5(IOHandlerChomboHDF5, IOHandlerParticlesBoxlibMixin): _dataset_type = "orion_chombo_native" - _particle_field_index = None + _particle_filename = None @property - def particle_field_index(self): - - fn = self.ds.fullplotdir[:-4] + "sink" - - index = parse_orion_sinks(fn) - - self._particle_field_index = index - return self._particle_field_index - - def _read_particles(self, grid, field): - """ - parses the Orion Star Particle text files - - """ - - particles = [] - - if grid.NumberOfParticles == 0: - return np.array(particles) - - def read(line, field): - entry = line.strip().split(" ")[self.particle_field_index[field]] - return float(entry) - - try: - lines = self._cached_lines - for num in grid._particle_line_numbers: - line = lines[num] - particles.append(read(line, field)) - return np.array(particles) - except AttributeError: - fn = grid.ds.fullplotdir[:-4] + "sink" - with open(fn) as f: - lines = f.readlines() - self._cached_lines = lines - for num in grid._particle_line_numbers: - line = lines[num] - particles.append(read(line, field)) - return np.array(particles) + def particle_filename(self): + return self.ds.fullplotdir[:-4] + "sink"