Skip to content

Commit

Permalink
Merge pull request #4143 from mtryan83/updating_firefly
Browse files Browse the repository at this point in the history
  • Loading branch information
neutrinoceros authored Oct 3, 2022
2 parents 0bdce1c + 83259e3 commit f7eb388
Show file tree
Hide file tree
Showing 4 changed files with 47 additions and 34 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,19 @@ particle visualization platform that allows you to filter, colormap, and fly
through their data. The Python frontend allows users to both load in their
own datasets and customize every aspect of the user interface.
yt offers to ability
to export your data to Firefly's JSON format through the
to export your data to Firefly's ffly or JSON format through the
:meth:`~yt.data_objects.data_containers.YTDataContainer.create_firefly_object`
method.

You can adjust the interface settings, particle colors, decimation factors, and
other `Firefly settings <https://ageller.github.io/Firefly/docs/build/html/index.html>`_
through the returned ``Firefly.reader`` object. Once the
settings are tuned to your liking, calling the ``reader.dumpToJSON()`` method will
produce the final JSON files. Note that ``reader.clean_JSONdir`` defaults to true
settings are tuned to your liking, calling the ``reader.writeToDisk()`` method will
produce the final ffly files. Note that ``reader.clean_datadir`` defaults to true
when using
:meth:`~yt.data_objects.data_containers.YTDataContainer.create_firefly_object`
so if you would like to manage multiple datasets make sure to pass different
``JSONdir`` keyword arguments.
``datadir`` keyword arguments.

.. image:: _images/firefly_example.png
:width: 85%
Expand All @@ -45,12 +45,11 @@ Here is an example of how to use yt to export data to Firefly using some
)
## adjust some of the options
reader.options["color"]["io"] = [1, 1, 0, 1] ## set default color
reader.settings["color"]["io"] = [1, 1, 0, 1] ## set default color
reader.particleGroups[0].decimation_factor = 100 ## increase the decimation factor
## dump files to
## ~/IsoGalaxyRamses/Dataio_0.json
## ~/IsoGalaxyRamses/Dataio_1.json
## ~/IsoGalaxyRamses/Dataio_ ... .json
## ~/IsoGalaxyRamses/Dataio000.ffly
## ~/IsoGalaxyRamses/filenames.json
## ~/IsoGalaxyRamses/DataSettings.json
reader.dumpToJSON()
reader.writeToDisk()
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ full =
arm-pyart!=1.12.5,>=1.11.4
astropy>=4.0.1,<6.0.0
f90nml>=1.1.2
firefly-vis>=2.0.4,<3.0.0
firefly>=3.2.0,<4.0.0
glueviz>=0.13.3
h5py>=3.1.0,<4.0.0
ipython>=2.0.0
Expand Down
54 changes: 34 additions & 20 deletions yt/data_objects/data_containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

import numpy as np

from yt._maintenance.deprecation import issue_deprecation_warning
from yt.config import ytcfg
from yt.data_objects.field_data import YTFieldData
from yt.data_objects.profiles import create_profile
Expand Down Expand Up @@ -703,13 +704,15 @@ def to_glue(self, fields, label="yt", data_collection=None):

def create_firefly_object(
self,
JSONdir,
datadir=None,
fields_to_include=None,
fields_units=None,
default_decimation_factor=100,
velocity_units="km/s",
coordinate_units="kpc",
show_unused_fields=0,
*,
JSONdir=None,
**kwargs,
):
r"""This function links a region of data stored in a yt dataset
Expand All @@ -719,9 +722,9 @@ def create_firefly_object(
Parameters
----------
JSONdir : string
datadir : string
Path to where any `.json` files should be saved. If a relative
path will assume relative to `${HOME}`
path will assume relative to `${HOME}`. A value of `None` will default to `${HOME}/Data`.
fields_to_include : array_like of strings
A list of fields that you want to include in your
Expand All @@ -736,7 +739,7 @@ def create_firefly_object(
not overtax a system. This is adjustable on a per particle group
basis by changing the returned reader's
`reader.particleGroup[i].decimation_factor` before calling
`reader.dumpToJSON()`.
`reader.writeToDisk()`.
velocity_units : string
The units that the velocity should be converted to in order to
Expand All @@ -750,6 +753,9 @@ def create_firefly_object(
A flag to optionally print the fields that are available, in the
dataset but were not explicitly requested to be tracked.
Any additional keyword arguments are passed to
firefly.data_reader.Reader.__init__
Returns
-------
reader : Firefly.data_reader.Reader object
Expand All @@ -775,9 +781,9 @@ def create_firefly_object(
... fields_units=["dimensionless", "dimensionless"],
... )
>>> reader.options["color"]["io"] = [1, 1, 0, 1]
>>> reader.settings["color"]["io"] = [1, 1, 0, 1]
>>> reader.particleGroups[0].decimation_factor = 100
>>> reader.dumpToJSON()
>>> reader.writeToDisk()
"""

## handle default arguments
Expand All @@ -793,9 +799,17 @@ def create_firefly_object(
## for safety, in case someone passes a float just cast it
default_decimation_factor = int(default_decimation_factor)

if JSONdir is not None:
issue_deprecation_warning(
"The 'JSONdir' keyword argument is a deprecated alias for 'datadir'."
"Please use 'datadir' directly.",
since="4.1",
)
datadir = JSONdir

## initialize a firefly reader instance
reader = firefly.data_reader.Reader(
JSONdir=JSONdir, clean_JSONdir=True, **kwargs
datadir=datadir, clean_datadir=True, **kwargs
)

## create a ParticleGroup object that contains *every* field
Expand All @@ -816,11 +830,8 @@ def create_firefly_object(
"detected (but did not request) %s %s", ptype, field
)

## you must have velocities (and they must be named "Velocities")
tracked_arrays = [
self[ptype, "relative_particle_velocity"].in_units(velocity_units)
]
tracked_names = ["Velocities"]
field_arrays = []
field_names = []

## explicitly go after the fields we want
for field, units in zip(fields_to_include, fields_units):
Expand All @@ -843,25 +854,28 @@ def create_firefly_object(
this_field_array = np.log10(this_field_array)

## add this array to the tracked arrays
tracked_arrays += [this_field_array]
tracked_names = np.append(tracked_names, [field], axis=0)
field_arrays += [this_field_array]
field_names = np.append(field_names, [field], axis=0)

## flag whether we want to filter and/or color by these fields
## we'll assume yes for both cases, this can be changed after
## the reader object is returned to the user.
tracked_filter_flags = np.ones(len(tracked_names))
tracked_colormap_flags = np.ones(len(tracked_names))
field_filter_flags = np.ones(len(field_names))
field_colormap_flags = np.ones(len(field_names))

## create a firefly ParticleGroup for this particle type
pg = firefly.data_reader.ParticleGroup(
UIname=ptype,
coordinates=self[ptype, "relative_particle_position"].in_units(
coordinate_units
),
tracked_arrays=tracked_arrays,
tracked_names=tracked_names,
tracked_filter_flags=tracked_filter_flags,
tracked_colormap_flags=tracked_colormap_flags,
velocities=self[ptype, "relative_particle_velocity"].in_units(
velocity_units
),
field_arrays=field_arrays,
field_names=field_names,
field_filter_flags=field_filter_flags,
field_colormap_flags=field_colormap_flags,
decimation_factor=default_decimation_factor,
)

Expand Down
8 changes: 4 additions & 4 deletions yt/utilities/on_demand_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -452,21 +452,21 @@ def concat(self):
_pandas = pandas_imports()


class Firefly_imports(OnDemand):
class firefly_imports(OnDemand):
@safe_import
def data_reader(self):
import Firefly.data_reader as data_reader
import firefly.data_reader as data_reader

return data_reader

@safe_import
def server(self):
import Firefly.server as server
import firefly.server as server

return server


_firefly = Firefly_imports()
_firefly = firefly_imports()


# Note: ratarmount may fail with an OSError on import if libfuse is missing
Expand Down

0 comments on commit f7eb388

Please sign in to comment.