1import numpy as np
2
3from yt.data_objects.field_data import YTFieldData
4from yt.fields.derived_field import DerivedField
5from yt.frontends.ytdata.utilities import save_as_dataset
6from yt.funcs import get_output_filename, is_sequence, iter_fields, mylog
7from yt.units.unit_object import Unit
8from yt.units.yt_array import YTQuantity, array_like_field
9from yt.utilities.exceptions import (
10    YTIllDefinedBounds,
11    YTIllDefinedProfile,
12    YTProfileDataShape,
13)
14from yt.utilities.lib.misc_utilities import (
15    new_bin_profile1d,
16    new_bin_profile2d,
17    new_bin_profile3d,
18)
19from yt.utilities.lib.particle_mesh_operations import CICDeposit_2, NGPDeposit_2
20from yt.utilities.parallel_tools.parallel_analysis_interface import (
21    ParallelAnalysisInterface,
22    parallel_objects,
23)
24
25
26def _sanitize_min_max_units(amin, amax, finfo, registry):
27    # returns a copy of amin and amax, converted to finfo's output units
28    umin = getattr(amin, "units", None)
29    umax = getattr(amax, "units", None)
30    if umin is None:
31        umin = Unit(finfo.output_units, registry=registry)
32        rmin = YTQuantity(amin, umin)
33    else:
34        rmin = amin.in_units(finfo.output_units)
35    if umax is None:
36        umax = Unit(finfo.output_units, registry=registry)
37        rmax = YTQuantity(amax, umax)
38    else:
39        rmax = amax.in_units(finfo.output_units)
40    return rmin, rmax
41
42
43def preserve_source_parameters(func):
44    def save_state(*args, **kwargs):
45        # Temporarily replace the 'field_parameters' for a
46        # grid with the 'field_parameters' for the data source
47        prof = args[0]
48        source = args[1]
49        if hasattr(source, "field_parameters"):
50            old_params = source.field_parameters
51            source.field_parameters = prof._data_source.field_parameters
52            tr = func(*args, **kwargs)
53            source.field_parameters = old_params
54        else:
55            tr = func(*args, **kwargs)
56        return tr
57
58    return save_state
59
60
61class ProfileFieldAccumulator:
62    def __init__(self, n_fields, size):
63        shape = size + (n_fields,)
64        self.values = np.zeros(shape, dtype="float64")
65        self.mvalues = np.zeros(shape, dtype="float64")
66        self.qvalues = np.zeros(shape, dtype="float64")
67        self.used = np.zeros(size, dtype="bool")
68        self.weight_values = np.zeros(size, dtype="float64")
69
70
71class ProfileND(ParallelAnalysisInterface):
72    """The profile object class"""
73
74    def __init__(self, data_source, weight_field=None):
75        self.data_source = data_source
76        self.ds = data_source.ds
77        self.field_map = {}
78        self.field_info = {}
79        self.field_data = YTFieldData()
80        if weight_field is not None:
81            self.standard_deviation = YTFieldData()
82            weight_field = self.data_source._determine_fields(weight_field)[0]
83        else:
84            self.standard_deviation = None
85        self.weight_field = weight_field
86        self.field_units = {}
87        ParallelAnalysisInterface.__init__(self, comm=data_source.comm)
88
89    def add_fields(self, fields):
90        """Add fields to profile
91
92        Parameters
93        ----------
94        fields : list of field names
95            A list of fields to create profile histograms for
96
97        """
98        fields = self.data_source._determine_fields(fields)
99        for f in fields:
100            self.field_info[f] = self.data_source.ds.field_info[f]
101        temp_storage = ProfileFieldAccumulator(len(fields), self.size)
102        citer = self.data_source.chunks([], "io")
103        for chunk in parallel_objects(citer):
104            self._bin_chunk(chunk, fields, temp_storage)
105        self._finalize_storage(fields, temp_storage)
106
107    def set_field_unit(self, field, new_unit):
108        """Sets a new unit for the requested field
109
110        Parameters
111        ----------
112        field : string or field tuple
113           The name of the field that is to be changed.
114
115        new_unit : string or Unit object
116           The name of the new unit.
117        """
118        if field in self.field_units:
119            self.field_units[field] = Unit(new_unit, registry=self.ds.unit_registry)
120        else:
121            fd = self.field_map[field]
122            if fd in self.field_units:
123                self.field_units[fd] = Unit(new_unit, registry=self.ds.unit_registry)
124            else:
125                raise KeyError(f"{field} not in profile!")
126
127    def _finalize_storage(self, fields, temp_storage):
128        # We use our main comm here
129        # This also will fill _field_data
130
131        for i, _field in enumerate(fields):
132            # q values are returned as q * weight but we want just q
133            temp_storage.qvalues[..., i][
134                temp_storage.used
135            ] /= temp_storage.weight_values[temp_storage.used]
136
137        # get the profile data from all procs
138        all_store = {self.comm.rank: temp_storage}
139        all_store = self.comm.par_combine_object(all_store, "join", datatype="dict")
140
141        all_val = np.zeros_like(temp_storage.values)
142        all_mean = np.zeros_like(temp_storage.mvalues)
143        all_std = np.zeros_like(temp_storage.qvalues)
144        all_weight = np.zeros_like(temp_storage.weight_values)
145        all_used = np.zeros_like(temp_storage.used, dtype="bool")
146
147        # Combine the weighted mean and standard deviation from each processor.
148        # For two samples with total weight, mean, and standard deviation
149        # given by w, m, and s, their combined mean and standard deviation are:
150        # m12 = (m1 * w1 + m2 * w2) / (w1 + w2)
151        # s12 = (m1 * (s1**2 + (m1 - m12)**2) +
152        #        m2 * (s2**2 + (m2 - m12)**2)) / (w1 + w2)
153        # Here, the mvalues are m and the qvalues are s**2.
154        for p in sorted(all_store.keys()):
155            all_used += all_store[p].used
156            old_mean = all_mean.copy()
157            old_weight = all_weight.copy()
158            all_weight[all_store[p].used] += all_store[p].weight_values[
159                all_store[p].used
160            ]
161            for i, _field in enumerate(fields):
162                all_val[..., i][all_store[p].used] += all_store[p].values[..., i][
163                    all_store[p].used
164                ]
165
166                all_mean[..., i][all_store[p].used] = (
167                    all_mean[..., i] * old_weight
168                    + all_store[p].mvalues[..., i] * all_store[p].weight_values
169                )[all_store[p].used] / all_weight[all_store[p].used]
170
171                all_std[..., i][all_store[p].used] = (
172                    old_weight
173                    * (all_std[..., i] + (old_mean[..., i] - all_mean[..., i]) ** 2)
174                    + all_store[p].weight_values
175                    * (
176                        all_store[p].qvalues[..., i]
177                        + (all_store[p].mvalues[..., i] - all_mean[..., i]) ** 2
178                    )
179                )[all_store[p].used] / all_weight[all_store[p].used]
180
181        all_std = np.sqrt(all_std)
182        del all_store
183        self.used = all_used
184        blank = ~all_used
185
186        self.weight = all_weight
187        self.weight[blank] = 0.0
188
189        for i, field in enumerate(fields):
190            if self.weight_field is None:
191                self.field_data[field] = array_like_field(
192                    self.data_source, all_val[..., i], field
193                )
194            else:
195                self.field_data[field] = array_like_field(
196                    self.data_source, all_mean[..., i], field
197                )
198                self.standard_deviation[field] = array_like_field(
199                    self.data_source, all_std[..., i], field
200                )
201                self.standard_deviation[field][blank] = 0.0
202                self.weight = array_like_field(
203                    self.data_source, self.weight, self.weight_field
204                )
205            self.field_data[field][blank] = 0.0
206            self.field_units[field] = self.field_data[field].units
207            if isinstance(field, tuple):
208                self.field_map[field[1]] = field
209            else:
210                self.field_map[field] = field
211
212    def _bin_chunk(self, chunk, fields, storage):
213        raise NotImplementedError
214
215    def _filter(self, bin_fields):
216        # cut_points is set to be everything initially, but
217        # we also want to apply a filtering based on min/max
218        pfilter = np.ones(bin_fields[0].shape, dtype="bool")
219        for (mi, ma), data in zip(self.bounds, bin_fields):
220            pfilter &= data > mi
221            pfilter &= data < ma
222        return pfilter, [data[pfilter] for data in bin_fields]
223
224    def _get_data(self, chunk, fields):
225        # We are using chunks now, which will manage the field parameters and
226        # the like.
227        bin_fields = [chunk[bf] for bf in self.bin_fields]
228        for i in range(1, len(bin_fields)):
229            if bin_fields[0].shape != bin_fields[i].shape:
230                raise YTProfileDataShape(
231                    self.bin_fields[0],
232                    bin_fields[0].shape,
233                    self.bin_fields[i],
234                    bin_fields[i].shape,
235                )
236        # We want to make sure that our fields are within the bounds of the
237        # binning
238        pfilter, bin_fields = self._filter(bin_fields)
239        if not np.any(pfilter):
240            return None
241        arr = np.zeros((bin_fields[0].size, len(fields)), dtype="float64")
242        for i, field in enumerate(fields):
243            if pfilter.shape != chunk[field].shape:
244                raise YTProfileDataShape(
245                    self.bin_fields[0], bin_fields[0].shape, field, chunk[field].shape
246                )
247            units = chunk.ds.field_info[field].output_units
248            arr[:, i] = chunk[field][pfilter].in_units(units)
249        if self.weight_field is not None:
250            if pfilter.shape != chunk[self.weight_field].shape:
251                raise YTProfileDataShape(
252                    self.bin_fields[0],
253                    bin_fields[0].shape,
254                    self.weight_field,
255                    chunk[self.weight_field].shape,
256                )
257            units = chunk.ds.field_info[self.weight_field].output_units
258            weight_data = chunk[self.weight_field].in_units(units)
259        else:
260            weight_data = np.ones(pfilter.shape, dtype="float64")
261        weight_data = weight_data[pfilter]
262        # So that we can pass these into
263        return arr, weight_data, bin_fields
264
265    def __getitem__(self, field):
266        if field in self.field_data:
267            fname = field
268        else:
269            # deal with string vs tuple field names and attempt to guess which field
270            # we are supposed to be talking about
271            fname = self.field_map.get(field, None)
272            if isinstance(field, tuple):
273                fname = self.field_map.get(field[1], None)
274                if fname != field:
275                    raise KeyError(
276                        "Asked for field '{}' but only have data for "
277                        "fields '{}'".format(field, list(self.field_data.keys()))
278                    )
279            elif isinstance(field, DerivedField):
280                fname = self.field_map.get(field.name[1], None)
281            if fname is None:
282                raise KeyError(field)
283        if getattr(self, "fractional", False):
284            return self.field_data[fname]
285        else:
286            return self.field_data[fname].in_units(self.field_units[fname])
287
288    def items(self):
289        return [(k, self[k]) for k in self.field_data.keys()]
290
291    def keys(self):
292        return self.field_data.keys()
293
294    def __iter__(self):
295        return sorted(self.items())
296
297    def _get_bins(self, mi, ma, n, take_log):
298        if take_log:
299            ret = np.logspace(np.log10(mi), np.log10(ma), n + 1)
300            # at this point ret[0] and ret[-1] are not exactly equal to
301            # mi and ma due to round-off error. Let's force them to be
302            # mi and ma exactly to avoid incorrectly discarding cells near
303            # the edges. See Issue #1300.
304            ret[0], ret[-1] = mi, ma
305            return ret
306        else:
307            return np.linspace(mi, ma, n + 1)
308
309    def save_as_dataset(self, filename=None):
310        r"""Export a profile to a reloadable yt dataset.
311
312        This function will take a profile and output a dataset
313        containing all relevant fields.  The resulting dataset
314        can be reloaded as a yt dataset.
315
316        Parameters
317        ----------
318        filename : str, optional
319            The name of the file to be written.  If None, the name
320            will be a combination of the original dataset plus
321            the type of object, e.g., Profile1D.
322
323        Returns
324        -------
325        filename : str
326            The name of the file that has been created.
327
328        Examples
329        --------
330
331        >>> import yt
332        >>> ds = yt.load("enzo_tiny_cosmology/DD0046/DD0046")
333        >>> ad = ds.all_data()
334        >>> profile = yt.create_profile(
335        ...     ad,
336        ...     [("gas", "density"), ("gas", "temperature")],
337        ...     ("gas", "mass"),
338        ...     weight_field=None,
339        ...     n_bins=(128, 128),
340        ... )
341        >>> fn = profile.save_as_dataset()
342        >>> prof_ds = yt.load(fn)
343        >>> print(prof_ds.data[("gas", "mass")])
344        (128, 128)
345        >>> print(prof_ds.data[("index", "x")].shape)  # x bins as 1D array
346        (128,)
347        >>> print(prof_ds.data[("gas", "density")])  # x bins as 2D array
348        (128, 128)
349        >>> p = yt.PhasePlot(
350        ...     prof_ds.data,
351        ...     ("gas", "density"),
352        ...     ("gas", "temperature"),
353        ...     ("gas", "mass"),
354        ...     weight_field=None,
355        ... )
356        >>> p.save()
357
358        """
359
360        keyword = f"{str(self.ds)}_{self.__class__.__name__}"
361        filename = get_output_filename(filename, keyword, ".h5")
362
363        args = ("field", "log")
364        extra_attrs = {
365            "data_type": "yt_profile",
366            "profile_dimensions": self.size,
367            "weight_field": self.weight_field,
368            "fractional": self.fractional,
369            "accumulation": self.accumulation,
370        }
371        data = {}
372        data.update(self.field_data)
373        data["weight"] = self.weight
374        data["used"] = self.used.astype("float64")
375        std = "standard_deviation"
376        if self.weight_field is not None:
377            std_data = getattr(self, std)
378            data.update({(std, field[1]): std_data[field] for field in self.field_data})
379
380        dimensionality = 0
381        bin_data = []
382        for ax in "xyz":
383            if hasattr(self, ax):
384                dimensionality += 1
385                data[ax] = getattr(self, ax)
386                bin_data.append(data[ax])
387                bin_field_name = f"{ax}_bins"
388                data[bin_field_name] = getattr(self, bin_field_name)
389                extra_attrs[f"{ax}_range"] = self.ds.arr(
390                    [data[bin_field_name][0], data[bin_field_name][-1]]
391                )
392                for arg in args:
393                    key = f"{ax}_{arg}"
394                    extra_attrs[key] = getattr(self, key)
395
396        bin_fields = np.meshgrid(*bin_data)
397        for i, ax in enumerate("xyz"[:dimensionality]):
398            data[getattr(self, f"{ax}_field")] = bin_fields[i]
399
400        extra_attrs["dimensionality"] = dimensionality
401        ftypes = {field: "data" for field in data if field[0] != std}
402        if self.weight_field is not None:
403            ftypes.update({(std, field[1]): std for field in self.field_data})
404        save_as_dataset(
405            self.ds, filename, data, field_types=ftypes, extra_attrs=extra_attrs
406        )
407
408        return filename
409
410
411class ProfileNDFromDataset(ProfileND):
412    """
413    An ND profile object loaded from a ytdata dataset.
414    """
415
416    def __init__(self, ds):
417        ProfileND.__init__(self, ds.data, ds.parameters.get("weight_field", None))
418        self.fractional = ds.parameters.get("fractional", False)
419        self.accumulation = ds.parameters.get("accumulation", False)
420        exclude_fields = ["used", "weight"]
421        for ax in "xyz"[: ds.dimensionality]:
422            setattr(self, ax, ds.data[("data", ax)])
423            ax_bins = f"{ax}_bins"
424            ax_field = f"{ax}_field"
425            ax_log = f"{ax}_log"
426            setattr(self, ax_bins, ds.data[("data", ax_bins)])
427            field_name = tuple(ds.parameters.get(ax_field, (None, None)))
428            setattr(self, ax_field, field_name)
429            self.field_info[field_name] = ds.field_info[field_name]
430            setattr(self, ax_log, ds.parameters.get(ax_log, False))
431            exclude_fields.extend([ax, ax_bins, field_name[1]])
432        self.weight = ds.data[("data", "weight")]
433        self.used = ds.data[("data", "used")].d.astype(bool)
434        profile_fields = [
435            f
436            for f in ds.field_list
437            if f[1] not in exclude_fields and f[0] != "standard_deviation"
438        ]
439        for field in profile_fields:
440            self.field_map[field[1]] = field
441            self.field_data[field] = ds.data[field]
442            self.field_info[field] = ds.field_info[field]
443            self.field_units[field] = ds.data[field].units
444            if ("standard_deviation", field[1]) in ds.field_list:
445                self.standard_deviation[field] = ds.data["standard_deviation", field[1]]
446
447
448class Profile1D(ProfileND):
449    """An object that represents a 1D profile.
450
451    Parameters
452    ----------
453
454    data_source : AMD3DData object
455        The data object to be profiled
456    x_field : string field name
457        The field to profile as a function of
458    x_n : integer
459        The number of bins along the x direction.
460    x_min : float
461        The minimum value of the x profile field. If supplied without units,
462        assumed to be in the output units for x_field.
463    x_max : float
464        The maximum value of the x profile field. If supplied without units,
465        assumed to be in the output units for x_field.
466    x_log : boolean
467        Controls whether or not the bins for the x field are evenly
468        spaced in linear (False) or log (True) space.
469    weight_field : string field name
470        The field to weight the profiled fields by.
471    override_bins_x : array
472        Array to set as xbins and ignore other parameters if set
473
474    """
475
476    def __init__(
477        self,
478        data_source,
479        x_field,
480        x_n,
481        x_min,
482        x_max,
483        x_log,
484        weight_field=None,
485        override_bins_x=None,
486    ):
487        super().__init__(data_source, weight_field)
488        self.x_field = data_source._determine_fields(x_field)[0]
489        self.field_info[self.x_field] = self.data_source.ds.field_info[self.x_field]
490        self.x_log = x_log
491        x_min, x_max = _sanitize_min_max_units(
492            x_min, x_max, self.field_info[self.x_field], self.ds.unit_registry
493        )
494        self.x_bins = array_like_field(
495            data_source, self._get_bins(x_min, x_max, x_n, x_log), self.x_field
496        )
497
498        if override_bins_x is not None:
499            self.x_bins = array_like_field(data_source, override_bins_x, self.x_field)
500
501        self.size = (self.x_bins.size - 1,)
502        self.bin_fields = (self.x_field,)
503        self.x = 0.5 * (self.x_bins[1:] + self.x_bins[:-1])
504
505    def _bin_chunk(self, chunk, fields, storage):
506        rv = self._get_data(chunk, fields)
507        if rv is None:
508            return
509        fdata, wdata, (bf_x,) = rv
510        bf_x.convert_to_units(self.field_info[self.x_field].output_units)
511        bin_ind = np.digitize(bf_x, self.x_bins) - 1
512        new_bin_profile1d(
513            bin_ind,
514            wdata,
515            fdata,
516            storage.weight_values,
517            storage.values,
518            storage.mvalues,
519            storage.qvalues,
520            storage.used,
521        )
522
523        # We've binned it!
524
525    def set_x_unit(self, new_unit):
526        """Sets a new unit for the x field
527
528        Parameters
529        ----------
530        new_unit : string or Unit object
531            The name of the new unit.
532        """
533        self.x_bins.convert_to_units(new_unit)
534        self.x = 0.5 * (self.x_bins[1:] + self.x_bins[:-1])
535
536    @property
537    def bounds(self):
538        return ((self.x_bins[0], self.x_bins[-1]),)
539
540    def plot(self):
541        r"""
542        This returns a :class:`~yt.visualization.profile_plotter.ProfilePlot`
543        with the fields that have been added to this object.
544        """
545        from yt.visualization.profile_plotter import ProfilePlot
546
547        return ProfilePlot.from_profiles(self)
548
549    def _export_prep(self, fields, only_used):
550        if only_used:
551            idxs = self.used
552        else:
553            idxs = slice(None, None, None)
554        if not only_used and not np.all(self.used):
555            masked = True
556        else:
557            masked = False
558        if fields is None:
559            fields = self.field_data.keys()
560        else:
561            fields = self.data_source._determine_fields(fields)
562        return idxs, masked, fields
563
564    def to_dataframe(self, fields=None, only_used=False):
565        r"""Export a profile object to a pandas DataFrame.
566
567        This function will take a data object and construct from it and
568        optionally a list of fields a pandas DataFrame object.  If pandas is
569        not importable, this will raise ImportError.
570
571        Parameters
572        ----------
573        fields : list of strings or tuple field names, default None
574            If this is supplied, it is the list of fields to be exported into
575            the DataFrame. If not supplied, whatever fields exist in the
576            profile, along with the bin field, will be exported.
577        only_used : boolean, default False
578            If True, only the bins which have data will be exported. If False,
579            all of the bins will be exported, but the elements for those bins
580            in the data arrays will be filled with NaNs.
581
582        Returns
583        -------
584        df : :class:`~pandas.DataFrame`
585            The data contained in the profile.
586
587        Examples
588        --------
589        >>> sp = ds.sphere("c", (0.1, "unitary"))
590        >>> p = sp.profile(
591        ...     ("index", "radius"), [("gas", "density"), ("gas", "temperature")]
592        ... )
593        >>> df1 = p.to_dataframe()
594        >>> df2 = p.to_dataframe(fields=("gas", "density"), only_used=True)
595        """
596        from yt.utilities.on_demand_imports import _pandas as pd
597
598        idxs, masked, fields = self._export_prep(fields, only_used)
599        pdata = {self.x_field[-1]: self.x[idxs]}
600        for field in fields:
601            pdata[field[-1]] = self[field][idxs]
602        df = pd.DataFrame(pdata)
603        if masked:
604            mask = np.zeros(df.shape, dtype="bool")
605            mask[~self.used, 1:] = True
606            df.mask(mask, inplace=True)
607        return df
608
609    def to_astropy_table(self, fields=None, only_used=False):
610        """
611        Export the profile data to a :class:`~astropy.table.table.QTable`,
612        which is a Table object which is unit-aware. The QTable can then
613        be exported to an ASCII file, FITS file, etc.
614
615        See the AstroPy Table docs for more details:
616        http://docs.astropy.org/en/stable/table/
617
618        Parameters
619        ----------
620        fields : list of strings or tuple field names, default None
621            If this is supplied, it is the list of fields to be exported into
622            the DataFrame. If not supplied, whatever fields exist in the
623            profile, along with the bin field, will be exported.
624        only_used : boolean, optional
625            If True, only the bins which are used are copied
626            to the QTable as rows. If False, all bins are
627            copied, but the bins which are not used are masked.
628            Default: False
629
630        Returns
631        -------
632        df : :class:`~astropy.table.QTable`
633            The data contained in the profile.
634
635        Examples
636        --------
637        >>> sp = ds.sphere("c", (0.1, "unitary"))
638        >>> p = sp.profile(
639        ...     ("index", "radius"), [("gas", "density"), ("gas", "temperature")]
640        ... )
641        >>> qt1 = p.to_astropy_table()
642        >>> qt2 = p.to_astropy_table(fields=("gas", "density"), only_used=True)
643        """
644        from astropy.table import QTable
645
646        idxs, masked, fields = self._export_prep(fields, only_used)
647        qt = QTable(masked=masked)
648        qt[self.x_field[-1]] = self.x[idxs].to_astropy()
649        if masked:
650            qt[self.x_field[-1]].mask = self.used
651        for field in fields:
652            qt[field[-1]] = self[field][idxs].to_astropy()
653            if masked:
654                qt[field[-1]].mask = self.used
655        return qt
656
657
658class Profile1DFromDataset(ProfileNDFromDataset, Profile1D):
659    """
660    A 1D profile object loaded from a ytdata dataset.
661    """
662
663    def __init(self, ds):
664        ProfileNDFromDataset.__init__(self, ds)
665
666
667class Profile2D(ProfileND):
668    """An object that represents a 2D profile.
669
670    Parameters
671    ----------
672
673    data_source : AMD3DData object
674        The data object to be profiled
675    x_field : string field name
676        The field to profile as a function of along the x axis.
677    x_n : integer
678        The number of bins along the x direction.
679    x_min : float
680        The minimum value of the x profile field. If supplied without units,
681        assumed to be in the output units for x_field.
682    x_max : float
683        The maximum value of the x profile field. If supplied without units,
684        assumed to be in the output units for x_field.
685    x_log : boolean
686        Controls whether or not the bins for the x field are evenly
687        spaced in linear (False) or log (True) space.
688    y_field : string field name
689        The field to profile as a function of along the y axis
690    y_n : integer
691        The number of bins along the y direction.
692    y_min : float
693        The minimum value of the y profile field. If supplied without units,
694        assumed to be in the output units for y_field.
695    y_max : float
696        The maximum value of the y profile field. If supplied without units,
697        assumed to be in the output units for y_field.
698    y_log : boolean
699        Controls whether or not the bins for the y field are evenly
700        spaced in linear (False) or log (True) space.
701    weight_field : string field name
702        The field to weight the profiled fields by.
703    override_bins_x : array
704        Array to set as xbins and ignore other parameters if set
705    override_bins_y : array
706        Array to set as ybins and ignore other parameters if set
707
708    """
709
710    def __init__(
711        self,
712        data_source,
713        x_field,
714        x_n,
715        x_min,
716        x_max,
717        x_log,
718        y_field,
719        y_n,
720        y_min,
721        y_max,
722        y_log,
723        weight_field=None,
724        override_bins_x=None,
725        override_bins_y=None,
726    ):
727        super().__init__(data_source, weight_field)
728        # X
729        self.x_field = data_source._determine_fields(x_field)[0]
730        self.x_log = x_log
731        self.field_info[self.x_field] = self.data_source.ds.field_info[self.x_field]
732        x_min, x_max = _sanitize_min_max_units(
733            x_min, x_max, self.field_info[self.x_field], self.ds.unit_registry
734        )
735        self.x_bins = array_like_field(
736            data_source, self._get_bins(x_min, x_max, x_n, x_log), self.x_field
737        )
738        if override_bins_x is not None:
739            self.x_bins = array_like_field(data_source, override_bins_x, self.x_field)
740
741        # Y
742        self.y_field = data_source._determine_fields(y_field)[0]
743        self.y_log = y_log
744        self.field_info[self.y_field] = self.data_source.ds.field_info[self.y_field]
745        y_min, y_max = _sanitize_min_max_units(
746            y_min, y_max, self.field_info[self.y_field], self.ds.unit_registry
747        )
748        self.y_bins = array_like_field(
749            data_source, self._get_bins(y_min, y_max, y_n, y_log), self.y_field
750        )
751        if override_bins_y is not None:
752            self.y_bins = array_like_field(data_source, override_bins_y, self.y_field)
753
754        self.size = (self.x_bins.size - 1, self.y_bins.size - 1)
755
756        self.bin_fields = (self.x_field, self.y_field)
757        self.x = 0.5 * (self.x_bins[1:] + self.x_bins[:-1])
758        self.y = 0.5 * (self.y_bins[1:] + self.y_bins[:-1])
759
760    def _bin_chunk(self, chunk, fields, storage):
761        rv = self._get_data(chunk, fields)
762        if rv is None:
763            return
764        fdata, wdata, (bf_x, bf_y) = rv
765        bf_x.convert_to_units(self.field_info[self.x_field].output_units)
766        bin_ind_x = np.digitize(bf_x, self.x_bins) - 1
767        bf_y.convert_to_units(self.field_info[self.y_field].output_units)
768        bin_ind_y = np.digitize(bf_y, self.y_bins) - 1
769        new_bin_profile2d(
770            bin_ind_x,
771            bin_ind_y,
772            wdata,
773            fdata,
774            storage.weight_values,
775            storage.values,
776            storage.mvalues,
777            storage.qvalues,
778            storage.used,
779        )
780        # We've binned it!
781
782    def set_x_unit(self, new_unit):
783        """Sets a new unit for the x field
784
785        Parameters
786        ----------
787        new_unit : string or Unit object
788            The name of the new unit.
789        """
790        self.x_bins.convert_to_units(new_unit)
791        self.x = 0.5 * (self.x_bins[1:] + self.x_bins[:-1])
792
793    def set_y_unit(self, new_unit):
794        """Sets a new unit for the y field
795
796        Parameters
797        ----------
798        new_unit : string or Unit object
799            The name of the new unit.
800        """
801        self.y_bins.convert_to_units(new_unit)
802        self.y = 0.5 * (self.y_bins[1:] + self.y_bins[:-1])
803
804    @property
805    def bounds(self):
806        return ((self.x_bins[0], self.x_bins[-1]), (self.y_bins[0], self.y_bins[-1]))
807
808    def plot(self):
809        r"""
810        This returns a :class:~yt.visualization.profile_plotter.PhasePlot with
811        the fields that have been added to this object.
812        """
813        from yt.visualization.profile_plotter import PhasePlot
814
815        return PhasePlot.from_profile(self)
816
817
818class Profile2DFromDataset(ProfileNDFromDataset, Profile2D):
819    """
820    A 2D profile object loaded from a ytdata dataset.
821    """
822
823    def __init(self, ds):
824        ProfileNDFromDataset.__init__(self, ds)
825
826
827class ParticleProfile(Profile2D):
828    """An object that represents a *deposited* 2D profile. This is like a
829    Profile2D, except that it is intended for particle data. Instead of just
830    binning the particles, the added fields will be deposited onto the mesh
831    using either the nearest-grid-point or cloud-in-cell interpolation kernels.
832
833    Parameters
834    ----------
835
836    data_source : AMD3DData object
837        The data object to be profiled
838    x_field : string field name
839        The field to profile as a function of along the x axis.
840    x_n : integer
841        The number of bins along the x direction.
842    x_min : float
843        The minimum value of the x profile field. If supplied without units,
844        assumed to be in the output units for x_field.
845    x_max : float
846        The maximum value of the x profile field. If supplied without units,
847        assumed to be in the output units for x_field.
848    y_field : string field name
849        The field to profile as a function of along the y axis
850    y_n : integer
851        The number of bins along the y direction.
852    y_min : float
853        The minimum value of the y profile field. If supplied without units,
854        assumed to be in the output units for y_field.
855    y_max : float
856        The maximum value of the y profile field. If supplied without units,
857        assumed to be in the output units for y_field.
858    weight_field : string field name
859        The field to use for weighting. Default is None.
860    deposition : string, optional
861        The interpolation kernal to be used for
862        deposition. Valid choices:
863        "ngp" : nearest grid point interpolation
864        "cic" : cloud-in-cell interpolation
865
866    """
867
868    accumulation = False
869    fractional = False
870
871    def __init__(
872        self,
873        data_source,
874        x_field,
875        x_n,
876        x_min,
877        x_max,
878        x_log,
879        y_field,
880        y_n,
881        y_min,
882        y_max,
883        y_log,
884        weight_field=None,
885        deposition="ngp",
886    ):
887
888        x_field = data_source._determine_fields(x_field)[0]
889        y_field = data_source._determine_fields(y_field)[0]
890
891        if deposition not in ["ngp", "cic"]:
892            raise NotImplementedError(deposition)
893        elif (x_log or y_log) and deposition != "ngp":
894            mylog.warning(
895                "cic deposition is only supported for linear axis "
896                "scales, falling back to ngp deposition"
897            )
898            deposition = "ngp"
899
900        self.deposition = deposition
901
902        # set the log parameters to False (since that doesn't make much sense
903        # for deposited data) and also turn off the weight field.
904        super().__init__(
905            data_source,
906            x_field,
907            x_n,
908            x_min,
909            x_max,
910            x_log,
911            y_field,
912            y_n,
913            y_min,
914            y_max,
915            y_log,
916            weight_field=weight_field,
917        )
918
919    # Either stick the particle field in the nearest bin,
920    # or spread it out using the 2D CIC deposition function
921    def _bin_chunk(self, chunk, fields, storage):
922        rv = self._get_data(chunk, fields)
923        if rv is None:
924            return
925        fdata, wdata, (bf_x, bf_y) = rv
926        # make sure everything has the same units before deposition.
927        # the units will be scaled to the correct values later.
928
929        if self.deposition == "ngp":
930            func = NGPDeposit_2
931        elif self.deposition == "cic":
932            func = CICDeposit_2
933
934        for fi, _field in enumerate(fields):
935            if self.weight_field is None:
936                deposit_vals = fdata[:, fi]
937            else:
938                deposit_vals = wdata * fdata[:, fi]
939
940            field_mask = np.zeros(self.size, dtype="uint8")
941
942            func(
943                bf_x,
944                bf_y,
945                deposit_vals,
946                fdata[:, fi].size,
947                storage.values[:, :, fi],
948                field_mask,
949                self.x_bins,
950                self.y_bins,
951            )
952
953            locs = field_mask > 0
954            storage.used[locs] = True
955
956            if self.weight_field is not None:
957                func(
958                    bf_x,
959                    bf_y,
960                    wdata,
961                    fdata[:, fi].size,
962                    storage.weight_values,
963                    field_mask,
964                    self.x_bins,
965                    self.y_bins,
966                )
967            else:
968                storage.weight_values[locs] = 1.0
969            storage.mvalues[locs, fi] = (
970                storage.values[locs, fi] / storage.weight_values[locs]
971            )
972        # We've binned it!
973
974
975class Profile3D(ProfileND):
976    """An object that represents a 2D profile.
977
978    Parameters
979    ----------
980
981    data_source : AMD3DData object
982        The data object to be profiled
983    x_field : string field name
984        The field to profile as a function of along the x axis.
985    x_n : integer
986        The number of bins along the x direction.
987    x_min : float
988        The minimum value of the x profile field. If supplied without units,
989        assumed to be in the output units for x_field.
990    x_max : float
991        The maximum value of the x profile field. If supplied without units,
992        assumed to be in the output units for x_field.
993    x_log : boolean
994        Controls whether or not the bins for the x field are evenly
995        spaced in linear (False) or log (True) space.
996    y_field : string field name
997        The field to profile as a function of along the y axis
998    y_n : integer
999        The number of bins along the y direction.
1000    y_min : float
1001        The minimum value of the y profile field. If supplied without units,
1002        assumed to be in the output units for y_field.
1003    y_max : float
1004        The maximum value of the y profile field. If supplied without units,
1005        assumed to be in the output units for y_field.
1006    y_log : boolean
1007        Controls whether or not the bins for the y field are evenly
1008        spaced in linear (False) or log (True) space.
1009    z_field : string field name
1010        The field to profile as a function of along the z axis
1011    z_n : integer
1012        The number of bins along the z direction.
1013    z_min : float
1014        The minimum value of the z profile field. If supplied without units,
1015        assumed to be in the output units for z_field.
1016    z_max : float
1017        The maximum value of thee z profile field. If supplied without units,
1018        assumed to be in the output units for z_field.
1019    z_log : boolean
1020        Controls whether or not the bins for the z field are evenly
1021        spaced in linear (False) or log (True) space.
1022    weight_field : string field name
1023        The field to weight the profiled fields by.
1024    override_bins_x : array
1025        Array to set as xbins and ignore other parameters if set
1026    override_bins_y : array
1027        Array to set as xbins and ignore other parameters if set
1028    override_bins_z : array
1029        Array to set as xbins and ignore other parameters if set
1030
1031    """
1032
1033    def __init__(
1034        self,
1035        data_source,
1036        x_field,
1037        x_n,
1038        x_min,
1039        x_max,
1040        x_log,
1041        y_field,
1042        y_n,
1043        y_min,
1044        y_max,
1045        y_log,
1046        z_field,
1047        z_n,
1048        z_min,
1049        z_max,
1050        z_log,
1051        weight_field=None,
1052        override_bins_x=None,
1053        override_bins_y=None,
1054        override_bins_z=None,
1055    ):
1056        super().__init__(data_source, weight_field)
1057        # X
1058        self.x_field = data_source._determine_fields(x_field)[0]
1059        self.x_log = x_log
1060        self.field_info[self.x_field] = self.data_source.ds.field_info[self.x_field]
1061        x_min, x_max = _sanitize_min_max_units(
1062            x_min, x_max, self.field_info[self.x_field], self.ds.unit_registry
1063        )
1064        self.x_bins = array_like_field(
1065            data_source, self._get_bins(x_min, x_max, x_n, x_log), self.x_field
1066        )
1067        if override_bins_x is not None:
1068            self.x_bins = array_like_field(data_source, override_bins_x, self.x_field)
1069        # Y
1070        self.y_field = data_source._determine_fields(y_field)[0]
1071        self.y_log = y_log
1072        self.field_info[self.y_field] = self.data_source.ds.field_info[self.y_field]
1073        y_min, y_max = _sanitize_min_max_units(
1074            y_min, y_max, self.field_info[self.y_field], self.ds.unit_registry
1075        )
1076        self.y_bins = array_like_field(
1077            data_source, self._get_bins(y_min, y_max, y_n, y_log), self.y_field
1078        )
1079        if override_bins_y is not None:
1080            self.y_bins = array_like_field(data_source, override_bins_y, self.y_field)
1081        # Z
1082        self.z_field = data_source._determine_fields(z_field)[0]
1083        self.z_log = z_log
1084        self.field_info[self.z_field] = self.data_source.ds.field_info[self.z_field]
1085        z_min, z_max = _sanitize_min_max_units(
1086            z_min, z_max, self.field_info[self.z_field], self.ds.unit_registry
1087        )
1088        self.z_bins = array_like_field(
1089            data_source, self._get_bins(z_min, z_max, z_n, z_log), self.z_field
1090        )
1091        if override_bins_z is not None:
1092            self.z_bins = array_like_field(data_source, override_bins_z, self.z_field)
1093
1094        self.size = (self.x_bins.size - 1, self.y_bins.size - 1, self.z_bins.size - 1)
1095
1096        self.bin_fields = (self.x_field, self.y_field, self.z_field)
1097        self.x = 0.5 * (self.x_bins[1:] + self.x_bins[:-1])
1098        self.y = 0.5 * (self.y_bins[1:] + self.y_bins[:-1])
1099        self.z = 0.5 * (self.z_bins[1:] + self.z_bins[:-1])
1100
1101    def _bin_chunk(self, chunk, fields, storage):
1102        rv = self._get_data(chunk, fields)
1103        if rv is None:
1104            return
1105        fdata, wdata, (bf_x, bf_y, bf_z) = rv
1106        bf_x.convert_to_units(self.field_info[self.x_field].output_units)
1107        bin_ind_x = np.digitize(bf_x, self.x_bins) - 1
1108        bf_y.convert_to_units(self.field_info[self.y_field].output_units)
1109        bin_ind_y = np.digitize(bf_y, self.y_bins) - 1
1110        bf_z.convert_to_units(self.field_info[self.z_field].output_units)
1111        bin_ind_z = np.digitize(bf_z, self.z_bins) - 1
1112        new_bin_profile3d(
1113            bin_ind_x,
1114            bin_ind_y,
1115            bin_ind_z,
1116            wdata,
1117            fdata,
1118            storage.weight_values,
1119            storage.values,
1120            storage.mvalues,
1121            storage.qvalues,
1122            storage.used,
1123        )
1124        # We've binned it!
1125
1126    @property
1127    def bounds(self):
1128        return (
1129            (self.x_bins[0], self.x_bins[-1]),
1130            (self.y_bins[0], self.y_bins[-1]),
1131            (self.z_bins[0], self.z_bins[-1]),
1132        )
1133
1134    def set_x_unit(self, new_unit):
1135        """Sets a new unit for the x field
1136
1137        Parameters
1138        ----------
1139        new_unit : string or Unit object
1140            The name of the new unit.
1141        """
1142        self.x_bins.convert_to_units(new_unit)
1143        self.x = 0.5 * (self.x_bins[1:] + self.x_bins[:-1])
1144
1145    def set_y_unit(self, new_unit):
1146        """Sets a new unit for the y field
1147
1148        Parameters
1149        ----------
1150        new_unit : string or Unit object
1151            The name of the new unit.
1152        """
1153        self.y_bins.convert_to_units(new_unit)
1154        self.y = 0.5 * (self.y_bins[1:] + self.y_bins[:-1])
1155
1156    def set_z_unit(self, new_unit):
1157        """Sets a new unit for the z field
1158
1159        Parameters
1160        ----------
1161        new_unit : string or Unit object
1162            The name of the new unit.
1163        """
1164        self.z_bins.convert_to_units(new_unit)
1165        self.z = 0.5 * (self.z_bins[1:] + self.z_bins[:-1])
1166
1167
1168class Profile3DFromDataset(ProfileNDFromDataset, Profile3D):
1169    """
1170    A 2D profile object loaded from a ytdata dataset.
1171    """
1172
1173    def __init(self, ds):
1174        ProfileNDFromDataset.__init__(self, ds)
1175
1176
1177def sanitize_field_tuple_keys(input_dict, data_source):
1178    if input_dict is not None:
1179        dummy = {}
1180        for item in input_dict:
1181            dummy[data_source._determine_fields(item)[0]] = input_dict[item]
1182        return dummy
1183    else:
1184        return input_dict
1185
1186
1187def create_profile(
1188    data_source,
1189    bin_fields,
1190    fields,
1191    n_bins=64,
1192    extrema=None,
1193    logs=None,
1194    units=None,
1195    weight_field=("gas", "mass"),
1196    accumulation=False,
1197    fractional=False,
1198    deposition="ngp",
1199    override_bins=None,
1200):
1201    r"""
1202    Create a 1, 2, or 3D profile object.
1203
1204    The dimensionality of the profile object is chosen by the number of
1205    fields given in the bin_fields argument.
1206
1207    Parameters
1208    ----------
1209    data_source : YTSelectionContainer Object
1210        The data object to be profiled.
1211    bin_fields : list of strings
1212        List of the binning fields for profiling.
1213    fields : list of strings
1214        The fields to be profiled.
1215    n_bins : int or list of ints
1216        The number of bins in each dimension.  If None, 64 bins for
1217        each bin are used for each bin field.
1218        Default: 64.
1219    extrema : dict of min, max tuples
1220        Minimum and maximum values of the bin_fields for the profiles.
1221        The keys correspond to the field names. Defaults to the extrema
1222        of the bin_fields of the dataset. If a units dict is provided, extrema
1223        are understood to be in the units specified in the dictionary.
1224    logs : dict of boolean values
1225        Whether or not to log the bin_fields for the profiles.
1226        The keys correspond to the field names. Defaults to the take_log
1227        attribute of the field.
1228    units : dict of strings
1229        The units of the fields in the profiles, including the bin_fields.
1230    weight_field : str or tuple field identifier
1231        The weight field for computing weighted average for the profile
1232        values.  If None, the profile values are sums of the data in
1233        each bin. Defaults to ("gas", "mass").
1234    accumulation : bool or list of bools
1235        If True, the profile values for a bin n are the cumulative sum of
1236        all the values from bin 0 to n.  If -True, the sum is reversed so
1237        that the value for bin n is the cumulative sum from bin N (total bins)
1238        to n.  If the profile is 2D or 3D, a list of values can be given to
1239        control the summation in each dimension independently.
1240        Default: False.
1241    fractional : bool
1242        If True the profile values are divided by the sum of all
1243        the profile data such that the profile represents a probability
1244        distribution function.
1245    deposition : strings
1246        Controls the type of deposition used for ParticlePhasePlots.
1247        Valid choices are 'ngp' and 'cic'. Default is 'ngp'. This parameter is
1248        ignored the if the input fields are not of particle type.
1249    override_bins : dict of bins to profile plot with
1250        If set, ignores n_bins and extrema settings and uses the
1251        supplied bins to profile the field. If a units dict is provided,
1252        bins are understood to be in the units specified in the dictionary.
1253
1254
1255    Examples
1256    --------
1257
1258    Create a 1d profile.  Access bin field from profile.x and field
1259    data from profile[<field_name>].
1260
1261    >>> ds = load("DD0046/DD0046")
1262    >>> ad = ds.all_data()
1263    >>> profile = create_profile(
1264    ...     ad, [("gas", "density")], [("gas", "temperature"), ("gas", "velocity_x")]
1265    ... )
1266    >>> print(profile.x)
1267    >>> print(profile["gas", "temperature"])
1268
1269    """
1270    bin_fields = data_source._determine_fields(bin_fields)
1271    fields = list(iter_fields(fields))
1272    is_pfield = [
1273        data_source.ds._get_field_info(f).sampling_type == "particle"
1274        for f in bin_fields + fields
1275    ]
1276    wf = None
1277    if weight_field is not None:
1278        wf = data_source.ds._get_field_info(weight_field)
1279        is_pfield.append(wf.sampling_type == "particle")
1280        wf = wf.name
1281
1282    if len(bin_fields) > 1 and isinstance(accumulation, bool):
1283        accumulation = [accumulation for _ in range(len(bin_fields))]
1284
1285    bin_fields = data_source._determine_fields(bin_fields)
1286    fields = data_source._determine_fields(fields)
1287    units = sanitize_field_tuple_keys(units, data_source)
1288    extrema = sanitize_field_tuple_keys(extrema, data_source)
1289    logs = sanitize_field_tuple_keys(logs, data_source)
1290    override_bins = sanitize_field_tuple_keys(override_bins, data_source)
1291
1292    if any(is_pfield) and not all(is_pfield):
1293        if hasattr(data_source.ds, "_sph_ptypes"):
1294            is_local = [
1295                data_source.ds.field_info[f].sampling_type == "local"
1296                for f in bin_fields + fields
1297            ]
1298            is_local_or_pfield = [pf or lf for (pf, lf) in zip(is_pfield, is_local)]
1299            if not all(is_local_or_pfield):
1300                raise YTIllDefinedProfile(
1301                    bin_fields, data_source._determine_fields(fields), wf, is_pfield
1302                )
1303        else:
1304            raise YTIllDefinedProfile(
1305                bin_fields, data_source._determine_fields(fields), wf, is_pfield
1306            )
1307    if len(bin_fields) == 1:
1308        cls = Profile1D
1309    elif len(bin_fields) == 2 and all(is_pfield):
1310        if deposition == "cic":
1311            if logs is not None:
1312                if (bin_fields[0] in logs and logs[bin_fields[0]]) or (
1313                    bin_fields[1] in logs and logs[bin_fields[1]]
1314                ):
1315                    raise RuntimeError(
1316                        "CIC deposition is only implemented for linear-scaled axes"
1317                    )
1318            else:
1319                logs = {bin_fields[0]: False, bin_fields[1]: False}
1320            if any(accumulation) or fractional:
1321                raise RuntimeError(
1322                    "The accumulation and fractional keyword arguments must be "
1323                    "False for CIC deposition"
1324                )
1325        cls = ParticleProfile
1326    elif len(bin_fields) == 2:
1327        cls = Profile2D
1328    elif len(bin_fields) == 3:
1329        cls = Profile3D
1330    else:
1331        raise NotImplementedError
1332    if weight_field is not None and cls == ParticleProfile:
1333        (weight_field,) = data_source._determine_fields([weight_field])
1334        wf = data_source.ds._get_field_info(weight_field)
1335        if not wf.sampling_type == "particle":
1336            weight_field = None
1337    if not is_sequence(n_bins):
1338        n_bins = [n_bins] * len(bin_fields)
1339    if not is_sequence(accumulation):
1340        accumulation = [accumulation] * len(bin_fields)
1341    if logs is None:
1342        logs = {}
1343    logs_list = []
1344    for bin_field in bin_fields:
1345        if bin_field in logs:
1346            logs_list.append(logs[bin_field])
1347        else:
1348            logs_list.append(data_source.ds.field_info[bin_field].take_log)
1349    logs = logs_list
1350    if extrema is None:
1351        ex = [
1352            data_source.quantities["Extrema"](f, non_zero=l)
1353            for f, l in zip(bin_fields, logs)
1354        ]
1355        # pad extrema by epsilon so cells at bin edges are not excluded
1356        for i, (mi, ma) in enumerate(ex):
1357            mi = mi - np.spacing(mi)
1358            ma = ma + np.spacing(ma)
1359            ex[i][0], ex[i][1] = mi, ma
1360    else:
1361        ex = []
1362        for bin_field in bin_fields:
1363            bf_units = data_source.ds.field_info[bin_field].output_units
1364            try:
1365                field_ex = list(extrema[bin_field[-1]])
1366            except KeyError as e:
1367                try:
1368                    field_ex = list(extrema[bin_field])
1369                except KeyError:
1370                    raise RuntimeError(
1371                        "Could not find field {} or {} in extrema".format(
1372                            bin_field[-1], bin_field
1373                        )
1374                    ) from e
1375
1376            if isinstance(field_ex[0], tuple):
1377                field_ex = [data_source.ds.quan(*f) for f in field_ex]
1378            if any([exi is None for exi in field_ex]):
1379                try:
1380                    ds_extrema = data_source.quantities.extrema(bin_field)
1381                except AttributeError:
1382                    # ytdata profile datasets don't have data_source.quantities
1383                    bf_vals = data_source[bin_field]
1384                    ds_extrema = data_source.ds.arr([bf_vals.min(), bf_vals.max()])
1385                for i, exi in enumerate(field_ex):
1386                    if exi is None:
1387                        field_ex[i] = ds_extrema[i]
1388                        # pad extrema by epsilon so cells at bin edges are
1389                        # not excluded
1390                        field_ex[i] -= (-1) ** i * np.spacing(field_ex[i])
1391            if units is not None and bin_field in units:
1392                for i, exi in enumerate(field_ex):
1393                    if hasattr(exi, "units"):
1394                        field_ex[i] = exi.to(units[bin_field])
1395                    else:
1396                        field_ex[i] = data_source.ds.quan(exi, units[bin_field])
1397                fe = data_source.ds.arr(field_ex)
1398            else:
1399                if hasattr(field_ex, "units"):
1400                    fe = field_ex.to(bf_units)
1401                else:
1402                    fe = data_source.ds.arr(field_ex, bf_units)
1403            fe.convert_to_units(bf_units)
1404            field_ex = [fe[0].v, fe[1].v]
1405            if is_sequence(field_ex[0]):
1406                field_ex[0] = data_source.ds.quan(field_ex[0][0], field_ex[0][1])
1407                field_ex[0] = field_ex[0].in_units(bf_units)
1408            if is_sequence(field_ex[1]):
1409                field_ex[1] = data_source.ds.quan(field_ex[1][0], field_ex[1][1])
1410                field_ex[1] = field_ex[1].in_units(bf_units)
1411            ex.append(field_ex)
1412
1413    if override_bins is not None:
1414        o_bins = []
1415        for bin_field in bin_fields:
1416            bf_units = data_source.ds.field_info[bin_field].output_units
1417            try:
1418                field_obin = override_bins[bin_field[-1]]
1419            except KeyError:
1420                field_obin = override_bins[bin_field]
1421
1422            if field_obin is None:
1423                o_bins.append(None)
1424                continue
1425
1426            if isinstance(field_obin, tuple):
1427                field_obin = data_source.ds.arr(*field_obin)
1428
1429            if units is not None and bin_field in units:
1430                fe = data_source.ds.arr(field_obin, units[bin_field])
1431            else:
1432                if hasattr(field_obin, "units"):
1433                    fe = field_obin.to(bf_units)
1434                else:
1435                    fe = data_source.ds.arr(field_obin, bf_units)
1436            fe.convert_to_units(bf_units)
1437            field_obin = fe.d
1438            o_bins.append(field_obin)
1439
1440    args = [data_source]
1441    for f, n, (mi, ma), l in zip(bin_fields, n_bins, ex, logs):
1442        if mi <= 0 and l:
1443            raise YTIllDefinedBounds(mi, ma)
1444        args += [f, n, mi, ma, l]
1445    kwargs = dict(weight_field=weight_field)
1446    if cls is ParticleProfile:
1447        kwargs["deposition"] = deposition
1448    if override_bins is not None:
1449        for o_bin, ax in zip(o_bins, ["x", "y", "z"]):
1450            kwargs[f"override_bins_{ax}"] = o_bin
1451    obj = cls(*args, **kwargs)
1452    obj.accumulation = accumulation
1453    obj.fractional = fractional
1454    if fields is not None:
1455        obj.add_fields([field for field in fields])
1456    for field in fields:
1457        if fractional:
1458            obj.field_data[field] /= obj.field_data[field].sum()
1459        for axis, acc in enumerate(accumulation):
1460            if not acc:
1461                continue
1462            temp = obj.field_data[field]
1463            temp = np.rollaxis(temp, axis)
1464            if weight_field is not None:
1465                temp_weight = obj.weight
1466                temp_weight = np.rollaxis(temp_weight, axis)
1467            if acc < 0:
1468                temp = temp[::-1]
1469                if weight_field is not None:
1470                    temp_weight = temp_weight[::-1]
1471            if weight_field is None:
1472                temp = temp.cumsum(axis=0)
1473            else:
1474                temp = (temp * temp_weight).cumsum(axis=0) / temp_weight.cumsum(axis=0)
1475            if acc < 0:
1476                temp = temp[::-1]
1477                if weight_field is not None:
1478                    temp_weight = temp_weight[::-1]
1479            temp = np.rollaxis(temp, axis)
1480            obj.field_data[field] = temp
1481            if weight_field is not None:
1482                temp_weight = np.rollaxis(temp_weight, axis)
1483                obj.weight = temp_weight
1484    if units is not None:
1485        for field, unit in units.items():
1486            field = data_source._determine_fields(field)[0]
1487            if field == obj.x_field:
1488                obj.set_x_unit(unit)
1489            elif field == getattr(obj, "y_field", None):
1490                obj.set_y_unit(unit)
1491            elif field == getattr(obj, "z_field", None):
1492                obj.set_z_unit(unit)
1493            else:
1494                obj.set_field_unit(field, unit)
1495    return obj
1496