1import abc 2import os 3 4from yt.config import ytcfg 5from yt.funcs import mylog 6from yt.utilities.cython_fortran_utils import FortranFile 7 8from .field_handlers import HandlerMixin 9from .io import _read_part_file_descriptor 10 11PARTICLE_HANDLERS = set() 12 13 14def get_particle_handlers(): 15 return PARTICLE_HANDLERS 16 17 18def register_particle_handler(ph): 19 PARTICLE_HANDLERS.add(ph) 20 21 22class ParticleFileHandler(abc.ABC, HandlerMixin): 23 """ 24 Abstract class to handle particles in RAMSES. Each instance 25 represents a single file (one domain). 26 27 To add support to a new particle file, inherit from this class and 28 implement all functions containing a `NotImplementedError`. 29 30 See `SinkParticleFileHandler` for an example implementation.""" 31 32 _file_type = "particle" 33 34 # These properties are static properties 35 ptype = None # The name to give to the particle type 36 fname = None # The name of the file(s). 37 file_descriptor = None # The name of the file descriptor (if any) 38 39 attrs = None # The attributes of the header 40 known_fields = None # A list of tuple containing the field name and its type 41 config_field = None # Name of the config section (if any) 42 43 # These properties are computed dynamically 44 field_offsets = None # Mapping from field to offset in file 45 field_types = ( 46 None # Mapping from field to the type of the data (float, integer, ...) 47 ) 48 local_particle_count = None # The number of particle in the domain 49 50 def __init_subclass__(cls, *args, **kwargs): 51 """ 52 Registers subclasses at creation. 53 """ 54 super().__init_subclass__(*args, **kwargs) 55 56 if cls.ptype is not None: 57 register_particle_handler(cls) 58 59 cls._unique_registry = {} 60 return cls 61 62 def __init__(self, domain): 63 self.setup_handler(domain) 64 65 # Attempt to read the list of fields from the config file 66 if self.config_field and ytcfg.has_section(self.config_field): 67 cfg = ytcfg.get(self.config_field, "fields") 68 known_fields = [] 69 for c in (_.strip() for _ in cfg.split("\n") if _.strip() != ""): 70 field, field_type = (_.strip() for _ in c.split(",")) 71 known_fields.append((field, field_type)) 72 self.known_fields = known_fields 73 74 @abc.abstractmethod 75 def read_header(self): 76 """ 77 This function is called once per file. It should: 78 79 * read the header of the file and store any relevant information 80 * detect the fields in the file 81 * compute the offsets (location in the file) of each field 82 83 It is in charge of setting `self.field_offsets` and `self.field_types`. 84 85 * `field_offsets`: dictionary: tuple -> integer 86 A dictionary that maps `(type, field_name)` to their 87 location in the file (integer) 88 * `field_types`: dictionary: tuple -> character 89 A dictionary that maps `(type, field_name)` to their type 90 (character), following Python's struct convention. 91 """ 92 pass 93 94 95class DefaultParticleFileHandler(ParticleFileHandler): 96 ptype = "io" 97 fname = "part_{iout:05d}.out{icpu:05d}" 98 file_descriptor = "part_file_descriptor.txt" 99 config_field = "ramses-particles" 100 101 attrs = ( 102 ("ncpu", 1, "i"), 103 ("ndim", 1, "i"), 104 ("npart", 1, "i"), 105 ("localseed", -1, "i"), 106 ("nstar_tot", 1, "i"), 107 ("mstar_tot", 1, "d"), 108 ("mstar_lost", 1, "d"), 109 ("nsink", 1, "i"), 110 ) 111 112 known_fields = [ 113 ("particle_position_x", "d"), 114 ("particle_position_y", "d"), 115 ("particle_position_z", "d"), 116 ("particle_velocity_x", "d"), 117 ("particle_velocity_y", "d"), 118 ("particle_velocity_z", "d"), 119 ("particle_mass", "d"), 120 ("particle_identity", "i"), 121 ("particle_refinement_level", "i"), 122 ] 123 124 def read_header(self): 125 if not self.exists: 126 self.field_offsets = {} 127 self.field_types = {} 128 self.local_particle_count = 0 129 return 130 131 fd = FortranFile(self.fname) 132 fd.seek(0, os.SEEK_END) 133 flen = fd.tell() 134 fd.seek(0) 135 hvals = {} 136 attrs = self.attrs 137 hvals.update(fd.read_attrs(attrs)) 138 self.header = hvals 139 self.local_particle_count = hvals["npart"] 140 extra_particle_fields = self.ds._extra_particle_fields 141 142 if self.has_descriptor: 143 particle_fields = _read_part_file_descriptor(self.file_descriptor) 144 else: 145 particle_fields = list(self.known_fields) 146 147 if extra_particle_fields is not None: 148 particle_fields += extra_particle_fields 149 150 if hvals["nstar_tot"] > 0 and extra_particle_fields is not None: 151 particle_fields += [ 152 ("particle_birth_time", "d"), 153 ("particle_metallicity", "d"), 154 ] 155 156 field_offsets = {} 157 _pfields = {} 158 159 ptype = self.ptype 160 161 # Read offsets 162 for field, vtype in particle_fields: 163 if fd.tell() >= flen: 164 break 165 field_offsets[ptype, field] = fd.tell() 166 _pfields[ptype, field] = vtype 167 fd.skip(1) 168 169 iextra = 0 170 while fd.tell() < flen: 171 iextra += 1 172 field, vtype = ("particle_extra_field_%i" % iextra, "d") 173 particle_fields.append((field, vtype)) 174 175 field_offsets[ptype, field] = fd.tell() 176 _pfields[ptype, field] = vtype 177 fd.skip(1) 178 179 fd.close() 180 181 if iextra > 0 and not self.ds._warned_extra_fields["io"]: 182 mylog.warning( 183 "Detected %s extra particle fields assuming kind " 184 "`double`. Consider using the `extra_particle_fields` " 185 "keyword argument if you have unexpected behavior.", 186 iextra, 187 ) 188 self.ds._warned_extra_fields["io"] = True 189 190 self.field_offsets = field_offsets 191 self.field_types = _pfields 192 193 194class SinkParticleFileHandler(ParticleFileHandler): 195 """Handle sink files""" 196 197 ptype = "sink" 198 fname = "sink_{iout:05d}.out{icpu:05d}" 199 file_descriptor = "sink_file_descriptor.txt" 200 config_field = "ramses-sink-particles" 201 202 attrs = (("nsink", 1, "i"), ("nindsink", 1, "i")) 203 204 known_fields = [ 205 ("particle_identifier", "i"), 206 ("particle_mass", "d"), 207 ("particle_position_x", "d"), 208 ("particle_position_y", "d"), 209 ("particle_position_z", "d"), 210 ("particle_velocity_x", "d"), 211 ("particle_velocity_y", "d"), 212 ("particle_velocity_z", "d"), 213 ("particle_birth_time", "d"), 214 ("BH_real_accretion", "d"), 215 ("BH_bondi_accretion", "d"), 216 ("BH_eddington_accretion", "d"), 217 ("BH_esave", "d"), 218 ("gas_spin_x", "d"), 219 ("gas_spin_y", "d"), 220 ("gas_spin_z", "d"), 221 ("BH_spin_x", "d"), 222 ("BH_spin_y", "d"), 223 ("BH_spin_z", "d"), 224 ("BH_spin", "d"), 225 ("BH_efficiency", "d"), 226 ] 227 228 def read_header(self): 229 if not self.exists: 230 self.field_offsets = {} 231 self.field_types = {} 232 self.local_particle_count = 0 233 return 234 fd = FortranFile(self.fname) 235 fd.seek(0, os.SEEK_END) 236 flen = fd.tell() 237 fd.seek(0) 238 hvals = {} 239 # Read the header of the file 240 attrs = self.attrs 241 242 hvals.update(fd.read_attrs(attrs)) 243 self._header = hvals 244 245 # This is somehow a trick here: we only want one domain to 246 # be read, as ramses writes all the sinks in all the 247 # domains. Here, we set the local_particle_count to 0 except 248 # for the first domain to be red. 249 if getattr(self.ds, "_sink_file_flag", False): 250 self.local_particle_count = 0 251 else: 252 self.ds._sink_file_flag = True 253 self.local_particle_count = hvals["nsink"] 254 255 # Read the fields + add the sink properties 256 if self.has_descriptor: 257 fields = _read_part_file_descriptor(self.file_descriptor) 258 else: 259 fields = list(self.known_fields) 260 261 # Note: this follows RAMSES convention. 262 for i in range(self.ds.dimensionality * 2 + 1): 263 for ilvl in range(self.ds.max_level + 1): 264 fields.append((f"particle_prop_{ilvl}_{i}", "d")) 265 266 field_offsets = {} 267 _pfields = {} 268 269 # Fill the fields, offsets and types 270 self.fields = [] 271 for field, vtype in fields: 272 self.fields.append(field) 273 if fd.tell() >= flen: 274 break 275 field_offsets[self.ptype, field] = fd.tell() 276 _pfields[self.ptype, field] = vtype 277 fd.skip(1) 278 self.field_offsets = field_offsets 279 self.field_types = _pfields 280 fd.close() 281