1 // Licensed to the .NET Foundation under one or more agreements. 2 // The .NET Foundation licenses this file to you under the MIT license. 3 // See the LICENSE file in the project root for more information. 4 5 // 6 // This is an implementation of a general purpose thunk pool manager. Each thunk consists of: 7 // 1- A thunk stub, typically consisting of a lea + jmp instructions (slightly different 8 // on ARM, but semantically equivalent) 9 // 2- A thunk common stub: the implementation of the common stub depends on 10 // the usage scenario of the thunk 11 // 3- Thunk data: each thunk has two pointer-sized data values that can be stored. 12 // The first data value is called the thunk's 'context', and the second value is 13 // the thunk's jump target typically. 14 // 15 // Without FEATURE_RX_THUNKS, thunks are allocated by mapping a thunks template into memory. The template 16 // consists of a number of pairs of sections called thunk blocks (typically 8 pairs per mapping). Each pair 17 // has 2 page-long sections (4096 bytes): 18 // 1- The first section has RX permissions, and contains the thunk stubs (lea's + jmp's), 19 // and the thunk common stubs. 20 // 2- The second section has RW permissions and contains the thunks data (context + target). 21 // The last pointer-sized block in this section is special: it stores the address of 22 // the common stub that each thunk stub will jump to (the jump instruction in each thunk 23 // jumps to the address stored in that block). Therefore, whenever a new thunks template 24 // gets mapped into memory, the value of that last pointer cell in the data section is updated 25 // to the common stub address passed in by the caller 26 // 27 // With FEATURE_RX_THUNKS, thunks are created by allocating new virtual memory space, where the first half of 28 // that space is filled with thunk stubs, and gets RX permissions, and the second half is for the thunks data, 29 // and gets RW permissions. The thunk stubs and data blocks are not in groupped in pairs like in ProjectN: all 30 // the thunk stubs blocks are groupped at the begining of the allocated virtual memory space, and all the 31 // thunk data blocks are groupped in the second half of the virtual space. 32 // 33 // Available thunks are tracked using a linked list. The first cell in the data block of each thunk is 34 // used as the nodes of the linked list. The cell will point to the data block of the next available thunk, 35 // if one is available, or point to null. When thunks are freed, they are added to the begining of the list. 36 // 37 38 using System.Diagnostics; 39 using Internal.Runtime; 40 41 // Convenient typecasting for IntPtr to use with arithmetic operations 42 #if BIT64 43 using nint = System.Int64; 44 using nuint = System.UInt64; 45 #else 46 using nint = System.Int32; 47 using nuint = System.UInt32; 48 #endif 49 50 51 namespace System.Runtime 52 { 53 internal static class Constants 54 { 55 public const uint PageSize = 0x1000; // 4k 56 public const uint AllocationGranularity = 0x10000; // 64k 57 public const nuint PageSizeMask = 0xFFF; 58 public const nuint AllocationGranularityMask = 0xFFFF; 59 60 public static readonly int ThunkDataSize = 2 * IntPtr.Size; 61 public static readonly int ThunkCodeSize = InternalCalls.RhpGetThunkSize(); 62 public static readonly int NumThunksPerBlock = InternalCalls.RhpGetNumThunksPerBlock(); 63 public static readonly int NumThunkBlocksPerMapping = InternalCalls.RhpGetNumThunkBlocksPerMapping(); 64 } 65 66 internal class ThunksHeap 67 { 68 private class AllocatedBlock 69 { 70 internal IntPtr _blockBaseAddress; 71 internal AllocatedBlock _nextBlock; 72 } 73 74 private IntPtr _commonStubAddress; 75 private IntPtr _nextAvailableThunkPtr; 76 private IntPtr _lastThunkPtr; 77 78 private AllocatedBlock _allocatedBlocks; 79 80 // Helper functions to set/clear the lowest bit for ARM instruction pointers ClearThumbBit(IntPtr value)81 private static IntPtr ClearThumbBit(IntPtr value) 82 { 83 #if ARM 84 Debug.Assert(((nint)value & 1) == 1); 85 value = (IntPtr)((nint)value - 1); 86 #endif 87 return value; 88 } SetThumbBit(IntPtr value)89 private static IntPtr SetThumbBit(IntPtr value) 90 { 91 #if ARM 92 Debug.Assert(((nint)value & 1) == 0); 93 value = (IntPtr)((nint)value + 1); 94 #endif 95 return value; 96 } 97 ThunksHeap(IntPtr commonStubAddress)98 private unsafe ThunksHeap(IntPtr commonStubAddress) 99 { 100 _commonStubAddress = commonStubAddress; 101 102 _allocatedBlocks = new AllocatedBlock(); 103 104 InternalCalls.RhpAcquireThunkPoolLock(); 105 106 IntPtr thunkStubsBlock = ThunkBlocks.GetNewThunksBlock(); 107 108 InternalCalls.RhpReleaseThunkPoolLock(); 109 110 if (thunkStubsBlock != IntPtr.Zero) 111 { 112 IntPtr thunkDataBlock = InternalCalls.RhpGetThunkDataBlockAddress(thunkStubsBlock); 113 114 // Address of the first thunk data cell should be at the begining of the thunks data block (page-aligned) 115 Debug.Assert(((nuint)thunkDataBlock % Constants.PageSize) == 0); 116 117 // Update the last pointer value in the thunks data section with the value of the common stub address 118 *(IntPtr*)(thunkDataBlock + (int)(Constants.PageSize - IntPtr.Size)) = commonStubAddress; 119 Debug.Assert(*(IntPtr*)(thunkDataBlock + (int)(Constants.PageSize - IntPtr.Size)) == commonStubAddress); 120 121 // Set the head and end of the linked list 122 _nextAvailableThunkPtr = thunkDataBlock; 123 _lastThunkPtr = _nextAvailableThunkPtr + Constants.ThunkDataSize * (Constants.NumThunksPerBlock - 1); 124 125 _allocatedBlocks._blockBaseAddress = thunkStubsBlock; 126 } 127 } 128 CreateThunksHeap(IntPtr commonStubAddress)129 public static unsafe ThunksHeap CreateThunksHeap(IntPtr commonStubAddress) 130 { 131 try 132 { 133 ThunksHeap newHeap = new ThunksHeap(commonStubAddress); 134 135 if (newHeap._nextAvailableThunkPtr != IntPtr.Zero) 136 return newHeap; 137 } 138 catch { } 139 140 return null; 141 } 142 143 // TODO: Feature 144 // public static ThunksHeap DestroyThunksHeap(ThunksHeap heapToDestroy) 145 // { 146 // } 147 148 // 149 // Note: Expected to be called under lock 150 // ExpandHeap()151 private unsafe bool ExpandHeap() 152 { 153 AllocatedBlock newBlockInfo; 154 155 try 156 { 157 newBlockInfo = new AllocatedBlock(); 158 } 159 catch 160 { 161 return false; 162 } 163 164 IntPtr thunkStubsBlock = ThunkBlocks.GetNewThunksBlock(); 165 166 if (thunkStubsBlock != IntPtr.Zero) 167 { 168 IntPtr thunkDataBlock = InternalCalls.RhpGetThunkDataBlockAddress(thunkStubsBlock); 169 170 // Address of the first thunk data cell should be at the begining of the thunks data block (page-aligned) 171 Debug.Assert(((nuint)thunkDataBlock % Constants.PageSize) == 0); 172 173 // Update the last pointer value in the thunks data section with the value of the common stub address 174 *(IntPtr*)(thunkDataBlock + (int)(Constants.PageSize - IntPtr.Size)) = _commonStubAddress; 175 Debug.Assert(*(IntPtr*)(thunkDataBlock + (int)(Constants.PageSize - IntPtr.Size)) == _commonStubAddress); 176 177 // Link the last entry in the old list to the first entry in the new list 178 *((IntPtr*)_lastThunkPtr) = thunkDataBlock; 179 180 // Update the pointer to the last entry in the list 181 _lastThunkPtr = *((IntPtr*)_lastThunkPtr) + Constants.ThunkDataSize * (Constants.NumThunksPerBlock - 1); 182 183 newBlockInfo._blockBaseAddress = thunkStubsBlock; 184 newBlockInfo._nextBlock = _allocatedBlocks; 185 186 _allocatedBlocks = newBlockInfo; 187 188 return true; 189 } 190 191 return false; 192 } 193 AllocateThunk()194 public unsafe IntPtr AllocateThunk() 195 { 196 // TODO: optimize the implementation and make it lock-free 197 // or at least change it to a per-heap lock instead of a global lock. 198 199 Debug.Assert(_nextAvailableThunkPtr != IntPtr.Zero); 200 201 InternalCalls.RhpAcquireThunkPoolLock(); 202 203 IntPtr nextAvailableThunkPtr = _nextAvailableThunkPtr; 204 IntPtr nextNextAvailableThunkPtr = *((IntPtr*)(nextAvailableThunkPtr)); 205 206 if (nextNextAvailableThunkPtr == IntPtr.Zero) 207 { 208 if (!ExpandHeap()) 209 { 210 InternalCalls.RhpReleaseThunkPoolLock(); 211 return IntPtr.Zero; 212 } 213 214 nextAvailableThunkPtr = _nextAvailableThunkPtr; 215 nextNextAvailableThunkPtr = *((IntPtr*)(nextAvailableThunkPtr)); 216 Debug.Assert(nextNextAvailableThunkPtr != IntPtr.Zero); 217 } 218 219 _nextAvailableThunkPtr = nextNextAvailableThunkPtr; 220 221 InternalCalls.RhpReleaseThunkPoolLock(); 222 223 Debug.Assert(nextAvailableThunkPtr != IntPtr.Zero); 224 225 #if DEBUG 226 // Reset debug flag indicating the thunk is now in use 227 *((IntPtr*)(nextAvailableThunkPtr + IntPtr.Size)) = IntPtr.Zero; 228 #endif 229 230 int thunkIndex = (int)(((nuint)nextAvailableThunkPtr) - ((nuint)nextAvailableThunkPtr & ~Constants.PageSizeMask)); 231 Debug.Assert((thunkIndex % Constants.ThunkDataSize) == 0); 232 thunkIndex = thunkIndex / Constants.ThunkDataSize; 233 234 IntPtr thunkAddress = InternalCalls.RhpGetThunkStubsBlockAddress(nextAvailableThunkPtr) + thunkIndex * Constants.ThunkCodeSize; 235 236 return SetThumbBit(thunkAddress); 237 } 238 FreeThunk(IntPtr thunkAddress)239 public unsafe void FreeThunk(IntPtr thunkAddress) 240 { 241 // TODO: optimize the implementation and make it lock-free 242 // or at least change it to a per-heap lock instead of a global lock. 243 244 IntPtr dataAddress = TryGetThunkDataAddress(thunkAddress); 245 if (dataAddress == IntPtr.Zero) 246 EH.FallbackFailFast(RhFailFastReason.InternalError, null); 247 248 #if DEBUG 249 if (!IsThunkInHeap(thunkAddress)) 250 EH.FallbackFailFast(RhFailFastReason.InternalError, null); 251 252 // Debug flag indicating the thunk is no longer used 253 *((IntPtr*)(dataAddress + IntPtr.Size)) = new IntPtr(-1); 254 #endif 255 256 InternalCalls.RhpAcquireThunkPoolLock(); 257 258 *((IntPtr*)(dataAddress)) = _nextAvailableThunkPtr; 259 _nextAvailableThunkPtr = dataAddress; 260 261 InternalCalls.RhpReleaseThunkPoolLock(); 262 } 263 IsThunkInHeap(IntPtr thunkAddress)264 private bool IsThunkInHeap(IntPtr thunkAddress) 265 { 266 nuint thunkAddressValue = (nuint)ClearThumbBit(thunkAddress); 267 268 AllocatedBlock currentBlock = _allocatedBlocks; 269 270 while (currentBlock != null) 271 { 272 if (thunkAddressValue >= (nuint)currentBlock._blockBaseAddress && 273 thunkAddressValue < (nuint)currentBlock._blockBaseAddress + (nuint)(Constants.NumThunksPerBlock * Constants.ThunkCodeSize)) 274 { 275 return true; 276 } 277 278 currentBlock = currentBlock._nextBlock; 279 } 280 281 return false; 282 } 283 TryGetThunkDataAddress(IntPtr thunkAddress)284 private IntPtr TryGetThunkDataAddress(IntPtr thunkAddress) 285 { 286 nuint thunkAddressValue = (nuint)ClearThumbBit(thunkAddress); 287 288 // Compute the base address of the thunk's mapping 289 nuint currentThunksBlockAddress = thunkAddressValue & ~Constants.PageSizeMask; 290 291 // Make sure the thunk address is valid by checking alignment 292 if ((thunkAddressValue - currentThunksBlockAddress) % (nuint)Constants.ThunkCodeSize != 0) 293 return IntPtr.Zero; 294 295 // Compute the thunk's index 296 int thunkIndex = (int)((thunkAddressValue - currentThunksBlockAddress) / (nuint)Constants.ThunkCodeSize); 297 298 // Compute the address of the data block that corresponds to the current thunk 299 IntPtr thunkDataBlockAddress = InternalCalls.RhpGetThunkDataBlockAddress((IntPtr)((nint)thunkAddressValue)); 300 301 return thunkDataBlockAddress + thunkIndex * Constants.ThunkDataSize; 302 } 303 304 /// <summary> 305 /// This method retrieves the two data fields for a thunk. 306 /// Caution: No checks are made to verify that the thunk address is that of a 307 /// valid thunk in use. The caller of this API is responsible for providing a valid 308 /// address of a thunk that was not previously freed. 309 /// </summary> 310 /// <returns>True if the thunk's data was successfully retrieved.</returns> TryGetThunkData(IntPtr thunkAddress, out IntPtr context, out IntPtr target)311 public unsafe bool TryGetThunkData(IntPtr thunkAddress, out IntPtr context, out IntPtr target) 312 { 313 context = IntPtr.Zero; 314 target = IntPtr.Zero; 315 316 IntPtr dataAddress = TryGetThunkDataAddress(thunkAddress); 317 if (dataAddress == IntPtr.Zero) 318 return false; 319 320 if (!IsThunkInHeap(thunkAddress)) 321 return false; 322 323 // Update the data that will be used by the thunk that was allocated 324 context = *((IntPtr*)(dataAddress)); 325 target = *((IntPtr*)(dataAddress + IntPtr.Size)); 326 327 return true; 328 } 329 330 /// <summary> 331 /// This method sets the two data fields for a thunk. 332 /// Caution: No checks are made to verify that the thunk address is that of a 333 /// valid thunk in use. The caller of this API is responsible for providing a valid 334 /// address of a thunk that was not previously freed. 335 /// </summary> 336 /// <returns>True if the thunk's data was successfully set.</returns> SetThunkData(IntPtr thunkAddress, IntPtr context, IntPtr target)337 public unsafe void SetThunkData(IntPtr thunkAddress, IntPtr context, IntPtr target) 338 { 339 IntPtr dataAddress = TryGetThunkDataAddress(thunkAddress); 340 if (dataAddress == IntPtr.Zero) 341 EH.FallbackFailFast(RhFailFastReason.InternalError, null); 342 343 #if DEBUG 344 if (!IsThunkInHeap(thunkAddress)) 345 EH.FallbackFailFast(RhFailFastReason.InternalError, null); 346 #endif 347 348 // Update the data that will be used by the thunk that was allocated 349 *((IntPtr*)(dataAddress)) = context; 350 *((IntPtr*)(dataAddress + IntPtr.Size)) = target; 351 } 352 } 353 354 internal class ThunkBlocks 355 { 356 private static IntPtr[] s_currentlyMappedThunkBlocks = new IntPtr[Constants.NumThunkBlocksPerMapping]; 357 private static int s_currentlyMappedThunkBlocksIndex = Constants.NumThunkBlocksPerMapping; 358 GetNewThunksBlock()359 public static unsafe IntPtr GetNewThunksBlock() 360 { 361 IntPtr nextThunksBlock; 362 363 // Check the most recently mapped thunks block. Each mapping consists of multiple 364 // thunk stubs pages, and multiple thunk data pages (typically 8 pages of each in a single mapping) 365 if (s_currentlyMappedThunkBlocksIndex < Constants.NumThunkBlocksPerMapping) 366 { 367 nextThunksBlock = s_currentlyMappedThunkBlocks[s_currentlyMappedThunkBlocksIndex++]; 368 #if DEBUG 369 s_currentlyMappedThunkBlocks[s_currentlyMappedThunkBlocksIndex - 1] = IntPtr.Zero; 370 Debug.Assert(nextThunksBlock != IntPtr.Zero); 371 #endif 372 } 373 else 374 { 375 nextThunksBlock = InternalCalls.RhAllocateThunksMapping(); 376 377 if (nextThunksBlock == IntPtr.Zero) 378 { 379 // We either ran out of memory and can't do anymore mappings of the thunks templates sections, 380 // or we are using the managed runtime services fallback, which doesn't provide the 381 // file mapping feature (ex: older version of mrt100.dll, or no mrt100.dll at all). 382 383 // The only option is for the caller to attempt and recycle unused thunks to be able to 384 // find some free entries. 385 386 return IntPtr.Zero; 387 } 388 389 // Each mapping consists of multiple blocks of thunk stubs/data pairs. Keep track of those 390 // so that we do not create a new mapping until all blocks in the sections we just mapped are consumed 391 IntPtr currentThunksBlock = nextThunksBlock; 392 int thunkBlockSize = InternalCalls.RhpGetThunkBlockSize(); 393 for (int i = 0; i < Constants.NumThunkBlocksPerMapping; i++) 394 { 395 s_currentlyMappedThunkBlocks[i] = currentThunksBlock; 396 currentThunksBlock += thunkBlockSize; 397 } 398 s_currentlyMappedThunkBlocksIndex = 1; 399 } 400 401 Debug.Assert(nextThunksBlock != IntPtr.Zero); 402 403 // Setup the thunks in the new block as a linked list of thunks. 404 // Use the first data field of the thunk to build the linked list. 405 IntPtr dataAddress = InternalCalls.RhpGetThunkDataBlockAddress(nextThunksBlock); 406 407 for (int i = 0; i < Constants.NumThunksPerBlock; i++) 408 { 409 if (i == (Constants.NumThunksPerBlock - 1)) 410 *((IntPtr*)(dataAddress)) = IntPtr.Zero; 411 else 412 *((IntPtr*)(dataAddress)) = dataAddress + Constants.ThunkDataSize; 413 414 #if DEBUG 415 // Debug flag in the second data cell indicating the thunk is not used 416 *((IntPtr*)(dataAddress + IntPtr.Size)) = new IntPtr(-1); 417 #endif 418 419 dataAddress += Constants.ThunkDataSize; 420 } 421 422 return nextThunksBlock; 423 } 424 425 // TODO: [Feature] Keep track of mapped sections and free them if we need to. 426 // public static unsafe void FreeThunksBlock() 427 // { 428 // } 429 } 430 } 431