aboutsummaryrefslogtreecommitdiff
path: root/src/zencore/memtrack/memorytrace.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/zencore/memtrack/memorytrace.cpp')
-rw-r--r--src/zencore/memtrack/memorytrace.cpp829
1 files changed, 829 insertions, 0 deletions
diff --git a/src/zencore/memtrack/memorytrace.cpp b/src/zencore/memtrack/memorytrace.cpp
new file mode 100644
index 000000000..b147aee91
--- /dev/null
+++ b/src/zencore/memtrack/memorytrace.cpp
@@ -0,0 +1,829 @@
+// Copyright Epic Games, Inc. All Rights Reserved.
+
+#include <zencore/memory/memorytrace.h>
+#include <zencore/memory/tagtrace.h>
+
+#include "callstacktrace.h"
+#include "tracemalloc.h"
+#include "vatrace.h"
+
+#include <zencore/commandline.h>
+#include <zencore/enumflags.h>
+#include <zencore/guardvalue.h>
+#include <zencore/intmath.h>
+#include <zencore/string.h>
+#include <zencore/trace.h>
+
+#include <string.h>
+
+#if ZEN_PLATFORM_WINDOWS
+# include <shellapi.h>
+#endif
+
+class FMalloc;
+
+#if UE_TRACE_ENABLED
+namespace zen {
+UE_TRACE_CHANNEL_DEFINE(MemAllocChannel, "Memory allocations", true)
+}
+#endif
+
+#if UE_MEMORY_TRACE_ENABLED
+
+////////////////////////////////////////////////////////////////////////////////
+
+namespace zen {
+
+void MemoryTrace_InitTags(FMalloc*);
+void MemoryTrace_EnableTracePump();
+
+} // namespace zen
+
+////////////////////////////////////////////////////////////////////////////////
+namespace {
+// Controls how often time markers are emitted (default: every 4095 allocations).
+constexpr uint32_t MarkerSamplePeriod = (4 << 10) - 1;
+
+// Number of shifted bits to SizeLower
+constexpr uint32_t SizeShift = 3;
+
+// Counter to track when time marker is emitted
+std::atomic<uint32_t> GMarkerCounter(0);
+
+// If enabled also pumps the Trace system itself. Used on process shutdown
+// when worker thread has been killed, but memory events still occurs.
+bool GDoPumpTrace;
+
+// Temporarily disables any internal operation that causes allocations. Used to
+// avoid recursive behaviour when memory tracing needs to allocate memory through
+// TraceMalloc.
+thread_local bool GDoNotAllocateInTrace;
+
+// Set on initialization; on some platforms we hook allocator functions very early
+// before Trace has the ability to allocate memory.
+bool GTraceAllowed;
+} // namespace
+
+////////////////////////////////////////////////////////////////////////////////
+namespace UE { namespace Trace {
+ TRACELOG_API void Update();
+}} // namespace UE::Trace
+
+namespace zen {
+
+////////////////////////////////////////////////////////////////////////////////
+UE_TRACE_EVENT_BEGIN(Memory, Init, NoSync | Important)
+ UE_TRACE_EVENT_FIELD(uint64_t, PageSize) // new in UE 5.5
+ UE_TRACE_EVENT_FIELD(uint32_t, MarkerPeriod)
+ UE_TRACE_EVENT_FIELD(uint8, Version)
+ UE_TRACE_EVENT_FIELD(uint8, MinAlignment)
+ UE_TRACE_EVENT_FIELD(uint8, SizeShift)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, Marker)
+ UE_TRACE_EVENT_FIELD(uint64_t, Cycle)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, Alloc)
+ UE_TRACE_EVENT_FIELD(uint64_t, Address)
+ UE_TRACE_EVENT_FIELD(uint32_t, CallstackId)
+ UE_TRACE_EVENT_FIELD(uint32_t, Size)
+ UE_TRACE_EVENT_FIELD(uint8, AlignmentPow2_SizeLower)
+ UE_TRACE_EVENT_FIELD(uint8, RootHeap)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, AllocSystem)
+ UE_TRACE_EVENT_FIELD(uint64_t, Address)
+ UE_TRACE_EVENT_FIELD(uint32_t, CallstackId)
+ UE_TRACE_EVENT_FIELD(uint32_t, Size)
+ UE_TRACE_EVENT_FIELD(uint8, AlignmentPow2_SizeLower)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, AllocVideo)
+ UE_TRACE_EVENT_FIELD(uint64_t, Address)
+ UE_TRACE_EVENT_FIELD(uint32_t, CallstackId)
+ UE_TRACE_EVENT_FIELD(uint32_t, Size)
+ UE_TRACE_EVENT_FIELD(uint8, AlignmentPow2_SizeLower)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, Free)
+ UE_TRACE_EVENT_FIELD(uint64_t, Address)
+ UE_TRACE_EVENT_FIELD(uint32_t, CallstackId)
+ UE_TRACE_EVENT_FIELD(uint8, RootHeap)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, FreeSystem)
+ UE_TRACE_EVENT_FIELD(uint64_t, Address)
+ UE_TRACE_EVENT_FIELD(uint32_t, CallstackId)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, FreeVideo)
+ UE_TRACE_EVENT_FIELD(uint64_t, Address)
+ UE_TRACE_EVENT_FIELD(uint32_t, CallstackId)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, ReallocAlloc)
+ UE_TRACE_EVENT_FIELD(uint64_t, Address)
+ UE_TRACE_EVENT_FIELD(uint32_t, CallstackId)
+ UE_TRACE_EVENT_FIELD(uint32_t, Size)
+ UE_TRACE_EVENT_FIELD(uint8, AlignmentPow2_SizeLower)
+ UE_TRACE_EVENT_FIELD(uint8, RootHeap)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, ReallocAllocSystem)
+ UE_TRACE_EVENT_FIELD(uint64_t, Address)
+ UE_TRACE_EVENT_FIELD(uint32_t, CallstackId)
+ UE_TRACE_EVENT_FIELD(uint32_t, Size)
+ UE_TRACE_EVENT_FIELD(uint8, AlignmentPow2_SizeLower)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, ReallocFree)
+ UE_TRACE_EVENT_FIELD(uint64_t, Address)
+ UE_TRACE_EVENT_FIELD(uint32_t, CallstackId)
+ UE_TRACE_EVENT_FIELD(uint8, RootHeap)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, ReallocFreeSystem)
+ UE_TRACE_EVENT_FIELD(uint64_t, Address)
+ UE_TRACE_EVENT_FIELD(uint32_t, CallstackId)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, MemorySwapOp)
+ UE_TRACE_EVENT_FIELD(uint64_t, Address) // page fault real address
+ UE_TRACE_EVENT_FIELD(uint32_t, CallstackId)
+ UE_TRACE_EVENT_FIELD(uint32_t, CompressedSize)
+ UE_TRACE_EVENT_FIELD(uint8, SwapOp)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, HeapSpec, NoSync | Important)
+ UE_TRACE_EVENT_FIELD(HeapId, Id)
+ UE_TRACE_EVENT_FIELD(HeapId, ParentId)
+ UE_TRACE_EVENT_FIELD(uint16, Flags)
+ UE_TRACE_EVENT_FIELD(UE::Trace::WideString, Name)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, HeapMarkAlloc)
+ UE_TRACE_EVENT_FIELD(uint64_t, Address)
+ UE_TRACE_EVENT_FIELD(uint32_t, CallstackId)
+ UE_TRACE_EVENT_FIELD(uint16, Flags)
+ UE_TRACE_EVENT_FIELD(HeapId, Heap)
+UE_TRACE_EVENT_END()
+
+UE_TRACE_EVENT_BEGIN(Memory, HeapUnmarkAlloc)
+ UE_TRACE_EVENT_FIELD(uint64_t, Address)
+ UE_TRACE_EVENT_FIELD(uint32_t, CallstackId)
+ UE_TRACE_EVENT_FIELD(HeapId, Heap)
+UE_TRACE_EVENT_END()
+
+// If the layout of the above events is changed, bump this version number.
+// version 1: Initial version (UE 5.0, UE 5.1)
+// version 2: Added CallstackId for Free events and also for HeapMarkAlloc, HeapUnmarkAlloc events (UE 5.2).
+constexpr uint8 MemoryTraceVersion = 2;
+
+////////////////////////////////////////////////////////////////////////////////
+class FMallocWrapper : public FMalloc
+{
+public:
+ FMallocWrapper(FMalloc* InMalloc);
+
+private:
+ struct FCookie
+ {
+ uint64_t Tag : 16;
+ uint64_t Bias : 8;
+ uint64_t Size : 40;
+ };
+
+ static uint32_t GetActualAlignment(SIZE_T Size, uint32_t Alignment);
+
+ virtual void* Malloc(SIZE_T Size, uint32_t Alignment) override;
+ virtual void* Realloc(void* PrevAddress, SIZE_T NewSize, uint32_t Alignment) override;
+ virtual void Free(void* Address) override;
+ virtual bool GetAllocationSize(void* Address, SIZE_T& SizeOut) override { return InnerMalloc->GetAllocationSize(Address, SizeOut); }
+ virtual void OnMallocInitialized() override { InnerMalloc->OnMallocInitialized(); }
+
+ FMalloc* InnerMalloc;
+};
+
+////////////////////////////////////////////////////////////////////////////////
+FMallocWrapper::FMallocWrapper(FMalloc* InMalloc) : InnerMalloc(InMalloc)
+{
+}
+
+////////////////////////////////////////////////////////////////////////////////
+uint32_t
+FMallocWrapper::GetActualAlignment(SIZE_T Size, uint32_t Alignment)
+{
+ // Defaults; if size is < 16 then alignment is 8 else 16.
+ uint32_t DefaultAlignment = 8 << uint32_t(Size >= 16);
+ return (Alignment < DefaultAlignment) ? DefaultAlignment : Alignment;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+void*
+FMallocWrapper::Malloc(SIZE_T Size, uint32_t Alignment)
+{
+ uint32_t ActualAlignment = GetActualAlignment(Size, Alignment);
+ void* Address = InnerMalloc->Malloc(Size, Alignment);
+
+ MemoryTrace_Alloc((uint64_t)Address, Size, ActualAlignment);
+
+ return Address;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+void*
+FMallocWrapper::Realloc(void* PrevAddress, SIZE_T NewSize, uint32_t Alignment)
+{
+ // This simplifies things and means reallocs trace events are true reallocs
+ if (PrevAddress == nullptr)
+ {
+ return Malloc(NewSize, Alignment);
+ }
+
+ MemoryTrace_ReallocFree((uint64_t)PrevAddress);
+
+ void* RetAddress = InnerMalloc->Realloc(PrevAddress, NewSize, Alignment);
+
+ Alignment = GetActualAlignment(NewSize, Alignment);
+ MemoryTrace_ReallocAlloc((uint64_t)RetAddress, NewSize, Alignment);
+
+ return RetAddress;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+void
+FMallocWrapper::Free(void* Address)
+{
+ if (Address == nullptr)
+ {
+ return;
+ }
+
+ MemoryTrace_Free((uint64_t)Address);
+
+ void* InnerAddress = Address;
+
+ return InnerMalloc->Free(InnerAddress);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+template<class T>
+class alignas(alignof(T)) FUndestructed
+{
+public:
+ template<typename... ArgTypes>
+ void Construct(ArgTypes... Args)
+ {
+ ::new (Buffer) T(Args...);
+ bIsConstructed = true;
+ }
+
+ bool IsConstructed() const { return bIsConstructed; }
+
+ T* operator&() { return (T*)Buffer; }
+ T* operator->() { return (T*)Buffer; }
+
+protected:
+ uint8 Buffer[sizeof(T)];
+ bool bIsConstructed;
+};
+
+////////////////////////////////////////////////////////////////////////////////
+static FUndestructed<FTraceMalloc> GTraceMalloc;
+
+////////////////////////////////////////////////////////////////////////////////
+static EMemoryTraceInit
+MemoryTrace_ShouldEnable()
+{
+ EMemoryTraceInit Mode = EMemoryTraceInit::Disabled;
+
+ // Process any command line trace options
+ //
+ // Note that calls can come into this function before we enter the regular main function
+ // and we can therefore not rely on the regular command line parsing for the application
+
+ using namespace std::literals;
+
+ auto ProcessTraceArg = [&](const std::string_view& Arg) {
+ if (Arg == "memalloc"sv)
+ {
+ Mode |= EMemoryTraceInit::AllocEvents;
+ }
+ else if (Arg == "callstack"sv)
+ {
+ Mode |= EMemoryTraceInit::Callstacks;
+ }
+ else if (Arg == "memtag"sv)
+ {
+ Mode |= EMemoryTraceInit::Tags;
+ }
+ else if (Arg == "memory"sv)
+ {
+ Mode |= EMemoryTraceInit::Full;
+ }
+ else if (Arg == "memory_light"sv)
+ {
+ Mode |= EMemoryTraceInit::Light;
+ }
+ };
+
+ constexpr std::string_view TraceOption = "--trace="sv;
+
+ std::function<void(const std::string_view&)> ProcessArg = [&](const std::string_view& Arg) {
+ if (Arg.starts_with(TraceOption))
+ {
+ const std::string_view OptionArgs = Arg.substr(TraceOption.size());
+
+ IterateCommaSeparatedValue(OptionArgs, ProcessTraceArg);
+ }
+ };
+
+ IterateCommandlineArgs(ProcessArg);
+
+ return Mode;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+FMalloc*
+MemoryTrace_CreateInternal(FMalloc* InMalloc, EMemoryTraceInit Mode)
+{
+ using namespace zen;
+
+ // If allocation events are not desired we don't need to do anything, even
+ // if user has enabled only callstacks it will be enabled later.
+ if (!EnumHasAnyFlags(Mode, EMemoryTraceInit::AllocEvents))
+ {
+ return InMalloc;
+ }
+
+ // Some OSes (i.e. Windows) will terminate all threads except the main
+ // one as part of static deinit. However we may receive more memory
+ // trace events that would get lost as Trace's worker thread has been
+ // terminated. So flush the last remaining memory events trace needs
+ // to be updated which we will do that in response to to memory events.
+ // We'll use an atexit can to know when Trace is probably no longer
+ // getting ticked.
+ atexit([]() { MemoryTrace_EnableTracePump(); });
+
+ GTraceMalloc.Construct(InMalloc);
+
+ // Both tag and callstack tracing need to use the wrapped trace malloc
+ // so we can break out tracing memory overhead (and not cause recursive behaviour).
+ if (EnumHasAnyFlags(Mode, EMemoryTraceInit::Tags))
+ {
+ MemoryTrace_InitTags(&GTraceMalloc);
+ }
+
+ if (EnumHasAnyFlags(Mode, EMemoryTraceInit::Callstacks))
+ {
+ CallstackTrace_Create(&GTraceMalloc);
+ }
+
+ static FUndestructed<FMallocWrapper> SMallocWrapper;
+ SMallocWrapper.Construct(InMalloc);
+
+ return &SMallocWrapper;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+FMalloc*
+MemoryTrace_CreateInternal(FMalloc* InMalloc)
+{
+ const EMemoryTraceInit Mode = MemoryTrace_ShouldEnable();
+ return MemoryTrace_CreateInternal(InMalloc, Mode);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+FMalloc*
+MemoryTrace_Create(FMalloc* InMalloc)
+{
+ FMalloc* OutMalloc = MemoryTrace_CreateInternal(InMalloc);
+
+ if (OutMalloc != InMalloc)
+ {
+# if PLATFORM_SUPPORTS_TRACE_WIN32_VIRTUAL_MEMORY_HOOKS
+ FVirtualWinApiHooks::Initialize(false);
+# endif
+ }
+
+ return OutMalloc;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+void
+MemoryTrace_Initialize()
+{
+ // At this point we initialized the system to allow tracing.
+ GTraceAllowed = true;
+
+ const int MIN_ALIGNMENT = 8;
+
+ UE_TRACE_LOG(Memory, Init, MemAllocChannel)
+ << Init.PageSize(4096) << Init.MarkerPeriod(MarkerSamplePeriod + 1) << Init.Version(MemoryTraceVersion)
+ << Init.MinAlignment(uint8(MIN_ALIGNMENT)) << Init.SizeShift(uint8(SizeShift));
+
+ const HeapId SystemRootHeap = MemoryTrace_RootHeapSpec(u"System memory");
+ ZEN_ASSERT(SystemRootHeap == EMemoryTraceRootHeap::SystemMemory);
+ const HeapId VideoRootHeap = MemoryTrace_RootHeapSpec(u"Video memory");
+ ZEN_ASSERT(VideoRootHeap == EMemoryTraceRootHeap::VideoMemory);
+
+ static_assert((1 << SizeShift) - 1 <= MIN_ALIGNMENT, "Not enough bits to pack size fields");
+
+# if !UE_MEMORY_TRACE_LATE_INIT
+ // On some platforms callstack initialization cannot happen this early in the process. It is initialized
+ // in other locations when UE_MEMORY_TRACE_LATE_INIT is defined. Until that point allocations cannot have
+ // callstacks.
+ CallstackTrace_Initialize();
+# endif
+}
+
+void
+MemoryTrace_Shutdown()
+{
+ // Disable any further activity
+ GTraceAllowed = false;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+bool
+MemoryTrace_IsActive()
+{
+ return GTraceAllowed;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+void
+MemoryTrace_EnableTracePump()
+{
+ GDoPumpTrace = true;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+void
+MemoryTrace_UpdateInternal()
+{
+ const uint32_t TheCount = GMarkerCounter.fetch_add(1, std::memory_order_relaxed);
+ if ((TheCount & MarkerSamplePeriod) == 0)
+ {
+ UE_TRACE_LOG(Memory, Marker, MemAllocChannel) << Marker.Cycle(UE::Trace::Private::TimeGetTimestamp());
+ }
+
+ if (GDoPumpTrace)
+ {
+ UE::Trace::Update();
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+void
+MemoryTrace_Alloc(uint64_t Address, uint64_t Size, uint32_t Alignment, HeapId RootHeap, uint32_t ExternalCallstackId)
+{
+ if (!GTraceAllowed)
+ {
+ return;
+ }
+
+ ZEN_ASSERT_SLOW(RootHeap < 16);
+
+ const uint32_t AlignmentPow2 = uint32_t(zen::CountTrailingZeros64(Alignment));
+ const uint32_t Alignment_SizeLower = (AlignmentPow2 << SizeShift) | uint32_t(Size & ((1 << SizeShift) - 1));
+ const uint32_t CallstackId = ExternalCallstackId ? ExternalCallstackId : GDoNotAllocateInTrace ? 0 : CallstackTrace_GetCurrentId();
+
+ switch (RootHeap)
+ {
+ case EMemoryTraceRootHeap::SystemMemory:
+ {
+ UE_TRACE_LOG(Memory, AllocSystem, MemAllocChannel)
+ << AllocSystem.Address(uint64_t(Address)) << AllocSystem.CallstackId(CallstackId)
+ << AllocSystem.Size(uint32_t(Size >> SizeShift)) << AllocSystem.AlignmentPow2_SizeLower(uint8(Alignment_SizeLower));
+ break;
+ }
+
+ case EMemoryTraceRootHeap::VideoMemory:
+ {
+ UE_TRACE_LOG(Memory, AllocVideo, MemAllocChannel)
+ << AllocVideo.Address(uint64_t(Address)) << AllocVideo.CallstackId(CallstackId)
+ << AllocVideo.Size(uint32_t(Size >> SizeShift)) << AllocVideo.AlignmentPow2_SizeLower(uint8(Alignment_SizeLower));
+ break;
+ }
+
+ default:
+ {
+ UE_TRACE_LOG(Memory, Alloc, MemAllocChannel)
+ << Alloc.Address(uint64_t(Address)) << Alloc.CallstackId(CallstackId) << Alloc.Size(uint32_t(Size >> SizeShift))
+ << Alloc.AlignmentPow2_SizeLower(uint8(Alignment_SizeLower)) << Alloc.RootHeap(uint8(RootHeap));
+ break;
+ }
+ }
+
+ MemoryTrace_UpdateInternal();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+void
+MemoryTrace_Free(uint64_t Address, HeapId RootHeap, uint32_t ExternalCallstackId)
+{
+ if (!GTraceAllowed)
+ {
+ return;
+ }
+
+ ZEN_ASSERT_SLOW(RootHeap < 16);
+
+ const uint32_t CallstackId = ExternalCallstackId ? ExternalCallstackId : GDoNotAllocateInTrace ? 0 : CallstackTrace_GetCurrentId();
+
+ switch (RootHeap)
+ {
+ case EMemoryTraceRootHeap::SystemMemory:
+ {
+ UE_TRACE_LOG(Memory, FreeSystem, MemAllocChannel)
+ << FreeSystem.Address(uint64_t(Address)) << FreeSystem.CallstackId(CallstackId);
+ break;
+ }
+ case EMemoryTraceRootHeap::VideoMemory:
+ {
+ UE_TRACE_LOG(Memory, FreeVideo, MemAllocChannel)
+ << FreeVideo.Address(uint64_t(Address)) << FreeVideo.CallstackId(CallstackId);
+ break;
+ }
+ default:
+ {
+ UE_TRACE_LOG(Memory, Free, MemAllocChannel)
+ << Free.Address(uint64_t(Address)) << Free.CallstackId(CallstackId) << Free.RootHeap(uint8(RootHeap));
+ break;
+ }
+ }
+
+ MemoryTrace_UpdateInternal();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+void
+MemoryTrace_ReallocAlloc(uint64_t Address, uint64_t Size, uint32_t Alignment, HeapId RootHeap, uint32_t ExternalCallstackId)
+{
+ if (!GTraceAllowed)
+ {
+ return;
+ }
+
+ ZEN_ASSERT_SLOW(RootHeap < 16);
+
+ const uint32_t AlignmentPow2 = uint32_t(zen::CountTrailingZeros64(Alignment));
+ const uint32_t Alignment_SizeLower = (AlignmentPow2 << SizeShift) | uint32_t(Size & ((1 << SizeShift) - 1));
+ const uint32_t CallstackId = ExternalCallstackId ? ExternalCallstackId : GDoNotAllocateInTrace ? 0 : CallstackTrace_GetCurrentId();
+
+ switch (RootHeap)
+ {
+ case EMemoryTraceRootHeap::SystemMemory:
+ {
+ UE_TRACE_LOG(Memory, ReallocAllocSystem, MemAllocChannel)
+ << ReallocAllocSystem.Address(uint64_t(Address)) << ReallocAllocSystem.CallstackId(CallstackId)
+ << ReallocAllocSystem.Size(uint32_t(Size >> SizeShift))
+ << ReallocAllocSystem.AlignmentPow2_SizeLower(uint8(Alignment_SizeLower));
+ break;
+ }
+
+ default:
+ {
+ UE_TRACE_LOG(Memory, ReallocAlloc, MemAllocChannel)
+ << ReallocAlloc.Address(uint64_t(Address)) << ReallocAlloc.CallstackId(CallstackId)
+ << ReallocAlloc.Size(uint32_t(Size >> SizeShift)) << ReallocAlloc.AlignmentPow2_SizeLower(uint8(Alignment_SizeLower))
+ << ReallocAlloc.RootHeap(uint8(RootHeap));
+ break;
+ }
+ }
+
+ MemoryTrace_UpdateInternal();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+void
+MemoryTrace_ReallocFree(uint64_t Address, HeapId RootHeap, uint32_t ExternalCallstackId)
+{
+ if (!GTraceAllowed)
+ {
+ return;
+ }
+
+ ZEN_ASSERT_SLOW(RootHeap < 16);
+
+ const uint32_t CallstackId = ExternalCallstackId ? ExternalCallstackId : GDoNotAllocateInTrace ? 0 : CallstackTrace_GetCurrentId();
+
+ switch (RootHeap)
+ {
+ case EMemoryTraceRootHeap::SystemMemory:
+ {
+ UE_TRACE_LOG(Memory, ReallocFreeSystem, MemAllocChannel)
+ << ReallocFreeSystem.Address(uint64_t(Address)) << ReallocFreeSystem.CallstackId(CallstackId);
+ break;
+ }
+
+ default:
+ {
+ UE_TRACE_LOG(Memory, ReallocFree, MemAllocChannel)
+ << ReallocFree.Address(uint64_t(Address)) << ReallocFree.CallstackId(CallstackId)
+ << ReallocFree.RootHeap(uint8(RootHeap));
+ break;
+ }
+ }
+
+ MemoryTrace_UpdateInternal();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+void
+MemoryTrace_SwapOp(uint64_t PageAddress, EMemoryTraceSwapOperation SwapOperation, uint32_t CompressedSize, uint32_t CallstackId)
+{
+ if (!GTraceAllowed)
+ {
+ return;
+ }
+
+ UE_TRACE_LOG(Memory, MemorySwapOp, MemAllocChannel)
+ << MemorySwapOp.Address(PageAddress) << MemorySwapOp.CallstackId(CallstackId) << MemorySwapOp.CompressedSize(CompressedSize)
+ << MemorySwapOp.SwapOp((uint8)SwapOperation);
+
+ MemoryTrace_UpdateInternal();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+HeapId
+MemoryTrace_HeapSpec(HeapId ParentId, const char16_t* Name, EMemoryTraceHeapFlags Flags)
+{
+ if (!GTraceAllowed)
+ {
+ return 0;
+ }
+
+ static std::atomic<HeapId> HeapIdCount(EMemoryTraceRootHeap::EndReserved + 1); // Reserve indexes for root heaps
+ const HeapId Id = HeapIdCount.fetch_add(1);
+ const uint32_t NameLen = uint32_t(zen::StringLength(Name));
+ const uint32_t DataSize = NameLen * sizeof(char16_t);
+ ZEN_ASSERT(ParentId < Id);
+
+ UE_TRACE_LOG(Memory, HeapSpec, MemAllocChannel, DataSize)
+ << HeapSpec.Id(Id) << HeapSpec.ParentId(ParentId) << HeapSpec.Name(Name, NameLen) << HeapSpec.Flags(uint16(Flags));
+
+ return Id;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+HeapId
+MemoryTrace_RootHeapSpec(const char16_t* Name, EMemoryTraceHeapFlags Flags)
+{
+ if (!GTraceAllowed)
+ {
+ return 0;
+ }
+
+ static std::atomic<HeapId> RootHeapCount(0);
+ const HeapId Id = RootHeapCount.fetch_add(1);
+ ZEN_ASSERT(Id <= EMemoryTraceRootHeap::EndReserved);
+
+ const uint32_t NameLen = uint32_t(zen::StringLength(Name));
+ const uint32_t DataSize = NameLen * sizeof(char16_t);
+
+ UE_TRACE_LOG(Memory, HeapSpec, MemAllocChannel, DataSize)
+ << HeapSpec.Id(Id) << HeapSpec.ParentId(HeapId(~0)) << HeapSpec.Name(Name, NameLen)
+ << HeapSpec.Flags(uint16(EMemoryTraceHeapFlags::Root | Flags));
+
+ return Id;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+void
+MemoryTrace_MarkAllocAsHeap(uint64_t Address, HeapId Heap, EMemoryTraceHeapAllocationFlags Flags, uint32_t ExternalCallstackId)
+{
+ if (!GTraceAllowed)
+ {
+ return;
+ }
+
+ const uint32_t CallstackId = ExternalCallstackId ? ExternalCallstackId : GDoNotAllocateInTrace ? 0 : CallstackTrace_GetCurrentId();
+
+ UE_TRACE_LOG(Memory, HeapMarkAlloc, MemAllocChannel)
+ << HeapMarkAlloc.Address(uint64_t(Address)) << HeapMarkAlloc.CallstackId(CallstackId)
+ << HeapMarkAlloc.Flags(uint16(EMemoryTraceHeapAllocationFlags::Heap | Flags)) << HeapMarkAlloc.Heap(Heap);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+void
+MemoryTrace_UnmarkAllocAsHeap(uint64_t Address, HeapId Heap, uint32_t ExternalCallstackId)
+{
+ if (!GTraceAllowed)
+ {
+ return;
+ }
+
+ const uint32_t CallstackId = ExternalCallstackId ? ExternalCallstackId : GDoNotAllocateInTrace ? 0 : CallstackTrace_GetCurrentId();
+
+ // Sets all flags to zero
+ UE_TRACE_LOG(Memory, HeapUnmarkAlloc, MemAllocChannel)
+ << HeapUnmarkAlloc.Address(uint64_t(Address)) << HeapUnmarkAlloc.CallstackId(CallstackId) << HeapUnmarkAlloc.Heap(Heap);
+}
+
+} // namespace zen
+
+#else // UE_MEMORY_TRACE_ENABLED
+
+/////////////////////////////////////////////////////////////////////////////
+bool
+MemoryTrace_IsActive()
+{
+ return false;
+}
+
+#endif // UE_MEMORY_TRACE_ENABLED
+
+namespace zen {
+
+/////////////////////////////////////////////////////////////////////////////
+FTraceMalloc::FTraceMalloc(FMalloc* InMalloc)
+{
+ WrappedMalloc = InMalloc;
+}
+
+/////////////////////////////////////////////////////////////////////////////
+FTraceMalloc::~FTraceMalloc()
+{
+}
+
+/////////////////////////////////////////////////////////////////////////////
+void*
+FTraceMalloc::Malloc(SIZE_T Count, uint32_t Alignment)
+{
+#if UE_MEMORY_TRACE_ENABLED
+ // UE_TRACE_METADATA_CLEAR_SCOPE();
+ UE_MEMSCOPE(TRACE_TAG);
+
+ void* NewPtr;
+ {
+ zen::TGuardValue<bool> _(GDoNotAllocateInTrace, true);
+ NewPtr = WrappedMalloc->Malloc(Count, Alignment);
+ }
+
+ const uint64_t Size = Count;
+ const uint32_t AlignmentPow2 = uint32_t(zen::CountTrailingZeros64(Alignment));
+ const uint32_t Alignment_SizeLower = (AlignmentPow2 << SizeShift) | uint32_t(Size & ((1 << SizeShift) - 1));
+
+ UE_TRACE_LOG(Memory, Alloc, MemAllocChannel)
+ << Alloc.Address(uint64_t(NewPtr)) << Alloc.CallstackId(0) << Alloc.Size(uint32_t(Size >> SizeShift))
+ << Alloc.AlignmentPow2_SizeLower(uint8(Alignment_SizeLower)) << Alloc.RootHeap(uint8(EMemoryTraceRootHeap::SystemMemory));
+
+ return NewPtr;
+#else
+ return WrappedMalloc->Malloc(Count, Alignment);
+#endif // UE_MEMORY_TRACE_ENABLED
+}
+
+/////////////////////////////////////////////////////////////////////////////
+void*
+FTraceMalloc::Realloc(void* Original, SIZE_T Count, uint32_t Alignment)
+{
+#if UE_MEMORY_TRACE_ENABLED
+ // UE_TRACE_METADATA_CLEAR_SCOPE();
+ UE_MEMSCOPE(TRACE_TAG);
+
+ UE_TRACE_LOG(Memory, ReallocFree, MemAllocChannel)
+ << ReallocFree.Address(uint64_t(Original)) << ReallocFree.RootHeap(uint8(EMemoryTraceRootHeap::SystemMemory));
+
+ void* NewPtr;
+ {
+ zen::TGuardValue<bool> _(GDoNotAllocateInTrace, true);
+ NewPtr = WrappedMalloc->Realloc(Original, Count, Alignment);
+ }
+
+ const uint64_t Size = Count;
+ const uint32_t AlignmentPow2 = uint32_t(zen::CountTrailingZeros64(Alignment));
+ const uint32_t Alignment_SizeLower = (AlignmentPow2 << SizeShift) | uint32_t(Size & ((1 << SizeShift) - 1));
+
+ UE_TRACE_LOG(Memory, ReallocAlloc, MemAllocChannel)
+ << ReallocAlloc.Address(uint64_t(NewPtr)) << ReallocAlloc.CallstackId(0) << ReallocAlloc.Size(uint32_t(Size >> SizeShift))
+ << ReallocAlloc.AlignmentPow2_SizeLower(uint8(Alignment_SizeLower))
+ << ReallocAlloc.RootHeap(uint8(EMemoryTraceRootHeap::SystemMemory));
+
+ return NewPtr;
+#else
+ return WrappedMalloc->Realloc(Original, Count, Alignment);
+#endif // UE_MEMORY_TRACE_ENABLED
+}
+
+/////////////////////////////////////////////////////////////////////////////
+void
+FTraceMalloc::Free(void* Original)
+{
+#if UE_MEMORY_TRACE_ENABLED
+ UE_TRACE_LOG(Memory, Free, MemAllocChannel)
+ << Free.Address(uint64_t(Original)) << Free.RootHeap(uint8(EMemoryTraceRootHeap::SystemMemory));
+
+ {
+ zen::TGuardValue<bool> _(GDoNotAllocateInTrace, true);
+ WrappedMalloc->Free(Original);
+ }
+#else
+ WrappedMalloc->Free(Original);
+#endif // UE_MEMORY_TRACE_ENABLED
+}
+
+} // namespace zen