1
0
mirror of https://github.com/wolfpld/tracy synced 2025-04-29 12:23:53 +00:00

Calculate callstack sample data on trace load.

This commit is contained in:
Bartosz Taudul 2020-02-27 01:22:36 +01:00
parent 8288f7c6b7
commit 852e37c8dd
3 changed files with 85 additions and 1 deletions

View File

@ -660,6 +660,14 @@ struct Parameter
int32_t val;
};
struct SymbolStats
{
uint32_t incl, excl;
};
enum { SymbolStatsSize = sizeof( SymbolStats ) };
}
#endif

View File

@ -244,6 +244,7 @@ Worker::Worker( const char* addr, int port )
#ifndef TRACY_NO_STATISTICS
m_data.sourceLocationZonesReady = true;
m_data.callstackSamplesReady = false; // FIXME implement live data update
m_data.ctxUsageReady = true;
#endif
@ -1660,7 +1661,7 @@ Worker::Worker( FileRead& f, EventType::Type eventMask, bool bgTasks )
{
m_backgroundDone.store( false, std::memory_order_relaxed );
#ifndef TRACY_NO_STATISTICS
m_threadBackground = std::thread( [this, reconstructMemAllocPlot] {
m_threadBackground = std::thread( [this, reconstructMemAllocPlot, eventMask] {
if( !m_data.ctxSwitch.empty() )
{
ReconstructContextSwitchUsage();
@ -1710,6 +1711,75 @@ Worker::Worker( FileRead& f, EventType::Type eventMask, bool bgTasks )
m_data.sourceLocationZonesReady = true;
}
if( eventMask & EventType::Samples )
{
unordered_flat_map<uint32_t, uint32_t> counts;
uint32_t total = 0;
for( auto& t : m_data.threads ) total += t->samples.size();
if( total != 0 )
{
counts.reserve( total );
for( auto& t : m_data.threads )
{
if( m_shutdown.load( std::memory_order_relaxed ) ) return;
for( auto& sd : t->samples )
{
const auto cs = sd.callstack.Val();
auto it = counts.find( cs );
if( it == counts.end() )
{
counts.emplace( cs, 1 );
}
else
{
it->second++;
}
}
}
for( auto& v : counts )
{
const auto count = v.second;
const auto& cs = GetCallstack( v.first );
const auto cssz = cs.size();
const auto fexcl = GetCallstackFrame( cs[0] );
if( fexcl )
{
const auto fsz = fexcl->size;
const auto& frame0 = fexcl->data[0];
auto sym = m_data.symbolStats.find( frame0.symAddr );
if( sym == m_data.symbolStats.end() ) sym = m_data.symbolStats.emplace( frame0.symAddr, SymbolStats {} ).first;
sym->second.excl += count;
for( uint8_t f=1; f<fsz; f++ )
{
const auto& frame = fexcl->data[f];
sym = m_data.symbolStats.find( frame.symAddr );
if( sym == m_data.symbolStats.end() ) sym = m_data.symbolStats.emplace( frame.symAddr, SymbolStats {} ).first;
sym->second.incl += count;
}
}
for( uint8_t c=1; c<cssz; c++ )
{
const auto fincl = GetCallstackFrame( cs[c] );
if( fincl )
{
const auto fsz = fincl->size;
for( uint8_t f=0; f<fsz; f++ )
{
const auto& frame = fincl->data[f];
auto sym = m_data.symbolStats.find( frame.symAddr );
if( sym == m_data.symbolStats.end() ) sym = m_data.symbolStats.emplace( frame.symAddr, SymbolStats {} ).first;
sym->second.incl += count;
}
}
}
}
}
std::lock_guard<std::shared_mutex> lock( m_data.lock );
m_data.callstackSamplesReady = true;
}
m_backgroundDone.store( true, std::memory_order_relaxed );
} );
#else

View File

@ -215,6 +215,11 @@ private:
unordered_flat_map<CallstackFrameId, CallstackFrameData*, CallstackFrameIdHash, CallstackFrameIdCompare> callstackFrameMap;
unordered_flat_map<CallstackFrameData*, CallstackFrameId, RevFrameHash, RevFrameComp> revFrameMap;
unordered_flat_map<uint64_t, SymbolData> symbolMap;
unordered_flat_map<uint64_t, SymbolStats> symbolStats;
#ifndef TRACY_NO_STATISTICS
bool callstackSamplesReady = false;
#endif
unordered_flat_map<uint32_t, LockMap*> lockMap;
@ -420,6 +425,7 @@ public:
const unordered_flat_map<int16_t, SourceLocationZones>& GetSourceLocationZones() const { return m_data.sourceLocationZones; }
bool AreSourceLocationZonesReady() const { return m_data.sourceLocationZonesReady; }
bool IsCpuUsageReady() const { return m_data.ctxUsageReady; }
bool AreCallstackSamplesReady() const { return m_data.callstackSamplesReady; }
#endif
tracy_force_inline uint16_t CompressThread( uint64_t thread ) { return m_data.localThreadCompress.CompressThread( thread ); }