diff --git a/profiler/src/profiler/TracySourceContents.cpp b/profiler/src/profiler/TracySourceContents.cpp index 9abd7fb4..2256403a 100644 --- a/profiler/src/profiler/TracySourceContents.cpp +++ b/profiler/src/profiler/TracySourceContents.cpp @@ -60,30 +60,45 @@ void SourceContents::Parse( const char* fileName, const Worker& worker, const Vi } } - if( m_file ) + if( m_file ) Tokenize( m_data, sz ); + } +} + +void SourceContents::Parse( const char* source ) +{ + if( source == m_data ) return; + + const size_t len = strlen( source ); + + m_file = nullptr; + m_fileStringIdx = 0; + m_data = source; + m_dataBuf = nullptr; + m_dataSize = len; + Tokenize( source, len ); +} + +void SourceContents::Tokenize( const char* txt, size_t sz ) +{ + Tokenizer tokenizer; + for(;;) + { + auto end = txt; + while( *end != '\n' && *end != '\r' && end - m_data < sz ) end++; + m_lines.emplace_back( Tokenizer::Line { txt, end, tokenizer.Tokenize( txt, end ) } ); + if( end - m_data == sz ) break; + if( *end == '\n' ) { - Tokenizer tokenizer; - auto txt = m_data; - for(;;) - { - auto end = txt; - while( *end != '\n' && *end != '\r' && end - m_data < sz ) end++; - m_lines.emplace_back( Tokenizer::Line { txt, end, tokenizer.Tokenize( txt, end ) } ); - if( end - m_data == sz ) break; - if( *end == '\n' ) - { - end++; - if( end - m_data < sz && *end == '\r' ) end++; - } - else if( *end == '\r' ) - { - end++; - if( end - m_data < sz && *end == '\n' ) end++; - } - if( end - m_data == sz ) break; - txt = end; - } + end++; + if( end - m_data < sz && *end == '\r' ) end++; } + else if( *end == '\r' ) + { + end++; + if( end - m_data < sz && *end == '\n' ) end++; + } + if( end - m_data == sz ) break; + txt = end; } } diff --git a/profiler/src/profiler/TracySourceContents.hpp b/profiler/src/profiler/TracySourceContents.hpp index 4e0d8333..f22f2869 100644 --- a/profiler/src/profiler/TracySourceContents.hpp +++ b/profiler/src/profiler/TracySourceContents.hpp @@ -20,6 +20,7 @@ public: ~SourceContents(); void Parse( const char* fileName, const Worker& worker, const View& view ); + void Parse( const char* source ); const std::vector& get() const { return m_lines; } bool empty() const { return m_lines.empty(); } @@ -31,6 +32,8 @@ public: size_t data_size() const { return m_dataSize; } private: + void Tokenize( const char* txt, size_t sz ); + const char* m_file; uint32_t m_fileStringIdx;