diff --git a/include/hepnos/Prefetcher.hpp b/include/hepnos/Prefetcher.hpp index 33fc8b19569012817f2952d1cb536e3378b6ef41..4cedd79def5d229e29391cb2a72ee21e1f53b999 100644 --- a/include/hepnos/Prefetcher.hpp +++ b/include/hepnos/Prefetcher.hpp @@ -9,6 +9,7 @@ #include #include #include +#include namespace hepnos { @@ -21,6 +22,13 @@ class Run; class SubRun; class Event; +struct PrefetcherStatistics { + Statistics batch_sizes; + Statistics product_sizes; + size_t product_cache_hit = 0; + size_t product_cache_miss = 0; +}; + /** * @brief The Prefetcher object will actively try to prefetch * items from the underlying DataStore when using iterators. @@ -134,6 +142,20 @@ class Prefetcher { fetchProductImpl(label + "#" + demangle(), fetch); } + /** + * @brief Activate statistics collection. + * + * @param activate Whether to activate statistics. + */ + void activateStatistics(bool activate=true); + + /** + * @brief Collects the usage statistics. + * + * @param stats PrefetcherStatistics object to fill. + */ + void collectStatistics(PrefetcherStatistics& stats) const; + private: std::shared_ptr m_impl; diff --git a/src/Prefetcher.cpp b/src/Prefetcher.cpp index 286ee45689581385454ce1e8d0f2b3bebc0fa02a..2c8317a2d0e294cff2dc8395f1b6078f0abde691 100644 --- a/src/Prefetcher.cpp +++ b/src/Prefetcher.cpp @@ -47,4 +47,17 @@ void Prefetcher::fetchProductImpl(const std::string& label, bool fetch=true) con } } +void Prefetcher::activateStatistics(bool activate) { + if(activate) { + if(m_impl->m_stats) return; + m_impl->m_stats = std::make_unique(); + } else { + m_impl->m_stats.reset(); + } +} + +void Prefetcher::collectStatistics(PrefetcherStatistics& stats) const { + m_impl->collectStatistics(stats); +} + } diff --git a/src/PrefetcherImpl.hpp b/src/PrefetcherImpl.hpp index ba050b2e8b9b9dd531f01080432a629635c3be6a..e0ba0f5a59f5d86640b183178b2cbe5033d409a2 100644 --- a/src/PrefetcherImpl.hpp +++ b/src/PrefetcherImpl.hpp @@ -20,6 +20,9 @@ class PrefetcherImpl { } }; + mutable std::unique_ptr m_stats; + mutable tl::mutex m_stats_mtx; + std::shared_ptr m_datastore; unsigned int m_cache_size = 16; unsigned int m_batch_size = 1; @@ -33,6 +36,16 @@ class PrefetcherImpl { virtual ~PrefetcherImpl() = default; + void update_batch_statistics(size_t batch_size) const { + if(!m_stats) return; + m_stats->batch_sizes.updateWith(batch_size); + } + + void update_product_statistics(size_t psize) const { + if(!m_stats) return; + m_stats->product_sizes.updateWith(psize); + } + virtual void fetchRequestedProducts(const std::shared_ptr& itemImpl) const = 0; virtual void prefetchFrom(const ItemType& item_type, @@ -55,6 +68,12 @@ class PrefetcherImpl { virtual bool loadRawProduct(const ItemDescriptor& id, const std::string& productName, char* value, size_t* vsize) const = 0; + + void collectStatistics(PrefetcherStatistics& stats) const { + std::unique_lock lock(m_stats_mtx); + if(m_stats) + stats = *m_stats; + } }; } diff --git a/src/SyncPrefetcherImpl.hpp b/src/SyncPrefetcherImpl.hpp index 611eb9f0dde091ff56c2c1de38777c4adb017d8b..83d94748db679a119718239f8fce4c32c2f27f4a 100644 --- a/src/SyncPrefetcherImpl.hpp +++ b/src/SyncPrefetcherImpl.hpp @@ -27,6 +27,7 @@ class SyncPrefetcherImpl : public PrefetcherImpl { std::string data; bool ok = m_datastore->loadRawProduct(product_id, data); if(ok) { + update_product_statistics(data.size()); m_product_cache[product_id.m_key] = std::move(data); } } @@ -41,8 +42,10 @@ class SyncPrefetcherImpl : public PrefetcherImpl { while(m_item_cache.size() != m_cache_size) { std::vector> items; size_t s = m_datastore->nextItems(item_type, prefix_type, last, items, m_batch_size, target); - if(s != 0) + if(s != 0) { + update_batch_statistics(s); last = items[items.size()-1]; + } for(auto& item : items) { fetchRequestedProducts(item); m_item_cache.insert(std::move(item)); @@ -85,8 +88,10 @@ class SyncPrefetcherImpl : public PrefetcherImpl { auto product_id = DataStoreImpl::buildProductID(id, productName); auto it = m_product_cache.find(product_id.m_key); if(it == m_product_cache.end()) { + m_stats->product_cache_miss += 1; return m_datastore->loadRawProduct(product_id, data); } else { + m_stats->product_cache_hit += 1; data = std::move(it->second); m_product_cache.erase(it); return true; @@ -99,9 +104,11 @@ class SyncPrefetcherImpl : public PrefetcherImpl { auto product_id = DataStoreImpl::buildProductID(id, productName); auto it = m_product_cache.find(product_id.m_key); if(it == m_product_cache.end()) { + if(m_stats) m_stats->product_cache_miss += 1; return m_datastore->loadRawProduct(id, productName, value, vsize); } else { *vsize = it->second.size(); + if(m_stats) m_stats->product_cache_hit += 1; std::memcpy(value, it->second.data(), *vsize); return true; } diff --git a/src/WriteBatch.cpp b/src/WriteBatch.cpp index bbd9025742ad4bd608ab15e3978ef451bcc29410..dfda501c9b053acebd6b7732659fdf055d2a82fb 100644 --- a/src/WriteBatch.cpp +++ b/src/WriteBatch.cpp @@ -22,7 +22,12 @@ void WriteBatch::flush() { } void WriteBatch::activateStatistics(bool activate) { - m_impl->m_stats_enabled = activate; + if(activate) { + if(m_impl->m_stats) return; + m_impl->m_stats = std::make_unique(); + } else { + m_impl->m_stats.reset(); + } } void WriteBatch::collectStatistics(WriteBatchStatistics& stats) const { diff --git a/src/WriteBatchImpl.hpp b/src/WriteBatchImpl.hpp index 354a91035de09487bc95ee89d616825ad484bc1c..8ef1fdb5684c9a0a1423c1e0d3e5cb6dc90ada1d 100644 --- a/src/WriteBatchImpl.hpp +++ b/src/WriteBatchImpl.hpp @@ -31,8 +31,7 @@ class WriteBatchImpl { typedef std::unordered_map entries_type; - WriteBatchStatistics m_stats; - bool m_stats_enabled; + std::unique_ptr m_stats; mutable tl::mutex m_stats_mtx; std::shared_ptr m_datastore; @@ -45,15 +44,15 @@ class WriteBatchImpl { bool m_async_thread_should_stop = false; void update_keyval_statistics(size_t ksize, size_t vsize) { - if(!m_stats_enabled) return; - m_stats.key_sizes.updateWith(ksize); + if(!m_stats) return; + m_stats->key_sizes.updateWith(ksize); if(vsize) - m_stats.value_sizes.updateWith(vsize); + m_stats->value_sizes.updateWith(vsize); } void update_operation_statistics(size_t batch_size) { - if(!m_stats_enabled) return; - m_stats.batch_sizes.updateWith(batch_size); + if(!m_stats) return; + m_stats->batch_sizes.updateWith(batch_size); } static void writer_thread(WriteBatchImpl& wb, @@ -229,7 +228,8 @@ class WriteBatchImpl { void collectStatistics(WriteBatchStatistics& stats) const { std::unique_lock lock(m_stats_mtx); - stats = m_stats; + if(m_stats) + stats = *m_stats; } };