30 #ifndef _GLIBCXX_PROFILE_PROFILER_MAP_TO_UNORDERED_MAP_H 31 #define _GLIBCXX_PROFILE_PROFILER_MAP_TO_UNORDERED_MAP_H 1 40 __log2(std::size_t __size)
42 for (
int __bit_count =
sizeof(std::size_t) - 1; __bit_count >= 0;
44 if ((2 << __bit_count) & __size)
50 __map_insert_cost(std::size_t __size)
51 {
return (_GLIBCXX_PROFILE_DATA(__map_insert_cost_factor).__value
52 * static_cast<float>(__log2(__size))); }
55 __map_erase_cost(std::size_t __size)
56 {
return (_GLIBCXX_PROFILE_DATA(__map_erase_cost_factor).__value
57 * static_cast<float>(__log2(__size))); }
60 __map_find_cost(std::size_t __size)
61 {
return (_GLIBCXX_PROFILE_DATA(__map_find_cost_factor).__value
62 * static_cast<float>(__log2(__size))); }
72 _M_iterate(0), _M_umap_cost(0.0), _M_map_cost(0.0)
78 __object_info_base::__merge(__o);
79 _M_insert += __o._M_insert;
80 _M_erase += __o._M_erase;
81 _M_find += __o._M_find;
82 _M_iterate += __o._M_iterate;
83 _M_umap_cost += __o._M_umap_cost;
84 _M_map_cost += __o._M_map_cost;
88 __write(FILE* __f)
const 90 std::fprintf(__f,
"%Zu %Zu %Zu %Zu %.0f %.0f\n",
91 _M_insert, _M_erase, _M_find, _M_iterate, _M_map_cost,
97 {
return _M_map_cost - _M_umap_cost; }
101 {
return "prefer an unordered container"; }
104 __record_insert(std::size_t __size, std::size_t __count)
109 _M_map_cost += __count * __map_insert_cost(__size);
112 * _GLIBCXX_PROFILE_DATA(__umap_insert_cost_factor).__value);
117 __record_erase(std::size_t __size, std::size_t __count)
122 _M_map_cost += __count * __map_erase_cost(__size);
125 * _GLIBCXX_PROFILE_DATA(__umap_erase_cost_factor).__value);
130 __record_find(std::size_t __size)
133 _M_map_cost += __map_find_cost(__size);
134 _M_umap_cost += _GLIBCXX_PROFILE_DATA(__umap_find_cost_factor).__value;
138 __record_iterate(
int __count)
139 { __gnu_cxx::__atomic_add(&_M_iterate, __count); }
142 __set_iterate_costs()
146 * _GLIBCXX_PROFILE_DATA(__umap_iterate_cost_factor).__value);
149 * _GLIBCXX_PROFILE_DATA(__map_iterate_cost_factor).__value);
153 std::size_t _M_insert;
154 std::size_t _M_erase;
156 mutable _Atomic_word _M_iterate;
174 :
public __trace_base<__map2umap_info, __map2umap_stack_info>
179 { __id =
"ordered-to-unordered"; }
185 __obj_info->__set_iterate_costs();
186 __retire_object(__obj_info);
191 __trace_map_to_unordered_map_init()
195 __trace_map_to_unordered_map_free()
196 {
delete _GLIBCXX_PROFILE_DATA(_S_map2umap); }
199 __trace_map_to_unordered_map_report(FILE* __f,
200 __warning_vector_t& __warnings)
201 { __trace_report(_GLIBCXX_PROFILE_DATA(_S_map2umap), __f, __warnings); }
203 inline __map2umap_info*
204 __trace_map_to_unordered_map_construct()
209 if (!__reentrance_guard::__get_in())
212 __reentrance_guard __get_out;
213 return _GLIBCXX_PROFILE_DATA(_S_map2umap)->__add_object(__get_stack());
217 __trace_map_to_unordered_map_insert(__map2umap_info* __info,
218 std::size_t __size, std::size_t __count)
223 __info->__record_insert(__size, __count);
227 __trace_map_to_unordered_map_erase(__map2umap_info* __info,
228 std::size_t __size, std::size_t __count)
233 __info->__record_erase(__size, __count);
237 __trace_map_to_unordered_map_find(__map2umap_info* __info,
243 __info->__record_find(__size);
247 __trace_map_to_unordered_map_iterate(__map2umap_info* __info,
254 __info->__record_iterate(1);
258 __trace_map_to_unordered_map_invalidate(__map2umap_info* __info)
263 __info->__set_invalid();
267 __trace_map_to_unordered_map_destruct(__map2umap_info* __info)
272 _GLIBCXX_PROFILE_DATA(_S_map2umap)->__destruct(__info);
Base class for all trace producers.
A map-to-unordered_map instrumentation line in the stack table.
GNU profile code for public use.
Data structures to represent a single profiling event.
A map-to-unordered_map instrumentation line in the object table.
Map-to-unordered_map instrumentation producer.
Interface of the profiling runtime library.
Base class for a line in the object table.
bool __profcxx_init()
This function must be called by each instrumentation point.
Data structures to represent profiling traces.