1// Copyright 2009-2010 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/profiler/heap-profiler.h"
6
7#include "src/api/api-inl.h"
8#include "src/debug/debug.h"
9#include "src/heap/combined-heap.h"
10#include "src/heap/heap-inl.h"
11#include "src/objects/js-array-buffer-inl.h"
12#include "src/profiler/allocation-tracker.h"
13#include "src/profiler/heap-snapshot-generator-inl.h"
14#include "src/profiler/sampling-heap-profiler.h"
15
16namespace v8 {
17namespace internal {
18
19HeapProfiler::HeapProfiler(Heap* heap)
20    : ids_(new HeapObjectsMap(heap)),
21      names_(new StringsStorage()),
22      is_tracking_object_moves_(false),
23      is_taking_snapshot_(false) {}
24
25HeapProfiler::~HeapProfiler() = default;
26
27void HeapProfiler::DeleteAllSnapshots() {
28  snapshots_.clear();
29  MaybeClearStringsStorage();
30}
31
32void HeapProfiler::MaybeClearStringsStorage() {
33  if (snapshots_.empty() && !sampling_heap_profiler_ && !allocation_tracker_ &&
34      !is_taking_snapshot_) {
35    names_.reset(new StringsStorage());
36  }
37}
38
39void HeapProfiler::RemoveSnapshot(HeapSnapshot* snapshot) {
40  snapshots_.erase(
41      std::find_if(snapshots_.begin(), snapshots_.end(),
42                   [&](const std::unique_ptr<HeapSnapshot>& entry) {
43                     return entry.get() == snapshot;
44                   }));
45}
46
47void HeapProfiler::AddBuildEmbedderGraphCallback(
48    v8::HeapProfiler::BuildEmbedderGraphCallback callback, void* data) {
49  build_embedder_graph_callbacks_.push_back({callback, data});
50}
51
52void HeapProfiler::RemoveBuildEmbedderGraphCallback(
53    v8::HeapProfiler::BuildEmbedderGraphCallback callback, void* data) {
54  auto it = std::find(build_embedder_graph_callbacks_.begin(),
55                      build_embedder_graph_callbacks_.end(),
56                      std::make_pair(callback, data));
57  if (it != build_embedder_graph_callbacks_.end())
58    build_embedder_graph_callbacks_.erase(it);
59}
60
61void HeapProfiler::BuildEmbedderGraph(Isolate* isolate,
62                                      v8::EmbedderGraph* graph) {
63  for (const auto& cb : build_embedder_graph_callbacks_) {
64    cb.first(reinterpret_cast<v8::Isolate*>(isolate), graph, cb.second);
65  }
66}
67
68void HeapProfiler::SetGetDetachednessCallback(
69    v8::HeapProfiler::GetDetachednessCallback callback, void* data) {
70  get_detachedness_callback_ = {callback, data};
71}
72
73v8::EmbedderGraph::Node::Detachedness HeapProfiler::GetDetachedness(
74    const v8::Local<v8::Value> v8_value, uint16_t class_id) {
75  DCHECK(HasGetDetachednessCallback());
76  return get_detachedness_callback_.first(
77      reinterpret_cast<v8::Isolate*>(heap()->isolate()), v8_value, class_id,
78      get_detachedness_callback_.second);
79}
80
81HeapSnapshot* HeapProfiler::TakeSnapshot(
82    v8::ActivityControl* control,
83    v8::HeapProfiler::ObjectNameResolver* resolver,
84    bool treat_global_objects_as_roots, bool capture_numeric_value) {
85  is_taking_snapshot_ = true;
86  HeapSnapshot* result = new HeapSnapshot(this, treat_global_objects_as_roots,
87                                          capture_numeric_value);
88  {
89    HeapSnapshotGenerator generator(result, control, resolver, heap());
90    if (!generator.GenerateSnapshot()) {
91      delete result;
92      result = nullptr;
93    } else {
94      snapshots_.emplace_back(result);
95    }
96  }
97  ids_->RemoveDeadEntries();
98  is_tracking_object_moves_ = true;
99  is_taking_snapshot_ = false;
100
101  heap()->isolate()->debug()->feature_tracker()->Track(
102      DebugFeatureTracker::kHeapSnapshot);
103
104  return result;
105}
106
107bool HeapProfiler::StartSamplingHeapProfiler(
108    uint64_t sample_interval, int stack_depth,
109    v8::HeapProfiler::SamplingFlags flags) {
110  if (sampling_heap_profiler_.get()) {
111    return false;
112  }
113  sampling_heap_profiler_.reset(new SamplingHeapProfiler(
114      heap(), names_.get(), sample_interval, stack_depth, flags));
115  return true;
116}
117
118
119void HeapProfiler::StopSamplingHeapProfiler() {
120  sampling_heap_profiler_.reset();
121  MaybeClearStringsStorage();
122}
123
124
125v8::AllocationProfile* HeapProfiler::GetAllocationProfile() {
126  if (sampling_heap_profiler_.get()) {
127    return sampling_heap_profiler_->GetAllocationProfile();
128  } else {
129    return nullptr;
130  }
131}
132
133
134void HeapProfiler::StartHeapObjectsTracking(bool track_allocations) {
135  ids_->UpdateHeapObjectsMap();
136  is_tracking_object_moves_ = true;
137  DCHECK(!allocation_tracker_);
138  if (track_allocations) {
139    allocation_tracker_.reset(new AllocationTracker(ids_.get(), names_.get()));
140    heap()->AddHeapObjectAllocationTracker(this);
141    heap()->isolate()->debug()->feature_tracker()->Track(
142        DebugFeatureTracker::kAllocationTracking);
143  }
144}
145
146SnapshotObjectId HeapProfiler::PushHeapObjectsStats(OutputStream* stream,
147                                                    int64_t* timestamp_us) {
148  return ids_->PushHeapObjectsStats(stream, timestamp_us);
149}
150
151void HeapProfiler::StopHeapObjectsTracking() {
152  ids_->StopHeapObjectsTracking();
153  if (allocation_tracker_) {
154    allocation_tracker_.reset();
155    MaybeClearStringsStorage();
156    heap()->RemoveHeapObjectAllocationTracker(this);
157  }
158}
159
160int HeapProfiler::GetSnapshotsCount() const {
161  return static_cast<int>(snapshots_.size());
162}
163
164bool HeapProfiler::IsTakingSnapshot() const { return is_taking_snapshot_; }
165
166HeapSnapshot* HeapProfiler::GetSnapshot(int index) {
167  return snapshots_.at(index).get();
168}
169
170SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle<Object> obj) {
171  if (!obj->IsHeapObject())
172    return v8::HeapProfiler::kUnknownObjectId;
173  return ids_->FindEntry(HeapObject::cast(*obj).address());
174}
175
176SnapshotObjectId HeapProfiler::GetSnapshotObjectId(NativeObject obj) {
177  // Try to find id of regular native node first.
178  SnapshotObjectId id = ids_->FindEntry(reinterpret_cast<Address>(obj));
179  // In case no id has been found, check whether there exists an entry where the
180  // native objects has been merged into a V8 entry.
181  if (id == v8::HeapProfiler::kUnknownObjectId) {
182    id = ids_->FindMergedNativeEntry(obj);
183  }
184  return id;
185}
186
187void HeapProfiler::ObjectMoveEvent(Address from, Address to, int size) {
188  base::MutexGuard guard(&profiler_mutex_);
189  bool known_object = ids_->MoveObject(from, to, size);
190  if (!known_object && allocation_tracker_) {
191    allocation_tracker_->address_to_trace()->MoveObject(from, to, size);
192  }
193}
194
195void HeapProfiler::AllocationEvent(Address addr, int size) {
196  DisallowGarbageCollection no_gc;
197  if (allocation_tracker_) {
198    allocation_tracker_->AllocationEvent(addr, size);
199  }
200}
201
202
203void HeapProfiler::UpdateObjectSizeEvent(Address addr, int size) {
204  ids_->UpdateObjectSize(addr, size);
205}
206
207Handle<HeapObject> HeapProfiler::FindHeapObjectById(SnapshotObjectId id) {
208  HeapObject object;
209  CombinedHeapObjectIterator iterator(heap(),
210                                      HeapObjectIterator::kFilterUnreachable);
211  // Make sure that object with the given id is still reachable.
212  for (HeapObject obj = iterator.Next(); !obj.is_null();
213       obj = iterator.Next()) {
214    if (ids_->FindEntry(obj.address()) == id) {
215      DCHECK(object.is_null());
216      object = obj;
217      // Can't break -- kFilterUnreachable requires full heap traversal.
218    }
219  }
220
221  return !object.is_null() ? Handle<HeapObject>(object, isolate())
222                           : Handle<HeapObject>();
223}
224
225
226void HeapProfiler::ClearHeapObjectMap() {
227  ids_.reset(new HeapObjectsMap(heap()));
228  if (!allocation_tracker_) is_tracking_object_moves_ = false;
229}
230
231
232Heap* HeapProfiler::heap() const { return ids_->heap(); }
233
234Isolate* HeapProfiler::isolate() const { return heap()->isolate(); }
235
236void HeapProfiler::QueryObjects(Handle<Context> context,
237                                debug::QueryObjectPredicate* predicate,
238                                PersistentValueVector<v8::Object>* objects) {
239  {
240    HandleScope handle_scope(isolate());
241    std::vector<Handle<JSTypedArray>> on_heap_typed_arrays;
242    CombinedHeapObjectIterator heap_iterator(
243        heap(), HeapObjectIterator::kFilterUnreachable);
244    for (HeapObject heap_obj = heap_iterator.Next(); !heap_obj.is_null();
245         heap_obj = heap_iterator.Next()) {
246      if (heap_obj.IsFeedbackVector()) {
247        FeedbackVector::cast(heap_obj).ClearSlots(isolate());
248      } else if (heap_obj.IsJSTypedArray() &&
249                 JSTypedArray::cast(heap_obj).is_on_heap()) {
250        // Cannot call typed_array->GetBuffer() here directly because it may
251        // trigger GC. Defer that call by collecting the object in a vector.
252        on_heap_typed_arrays.push_back(
253            handle(JSTypedArray::cast(heap_obj), isolate()));
254      }
255    }
256    for (auto& typed_array : on_heap_typed_arrays) {
257      // Convert the on-heap typed array into off-heap typed array, so that
258      // its ArrayBuffer becomes valid and can be returned in the result.
259      typed_array->GetBuffer();
260    }
261  }
262  // We should return accurate information about live objects, so we need to
263  // collect all garbage first.
264  heap()->CollectAllAvailableGarbage(GarbageCollectionReason::kHeapProfiler);
265  CombinedHeapObjectIterator heap_iterator(
266      heap(), HeapObjectIterator::kFilterUnreachable);
267  PtrComprCageBase cage_base(isolate());
268  for (HeapObject heap_obj = heap_iterator.Next(); !heap_obj.is_null();
269       heap_obj = heap_iterator.Next()) {
270    if (!heap_obj.IsJSObject(cage_base) ||
271        heap_obj.IsJSExternalObject(cage_base))
272      continue;
273    v8::Local<v8::Object> v8_obj(
274        Utils::ToLocal(handle(JSObject::cast(heap_obj), isolate())));
275    if (!predicate->Filter(v8_obj)) continue;
276    objects->Append(v8_obj);
277  }
278}
279
280}  // namespace internal
281}  // namespace v8
282