Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ jobs:
target-name: 'dd_pprof' # target name in binding.gyp
package-manager: 'npm' # npm or yarn
cache: true # enable caching of dependencies based on lockfile
min-node-version: 16
min-node-version: 18
skip: 'linux-arm,linux-ia32' # skip building for these platforms

dev_publish:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/package-size.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
- name: Setup Node.js
uses: actions/setup-node@v2
with:
node-version: '16'
node-version: '22'
- run: yarn
- name: Compute module size tree and report
uses: qard/heaviest-objects-in-the-universe@v1
Expand Down
73 changes: 57 additions & 16 deletions bindings/profilers/wall.cc
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
#include <limits>
#include <memory>
#include <mutex>
#include <type_traits>
#include <vector>

#include <nan.h>
Expand Down Expand Up @@ -120,6 +121,18 @@ class ProtectedProfilerMap {
return profiler;
}

WallProfiler* RemoveProfilerForIsolate(const v8::Isolate* isolate) {
return UpdateProfilers([isolate](auto map) {
auto it = map->find(isolate);
if (it != map->end()) {
auto profiler = it->second;
map->erase(it);
return profiler;
}
return static_cast<WallProfiler*>(nullptr);
});
}

bool RemoveProfiler(const v8::Isolate* isolate, WallProfiler* profiler) {
return UpdateProfilers([isolate, profiler, this](auto map) {
terminatedWorkersCpu_ += profiler->GetAndResetThreadCpu();
Expand Down Expand Up @@ -177,8 +190,10 @@ class ProtectedProfilerMap {
}

private:
using ProfilerMap = std::unordered_map<const Isolate*, WallProfiler*>;

template <typename F>
bool UpdateProfilers(F updateFn) {
std::invoke_result_t<F, ProfilerMap*> UpdateProfilers(F updateFn) {
// use mutex to prevent two isolates of updating profilers concurrently
std::lock_guard<std::mutex> lock(update_mutex_);

Expand Down Expand Up @@ -207,7 +222,6 @@ class ProtectedProfilerMap {
return res;
}

using ProfilerMap = std::unordered_map<const Isolate*, WallProfiler*>;
mutable std::atomic<ProfilerMap*> profilers_;
std::mutex update_mutex_;
bool init_ = false;
Expand Down Expand Up @@ -366,6 +380,27 @@ static int64_t GetV8ToEpochOffset() {
return V8toEpochOffset;
}

void WallProfiler::CleanupHook(void* data) {
auto isolate = static_cast<Isolate*>(data);
auto prof = g_profilers.RemoveProfilerForIsolate(isolate);
if (prof) {
prof->Cleanup(isolate);
delete prof;
}
}

// This is only called when isolate is terminated without `beforeExit`
// notification.
void WallProfiler::Cleanup(Isolate* isolate) {
if (started_) {
cpuProfiler_->Stop(profileId_);
if (interceptSignal()) {
SignalHandler::DecreaseUseCount();
}
Dispose(isolate, false);
}
}

ContextsByNode WallProfiler::GetContextsByNode(CpuProfile* profile,
ContextBuffer& contexts,
int64_t startCpuTime) {
Expand Down Expand Up @@ -547,21 +582,22 @@ WallProfiler::WallProfiler(std::chrono::microseconds samplingPeriod,
}
}

WallProfiler::~WallProfiler() {
Dispose(nullptr);
}

void WallProfiler::Dispose(Isolate* isolate) {
void WallProfiler::Dispose(Isolate* isolate, bool removeFromMap) {
if (cpuProfiler_ != nullptr) {
cpuProfiler_->Dispose();
cpuProfiler_ = nullptr;

g_profilers.RemoveProfiler(isolate, this);
if (removeFromMap) {
g_profilers.RemoveProfiler(isolate, this);
}

if (isolate != nullptr && collectAsyncId_) {
if (collectAsyncId_) {
isolate->RemoveGCPrologueCallback(&GCPrologueCallback, this);
isolate->RemoveGCEpilogueCallback(&GCEpilogueCallback, this);
}

node::RemoveEnvironmentCleanupHook(
isolate, &WallProfiler::CleanupHook, isolate);
}
}

Expand Down Expand Up @@ -702,17 +738,19 @@ Result WallProfiler::StartImpl() {
: CollectionMode::kNoCollect);
collectionMode_.store(collectionMode, std::memory_order_relaxed);
started_ = true;
auto isolate = Isolate::GetCurrent();
node::AddEnvironmentCleanupHook(isolate, &WallProfiler::CleanupHook, isolate);
return {};
}

std::string WallProfiler::StartInternal() {
v8::ProfilerId WallProfiler::StartInternal() {
// Reuse the same names for the profiles because strings used for profile
// names are not released until v8::CpuProfiler object is destroyed.
// https://github.com/nodejs/node/blob/b53c51995380b1f8d642297d848cab6010d2909c/deps/v8/src/profiler/profile-generator.h#L516
char buf[128];
snprintf(buf, sizeof(buf), "pprof-%" PRId64, (profileIdx_++) % 2);
v8::Local<v8::String> title = Nan::New<String>(buf).ToLocalChecked();
cpuProfiler_->StartProfiling(
auto result = cpuProfiler_->Start(
title,
includeLines_ ? CpuProfilingMode::kCallerLineNumbers
: CpuProfilingMode::kLeafNodeLineNumbers,
Expand Down Expand Up @@ -752,7 +790,7 @@ std::string WallProfiler::StartInternal() {
cpuProfiler_->CollectSample(v8::Isolate::GetCurrent());
}

return buf;
return result.id;
}

NAN_METHOD(WallProfiler::Stop) {
Expand Down Expand Up @@ -837,12 +875,11 @@ Result WallProfiler::StopImpl(bool restart, v8::Local<v8::Value>& profile) {
std::atomic_signal_fence(std::memory_order_acquire);
}

if (withContexts_ || workaroundV8Bug_) {
if (interceptSignal()) {
SignalHandler::DecreaseUseCount();
}

auto v8_profile = cpuProfiler_->StopProfiling(
Nan::New<String>(oldProfileId).ToLocalChecked());
auto v8_profile = cpuProfiler_->Stop(oldProfileId);

ContextBuffer contexts;
if (withContexts_) {
Expand Down Expand Up @@ -896,7 +933,7 @@ Result WallProfiler::StopImpl(bool restart, v8::Local<v8::Value>& profile) {
v8_profile->Delete();

if (!restart) {
Dispose(v8::Isolate::GetCurrent());
Dispose(v8::Isolate::GetCurrent(), true);
} else if (workaroundV8Bug_) {
waitForSignal(callCount + 1);
collectionMode_.store(withContexts_ ? CollectionMode::kCollectContexts
Expand Down Expand Up @@ -1017,6 +1054,10 @@ NAN_METHOD(WallProfiler::V8ProfilerStuckEventLoopDetected) {

NAN_METHOD(WallProfiler::Dispose) {
auto profiler = Nan::ObjectWrap::Unwrap<WallProfiler>(info.This());
// Profiler must already be stopped when this is called.
if (profiler->started_) {
return Nan::ThrowTypeError("Profiler is still running, stop it first.");
}
delete profiler;
}

Expand Down
12 changes: 8 additions & 4 deletions bindings/profilers/wall.hh
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ class WallProfiler : public Nan::ObjectWrap {

std::atomic<CollectionMode> collectionMode_;
std::atomic<uint64_t> noCollectCallCount_;
std::string profileId_;
v8::ProfilerId profileId_;
uint64_t profileIdx_ = 0;
bool includeLines_ = false;
bool withContexts_ = false;
Expand Down Expand Up @@ -92,8 +92,8 @@ class WallProfiler : public Nan::ObjectWrap {
using ContextBuffer = std::vector<SampleContext>;
ContextBuffer contexts_;

~WallProfiler();
void Dispose(v8::Isolate* isolate);
~WallProfiler() = default;
void Dispose(v8::Isolate* isolate, bool removeFromMap);

// A new CPU profiler object will be created each time profiling is started
// to work around https://bugs.chromium.org/p/v8/issues/detail?id=11051.
Expand All @@ -104,6 +104,8 @@ class WallProfiler : public Nan::ObjectWrap {
int64_t startCpuTime);

bool waitForSignal(uint64_t targetCallCount = 0);
static void CleanupHook(void* data);
void Cleanup(v8::Isolate* isolate);

public:
/**
Expand All @@ -129,7 +131,7 @@ class WallProfiler : public Nan::ObjectWrap {
int64_t cpu_time,
double async_id);
Result StartImpl();
std::string StartInternal();
v8::ProfilerId StartInternal();
Result StopImpl(bool restart, v8::Local<v8::Value>& profile);

CollectionMode collectionMode() {
Expand All @@ -143,6 +145,8 @@ class WallProfiler : public Nan::ObjectWrap {

bool collectCpuTime() const { return collectCpuTime_; }

bool interceptSignal() const { return withContexts_ || workaroundV8Bug_; }

int v8ProfilerStuckEventLoopDetected() const {
return v8ProfilerStuckEventLoopDetected_;
}
Expand Down
Loading
Loading