Source code
Revision control
Copy as Markdown
Other Tools
Test Info: Warnings
- This test gets skipped with pattern: !gecko_profiler
- Manifest: toolkit/components/glean/tests/xpcshell/xpcshell.toml
/* Any copyright is dedicated to the Public Domain.
/* Test the firefox-on-glean profiler integration. */
"use strict";
const { AppConstants } = ChromeUtils.importESModule(
"resource://gre/modules/AppConstants.sys.mjs"
);
const { ObjectUtils } = ChromeUtils.importESModule(
"resource://gre/modules/ObjectUtils.sys.mjs"
);
const { ProfilerTestUtils } = ChromeUtils.importESModule(
);
add_setup(
/* on Android FOG is set up through the global xpcshell head.js */
{ skip_if: () => !runningInParent || AppConstants.platform == "android" },
function test_setup() {
// FOG needs a profile directory to put its data in.
do_get_profile();
// We need to initialize it once, otherwise operations will be stuck in the pre-init queue.
Services.fog.initializeFOG();
}
);
// It is CRUCIAL that metrics are registered in the same order in the parent
// and the child or their metric ids will not line up and ALL WILL EXPLODE.
const METRICS = [
[
"labeled_counter",
"jog_ipc",
"jog_labeled_counter",
["test-ping"],
`"ping"`,
false,
],
];
/**
* Start the profiler, run `func`, stop the profiler, and get a collection of
* markers that were recorded while running `func`.
*
* @param {string} type The marker payload type, e.g. "Counter"
* @param {object} func The function that runs glean code to generate markers
* @returns {object} The markers generated during `func`, with the id field
* expanded using the string table
*/
async function runWithProfilerAndGetMarkers(type, func) {
await ProfilerTestUtils.startProfiler({
entries: 10000,
interval: 10,
features: ["nostacksampling"],
threads: ["GeckoMain"],
});
Assert.ok(Services.profiler.IsActive());
await func();
let profile = await ProfilerTestUtils.stopNowAndGetProfile();
// We assume that we only have one thread being profiled here.
Assert.equal(
profile.threads.length,
1,
"We should only be profiling one thread"
);
let markers = ProfilerTestUtils.getPayloadsOfType(profile.threads[0], type);
let stringTable = profile.threads[0].stringTable;
// We expect that the id, or name, of a marker should be a unique string.
// Go through them and look up the values so that we can just write a string
// in the test, and not use a numerical id (which may not be stable!)
for (let marker of markers) {
marker.id = stringTable[marker.id];
if (marker.cat != undefined) {
marker.cat = stringTable[marker.cat];
}
if (marker.label != undefined) {
marker.label = stringTable[marker.label];
}
}
// Next, filter the markers so that we only return those generated by test
// metrics. We need to do this, as some metric types are used by other
// parts of Fx, which may be running during the test. As we cannot predict
// when they may occur, or what values they may hold, we can't write down
// a "gold" value in our tests to compare them to - so we need to remove
// them. Some notable existing examples include:
// - Memory distribution metrics, "performanceCloneDeserialize.<x>"
// - Custom distribution metrics (same as memory distribution)
// - Timing distribution metrics (for timing of performance
// clone/deserialise)
// - Counter metrics ("javascriptGc"), which caused intermittents in Bug
// 1943425
markers = markers.filter(marker => {
// Metric markers with names coming from JS all start with `testOnly`,
// `test_only` or `jog_`
let name =
marker.id.startsWith("testOnly") ||
marker.id.startsWith("test_only") ||
marker.id.startsWith("jog_");
// Marker categories all start with test or jog
let cat = false;
if (marker.cat != undefined) {
cat = marker.cat.startsWith("test") || marker.cat.startsWith("jog_");
}
// Ping markers are a little more varied, so we enumerate them
let ping = ["test-ping", "one-ping-only", "ride-along-ping"].includes(
marker.id
);
// Error markers start with "JOG"
let error = marker.id.startsWith("JOG");
return name || cat || ping || error;
});
// Return selected markers
return markers;
}
add_task({ skip_if: () => runningInParent }, async function run_child_stuff() {
// Ensure any _actual_ runtime metrics are registered first.
// Otherwise the jog_ipc.* ones will have incorrect ids.
Glean.testOnly.badCode;
// Register our test metrics.
for (let metric of METRICS) {
Services.fog.testRegisterRuntimeMetric(...metric);
}
let markers = await runWithProfilerAndGetMarkers("IntLikeMetric", () => {
Glean.jogIpc.jogLabeledCounter.label1.add(1);
Glean.jogIpc.jogLabeledCounter.label1.add(2);
});
// Interestingly, due to compiler optimizations we might get either of two
// distinct errors here.
const lookupFailedMarkers = [
{
type: "IntLikeMetric",
cat: "",
id: "JOGMetricMapLookupFailed",
label: "label1",
val: 1,
},
{
type: "IntLikeMetric",
cat: "",
id: "JOGMetricMapLookupFailed",
label: "label1",
val: 2,
},
];
const mapUninitMarkers = [
{
type: "IntLikeMetric",
cat: "",
id: "JOGMetricMapWasUninit",
label: "label1",
val: 1,
},
{
type: "IntLikeMetric",
cat: "",
id: "JOGMetricMapWasUninit",
label: "label1",
val: 2,
},
];
Assert.ok(
ObjectUtils.deepEqual(markers, lookupFailedMarkers) ||
ObjectUtils.deepEqual(markers, mapUninitMarkers),
"Error markers present."
);
});
add_task(
{ skip_if: () => !runningInParent },
async function test_child_markers() {
await run_test_in_child("test_ProfilerMarkersIPC.js");
}
);