Source code

Revision control

Copy as Markdown

Other Tools

Test Info: Warnings

/* Any copyright is dedicated to the Public Domain.
/* Test the firefox-on-glean profiler integration. */
"use strict";
const { AppConstants } = ChromeUtils.importESModule(
"resource://gre/modules/AppConstants.sys.mjs"
);
const { ProfilerTestUtils } = ChromeUtils.importESModule(
);
const { setTimeout } = ChromeUtils.importESModule(
"resource://gre/modules/Timer.sys.mjs"
);
function sleep(ms) {
/* eslint-disable mozilla/no-arbitrary-setTimeout */
return new Promise(resolve => setTimeout(resolve, ms));
}
add_setup(
/* on Android FOG is set up through the global xpcshell head.js */
{ skip_if: () => AppConstants.platform == "android" },
function test_setup() {
// FOG needs a profile directory to put its data in.
do_get_profile();
// We need to initialize it once, otherwise operations will be stuck in the pre-init queue.
Services.fog.initializeFOG();
}
);
/**
* Start the profiler, run `func`, stop the profiler, and get a collection of
* markers that were recorded while running `func`.
*
* @param {string} type The marker payload type, e.g. "Counter"
* @param {object} func The function that runs glean code to generate markers
* @returns {object} The markers generated during `func`, with the id field
* expanded using the string table
*/
async function runWithProfilerAndGetMarkers(type, func) {
await ProfilerTestUtils.startProfiler({
entries: 10000,
interval: 10,
features: ["nostacksampling"],
threads: ["GeckoMain"],
});
Assert.ok(Services.profiler.IsActive());
await func();
let profile = await ProfilerTestUtils.stopNowAndGetProfile();
// We assume that we only have one thread being profiled here.
Assert.equal(
profile.threads.length,
1,
"We should only be profiling one thread"
);
let markers = ProfilerTestUtils.getPayloadsOfType(profile.threads[0], type);
let stringTable = profile.threads[0].stringTable;
// We expect that the id, or name, of a marker should be a unique string.
// Go through them and look up the values so that we can just write a string
// in the test, and not use a numerical id (which may not be stable!)
for (let marker of markers) {
marker.id = stringTable[marker.id];
if (marker.cat != undefined) {
marker.cat = stringTable[marker.cat];
}
if (marker.label != undefined) {
marker.label = stringTable[marker.label];
}
}
// Next, filter the markers so that we only return those generated by test
// metrics. We need to do this, as some metric types are used by other
// parts of Fx, which may be running during the test. As we cannot predict
// when they may occur, or what values they may hold, we can't write down
// a "gold" value in our tests to compare them to - so we need to remove
// them. Some notable existing examples include:
// - Memory distribution metrics, "performanceCloneDeserialize.<x>"
// - Custom distribution metrics (same as memory distribution)
// - Timing distribution metrics (for timing of performance
// clone/deserialise)
// - Counter metrics ("javascriptGc"), which caused intermittents in Bug
// 1943425
markers = markers.filter(marker => {
// Metric markers with names coming from JS all start with `testOnly`,
// `test_only` or `jog_`
let name =
marker.id.startsWith("testOnly") ||
marker.id.startsWith("test_only") ||
marker.id.startsWith("jog_");
// Marker categories all start with test or jog
let cat = false;
if (marker.cat != undefined) {
cat = marker.cat.startsWith("test") || marker.cat.startsWith("jog_");
}
// Ping markers are a little more varied, so we enumerate them
let ping = ["test-ping", "one-ping-only", "ride-along-ping"].includes(
marker.id
);
return name || cat || ping;
});
// Return selected markers
return markers;
}
add_task(async function test_fog_counter_markers() {
let markers = await runWithProfilerAndGetMarkers("IntLikeMetric", () => {
Glean.testOnly.badCode.add(31);
// Labeled counter submetrics
Glean.testOnly.mabelsKitchenCounters.near_the_sink.add(1);
Glean.testOnly.mabelsKitchenCounters.with_junk_on_them.add(2);
// Invalid label, but we still expect to see a marker
Glean.testOnly.mabelsKitchenCounters["1".repeat(72)].add(1);
});
Assert.deepEqual(markers, [
{ type: "IntLikeMetric", cat: "test_only", id: "bad_code", val: 31 },
{
type: "IntLikeMetric",
cat: "test_only",
id: "mabels_kitchen_counters",
label: "near_the_sink",
val: 1,
},
{
type: "IntLikeMetric",
cat: "test_only",
id: "mabels_kitchen_counters",
label: "with_junk_on_them",
val: 2,
},
{
type: "IntLikeMetric",
cat: "test_only",
id: "mabels_kitchen_counters",
label: "1".repeat(72),
val: 1,
},
]);
});
add_task(async function test_jog_counter_markers() {
Services.fog.testRegisterRuntimeMetric(
"counter",
"jog_cat",
"jog_counter",
["test-ping"],
`"ping"`,
false
);
let markers = await runWithProfilerAndGetMarkers("IntLikeMetric", () => {
Glean.jogCat.jogCounter.add(53);
});
Assert.deepEqual(markers, [
{ type: "IntLikeMetric", cat: "jog_cat", id: "jog_counter", val: 53 },
]);
});
add_task(async function test_fog_static_labeled_counter_markers() {
let markers = await runWithProfilerAndGetMarkers("IntLikeMetric", () => {
// Statically labeled counter submetrics
Glean.testOnly.mabelsLabeledCounters.next_to_the_fridge.add(1);
Glean.testOnly.mabelsLabeledCounters.clean.add(2);
Glean.testOnly.mabelsLabeledCounters["1st_counter"].add(2);
Glean.testOnlyIpc.aLabeledCounterForCategorical.CommonLabel.add(1);
Glean.testOnlyIpc.aLabeledCounterForCategorical.Label4.add(1);
Glean.testOnlyIpc.aLabeledCounterForCategorical.Label5.add(1);
Glean.testOnlyIpc.aLabeledCounterForCategorical.Label6.add(1);
Glean.testOnlyIpc.aLabeledCounterForHgram.false.add(1);
Glean.testOnlyIpc.aLabeledCounterForHgram.true.add(1);
});
Assert.deepEqual(markers, [
{
type: "IntLikeMetric",
cat: "test_only",
id: "mabels_labeled_counters",
label: "next_to_the_fridge",
val: 1,
},
{
type: "IntLikeMetric",
cat: "test_only",
id: "mabels_labeled_counters",
label: "clean",
val: 2,
},
{
type: "IntLikeMetric",
cat: "test_only",
id: "mabels_labeled_counters",
label: "1st_counter",
val: 2,
},
{
type: "IntLikeMetric",
cat: "test_only.ipc",
id: "a_labeled_counter_for_categorical",
label: "CommonLabel",
val: 1,
},
{
type: "IntLikeMetric",
cat: "test_only.ipc",
id: "a_labeled_counter_for_categorical",
label: "Label4",
val: 1,
},
{
type: "IntLikeMetric",
cat: "test_only.ipc",
id: "a_labeled_counter_for_categorical",
label: "Label5",
val: 1,
},
{
type: "IntLikeMetric",
cat: "test_only.ipc",
id: "a_labeled_counter_for_categorical",
label: "Label6",
val: 1,
},
{
type: "IntLikeMetric",
cat: "test_only.ipc",
id: "a_labeled_counter_for_hgram",
label: "false",
val: 1,
},
{
type: "IntLikeMetric",
cat: "test_only.ipc",
id: "a_labeled_counter_for_hgram",
label: "true",
val: 1,
},
]);
});
add_task(async function test_jog_labeled_counter_markers() {
Services.fog.testRegisterRuntimeMetric(
"labeled_counter",
"jog_cat",
"jog_labeled_counter",
["test-ping"],
`"ping"`,
false
);
let markers = await runWithProfilerAndGetMarkers("IntLikeMetric", () => {
Glean.jogCat.jogLabeledCounter.label_1.add(1);
Glean.jogCat.jogLabeledCounter.label_2.add(2);
Glean.jogCat.jogLabeledCounter["1".repeat(72)].add(1);
});
Assert.deepEqual(markers, [
{
type: "IntLikeMetric",
cat: "jog_cat",
id: "jog_labeled_counter",
label: "label_1",
val: 1,
},
{
type: "IntLikeMetric",
cat: "jog_cat",
id: "jog_labeled_counter",
label: "label_2",
val: 2,
},
{
type: "IntLikeMetric",
cat: "jog_cat",
id: "jog_labeled_counter",
label: "1".repeat(72),
val: 1,
},
]);
});
add_task(async function test_jog_labeled_counter_with_static_labels_markers() {
Services.fog.testRegisterRuntimeMetric(
"labeled_counter",
"jog_cat",
"jog_labeled_counter_with_labels",
["test-ping"],
`"ping"`,
false,
JSON.stringify({ ordered_labels: ["label_1", "label_2"] })
);
let markers = await runWithProfilerAndGetMarkers("IntLikeMetric", () => {
Glean.jogCat.jogLabeledCounterWithLabels.label_1.add(1);
Glean.jogCat.jogLabeledCounterWithLabels.label_2.add(2);
Glean.jogCat.jogLabeledCounterWithLabels["1".repeat(72)].add(1);
});
Assert.deepEqual(markers, [
{
type: "IntLikeMetric",
cat: "jog_cat",
id: "jog_labeled_counter_with_labels",
label: "label_1",
val: 1,
},
{
type: "IntLikeMetric",
cat: "jog_cat",
id: "jog_labeled_counter_with_labels",
label: "label_2",
val: 2,
},
{
type: "IntLikeMetric",
cat: "jog_cat",
id: "jog_labeled_counter_with_labels",
label: "__other__",
val: 1,
},
]);
});
add_task(async function test_fog_string_markers() {
const value = "a cheesy string!";
let markers = await runWithProfilerAndGetMarkers("StringLikeMetric", () => {
Glean.testOnly.cheesyString.set(value);
Glean.testOnly.cheesyString.set("a".repeat(2048));
// Labeled string submetrics
Glean.testOnly.mabelsLabelMaker.singleword.set("portmanteau");
Glean.testOnly.mabelsLabelMaker.snake_case.set("snek");
Glean.testOnly.mabelsLabelMaker["dash-character"].set("Dash Rendar");
Glean.testOnly.mabelsLabelMaker["dot.separated"].set("dot product");
Glean.testOnly.mabelsLabelMaker.camelCase.set("wednesday");
// Invalid label, but we still expect to see a marker
const veryLong = "1".repeat(72);
Glean.testOnly.mabelsLabelMaker[veryLong].set("seventy-two");
});
// Test that we correctly truncate long strings:
const truncatedLongString = "a".repeat(1024);
Assert.deepEqual(markers, [
{
type: "StringLikeMetric",
cat: "test_only",
id: "cheesy_string",
val: "a cheesy string!",
},
{
type: "StringLikeMetric",
cat: "test_only",
id: "cheesy_string",
val: truncatedLongString,
},
{
type: "StringLikeMetric",
cat: "test_only",
id: "mabels_label_maker",
label: "singleword",
val: "portmanteau",
},
{
type: "StringLikeMetric",
cat: "test_only",
id: "mabels_label_maker",
label: "snake_case",
val: "snek",
},
{
type: "StringLikeMetric",
cat: "test_only",
id: "mabels_label_maker",
label: "dash-character",
val: "Dash Rendar",
},
{
type: "StringLikeMetric",
cat: "test_only",
id: "mabels_label_maker",
label: "dot.separated",
val: "dot product",
},
{
type: "StringLikeMetric",
cat: "test_only",
id: "mabels_label_maker",
label: "camelCase",
val: "wednesday",
},
{
type: "StringLikeMetric",
cat: "test_only",
id: "mabels_label_maker",
label: "1".repeat(72),
val: "seventy-two",
},
]);
});
add_task(async function test_fog_static_labeled_string_markers() {
let markers = await runWithProfilerAndGetMarkers("StringLikeMetric", () => {
// Labeled string submetrics
Glean.testOnly.mabelsBalloonLabels.celebratory.set("hooray");
Glean.testOnly.mabelsBalloonLabels.celebratory_and_snarky.set(
"oh yay, hooray"
);
});
Assert.deepEqual(markers, [
{
type: "StringLikeMetric",
cat: "test_only",
id: "mabels_balloon_labels",
label: "celebratory",
val: "hooray",
},
{
type: "StringLikeMetric",
cat: "test_only",
id: "mabels_balloon_labels",
label: "celebratory_and_snarky",
val: "oh yay, hooray",
},
]);
});
add_task(async function test_jog_string_markers() {
const value = "an active string!";
Services.fog.testRegisterRuntimeMetric(
"string",
"jog_cat",
"jog_string",
["test-ping"],
`"ping"`,
false
);
let markers = await runWithProfilerAndGetMarkers("StringLikeMetric", () => {
Glean.jogCat.jogString.set(value);
});
Assert.deepEqual(markers, [
{
type: "StringLikeMetric",
cat: "jog_cat",
id: "jog_string",
val: "an active string!",
},
]);
});
add_task(async function test_jog_labeled_string_markers() {
Services.fog.testRegisterRuntimeMetric(
"labeled_string",
"jog_cat",
"jog_labeled_string",
["test-ping"],
`"ping"`,
false
);
let markers = await runWithProfilerAndGetMarkers("StringLikeMetric", () => {
Glean.jogCat.jogLabeledString.label_1.set("crimson");
Glean.jogCat.jogLabeledString.label_2.set("various");
Glean.jogCat.jogLabeledString["1".repeat(72)].set("valid");
});
Assert.deepEqual(markers, [
{
type: "StringLikeMetric",
cat: "jog_cat",
id: "jog_labeled_string",
label: "label_1",
val: "crimson",
},
{
type: "StringLikeMetric",
cat: "jog_cat",
id: "jog_labeled_string",
label: "label_2",
val: "various",
},
{
type: "StringLikeMetric",
cat: "jog_cat",
id: "jog_labeled_string",
label: "1".repeat(72),
val: "valid",
},
]);
});
add_task(async function test_jog_labeled_string_with_labels() {
Services.fog.testRegisterRuntimeMetric(
"labeled_string",
"jog_cat",
"jog_labeled_string_with_labels",
["test-ping"],
`"ping"`,
false,
JSON.stringify({ ordered_labels: ["label_1", "label_2"] })
);
let markers = await runWithProfilerAndGetMarkers("StringLikeMetric", () => {
Glean.jogCat.jogLabeledStringWithLabels.label_1.set("crimson");
Glean.jogCat.jogLabeledStringWithLabels.label_2.set("various");
Glean.jogCat.jogLabeledStringWithLabels["1".repeat(72)].set("valid");
});
Assert.deepEqual(markers, [
{
type: "StringLikeMetric",
cat: "jog_cat",
id: "jog_labeled_string_with_labels",
label: "label_1",
val: "crimson",
},
{
type: "StringLikeMetric",
cat: "jog_cat",
id: "jog_labeled_string_with_labels",
label: "label_2",
val: "various",
},
{
type: "StringLikeMetric",
cat: "jog_cat",
id: "jog_labeled_string_with_labels",
label: "__other__",
val: "valid",
},
]);
});
add_task(async function test_fog_string_list() {
const value = "a cheesy string!";
const value2 = "a cheesier string!";
const value3 = "the cheeziest of strings.";
const cheeseList = [value, value2];
let markers = await runWithProfilerAndGetMarkers("StringLikeMetric", () => {
Glean.testOnly.cheesyStringList.set(cheeseList);
Glean.testOnly.cheesyStringList.add(value3);
});
Assert.deepEqual(markers, [
{
type: "StringLikeMetric",
cat: "test_only",
id: "cheesy_string_list",
// Note: This is a little fragile and will need to be updated if we ever
// rearrange the items in the string list.
val: `[${value},${value2}]`,
},
{
type: "StringLikeMetric",
cat: "test_only",
id: "cheesy_string_list",
val: value3,
},
]);
});
add_task(async function test_jog_string_list_markers() {
const value = "an active string!";
const value2 = "a more active string!";
const value3 = "the most active of strings.";
Services.fog.testRegisterRuntimeMetric(
"string_list",
"jog_cat",
"jog_string_list",
["test-ping"],
`"ping"`,
false
);
const jogList = [value, value2];
let markers = await runWithProfilerAndGetMarkers("StringLikeMetric", () => {
Glean.jogCat.jogStringList.set(jogList);
Glean.jogCat.jogStringList.add(value3);
});
Assert.deepEqual(markers, [
{
type: "StringLikeMetric",
cat: "jog_cat",
id: "jog_string_list",
val: "[an active string!,a more active string!]",
},
{
type: "StringLikeMetric",
cat: "jog_cat",
id: "jog_string_list",
val: "the most active of strings.",
},
]);
});
add_task(async function test_fog_timespan() {
let markers = await runWithProfilerAndGetMarkers(
"TimespanMetric",
async () => {
Glean.testOnly.canWeTimeIt.start();
Glean.testOnly.canWeTimeIt.cancel();
// We start, briefly sleep and then stop.
// That guarantees some time to measure.
Glean.testOnly.canWeTimeIt.start();
await sleep(10);
Glean.testOnly.canWeTimeIt.stop();
// Set a raw value to make sure we get values in markers
Glean.testOnly.canWeTimeIt.setRaw(100);
}
);
Assert.deepEqual(markers, [
{ type: "TimespanMetric", cat: "test_only", id: "can_we_time_it" },
{ type: "TimespanMetric", cat: "test_only", id: "can_we_time_it" },
{ type: "TimespanMetric", cat: "test_only", id: "can_we_time_it" },
{ type: "TimespanMetric", cat: "test_only", id: "can_we_time_it" },
{
type: "TimespanMetric",
cat: "test_only",
id: "can_we_time_it",
val: 100,
},
]);
});
add_task(async function test_jog_timespan_markers() {
Services.fog.testRegisterRuntimeMetric(
"timespan",
"jog_cat",
"jog_timespan",
["test-ping"],
`"ping"`,
false,
JSON.stringify({ time_unit: "millisecond" })
);
let markers = await runWithProfilerAndGetMarkers(
"TimespanMetric",
async () => {
Glean.jogCat.jogTimespan.start();
Glean.jogCat.jogTimespan.cancel();
// We start, briefly sleep and then stop.
// That guarantees some time to measure.
Glean.jogCat.jogTimespan.start();
await sleep(10);
Glean.jogCat.jogTimespan.stop();
}
);
Assert.deepEqual(markers, [
{ type: "TimespanMetric", cat: "jog_cat", id: "jog_timespan" },
{ type: "TimespanMetric", cat: "jog_cat", id: "jog_timespan" },
{ type: "TimespanMetric", cat: "jog_cat", id: "jog_timespan" },
{ type: "TimespanMetric", cat: "jog_cat", id: "jog_timespan" },
]);
});
add_task(
async function test_fog_timespan_throws_on_stop_wout_start_but_still_records() {
let markers = await runWithProfilerAndGetMarkers("TimespanMetric", () => {
// Throws an error inside glean, but we still expect to see a marker
Glean.testOnly.canWeTimeIt.stop();
});
Assert.deepEqual(markers, [
{ type: "TimespanMetric", cat: "test_only", id: "can_we_time_it" },
]);
}
);
add_task(async function test_fog_uuid() {
const kTestUuid = "decafdec-afde-cafd-ecaf-decafdecafde";
let generatedUuid;
let markers = await runWithProfilerAndGetMarkers("StringLikeMetric", () => {
Glean.testOnly.whatIdIt.set(kTestUuid);
// We need to compare the generated UUID to what we record in a marker. Since
// this won't be stable across test runs (we can't "write it down"), we have
// to instead query glean for what it generated, and then check that that
// value is in the marker.
Glean.testOnly.whatIdIt.generateAndSet();
generatedUuid = Glean.testOnly.whatIdIt.testGetValue("test-ping");
});
Assert.deepEqual(markers, [
{
type: "StringLikeMetric",
cat: "test_only",
id: "what_id_it",
val: kTestUuid,
},
{
type: "StringLikeMetric",
cat: "test_only",
id: "what_id_it",
val: generatedUuid,
},
]);
});
add_task(async function test_jog_uuid_markers() {
const kTestUuid = "decafdec-afde-cafd-ecaf-decafdecafde";
let generatedUuid;
Services.fog.testRegisterRuntimeMetric(
"uuid",
"jog_cat",
"jog_uuid",
["test-ping"],
`"ping"`,
false
);
let markers = await runWithProfilerAndGetMarkers("StringLikeMetric", () => {
Glean.jogCat.jogUuid.set(kTestUuid);
Glean.jogCat.jogUuid.generateAndSet();
generatedUuid = Glean.jogCat.jogUuid.testGetValue();
});
Assert.deepEqual(markers, [
{
type: "StringLikeMetric",
cat: "jog_cat",
id: "jog_uuid",
val: kTestUuid,
},
{
type: "StringLikeMetric",
cat: "jog_cat",
id: "jog_uuid",
val: generatedUuid,
},
]);
});
add_task(async function test_fog_datetime() {
const value = new Date();
let markers = await runWithProfilerAndGetMarkers("DatetimeMetric", () => {
Glean.testOnly.whatADate.set(value.getTime() * 1000);
});
Assert.deepEqual(markers, [
{
type: "DatetimeMetric",
cat: "test_only",
id: "what_a_date",
time: value.toISOString(),
},
]);
});
add_task(async function test_jog_datetime_markers() {
const value = new Date("2020-06-11T12:00:00");
Services.fog.testRegisterRuntimeMetric(
"datetime",
"jog_cat",
"jog_datetime",
["test-ping"],
`"ping"`,
false,
JSON.stringify({ time_unit: "nanosecond" })
);
let markers = await runWithProfilerAndGetMarkers("DatetimeMetric", () => {
Glean.jogCat.jogDatetime.set(value.getTime() * 1000);
});
Assert.deepEqual(markers, [
{
type: "DatetimeMetric",
cat: "jog_cat",
id: "jog_datetime",
time: value.toISOString(),
},
]);
});
add_task(async function test_fog_boolean_markers() {
let markers = await runWithProfilerAndGetMarkers("BooleanMetric", () => {
Glean.testOnly.canWeFlagIt.set(false);
Glean.testOnly.canWeFlagIt.set(true);
// Labeled boolean submetrics
Glean.testOnly.mabelsLikeBalloons.at_parties.set(true);
Glean.testOnly.mabelsLikeBalloons.at_funerals.set(false);
// Invalid label, but we still expect to see a marker
Glean.testOnly.mabelsLikeBalloons["1".repeat(72)].set(true);
});
Assert.deepEqual(markers, [
{
type: "BooleanMetric",
cat: "test_only",
id: "can_we_flag_it",
val: false,
},
{
type: "BooleanMetric",
cat: "test_only",
id: "can_we_flag_it",
val: true,
},
{
type: "BooleanMetric",
cat: "test_only",
id: "mabels_like_balloons",
label: "at_parties",
val: true,
},
{
type: "BooleanMetric",
cat: "test_only",
id: "mabels_like_balloons",
label: "at_funerals",
val: false,
},
{
type: "BooleanMetric",
cat: "test_only",
id: "mabels_like_balloons",
label: "1".repeat(72),
val: true,
},
]);
});
add_task(async function test_jog_boolean_markers() {
Services.fog.testRegisterRuntimeMetric(
"boolean",
"jog_cat",
"jog_bool",
["test-ping"],
`"ping"`,
false
);
let markers = await runWithProfilerAndGetMarkers("BooleanMetric", () => {
Glean.jogCat.jogBool.set(false);
});
Assert.deepEqual(markers, [
{
type: "BooleanMetric",
cat: "jog_cat",
id: "jog_bool",
val: false,
},
]);
});
add_task(async function test_fog_static_labeled_boolean_markers() {
let markers = await runWithProfilerAndGetMarkers("BooleanMetric", () => {
// Labeled boolean submetrics
Glean.testOnly.mabelsLikeLabeledBalloons.water.set(true);
Glean.testOnly.mabelsLikeLabeledBalloons.birthday_party.set(false);
});
Assert.deepEqual(markers, [
{
type: "BooleanMetric",
cat: "test_only",
id: "mabels_like_labeled_balloons",
label: "water",
val: true,
},
{
type: "BooleanMetric",
cat: "test_only",
id: "mabels_like_labeled_balloons",
label: "birthday_party",
val: false,
},
]);
});
add_task(async function test_jog_labeled_boolean_markers() {
Services.fog.testRegisterRuntimeMetric(
"labeled_boolean",
"jog_cat",
"jog_labeled_bool",
["test-ping"],
`"ping"`,
false
);
let markers = await runWithProfilerAndGetMarkers("BooleanMetric", () => {
Glean.jogCat.jogLabeledBool.label_1.set(true);
Glean.jogCat.jogLabeledBool.label_2.set(false);
Glean.jogCat.jogLabeledBool.NowValidLabel.set(true);
Glean.jogCat.jogLabeledBool["1".repeat(72)].set(true);
});
Assert.deepEqual(markers, [
{
type: "BooleanMetric",
cat: "jog_cat",
id: "jog_labeled_bool",
label: "label_1",
val: true,
},
{
type: "BooleanMetric",
cat: "jog_cat",
id: "jog_labeled_bool",
label: "label_2",
val: false,
},
{
type: "BooleanMetric",
cat: "jog_cat",
id: "jog_labeled_bool",
label: "NowValidLabel",
val: true,
},
{
type: "BooleanMetric",
cat: "jog_cat",
id: "jog_labeled_bool",
label: "1".repeat(72),
val: true,
},
]);
});
add_task(async function test_jog_labeled_boolean_with_static_labels_markers() {
Services.fog.testRegisterRuntimeMetric(
"labeled_boolean",
"jog_cat",
"jog_labeled_bool_with_labels",
["test-ping"],
`"ping"`,
false,
JSON.stringify({ ordered_labels: ["label_1", "label_2"] })
);
let markers = await runWithProfilerAndGetMarkers("BooleanMetric", () => {
Glean.jogCat.jogLabeledBoolWithLabels.label_1.set(true);
Glean.jogCat.jogLabeledBoolWithLabels.label_2.set(false);
Glean.jogCat.jogLabeledBoolWithLabels.label_3.set(true);
});
Assert.deepEqual(markers, [
{
type: "BooleanMetric",
cat: "jog_cat",
id: "jog_labeled_bool_with_labels",
label: "label_1",
val: true,
},
{
type: "BooleanMetric",
cat: "jog_cat",
id: "jog_labeled_bool_with_labels",
label: "label_2",
val: false,
},
{
type: "BooleanMetric",
cat: "jog_cat",
id: "jog_labeled_bool_with_labels",
label: "__other__",
val: true,
},
]);
});
add_task(
{
// Event metrics don't support profile markers in artifact builds due to
// having no centralized metric instance storage.
skip_if: () =>
Services.prefs.getBoolPref("telemetry.fog.artifact_build", false),
},
async function test_fog_event_markers() {
let markers = await runWithProfilerAndGetMarkers("EventMetric", () => {
// Record an event that produces a marker with `extra` undefined:
Glean.testOnlyIpc.noExtraEvent.record();
let extra = { extra1: "can set extras", extra2: "passing more data" };
Glean.testOnlyIpc.anEvent.record(extra);
// Corner case: Event with extra with `undefined` value.
// Should pretend that the extra key (extra1) isn't there.
let extraWithUndef = { extra1: undefined, extra2: "defined" };
Glean.testOnlyIpc.anEvent.record(extraWithUndef);
let extra2 = {
extra1: "can set extras",
extra2: 37,
extra3_longer_name: false,
};
Glean.testOnlyIpc.eventWithExtra.record(extra2);
// camelCase extras work.
let extra5 = {
extra4CamelCase: false,
};
Glean.testOnlyIpc.eventWithExtra.record(extra5);
// Passing `null` works.
Glean.testOnlyIpc.eventWithExtra.record(null);
// Invalid extra keys don't crash, the event is not recorded,
// but an error and marker are recorded.
let extra3 = {
extra1_nonexistent_extra: "this does not crash",
};
Glean.testOnlyIpc.eventWithExtra.record(extra3);
// Supplying extras when there aren't any defined results in the event not
// being recorded, but an error is, along with a marker
Glean.testOnlyIpc.noExtraEvent.record(extra3);
});
let expected_markers = [
{
type: "EventMetric",
id: "testOnlyIpc.noExtraEvent",
},
{
type: "EventMetric",
id: "testOnlyIpc.anEvent",
extra: { extra1: "can set extras", extra2: "passing more data" },
},
{
type: "EventMetric",
id: "testOnlyIpc.anEvent",
extra: { extra2: "defined" },
},
{
type: "EventMetric",
id: "testOnlyIpc.eventWithExtra",
extra: {
extra3_longer_name: "false",
extra2: "37",
extra1: "can set extras",
},
},
{
type: "EventMetric",
id: "testOnlyIpc.eventWithExtra",
extra: { extra4CamelCase: "false" },
},
{
type: "EventMetric",
id: "testOnlyIpc.eventWithExtra",
},
// This event throws an error in glean, but we still record a marker
{
type: "EventMetric",
id: "testOnlyIpc.eventWithExtra",
extra: { extra1_nonexistent_extra: "this does not crash" },
},
// This event throws an error in glean, but we still record a marker
{
type: "EventMetric",
id: "testOnlyIpc.noExtraEvent",
extra: { extra1_nonexistent_extra: "this does not crash" },
},
];
// Parse the `extra` field of each marker into a JS object so that we can do
// a deep equality check, ignoring undefined extras.
markers.forEach(m => {
if (m.extra !== undefined) {
m.extra = JSON.parse(m.extra);
}
});
Assert.deepEqual(markers, expected_markers);
}
);
add_task(async function test_fog_memory_distribution() {
let markers = await runWithProfilerAndGetMarkers("DistMetric", () => {
Glean.testOnly.doYouRemember.accumulate(7);
Glean.testOnly.doYouRemember.accumulate(17);
// Note, we would like to test something like this, to ensure that we test
// the internal `accumulate_samples` marker, but the JS API doesn't support
// accumulating multiple samples (or an array of samples).
// Glean.testOnly.doYouRemember.accumulate([17, 2134, 543]);
// Labeled memory distribution submetrics
Glean.testOnly.whatDoYouRemember.twenty_years_ago.accumulate(7);
Glean.testOnly.whatDoYouRemember.twenty_years_ago.accumulate(17);
});
Assert.deepEqual(markers, [
{
type: "DistMetric",
cat: "test_only",
id: "do_you_remember",
sample: "7",
},
{
type: "DistMetric",
cat: "test_only",
id: "do_you_remember",
sample: "17",
},
{
type: "DistMetric",
cat: "test_only",
id: "what_do_you_remember",
label: "twenty_years_ago",
sample: "7",
},
{
type: "DistMetric",
cat: "test_only",
id: "what_do_you_remember",
label: "twenty_years_ago",
sample: "17",
},
]);
});
add_task(async function test_jog_memory_distribution_markers() {
Services.fog.testRegisterRuntimeMetric(
"memory_distribution",
"jog_cat",
"jog_memory_dist",
["test-ping"],
`"ping"`,
false,
JSON.stringify({ memory_unit: "megabyte" })
);
let markers = await runWithProfilerAndGetMarkers("DistMetric", () => {
Glean.jogCat.jogMemoryDist.accumulate(7);
Glean.jogCat.jogMemoryDist.accumulate(17);
});
Assert.deepEqual(markers, [
{ type: "DistMetric", cat: "jog_cat", id: "jog_memory_dist", sample: "7" },
{ type: "DistMetric", cat: "jog_cat", id: "jog_memory_dist", sample: "17" },
]);
});
add_task(async function test_jog_labeled_memory_distribution_markers() {
Services.fog.testRegisterRuntimeMetric(
"labeled_memory_distribution",
"jog_cat",
"jog_labeled_memory_dist",
["test-ping"],
`"ping"`,
false,
JSON.stringify({ memory_unit: "megabyte" })
);
let markers = await runWithProfilerAndGetMarkers("DistMetric", () => {
Glean.jogCat.jogLabeledMemoryDist.short_term.accumulate(7);
Glean.jogCat.jogLabeledMemoryDist.short_term.accumulate(17);
});
Assert.deepEqual(markers, [
{
type: "DistMetric",
cat: "jog_cat",
id: "jog_labeled_memory_dist",
label: "short_term",
sample: "7",
},
{
type: "DistMetric",
cat: "jog_cat",
id: "jog_labeled_memory_dist",
label: "short_term",
sample: "17",
},
]);
});
add_task(async function test_fog_custom_distribution() {
let markers = await runWithProfilerAndGetMarkers("DistMetric", () => {
Glean.testOnlyIpc.aCustomDist.accumulateSingleSample(120);
Glean.testOnlyIpc.aCustomDist.accumulateSamples([7, 268435458]);
// Negative values will not be recorded, instead an error is recorded.
// However, we still expect to see a marker!
Glean.testOnlyIpc.aCustomDist.accumulateSamples([-7]);
// Labeled custom distribution submetrics
Glean.testOnly.mabelsCustomLabelLengths.monospace.accumulateSamples([
1, 42,
]);
Glean.testOnly.mabelsCustomLabelLengths.sanserif.accumulateSingleSample(13);
// Invalid label, but we still expect to see a marker
Glean.testOnly.mabelsCustomLabelLengths[
"1".repeat(72)
].accumulateSingleSample(3);
});
Assert.deepEqual(markers, [
{
type: "DistMetric",
cat: "test_only.ipc",
id: "a_custom_dist",
sample: "120",
},
{
type: "DistMetric",
cat: "test_only.ipc",
id: "a_custom_dist",
samples: "[7,268435458]",
},
{
type: "DistMetric",
cat: "test_only.ipc",
id: "a_custom_dist",
samples: "[-7]",
},
{
type: "DistMetric",
cat: "test_only",
id: "mabels_custom_label_lengths",
label: "monospace",
samples: "[1,42]",
},
{
type: "DistMetric",
cat: "test_only",
id: "mabels_custom_label_lengths",
label: "sanserif",
sample: "13",
},
{
type: "DistMetric",
cat: "test_only",
id: "mabels_custom_label_lengths",
label: "1".repeat(72),
sample: "3",
},
]);
});
add_task(async function test_jog_custom_distribution_markers() {
Services.fog.testRegisterRuntimeMetric(
"custom_distribution",
"jog_cat",
"jog_custom_dist",
["test-ping"],
`"ping"`,
false,
JSON.stringify({
range_min: 1,
range_max: 2147483646,
bucket_count: 10,
histogram_type: "linear",
})
);
let markers = await runWithProfilerAndGetMarkers("DistMetric", () => {
Glean.jogCat.jogCustomDist.accumulateSamples([7, 268435458]);
// Negative values will not be recorded, instead an error is recorded.
// We still record a marker.
Glean.jogCat.jogCustomDist.accumulateSamples([-7]);
});
Assert.deepEqual(markers, [
{
type: "DistMetric",
cat: "jog_cat",
id: "jog_custom_dist",
samples: "[7,268435458]",
},
{
type: "DistMetric",
cat: "jog_cat",
id: "jog_custom_dist",
samples: "[-7]",
},
]);
});
add_task(async function test_fog_timing_distribution() {
let markers = await runWithProfilerAndGetMarkers("TimingDist", async () => {
let t1 = Glean.testOnly.whatTimeIsIt.start();
let t2 = Glean.testOnly.whatTimeIsIt.start();
await sleep(5);
let t3 = Glean.testOnly.whatTimeIsIt.start();
Glean.testOnly.whatTimeIsIt.cancel(t1);
await sleep(5);
Glean.testOnly.whatTimeIsIt.stopAndAccumulate(t2); // 10ms
Glean.testOnly.whatTimeIsIt.stopAndAccumulate(t3); // 5ms
// samples are measured in microseconds, since that's the unit listed in metrics.yaml
Glean.testOnly.whatTimeIsIt.accumulateSingleSample(5000); // 5ms
Glean.testOnly.whatTimeIsIt.accumulateSamples([2000, 8000]); // 10ms
// Labeled timing distribution submetrics
let t4 = Glean.testOnly.whereHasTheTimeGone.west.start();
let t5 = Glean.testOnly.whereHasTheTimeGone.west.start();
await sleep(5);
let t6 = Glean.testOnly.whereHasTheTimeGone.west.start();
Glean.testOnly.whereHasTheTimeGone.west.cancel(t4);
await sleep(5);
Glean.testOnly.whereHasTheTimeGone.west.stopAndAccumulate(t5); // 10ms
Glean.testOnly.whereHasTheTimeGone.west.stopAndAccumulate(t6); // 5ms
Glean.testOnly.whereHasTheTimeGone.west.accumulateSingleSample(5000); // 5ms
Glean.testOnly.whereHasTheTimeGone.west.accumulateSamples([2000, 8000]); // 10ms
});
Assert.deepEqual(markers, [
{
type: "TimingDist",
cat: "test_only",
id: "what_time_is_it",
timer_id: 1,
},
{
type: "TimingDist",
cat: "test_only",
id: "what_time_is_it",
timer_id: 2,
},
{
type: "TimingDist",
cat: "test_only",
id: "what_time_is_it",
timer_id: 3,
},
{
type: "TimingDist",
cat: "test_only",
id: "what_time_is_it",
timer_id: 1,
},
{
type: "TimingDist",
cat: "test_only",
id: "what_time_is_it",
timer_id: 2,
},
{
type: "TimingDist",
cat: "test_only",
id: "what_time_is_it",
timer_id: 3,
},
{
type: "TimingDist",
cat: "test_only",
id: "what_time_is_it",
sample: "5000",
},
{
type: "TimingDist",
cat: "test_only",
id: "what_time_is_it",
samples: "[2000,8000]",
},
{
type: "TimingDist",
cat: "test_only",
id: "where_has_the_time_gone",
label: "west",
timer_id: 1,
},
{
type: "TimingDist",
cat: "test_only",
id: "where_has_the_time_gone",
label: "west",
timer_id: 2,
},
{
type: "TimingDist",
cat: "test_only",
id: "where_has_the_time_gone",
label: "west",
timer_id: 3,
},
{
type: "TimingDist",
cat: "test_only",
id: "where_has_the_time_gone",
label: "west",
timer_id: 1,
},
{
type: "TimingDist",
cat: "test_only",
id: "where_has_the_time_gone",
label: "west",
timer_id: 2,
},
{
type: "TimingDist",
cat: "test_only",
id: "where_has_the_time_gone",
label: "west",
timer_id: 3,
},
{
type: "TimingDist",
cat: "test_only",
id: "where_has_the_time_gone",
label: "west",
sample: "5000",
},
{
type: "TimingDist",
cat: "test_only",
id: "where_has_the_time_gone",
label: "west",
samples: "[2000,8000]",
},
]);
});
add_task(async function test_jog_timing_distribution() {
Services.fog.testRegisterRuntimeMetric(
"timing_distribution",
"jog_cat",
"jog_timing_dist",
["test-ping"],
`"ping"`,
false,
JSON.stringify({ time_unit: "microsecond" })
);
let markers = await runWithProfilerAndGetMarkers("TimingDist", async () => {
let t1 = Glean.jogCat.jogTimingDist.start();
let t2 = Glean.jogCat.jogTimingDist.start();
await sleep(5);
let t3 = Glean.jogCat.jogTimingDist.start();
Glean.jogCat.jogTimingDist.cancel(t1);
await sleep(5);
Glean.jogCat.jogTimingDist.stopAndAccumulate(t2); // 10ms
Glean.jogCat.jogTimingDist.stopAndAccumulate(t3); // 5ms
// samples are measured in microseconds, since that's the unit listed in metrics.yaml
Glean.jogCat.jogTimingDist.accumulateSingleSample(5000); // 5ms
Glean.jogCat.jogTimingDist.accumulateSamples([2000, 8000]); // 10ms
});
Assert.deepEqual(markers, [
{ type: "TimingDist", cat: "jog_cat", id: "jog_timing_dist", timer_id: 1 },
{ type: "TimingDist", cat: "jog_cat", id: "jog_timing_dist", timer_id: 2 },
{ type: "TimingDist", cat: "jog_cat", id: "jog_timing_dist", timer_id: 3 },
{ type: "TimingDist", cat: "jog_cat", id: "jog_timing_dist", timer_id: 1 },
{ type: "TimingDist", cat: "jog_cat", id: "jog_timing_dist", timer_id: 2 },
{ type: "TimingDist", cat: "jog_cat", id: "jog_timing_dist", timer_id: 3 },
{
type: "TimingDist",
cat: "jog_cat",
id: "jog_timing_dist",
sample: "5000",
},
{
type: "TimingDist",
cat: "jog_cat",
id: "jog_timing_dist",
samples: "[2000,8000]",
},
]);
});
add_task(async function test_jog_labeled_timing_distribution() {
Services.fog.testRegisterRuntimeMetric(
"labeled_timing_distribution",
"jog_cat",
"jog_labeled_timing_dist",
["test-ping"],
`"ping"`,
false,
JSON.stringify({ time_unit: "microsecond" })
);
let markers = await runWithProfilerAndGetMarkers("TimingDist", async () => {
let t1 = Glean.jogCat.jogLabeledTimingDist.label1.start();
let t2 = Glean.jogCat.jogLabeledTimingDist.label1.start();
await sleep(5);
let t3 = Glean.jogCat.jogLabeledTimingDist.label1.start();
Glean.jogCat.jogLabeledTimingDist.label1.cancel(t1);
await sleep(5);
Glean.jogCat.jogLabeledTimingDist.label1.stopAndAccumulate(t2); // 10ms
Glean.jogCat.jogLabeledTimingDist.label1.stopAndAccumulate(t3); // 5ms
});
Assert.deepEqual(markers, [
{
type: "TimingDist",
cat: "jog_cat",
id: "jog_labeled_timing_dist",
label: "label1",
timer_id: 1,
},
{
type: "TimingDist",
cat: "jog_cat",
id: "jog_labeled_timing_dist",
label: "label1",
timer_id: 2,
},
{
type: "TimingDist",
cat: "jog_cat",
id: "jog_labeled_timing_dist",
label: "label1",
timer_id: 3,
},
{
type: "TimingDist",
cat: "jog_cat",
id: "jog_labeled_timing_dist",
label: "label1",
timer_id: 1,
},
{
type: "TimingDist",
cat: "jog_cat",
id: "jog_labeled_timing_dist",
label: "label1",
timer_id: 2,
},
{
type: "TimingDist",
cat: "jog_cat",
id: "jog_labeled_timing_dist",
label: "label1",
timer_id: 3,
},
]);
});
add_task(async function test_fog_quantity() {
let markers = await runWithProfilerAndGetMarkers("IntLikeMetric", () => {
Glean.testOnly.meaningOfLife.set(42);
// Labeled quantity submetrics
Glean.testOnly.buttonJars.up.set(2);
Glean.testOnly.buttonJars.curling.set(0);
// Invalid label, but we still expect to see a marker
Glean.testOnly.buttonJars["1".repeat(72)].set(0);
});
Assert.deepEqual(markers, [
{ type: "IntLikeMetric", cat: "test_only", id: "meaning_of_life", val: 42 },
{
type: "IntLikeMetric",
cat: "test_only",
id: "button_jars",
label: "up",
val: 2,
},
{
type: "IntLikeMetric",
cat: "test_only",
id: "button_jars",
label: "curling",
val: 0,
},
{
type: "IntLikeMetric",
cat: "test_only",
id: "button_jars",
label: "1".repeat(72),
val: 0,
},
]);
});
add_task(async function test_jog_quantity_markers() {
Services.fog.testRegisterRuntimeMetric(
"quantity",
"jog_cat",
"jog_quantity",
["test-ping"],
`"ping"`,
false
);
let markers = await runWithProfilerAndGetMarkers("IntLikeMetric", () => {
Glean.jogCat.jogQuantity.set(42);
});
Assert.deepEqual(markers, [
{ type: "IntLikeMetric", cat: "jog_cat", id: "jog_quantity", val: 42 },
]);
});
add_task(async function test_jog_labeled_quantity_markers() {
Services.fog.testRegisterRuntimeMetric(
"labeled_quantity",
"jog_cat",
"jog_labeled_quantity",
["test-ping"],
`"ping"`,
false
);
Assert.equal(
undefined,
Glean.jogCat.jogLabeledQuantity.label_1.testGetValue(),
"New labels with no values should return undefined"
);
let markers = await runWithProfilerAndGetMarkers("IntLikeMetric", () => {
Glean.jogCat.jogLabeledQuantity.label_1.set(9000);
Glean.jogCat.jogLabeledQuantity.label_2.set(0);
Glean.jogCat.jogLabeledQuantity.NowValidLabel.set(100);
Glean.jogCat.jogLabeledQuantity["1".repeat(72)].set(true);
});
Assert.deepEqual(markers, [
{
type: "IntLikeMetric",
cat: "jog_cat",
id: "jog_labeled_quantity",
label: "label_1",
val: 9000,
},
{
type: "IntLikeMetric",
cat: "jog_cat",
id: "jog_labeled_quantity",
label: "label_2",
val: 0,
},
{
type: "IntLikeMetric",
cat: "jog_cat",
id: "jog_labeled_quantity",
label: "NowValidLabel",
val: 100,
},
{
type: "IntLikeMetric",
cat: "jog_cat",
id: "jog_labeled_quantity",
label: "1".repeat(72),
val: 1,
},
]);
});
add_task(async function test_fog_rate() {
let markers = await runWithProfilerAndGetMarkers("IntLikeMetric", () => {
// 1) Standard rate with internal denominator
Glean.testOnlyIpc.irate.addToNumerator(22);
Glean.testOnlyIpc.irate.addToDenominator(7);
// 2) Rate with external denominator
Glean.testOnlyIpc.anExternalDenominator.add(11);
Glean.testOnlyIpc.rateWithExternalDenominator.addToNumerator(121);
});
Assert.deepEqual(markers, [
{ type: "IntLikeMetric", cat: "test_only.ipc", id: "irate", val: 22 },
{ type: "IntLikeMetric", cat: "test_only.ipc", id: "irate", val: 7 },
{
type: "IntLikeMetric",
cat: "test_only.ipc",
id: "an_external_denominator",
val: 11,
},
{
type: "IntLikeMetric",
cat: "test_only.ipc",
id: "rate_with_external_denominator",
val: 121,
},
]);
});
add_task(async function test_jog_rate_markers() {
Services.fog.testRegisterRuntimeMetric(
"rate",
"jog_cat",
"jog_rate",
["test-ping"],
`"ping"`,
false
);
Services.fog.testRegisterRuntimeMetric(
"denominator",
"jog_cat",
"jog_denominator",
["test-ping"],
`"ping"`,
false,
JSON.stringify({
numerators: [
{
name: "jog_rate_ext",
category: "jog_cat",
send_in_pings: ["test-ping"],
lifetime: "ping",
disabled: false,
},
],
})
);
Services.fog.testRegisterRuntimeMetric(
"rate",
"jog_cat",
"jog_rate_ext",
["test-ping"],
`"ping"`,
false
);
let markers = await runWithProfilerAndGetMarkers("IntLikeMetric", () => {
// 1) Standard rate with internal denominator
Glean.jogCat.jogRate.addToNumerator(22);
Glean.jogCat.jogRate.addToDenominator(7);
// 2) Rate with external denominator
Glean.jogCat.jogDenominator.add(11);
Glean.jogCat.jogRateExt.addToNumerator(121);
});
Assert.deepEqual(markers, [
{ type: "IntLikeMetric", cat: "jog_cat", id: "jog_rate", val: 22 },
{ type: "IntLikeMetric", cat: "jog_cat", id: "jog_rate", val: 7 },
{ type: "IntLikeMetric", cat: "jog_cat", id: "jog_denominator", val: 11 },
{ type: "IntLikeMetric", cat: "jog_cat", id: "jog_rate_ext", val: 121 },
]);
});
add_task(async function test_fog_url() {
const value = "https://www.example.com/fog";
let markers = await runWithProfilerAndGetMarkers("UrlMetric", () => {
Glean.testOnlyIpc.aUrl.set(value);
});
Assert.deepEqual(markers, [
{
type: "UrlMetric",
cat: "test_only.ipc",
id: "a_url",
},
]);
});
add_task(async function test_fog_text() {
const value =
"Before the risin' sun, we fly, So many roads to choose, We'll start out walkin' and learn to run, (We've only just begun)";
let markers = await runWithProfilerAndGetMarkers("StringLikeMetric", () => {
Glean.testOnlyIpc.aText.set(value);
});
Assert.deepEqual(markers, [
{
type: "StringLikeMetric",
cat: "test_only.ipc",
id: "a_text",
val: value,
},
]);
});
add_task(async function test_jog_text() {
const value =
"In the heart of the Opéra district in Paris, the Cédric Grolet Opéra bakery-pastry shop is a veritable temple of gourmet delights.";
Services.fog.testRegisterRuntimeMetric(
"text",
"test_only.jog",
"a_text",
["test-ping"],
`"ping"`,
false
);
let markers = await runWithProfilerAndGetMarkers("StringLikeMetric", () => {
Glean.testOnlyJog.aText.set(value);
});
Assert.deepEqual(markers, [
{
type: "StringLikeMetric",
cat: "test_only.jog",
id: "a_text",
val: value,
},
]);
});
add_task(async function test_fog_text_unusual_character() {
const value =
"The secret to Dominique Ansel's viennoiserie is the use of Isigny Sainte-Mère butter and Les Grands Moulins de Paris flour";
let markers = await runWithProfilerAndGetMarkers("StringLikeMetric", () => {
Glean.testOnlyIpc.aText.set(value);
});
Assert.deepEqual(markers, [
{
type: "StringLikeMetric",
cat: "test_only.ipc",
id: "a_text",
val: value,
},
]);
});
add_task(
{
// Object metrics don't support profile markers in artifact builds due to
// having no centralized metric instance storage.
skip_if: () =>
Services.prefs.getBoolPref("telemetry.fog.artifact_build", false),
},
async function test_fog_object_markers() {
if (!Glean.testOnly.balloons) {
// FIXME(bug 1883857): object metric type not available, e.g. in artifact builds.
// Skipping this test.
return;
}
let markers = await runWithProfilerAndGetMarkers("ObjectMetric", () => {
let balloons = [
{ colour: "red", diameter: 5 },
{ colour: "blue", diameter: 7 },
{ colour: "orange" },
];
Glean.testOnly.balloons.set(balloons);
// These values are coerced to null or removed.
balloons = [
{ colour: "inf", diameter: Infinity },
{ colour: "negative-inf", diameter: -1 / 0 },
{ colour: "nan", diameter: NaN },
{ colour: "undef", diameter: undefined },
];
Glean.testOnly.balloons.set(balloons);
// colour != color.
// This is invalid, but still produces a marker!
let invalid = [{ color: "orange" }, { color: "red", diameter: "small" }];
Glean.testOnly.balloons.set(invalid);
Services.fog.testResetFOG();
// set again to ensure it's stored
balloons = [
{ colour: "red", diameter: 5 },
{ colour: "blue", diameter: 7 },
];
Glean.testOnly.balloons.set(balloons);
// Again, invalid, but produces a marker
invalid = [{ colour: "red", diameter: 5, extra: "field" }];
Glean.testOnly.balloons.set(invalid);
// More complex objects:
Glean.testOnly.crashStack.set({});
let stack = {
status: "OK",
crash_info: {
typ: "main",
address: "0xf001ba11",
crashing_thread: 1,
},
main_module: 0,
modules: [
{
base_addr: "0x00000000",
end_addr: "0x00004000",
},
],
};
Glean.testOnly.crashStack.set(stack);
stack = {
status: "OK",
modules: [
{
base_addr: "0x00000000",
end_addr: "0x00004000",
},
],
};
Glean.testOnly.crashStack.set(stack);
stack = {
status: "OK",
modules: [],
};
Glean.testOnly.crashStack.set(stack);
stack = {
status: "OK",
};
Glean.testOnly.crashStack.set(stack);
});
let expected_markers = [
{
type: "ObjectMetric",
id: "testOnly.balloons",
value: [
{ colour: "red", diameter: 5 },
{ colour: "blue", diameter: 7 },
{ colour: "orange" },
],
},
// Check that values are coerced or removed
{
type: "ObjectMetric",
id: "testOnly.balloons",
value: [
{ colour: "inf", diameter: null },
{ colour: "negative-inf", diameter: null },
{ colour: "nan", diameter: null },
{ colour: "undef" },
],
},
// Invalid glean object, but still produces a marker
{
type: "ObjectMetric",
id: "testOnly.balloons",
value: [{ color: "orange" }, { color: "red", diameter: "small" }],
},
{
type: "ObjectMetric",
id: "testOnly.balloons",
value: [
{ colour: "red", diameter: 5 },
{ colour: "blue", diameter: 7 },
],
},
// Invalid glean object, but still produces a marker
{
type: "ObjectMetric",
id: "testOnly.balloons",
value: [{ colour: "red", diameter: 5, extra: "field" }],
},
{
type: "ObjectMetric",
id: "testOnly.crashStack",
value: {},
},
{
type: "ObjectMetric",
id: "testOnly.crashStack",
value: {
status: "OK",
crash_info: {
typ: "main",
address: "0xf001ba11",
crashing_thread: 1,
},
main_module: 0,
modules: [{ base_addr: "0x00000000", end_addr: "0x00004000" }],
},
},
{
type: "ObjectMetric",
id: "testOnly.crashStack",
value: {
status: "OK",
modules: [{ base_addr: "0x00000000", end_addr: "0x00004000" }],
},
},
// Modules gets erased within Glean, but it still shows up in a marker
{
type: "ObjectMetric",
id: "testOnly.crashStack",
value: { status: "OK", modules: [] },
},
{
type: "ObjectMetric",
id: "testOnly.crashStack",
value: { status: "OK" },
},
];
// Parse the `value` field of each marker into a JS object so that we can do
// a deep equality check, ignoring undefined values.
markers.forEach(m => {
if (m.value !== undefined) {
m.value = JSON.parse(m.value);
}
});
Assert.deepEqual(markers, expected_markers);
}
);
add_task(
// FIXME(1898464): ride-along pings are not handled correctly in artifact builds.
{
skip_if: () =>
Services.prefs.getBoolPref("telemetry.fog.artifact_build", false),
},
async function test_fog_ping_markers() {
let markers = await runWithProfilerAndGetMarkers("GleanPing", () => {
// Do stuff that will generate Ping submissions:
// From test_fog_custom_pings:
Glean.testOnly.onePingOneBool.set(false);
GleanPings.onePingOnly.submit();
// From test_recursive_testBeforeNextSubmit:
Glean.testOnly.onePingOneBool.set(false);
GleanPings.onePingOnly.submit();
GleanPings.onePingOnly.submit();
GleanPings.onePingOnly.submit();
// From test_fog_ride_along_pings:
Glean.testOnly.badCode.add(37);
GleanPings.testPing.submit();
});
Assert.deepEqual(markers, [
{ type: "GleanPing", id: "one-ping-only" },
{ type: "GleanPing", id: "one-ping-only" },
{ type: "GleanPing", id: "one-ping-only" },
{ type: "GleanPing", id: "one-ping-only" },
{ type: "GleanPing", id: "test-ping" },
]);
}
);