Source code

Revision control

Copy as Markdown

Other Tools

Test Info:

<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script type="module">
"use strict";
function tryToCreateNodeOnClosedContext(ctx) {
assert_equals(ctx.state, "closed", "The context is in closed state");
[
{ name: "createBufferSource" },
{
name: "createMediaStreamDestination",
onOfflineAudioContext: false,
},
{ name: "createScriptProcessor" },
{ name: "createStereoPanner" },
{ name: "createAnalyser" },
{ name: "createGain" },
{ name: "createDelay" },
{ name: "createBiquadFilter" },
{ name: "createWaveShaper" },
{ name: "createPanner" },
{ name: "createConvolver" },
{ name: "createChannelSplitter" },
{ name: "createChannelMerger" },
{ name: "createDynamicsCompressor" },
{ name: "createOscillator" },
{
name: "createMediaElementSource",
args: [new Audio()],
onOfflineAudioContext: false,
},
{
name: "createMediaStreamSource",
args: [new AudioContext().createMediaStreamDestination().stream],
onOfflineAudioContext: false,
},
].forEach(function (e) {
if (
e.onOfflineAudioContext == false &&
ctx instanceof OfflineAudioContext
) {
return;
}
try {
ctx[e.name].apply(ctx, e.args);
} catch (err) {
assert_true(false, "unexpected exception thrown for " + e.name);
}
});
}
function loadFile(url, callback) {
return new Promise((resolve) => {
var xhr = new XMLHttpRequest();
xhr.open("GET", url, true);
xhr.responseType = "arraybuffer";
xhr.onload = function () {
resolve(xhr.response);
};
xhr.send();
});
}
// createBuffer, createPeriodicWave and decodeAudioData should work on a context
// that has `state` == "closed"
async function tryLegalOperationsOnClosedContext(ctx) {
assert_equals(ctx.state, "closed", "The context is in closed state");
[
{ name: "createBuffer", args: [1, 44100, 44100] },
{
name: "createPeriodicWave",
args: [new Float32Array(10), new Float32Array(10)],
},
].forEach(function (e) {
try {
ctx[e.name].apply(ctx, e.args);
} catch (err) {
assert_true(false, "unexpected exception thrown");
}
});
var buf = await loadFile("/webaudio/resources/sin_440Hz_-6dBFS_1s.wav");
return ctx
.decodeAudioData(buf)
.then(function (decodedBuf) {
assert_true(
true,
"decodeAudioData on a closed context should work, it did."
);
})
.catch(function (e) {
assert_true(
false,
"decodeAudioData on a closed context should work, it did not"
);
});
}
// Test that MediaStreams that are the output of a suspended AudioContext are
// producing silence
// ac1 produce a sine fed to a MediaStreamAudioDestinationNode
// ac2 is connected to ac1 with a MediaStreamAudioSourceNode, and check that
// there is silence when ac1 is suspended
async function testMultiContextOutput() {
var ac1 = new AudioContext(),
ac2 = new AudioContext();
await new Promise((resolve) => (ac1.onstatechange = resolve));
ac1.onstatechange = null;
await ac1.suspend();
assert_equals(ac1.state, "suspended", "ac1 is suspended");
var osc1 = ac1.createOscillator(),
mediaStreamDestination1 = ac1.createMediaStreamDestination();
var mediaStreamAudioSourceNode2 = ac2.createMediaStreamSource(
mediaStreamDestination1.stream
),
sp2 = ac2.createScriptProcessor(),
silentBuffersInARow = 0;
osc1.connect(mediaStreamDestination1);
mediaStreamAudioSourceNode2.connect(sp2);
osc1.start();
let e = await new Promise((resolve) => (sp2.onaudioprocess = resolve));
while (true) {
let e = await new Promise(
(resolve) => (sp2.onaudioprocess = resolve)
);
var input = e.inputBuffer.getChannelData(0);
var silent = true;
for (var i = 0; i < input.length; i++) {
if (input[i] != 0.0) {
silent = false;
}
}
if (silent) {
silentBuffersInARow++;
if (silentBuffersInARow == 10) {
assert_true(
true,
"MediaStreams produce silence when their input is blocked."
);
break;
}
} else {
assert_equals(
silentBuffersInARow,
0,
"No non silent buffer inbetween silent buffers."
);
}
}
sp2.onaudioprocess = null;
ac1.close();
ac2.close();
}
// Test that there is no buffering between contexts when connecting a running
// AudioContext to a suspended AudioContext. Gecko's ScriptProcessorNode does some
// buffering internally, so we ensure this by using a very very low frequency
// on a sine, and oberve that the phase has changed by a big enough margin.
async function testMultiContextInput() {
var ac1 = new AudioContext(),
ac2 = new AudioContext();
await new Promise((resolve) => (ac1.onstatechange = resolve));
ac1.onstatechange = null;
var osc1 = ac1.createOscillator(),
mediaStreamDestination1 = ac1.createMediaStreamDestination(),
sp1 = ac1.createScriptProcessor();
var mediaStreamAudioSourceNode2 = ac2.createMediaStreamSource(
mediaStreamDestination1.stream
),
sp2 = ac2.createScriptProcessor(),
eventReceived = 0;
osc1.frequency.value = 0.0001;
osc1.connect(mediaStreamDestination1);
osc1.connect(sp1);
mediaStreamAudioSourceNode2.connect(sp2);
osc1.start();
var e = await new Promise((resolve) => (sp2.onaudioprocess = resolve));
var inputBuffer1 = e.inputBuffer.getChannelData(0);
sp2.value = inputBuffer1[inputBuffer1.length - 1];
await ac2.suspend();
await ac2.resume();
while (true) {
var e = await new Promise(
(resolve) => (sp2.onaudioprocess = resolve)
);
var inputBuffer = e.inputBuffer.getChannelData(0);
if (eventReceived++ == 3) {
var delta = Math.abs(inputBuffer[1] - sp2.value),
theoreticalIncrement =
(2048 * 3 * Math.PI * 2 * osc1.frequency.value) /
ac1.sampleRate;
assert_true(
delta >= theoreticalIncrement,
"Buffering did not occur when the context was suspended (delta:" +
delta +
" increment: " +
theoreticalIncrement +
")"
);
break;
}
}
ac1.close();
ac2.close();
sp1.onaudioprocess = null;
sp2.onaudioprocess = null;
}
// Take an AudioContext, make sure it switches to running when the audio starts
// flowing, and then, call suspend, resume and close on it, tracking its state.
async function testAudioContext() {
var ac = new AudioContext();
assert_equals(
ac.state,
"suspended",
"AudioContext should start in suspended state."
);
var stateTracker = {
previous: ac.state,
// no promise for the initial suspended -> running
initial: { handler: false },
suspend: { promise: false, handler: false },
resume: { promise: false, handler: false },
close: { promise: false, handler: false },
};
await new Promise((resolve) => (ac.onstatechange = resolve));
assert_true(
stateTracker.previous == "suspended" && ac.state == "running",
'AudioContext should switch to "running" when the audio hardware is' +
" ready."
);
stateTracker.previous = ac.state;
stateTracker.initial.handler = true;
let promise_statechange_suspend = new Promise((resolve) => {
ac.onstatechange = resolve;
}).then(() => {
stateTracker.suspend.handler = true;
});
await ac.suspend();
assert_true(
!stateTracker.suspend.handler,
"Promise should be resolved before the callback."
);
assert_equals(
ac.state,
"suspended",
'AudioContext should switch to "suspended" when the audio stream is ' +
"suspended."
);
await promise_statechange_suspend;
stateTracker.previous = ac.state;
let promise_statechange_resume = new Promise((resolve) => {
ac.onstatechange = resolve;
}).then(() => {
stateTracker.resume.handler = true;
});
await ac.resume();
assert_true(
!stateTracker.resume.handler,
"Promise should be resolved before the callback."
);
assert_equals(
ac.state,
"running",
'AudioContext should switch to "running" when the audio stream is ' +
"resumed."
);
await promise_statechange_resume;
stateTracker.previous = ac.state;
let promise_statechange_close = new Promise((resolve) => {
ac.onstatechange = resolve;
}).then(() => {
stateTracker.close.handler = true;
});
await ac.close();
assert_true(
!stateTracker.close.handler,
"Promise should be resolved before the callback."
);
assert_equals(
ac.state,
"closed",
'AudioContext should switch to "closed" when the audio stream is ' +
"closed."
);
await promise_statechange_close;
stateTracker.previous = ac.state;
tryToCreateNodeOnClosedContext(ac);
await tryLegalOperationsOnClosedContext(ac);
}
async function testOfflineAudioContext() {
var o = new OfflineAudioContext(1, 44100, 44100);
assert_equals(
o.state,
"suspended",
"OfflineAudioContext should start in suspended state."
);
var previousState = o.state,
finishedRendering = false;
o.startRendering().then(function (buffer) {
finishedRendering = true;
});
await new Promise((resolve) => (o.onstatechange = resolve));
assert_true(
previousState == "suspended" && o.state == "running",
"onstatechanged" +
"handler is called on state changed, and the new state is running"
);
previousState = o.state;
await new Promise((resolve) => (o.onstatechange = resolve));
assert_true(
previousState == "running" && o.state == "closed",
"onstatechanged handler is called when rendering finishes, " +
"and the new state is closed"
);
assert_true(
finishedRendering,
"The Promise that is resolved when the rendering is " +
"done should be resolved earlier than the state change."
);
previousState = o.state;
function afterRenderingFinished() {
assert_true(
false,
"There should be no transition out of the closed state."
);
}
o.onstatechange = afterRenderingFinished;
tryToCreateNodeOnClosedContext(o);
await tryLegalOperationsOnClosedContext(o);
}
async function testSuspendResumeEventLoop() {
var ac = new AudioContext();
var source = ac.createBufferSource();
source.buffer = ac.createBuffer(1, 44100, 44100);
await new Promise((resolve) => (ac.onstatechange = resolve));
ac.onstatechange = null;
assert_true(ac.state == "running", "initial state is running");
await ac.suspend();
source.start();
ac.resume();
await new Promise((resolve) => (source.onended = resolve));
assert_true(true, "The AudioContext did resume");
}
function testResumeInStateChangeForResumeCallback() {
return new Promise((resolve) => {
var ac = new AudioContext();
ac.onstatechange = function () {
ac.resume().then(() => {
assert_true(true, "resume promise resolved as expected.");
resolve();
});
};
});
}
var tests = [
testOfflineAudioContext,
testSuspendResumeEventLoop,
testResumeInStateChangeForResumeCallback,
testAudioContext,
testMultiContextOutput,
testMultiContextInput,
];
tests.forEach(function (f) {
promise_test(f, f.name);
});
</script>
</head>
</html>