DXR is a code search and navigation tool aimed at making sense of large projects. It supports full-text and regex searches as well as structural queries.

Mercurial (b6d82b1a6b02)

VCS Links

Line Code
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182
<!DOCTYPE HTML>
<html>
<head>
  <script type="application/javascript" src="mediaStreamPlayback.js"></script>
</head>
<body>
<pre id="test">
<script>

createHTML({
  bug: "1259788",
  title: "Test CaptureStream track output on HTMLMediaElement playing a gUM MediaStream",
  visible: true
});

var audioElement;
var audioCaptureStream;
var videoElement;
var videoCaptureStream;
var untilEndedElement;
var streamUntilEnded;
var tracks = [];
runTest(() => getUserMedia({audio: true, video: true})
  .then(stream => {
    // We need to test with multiple tracks. We add an extra of each kind.
    stream.getTracks().forEach(t => stream.addTrack(t.clone()));

    audioElement = createMediaElement("audio", "gUMAudio");
    audioElement.srcObject = stream;

    return haveEvent(audioElement, "loadedmetadata", wait(50000, new Error("Timeout")));
  })
  .then(() => {
    info("Capturing audio element (loadedmetadata -> captureStream)");
    audioCaptureStream = audioElement.mozCaptureStream();

    is(audioCaptureStream.getAudioTracks().length, 2,
       "audio element should capture two audio tracks");
    is(audioCaptureStream.getVideoTracks().length, 0,
       "audio element should not capture any video tracks");

    return haveNoEvent(audioCaptureStream, "addtrack");
  })
  .then(() => {
    videoElement = createMediaElement("video", "gUMVideo");

    info("Capturing video element (captureStream -> loadedmetadata)");
    videoCaptureStream = videoElement.mozCaptureStream();
    videoElement.srcObject = audioElement.srcObject.clone();

    is(videoCaptureStream.getTracks().length, 0,
       "video element should have no tracks before metadata known");

    return haveEventsButNoMore(
        videoCaptureStream, "addtrack", 3, wait(50000, new Error("No event")));
  })
  .then(() => {
    is(videoCaptureStream.getAudioTracks().length, 2,
       "video element should capture two audio tracks");
    is(videoCaptureStream.getVideoTracks().length, 1,
       "video element should capture one video track at most");

    info("Testing dynamically adding audio track to audio element");
    audioElement.srcObject.addTrack(
        audioElement.srcObject.getAudioTracks()[0].clone());
    return haveEventsButNoMore(
        audioCaptureStream, "addtrack", 1, wait(50000, new Error("No event")));
  })
  .then(() => {
    is(audioCaptureStream.getAudioTracks().length, 3,
       "Audio element should have three audio tracks captured.");

    info("Testing dynamically adding video track to audio element");
    audioElement.srcObject.addTrack(
        audioElement.srcObject.getVideoTracks()[0].clone());
    return haveNoEvent(audioCaptureStream, "addtrack");
  })
  .then(() => {
    is(audioCaptureStream.getVideoTracks().length, 0,
       "Audio element should have no video tracks captured.");

    info("Testing dynamically adding audio track to video element");
    videoElement.srcObject.addTrack(
        videoElement.srcObject.getAudioTracks()[0].clone());
    return haveEventsButNoMore(
        videoCaptureStream, "addtrack", 1, wait(50000, new Error("Timeout")));
  })
  .then(() => {
    is(videoCaptureStream.getAudioTracks().length, 3,
       "Captured video stream should have three audio tracks captured.");

    info("Testing dynamically adding video track to video element");
    videoElement.srcObject.addTrack(
        videoElement.srcObject.getVideoTracks()[0].clone());
    return haveNoEvent(videoCaptureStream, "addtrack");
  })
  .then(() => {
    is(videoCaptureStream.getVideoTracks().length, 1,
       "Captured video stream should have at most one video tracks captured.");

    info("Testing track removal.");
    tracks.push(...videoElement.srcObject.getTracks());
    videoElement.srcObject.getVideoTracks().reverse().forEach(t =>
        videoElement.srcObject.removeTrack(t));
    is(videoCaptureStream.getVideoTracks()
                         .filter(t => t.readyState == "live").length, 1,
       "Captured video should have still have one video track captured.");

    return haveEvent(videoCaptureStream.getVideoTracks()[0], "ended",
                     wait(50000, new Error("Timeout")));
  })
  .then(() => {
    is(videoCaptureStream.getVideoTracks()
                         .filter(t => t.readyState == "live").length, 0,
       "Captured video stream should have no video tracks captured after removal.");

    info("Testing source reset.");
  })
  .then(() => getUserMedia({audio: true, video: true}))
  .then(stream => {
    videoElement.srcObject = stream;
    return Promise.all(videoCaptureStream.getTracks()
        .filter(t => t.readyState == "live")
        .map(t => haveEvent(t, "ended", wait(50000, new Error("Timeout")))));
  })
  .then(() => haveEventsButNoMore(
                  videoCaptureStream, "addtrack", 2, wait(50000, new Error("Timeout"))))
  .then(() => {
    is(videoCaptureStream.getAudioTracks()
        .filter(t => t.readyState == "ended").length, 3,
       "Captured video stream should have three ended audio tracks");
    is(videoCaptureStream.getAudioTracks()
        .filter(t => t.readyState == "live").length, 1,
       "Captured video stream should have one live audio track");

    is(videoCaptureStream.getVideoTracks()
        .filter(t => t.readyState == "ended").length, 1,
       "Captured video stream should have one ended video tracks");
    is(videoCaptureStream.getVideoTracks()
        .filter(t => t.readyState == "live").length, 1,
       "Captured video stream should have one live video track");

    info("Testing CaptureStreamUntilEnded");
    untilEndedElement =
      createMediaElement("video", "gUMVideoUntilEnded");
    untilEndedElement.srcObject = audioElement.srcObject;

    return haveEvent(untilEndedElement, "loadedmetadata",
                     wait(50000, new Error("Timeout")));
  })
  .then(() => {
    streamUntilEnded = untilEndedElement.mozCaptureStreamUntilEnded();

    is(streamUntilEnded.getAudioTracks().length, 3,
       "video element should capture all 3 audio tracks until ended");
    is(streamUntilEnded.getVideoTracks().length, 1,
       "video element should capture only 1 video track until ended");

    untilEndedElement.srcObject.getTracks().forEach(t => t.stop());

    return Promise.all([
      haveEvent(untilEndedElement, "ended", wait(50000, new Error("Timeout"))),
      ...streamUntilEnded.getTracks()
           .map(t => haveEvent(t, "ended", wait(50000, new Error("Timeout"))))
    ]);
  })
  .then(() => {
    info("Element and tracks ended. Ensuring that new tracks aren't created.");
    untilEndedElement.srcObject = videoElement.srcObject;
    return haveEventsButNoMore(
        untilEndedElement, "loadedmetadata", 1, wait(50000, new Error("Timeout")));
  })
  .then(() => is(streamUntilEnded.getTracks().length, 4,
                 "Should still have 4 tracks"))
  .catch(e => ok(false, "Test failed: " + e + (e && e.stack ? "\n" + e.stack : "")))
  .then(() => [...tracks,
               ...videoElement.srcObject.getTracks()].forEach(t => t.stop())));

</script>
</pre>
</body>
</html>