DXR is a code search and navigation tool aimed at making sense of large projects. It supports full-text and regex searches as well as structural queries.

Mercurial (5350524bb654)

VCS Links

Line Code
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130
<!DOCTYPE HTML>
<html>
<head>
  <script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
  <script type="application/javascript" src="mediaStreamPlayback.js"></script>
  <script type="application/javascript" src="head.js"></script>
</head>
<body>
<pre id="test">
<script>

createHTML({
  bug: "1259788",
  title: "Test CaptureStream video content on HTMLMediaElement playing a gUM MediaStream",
  visible: true
});

var gUMVideoElement;
var captureStreamElement;

// We check a pixel somewhere away from the top left corner since
// MediaEngineDefault puts semi-transparent time indicators there.
const offsetX = 20;
const offsetY = 20;
const threshold = 16;
const pausedTimeout = 1000;
const h = new CaptureStreamTestHelper2D(50, 50);

var checkHasFrame = video => h.waitForPixel(video, offsetX, offsetY, px => {
  let result = h.isOpaquePixelNot(px, h.black, threshold);
  info("Checking that we have a frame, got [" +
       Array.slice(px) + "]. Pass=" + result);
  return result;
});

var checkVideoPlaying = video => checkHasFrame(video)
  .then(() => {
    let startPixel = { data: h.getPixel(video, offsetX, offsetY)
                     , name: "startcolor"
                     };
    return h.waitForPixel(video, offsetX, offsetY, px => {
      let result = h.isPixelNot(px, startPixel, threshold)
      info("Checking playing, [" + Array.slice(px) + "] vs [" +
           Array.slice(startPixel.data) + "]. Pass=" + result);
      return result;
    });
  });

var checkVideoPaused = video => checkHasFrame(video)
  .then(() => {
    let startPixel = { data: h.getPixel(video, offsetX, offsetY)
                     , name: "startcolor"
                     };
    return h.waitForPixel(video, offsetX, offsetY, px => {
      let result = h.isOpaquePixelNot(px, startPixel, threshold);
      info("Checking paused, [" + Array.slice(px) + "] vs [" +
           Array.slice(startPixel.data) + "]. Pass=" + result);
      return result;
    }, pausedTimeout);
  }).then(result => ok(!result, "Frame shouldn't change within " + pausedTimeout / 1000 + " seconds."));

runTest(() => getUserMedia({video: true, fake: true})
  .then(stream => {
    gUMVideoElement =
      createMediaElement("video", "gUMVideo");
    gUMVideoElement.srcObject = stream;
    gUMVideoElement.play();

    info("Capturing");
    captureStreamElement =
      createMediaElement("video", "captureStream");
    captureStreamElement.srcObject = gUMVideoElement.mozCaptureStream();
    captureStreamElement.play();

    // Adding a dummy audio track to the stream will keep a consuming media
    // element from ending.
    // We could also solve it by repeatedly play()ing or autoplay, but then we
    // wouldn't be sure the media element stopped rendering video because it
    // went to the ended state or because there were no frames for the track.
    let osc = createOscillatorStream(new AudioContext(), 1000);
    captureStreamElement.srcObject.addTrack(osc.getTracks()[0]);

    return checkVideoPlaying(captureStreamElement);
  })
  .then(() => {
    info("Video flowing. Pausing.");
    gUMVideoElement.pause();

    return checkVideoPaused(captureStreamElement);
  })
  .then(() => {
    info("Video stopped flowing. Playing.");
    gUMVideoElement.play();

    return checkVideoPlaying(captureStreamElement);
  })
  .then(() => {
    info("Video flowing. Removing source.");
    var stream = gUMVideoElement.srcObject;
    gUMVideoElement.srcObject = null;

    return checkVideoPaused(captureStreamElement).then(() => stream);
  })
  .then(stream => {
    info("Video stopped flowing. Setting source.");
    gUMVideoElement.srcObject = stream;
    return checkVideoPlaying(captureStreamElement);
  })
  .then(() => {
    info("Video flowing. Changing source by track manipulation. Remove first.");
    var track = gUMVideoElement.srcObject.getTracks()[0];
    gUMVideoElement.srcObject.removeTrack(track);
    return checkVideoPaused(captureStreamElement).then(() => track);
  })
  .then(track => {
    info("Video paused. Changing source by track manipulation. Add first.");
    gUMVideoElement.srcObject.addTrack(track);
    gUMVideoElement.play();
    return checkVideoPlaying(captureStreamElement);
  })
  .then(() => {
    gUMVideoElement.srcObject.getTracks().forEach(t => t.stop());
    ok(true, "Test passed.");
  })
  .catch(e => ok(false, "Test failed: " + e + (e.stack ? "\n" + e.stack : ""))));

</script>
</pre>
</body>
</html>