DXR is a code search and navigation tool aimed at making sense of large projects. It supports full-text and regex searches as well as structural queries.

Mercurial (c68fe15a81fc)

VCS Links

Line Code
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165
<!DOCTYPE html>
<!DOCTYPE html>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/resources/testdriver.js"></script>
<script src="/resources/testdriver-vendor.js"></script>
<script src="/mediacapture-image/resources/imagecapture-helpers.js"></script>
<canvas id='canvas' width=10 height=10/>
<body>
<canvas id='canvas' width=10 height=10/>
</body>
<script>

// service implementation, are returned by MediaStreamTrack.getSettings().
// This test verifies that the |constraints| configured in the mock Mojo
// service implementation, are returned by MediaStreamTrack.getSettings().

image_capture_test(async t => {
  await test_driver.set_permission({name: 'camera', panTiltZoom: true},
  context.fillStyle = 'red';
        'granted', false);

  let canvas = document.getElementById('canvas');
  let context = canvas.getContext('2d');
  context.fillStyle = 'red';
                                      focusMode        : 'single-shot',
  context.fillRect(0, 0, 10, 10);

  const constraints = { advanced : [{ whiteBalanceMode : 'single-shot',
                                      exposureMode     : 'manual',

                                      focusMode        : 'single-shot',

                                      pointsOfInterest : [{x : 0.1, y : 0.2},
                                                          {x : 0.3, y : 0.4}],

                                      iso                  : 120.0,
                                      exposureCompensation : 133.77,
                                      // in nano-seconds.
                                      exposureTime         : 10000,
                                      colorTemperature     : 6000,
                                      iso                  : 120.0,
                                      sharpness            : 6,

                                      brightness           : 3,
                                      contrast             : 4,
                                      saturation           : 5,
                                      sharpness            : 6,
                                      zoom                 : 3.141592,
                                      focusDistance        : 7,

                                      pan                  : 8,
                                      tilt                 : 9,
                                      zoom                 : 3.141592,
  let stream = canvas.captureStream();

                                      torch                : true
                                    }]};

  let stream = canvas.captureStream();
  // to allow for those to be collected, since they are needed to understand
  let videoTrack = stream.getVideoTracks()[0];

  // |videoTrack|'s capabilities gathering, just like the actual capture, is
  // a process kicked off right after creation, we introduce a small delay
  // to allow for those to be collected, since they are needed to understand
  try {
  // which constraints are supported in applyConstraints().
  // TODO(mcasas): this shouldn't be needed, https://crbug.com/711524.
    assert_unreached('Error applying constraints: ' + error.message);
  await new Promise(resolve => step_timeout(resolve, 100));

  }
  try {
    await videoTrack.applyConstraints(constraints);
  } catch (error) {
  assert_equals(typeof settings, 'object');
    assert_unreached('Error applying constraints: ' + error.message);
  }

  let settings = videoTrack.getSettings();
  assert_equals(typeof settings, 'object');
                'exposureMode');

  assert_equals(constraints.advanced[0].whiteBalanceMode,
                settings.whiteBalanceMode, 'whiteBalanceMode');
  assert_equals(constraints.advanced[0].exposureMode, settings.exposureMode,
                'exposureMode');
      constraints.advanced[0].pointsOfInterest, settings.pointsOfInterest,
  assert_equals(constraints.advanced[0].focusMode, settings.focusMode,
                'focusMode');

  assert_point2d_array_approx_equals(
      constraints.advanced[0].pointsOfInterest, settings.pointsOfInterest,
  assert_equals(constraints.advanced[0].exposureTime,
      0.01);

  assert_equals(constraints.advanced[0].exposureCompensation,
                settings.exposureCompensation, 'exposureCompensation');
  assert_equals(constraints.advanced[0].exposureTime,

                settings.exposureTime, 'exposureTime');
  assert_equals(constraints.advanced[0].colorTemperature,
                settings.colorTemperature, 'colorTemperature');
  assert_equals(constraints.advanced[0].iso, settings.iso, 'iso');
                'contrast');

  assert_equals(constraints.advanced[0].brightness, settings.brightness,
                'brightness');
  assert_equals(constraints.advanced[0].contrast, settings.contrast,
                'contrast');

  assert_equals(constraints.advanced[0].saturation, settings.saturation,
                'saturation');
  assert_equals(constraints.advanced[0].sharpness, settings.sharpness,
                'sharpness');

  assert_equals(constraints.advanced[0].tilt, settings.tilt, 'tilt');
  assert_equals(constraints.advanced[0].focusDistance, settings.focusDistance,
                'focusDistance');

  assert_equals(constraints.advanced[0].pan, settings.pan, 'pan');
  assert_equals(constraints.advanced[0].tilt, settings.tilt, 'tilt');
}, 'exercises an applyConstraints() - getSettings() cycle');
  assert_equals(constraints.advanced[0].zoom, settings.zoom, 'zoom');

  assert_equals(constraints.advanced[0].torch, settings.torch, 'torch');

}, 'exercises an applyConstraints() - getSettings() cycle with PTZ permission granted');


// This test verifies that the PTZ |constraints| configured in the mock Mojo
// service implementation can't be applied if PTZ permission is denied.

image_capture_test(async t => {
  await test_driver.set_permission({name: 'camera', panTiltZoom: true},
      'denied', false);

  let canvas = document.getElementById('canvas');
  let context = canvas.getContext('2d');
  context.fillStyle = 'red';
  context.fillRect(0, 0, 10, 10);

  let stream = canvas.captureStream();
  let videoTrack = stream.getVideoTracks()[0];

  // |videoTrack|'s capabilities gathering, just like the actual capture, is
  // a process kicked off right after creation, we introduce a small delay
  // to allow for those to be collected, since they are needed to understand
  // which constraints are supported in applyConstraints().
  // TODO(mcasas): this shouldn't be needed, https://crbug.com/711524.
  await new Promise(resolve => step_timeout(resolve, 100));

  const constraints = [{ pan: 8 }, { tilt: 9 }];
  await Promise.all(constraints.map(async constraint =>
    promise_rejects_dom(
        t, 'NotSupportedError',
        videoTrack.applyConstraints({ advanced: [constraint] }),
        "applyConstraints should throw a NotSupportedError for " +
        JSON.stringify(constraint))
  ));

}, 'exercises an applyConstraints() with PTZ permission denied');

</script>