Coverage for appearance/hunt.py: 67%

252 statements  

« prev     ^ index     » next       coverage.py v7.11.0, created at 2025-11-16 22:49 +1300

1""" 

2Hunt Colour Appearance Model 

3============================ 

4 

5Define the *Hunt* colour appearance model for predicting perceptual colour 

6attributes under varying viewing conditions. 

7 

8- :class:`colour.appearance.InductionFactors_Hunt` 

9- :attr:`colour.VIEWING_CONDITIONS_HUNT` 

10- :class:`colour.CAM_Specification_Hunt` 

11- :func:`colour.XYZ_to_Hunt` 

12 

13References 

14---------- 

15- :cite:`Fairchild2013u` : Fairchild, M. D. (2013). The Hunt Model. In Color 

16 Appearance Models (3rd ed., pp. 5094-5556). Wiley. ISBN:B00DAYO8E2 

17- :cite:`Hunt2004b` : Hunt, R. W. G. (2004). The Reproduction of Colour (6th 

18 ed.). John Wiley & Sons, Ltd. doi:10.1002/0470024275 

19""" 

20 

21from __future__ import annotations 

22 

23import typing 

24from dataclasses import dataclass, field 

25 

26import numpy as np 

27 

28from colour.algebra import spow, vecmul 

29 

30if typing.TYPE_CHECKING: 

31 from colour.hints import ArrayLike, Domain100 

32 

33from colour.hints import Annotated, NDArrayFloat, cast 

34from colour.utilities import ( 

35 CanonicalMapping, 

36 MixinDataclassArithmetic, 

37 MixinDataclassIterable, 

38 as_float, 

39 as_float_array, 

40 from_range_degrees, 

41 ones, 

42 to_domain_100, 

43 tsplit, 

44 tstack, 

45 usage_warning, 

46 zeros, 

47) 

48 

49__author__ = "Colour Developers" 

50__copyright__ = "Copyright 2013 Colour Developers" 

51__license__ = "BSD-3-Clause - https://opensource.org/licenses/BSD-3-Clause" 

52__maintainer__ = "Colour Developers" 

53__email__ = "colour-developers@colour-science.org" 

54__status__ = "Production" 

55 

56__all__ = [ 

57 "InductionFactors_Hunt", 

58 "VIEWING_CONDITIONS_HUNT", 

59 "HUE_DATA_FOR_HUE_QUADRATURE", 

60 "MATRIX_XYZ_TO_HPE", 

61 "MATRIX_HPE_TO_XYZ", 

62 "CAM_ReferenceSpecification_Hunt", 

63 "CAM_Specification_Hunt", 

64 "XYZ_to_Hunt", 

65 "luminance_level_adaptation_factor", 

66 "illuminant_scotopic_luminance", 

67 "XYZ_to_rgb", 

68 "f_n", 

69 "chromatic_adaptation", 

70 "adjusted_reference_white_signals", 

71 "achromatic_post_adaptation_signal", 

72 "colour_difference_signals", 

73 "hue_angle", 

74 "eccentricity_factor", 

75 "low_luminance_tritanopia_factor", 

76 "yellowness_blueness_response", 

77 "redness_greenness_response", 

78 "overall_chromatic_response", 

79 "saturation_correlate", 

80 "achromatic_signal", 

81 "brightness_correlate", 

82 "lightness_correlate", 

83 "chroma_correlate", 

84 "colourfulness_correlate", 

85] 

86 

87 

88@dataclass(frozen=True) 

89class InductionFactors_Hunt(MixinDataclassIterable): 

90 """ 

91 Define the *Hunt* colour appearance model induction factors. 

92 

93 Parameters 

94 ---------- 

95 N_c 

96 Chromatic surround induction factor :math:`N_c`. 

97 N_b 

98 Brightness surround induction factor :math:`N_b`. 

99 N_cb 

100 Chromatic background induction factor :math:`N_{cb}`, approximated 

101 using tristimulus values :math:`Y_w` and :math:`Y_b` of 

102 respectively the reference white and the background if not specified. 

103 N_bb 

104 Brightness background induction factor :math:`N_{bb}`, approximated 

105 using tristimulus values :math:`Y_w` and :math:`Y_b` of 

106 respectively the reference white and the background if not 

107 specified. 

108 

109 References 

110 ---------- 

111 :cite:`Fairchild2013u`, :cite:`Hunt2004b` 

112 """ 

113 

114 N_c: float 

115 N_b: float 

116 N_cb: float | None = field(default_factory=lambda: None) 

117 N_bb: float | None = field(default_factory=lambda: None) 

118 

119 

120VIEWING_CONDITIONS_HUNT: CanonicalMapping = CanonicalMapping( 

121 { 

122 "Small Areas, Uniform Background & Surrounds": InductionFactors_Hunt(1, 300), 

123 "Normal Scenes": InductionFactors_Hunt(1, 75), 

124 "Television & CRT, Dim Surrounds": InductionFactors_Hunt(1, 25), 

125 "Large Transparencies On Light Boxes": InductionFactors_Hunt(0.7, 25), 

126 "Projected Transparencies, Dark Surrounds": InductionFactors_Hunt(0.7, 10), 

127 } 

128) 

129VIEWING_CONDITIONS_HUNT.__doc__ = """ 

130Define the reference *Hunt* colour appearance model viewing conditions. 

131 

132References 

133---------- 

134:cite:`Fairchild2013u`, :cite:`Hunt2004b` 

135 

136Aliases: 

137 

138- 'small_uniform': 'Small Areas, Uniform Background & Surrounds' 

139- 'normal': 'Normal Scenes' 

140- 'tv_dim': 'Television & CRT, Dim Surrounds' 

141- 'light_boxes': 'Large Transparencies On Light Boxes' 

142- 'projected_dark': 'Projected Transparencies, Dark Surrounds' 

143""" 

144VIEWING_CONDITIONS_HUNT["small_uniform"] = VIEWING_CONDITIONS_HUNT[ 

145 "Small Areas, Uniform Background & Surrounds" 

146] 

147VIEWING_CONDITIONS_HUNT["normal"] = VIEWING_CONDITIONS_HUNT["Normal Scenes"] 

148VIEWING_CONDITIONS_HUNT["tv_dim"] = VIEWING_CONDITIONS_HUNT[ 

149 "Television & CRT, Dim Surrounds" 

150] 

151VIEWING_CONDITIONS_HUNT["light_boxes"] = VIEWING_CONDITIONS_HUNT[ 

152 "Large Transparencies On Light Boxes" 

153] 

154VIEWING_CONDITIONS_HUNT["projected_dark"] = VIEWING_CONDITIONS_HUNT[ 

155 "Projected Transparencies, Dark Surrounds" 

156] 

157 

158HUE_DATA_FOR_HUE_QUADRATURE: dict = { 

159 "h_s": np.array([20.14, 90.00, 164.25, 237.53]), 

160 "e_s": np.array([0.8, 0.7, 1.0, 1.2]), 

161} 

162 

163MATRIX_XYZ_TO_HPE: NDArrayFloat = np.array( 

164 [ 

165 [0.38971, 0.68898, -0.07868], 

166 [-0.22981, 1.18340, 0.04641], 

167 [0.00000, 0.00000, 1.00000], 

168 ] 

169) 

170""" 

171*Hunt* colour appearance model *CIE XYZ* tristimulus values to 

172*Hunt-Pointer-Estevez* :math:`\\rho\\gamma\\beta` colourspace matrix. 

173""" 

174 

175MATRIX_HPE_TO_XYZ: NDArrayFloat = np.linalg.inv(MATRIX_XYZ_TO_HPE) 

176""" 

177*Hunt* colour appearance model *Hunt-Pointer-Estevez* 

178:math:`\\rho\\gamma\\beta` colourspace to *CIE XYZ* tristimulus values matrix. 

179""" 

180 

181 

182@dataclass 

183class CAM_ReferenceSpecification_Hunt(MixinDataclassArithmetic): 

184 """ 

185 Define the *Hunt* colour appearance model reference specification. 

186 

187 This specification contains field names consistent with the *Fairchild 

188 (2013)* reference. 

189 

190 Parameters 

191 ---------- 

192 J 

193 Correlate of *Lightness* :math:`J`. 

194 C_94 

195 Correlate of *chroma* :math:`C_{94}`. 

196 h_S 

197 *Hue* angle :math:`h_S` in degrees. 

198 s 

199 Correlate of *saturation* :math:`s`. 

200 Q 

201 Correlate of *brightness* :math:`Q`. 

202 M_94 

203 Correlate of *colourfulness* :math:`M_{94}`. 

204 H 

205 *Hue* :math:`h` quadrature :math:`H`. 

206 H_C 

207 *Hue* :math:`h` composition :math:`H_C`. 

208 

209 References 

210 ---------- 

211 :cite:`Fairchild2013u`, :cite:`Hunt2004b` 

212 """ 

213 

214 J: float | NDArrayFloat | None = field(default_factory=lambda: None) 

215 C_94: float | NDArrayFloat | None = field(default_factory=lambda: None) 

216 h_S: float | NDArrayFloat | None = field(default_factory=lambda: None) 

217 s: float | NDArrayFloat | None = field(default_factory=lambda: None) 

218 Q: float | NDArrayFloat | None = field(default_factory=lambda: None) 

219 M_94: float | NDArrayFloat | None = field(default_factory=lambda: None) 

220 H: float | NDArrayFloat | None = field(default_factory=lambda: None) 

221 H_C: float | NDArrayFloat | None = field(default_factory=lambda: None) 

222 

223 

224@dataclass 

225class CAM_Specification_Hunt(MixinDataclassArithmetic): 

226 """ 

227 Define the *Hunt* colour appearance model specification. 

228 

229 This specification provides a standardized interface for the *Hunt* model 

230 with field names consistent across all colour appearance models in 

231 :mod:`colour.appearance`. While the field names differ from the original 

232 *Fairchild (2013)* reference notation, they map directly to the model's 

233 perceptual correlates. 

234 

235 Parameters 

236 ---------- 

237 J 

238 Correlate of *lightness* :math:`J`. 

239 C 

240 Correlate of *chroma* :math:`C_{94}`. 

241 h 

242 *Hue* angle :math:`h_s` in degrees. 

243 s 

244 Correlate of *saturation* :math:`s`. 

245 Q 

246 Correlate of *brightness* :math:`Q`. 

247 M 

248 Correlate of *colourfulness* :math:`M_{94}`. 

249 H 

250 *Hue* :math:`h` quadrature :math:`H`. 

251 HC 

252 *Hue* :math:`h` composition :math:`H_C`. 

253 

254 Notes 

255 ----- 

256 - This specification is the one used in the current model 

257 implementation. 

258 

259 References 

260 ---------- 

261 :cite:`Fairchild2013u`, :cite:`Hunt2004b` 

262 """ 

263 

264 J: float | NDArrayFloat | None = field(default_factory=lambda: None) 

265 C: float | NDArrayFloat | None = field(default_factory=lambda: None) 

266 h: float | NDArrayFloat | None = field(default_factory=lambda: None) 

267 s: float | NDArrayFloat | None = field(default_factory=lambda: None) 

268 Q: float | NDArrayFloat | None = field(default_factory=lambda: None) 

269 M: float | NDArrayFloat | None = field(default_factory=lambda: None) 

270 H: float | NDArrayFloat | None = field(default_factory=lambda: None) 

271 HC: float | NDArrayFloat | None = field(default_factory=lambda: None) 

272 

273 

274def XYZ_to_Hunt( 

275 XYZ: Domain100, 

276 XYZ_w: Domain100, 

277 XYZ_b: Domain100, 

278 L_A: ArrayLike, 

279 surround: InductionFactors_Hunt = VIEWING_CONDITIONS_HUNT["Normal Scenes"], 

280 L_AS: ArrayLike | None = None, 

281 CCT_w: ArrayLike | None = None, 

282 XYZ_p: Annotated[ArrayLike | None, 100] = None, 

283 p: ArrayLike | None = None, 

284 S: ArrayLike | None = None, 

285 S_w: ArrayLike | None = None, 

286 helson_judd_effect: bool = False, 

287 discount_illuminant: bool = True, 

288) -> Annotated[CAM_Specification_Hunt, 360]: 

289 """ 

290 Compute the *Hunt* colour appearance model correlates from the specified 

291 *CIE XYZ* tristimulus values. 

292 

293 Parameters 

294 ---------- 

295 XYZ 

296 *CIE XYZ* tristimulus values of test sample / stimulus. 

297 XYZ_w 

298 *CIE XYZ* tristimulus values of reference white. 

299 XYZ_b 

300 *CIE XYZ* tristimulus values of background. 

301 L_A 

302 Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`. 

303 surround 

304 Surround viewing conditions induction factors. 

305 L_AS 

306 Scotopic luminance :math:`L_{AS}` of the illuminant, 

307 approximated if not specified. 

308 CCT_w 

309 Correlated colour temperature :math:`T_{cp}` of the illuminant, 

310 required to approximate :math:`L_{AS}` when not specified. 

311 XYZ_p 

312 *CIE XYZ* tristimulus values of proximal field, assumed to equal 

313 background if not specified. 

314 p 

315 Simultaneous contrast / assimilation factor :math:`p` with value 

316 normalised to domain [-1, 0] for simultaneous contrast and 

317 normalised to domain [0, 1] for assimilation. 

318 S 

319 Scotopic response :math:`S` to the stimulus, approximated using 

320 tristimulus value :math:`Y` of the stimulus if not specified. 

321 S_w 

322 Scotopic response :math:`S_w` for the reference white, 

323 approximated using tristimulus value :math:`Y_w` of the 

324 reference white if not specified. 

325 helson_judd_effect 

326 Whether to account for the *Helson-Judd* effect. 

327 discount_illuminant 

328 Whether to discount the illuminant. 

329 

330 Returns 

331 ------- 

332 :class:`colour.CAM_Specification_Hunt` 

333 *Hunt* colour appearance model specification. 

334 

335 Raises 

336 ------ 

337 ValueError 

338 If an illegal argument combination is specified. 

339 

340 Notes 

341 ----- 

342 +---------------------+-----------------------+---------------+ 

343 | **Domain** | **Scale - Reference** | **Scale - 1** | 

344 +=====================+=======================+===============+ 

345 | ``XYZ`` | 100 | 1 | 

346 +---------------------+-----------------------+---------------+ 

347 | ``XYZ_w`` | 100 | 1 | 

348 +---------------------+-----------------------+---------------+ 

349 | ``XYZ_b`` | 100 | 1 | 

350 +---------------------+-----------------------+---------------+ 

351 | ``XYZ_p`` | 100 | 1 | 

352 +---------------------+-----------------------+---------------+ 

353 

354 +---------------------+-----------------------+---------------+ 

355 | **Range** | **Scale - Reference** | **Scale - 1** | 

356 +=====================+=======================+===============+ 

357 | ``specification.h`` | 360 | 1 | 

358 +---------------------+-----------------------+---------------+ 

359 

360 References 

361 ---------- 

362 :cite:`Fairchild2013u`, :cite:`Hunt2004b` 

363 

364 Examples 

365 -------- 

366 >>> XYZ = np.array([19.01, 20.00, 21.78]) 

367 >>> XYZ_w = np.array([95.05, 100.00, 108.88]) 

368 >>> XYZ_b = np.array([95.05, 100.00, 108.88]) 

369 >>> L_A = 318.31 

370 >>> surround = VIEWING_CONDITIONS_HUNT["Normal Scenes"] 

371 >>> CCT_w = 6504 

372 >>> XYZ_to_Hunt(XYZ, XYZ_w, XYZ_b, L_A, surround, CCT_w=CCT_w) 

373 ... # doctest: +ELLIPSIS 

374 CAM_Specification_Hunt(J=30.0462678..., C=0.1210508..., h=269.2737594..., \ 

375s=0.0199093..., Q=22.2097654..., M=0.1238964..., H=None, HC=None) 

376 """ 

377 

378 XYZ = to_domain_100(XYZ) 

379 XYZ_w = to_domain_100(XYZ_w) 

380 XYZ_b = to_domain_100(XYZ_b) 

381 _X, Y, _Z = tsplit(XYZ) 

382 _X_w, Y_w, _Z_w = tsplit(XYZ_w) 

383 X_b, Y_b, _Z_b = tsplit(XYZ_b) 

384 

385 # Arguments handling. 

386 if XYZ_p is not None: 

387 X_p, Y_p, Z_p = tsplit(to_domain_100(XYZ_p)) 

388 else: 

389 X_p = X_b 

390 Y_p = Y_b 

391 Z_p = Y_b 

392 usage_warning( 

393 'Unspecified proximal field "XYZ_p" argument, using ' 

394 'background "XYZ_b" as approximation!' 

395 ) 

396 

397 if surround.N_cb is None: 

398 N_cb = 0.725 * spow(Y_w / Y_b, 0.2) 

399 usage_warning(f'Unspecified "N_cb" argument, using approximation: "{N_cb}"') 

400 if surround.N_bb is None: 

401 N_bb = 0.725 * spow(Y_w / Y_b, 0.2) 

402 usage_warning(f'Unspecified "N_bb" argument, using approximation: "{N_bb}"') 

403 

404 if L_AS is None and CCT_w is None: 

405 error = ( 

406 'Either the scotopic luminance "L_AS" of the ' 

407 "illuminant or its correlated colour temperature " 

408 '"CCT_w" must be specified!' 

409 ) 

410 

411 raise ValueError(error) 

412 if L_AS is None and CCT_w is not None: 

413 L_AS = illuminant_scotopic_luminance(L_A, CCT_w) 

414 usage_warning( 

415 f'Unspecified "L_AS" argument, using approximation from "CCT": "{L_AS}"' 

416 ) 

417 

418 if (S is None and S_w is not None) or (S is not None and S_w is None): 

419 error = ( 

420 'Either both stimulus scotopic response "S" and ' 

421 'reference white scotopic response "S_w" arguments ' 

422 "need to be specified or none of them!" 

423 ) 

424 

425 raise ValueError(error) 

426 if S is None and S_w is None: 

427 S_p = Y 

428 S_w_p = Y_w 

429 usage_warning( 

430 f'Unspecified stimulus scotopic response "S" and reference white ' 

431 f'scotopic response "S_w" arguments, using approximation: ' 

432 f'"{S}", "{S_w}"' 

433 ) 

434 

435 if p is None: 

436 usage_warning( 

437 'Unspecified simultaneous contrast / assimilation "p" ' 

438 "argument, model will not account for simultaneous chromatic " 

439 "contrast!" 

440 ) 

441 

442 XYZ_p = tstack([X_p, Y_p, Z_p]) 

443 

444 # Computing luminance level adaptation factor :math:`F_L`. 

445 F_L = luminance_level_adaptation_factor(L_A) 

446 

447 # Computing test sample chromatic adaptation. 

448 rgb_a = chromatic_adaptation( 

449 XYZ, 

450 XYZ_w, 

451 XYZ_b, 

452 L_A, 

453 F_L, 

454 XYZ_p, 

455 p, 

456 helson_judd_effect, 

457 discount_illuminant, 

458 ) 

459 

460 # Computing reference white chromatic adaptation. 

461 rgb_aw = chromatic_adaptation( 

462 XYZ_w, 

463 XYZ_w, 

464 XYZ_b, 

465 L_A, 

466 F_L, 

467 XYZ_p, 

468 p, 

469 helson_judd_effect, 

470 discount_illuminant, 

471 ) 

472 

473 # Computing opponent colour dimensions. 

474 # Computing achromatic post adaptation signals. 

475 A_a = achromatic_post_adaptation_signal(rgb_a) 

476 A_aw = achromatic_post_adaptation_signal(rgb_aw) 

477 

478 # Computing colour difference signals. 

479 C = colour_difference_signals(rgb_a) 

480 C_w = colour_difference_signals(rgb_aw) 

481 

482 # ------------------------------------------------------------------------- 

483 # Computing the *hue* angle :math:`h_s`. 

484 # ------------------------------------------------------------------------- 

485 h = hue_angle(C) 

486 # hue_w = hue_angle(C_w) 

487 # TODO: Implement hue quadrature & composition computation. 

488 

489 # ------------------------------------------------------------------------- 

490 # Computing the correlate of *saturation* :math:`s`. 

491 # ------------------------------------------------------------------------- 

492 # Computing eccentricity factors. 

493 e_s = eccentricity_factor(h) 

494 

495 # Computing low luminance tritanopia factor :math:`F_t`. 

496 F_t = low_luminance_tritanopia_factor(L_A) 

497 

498 M_yb = yellowness_blueness_response(C, e_s, surround.N_c, N_cb, F_t) 

499 M_rg = redness_greenness_response(C, e_s, surround.N_c, N_cb) 

500 M_yb_w = yellowness_blueness_response(C_w, e_s, surround.N_c, N_cb, F_t) 

501 M_rg_w = redness_greenness_response(C_w, e_s, surround.N_c, N_cb) 

502 

503 # Computing overall chromatic response. 

504 M = overall_chromatic_response(M_yb, M_rg) 

505 M_w = overall_chromatic_response(M_yb_w, M_rg_w) 

506 

507 s = saturation_correlate(M, rgb_a) 

508 

509 # ------------------------------------------------------------------------- 

510 # Computing the correlate of *brightness* :math:`Q`. 

511 # ------------------------------------------------------------------------- 

512 # Computing achromatic signal :math:`A`. 

513 A = achromatic_signal(cast("NDArrayFloat", L_AS), S_p, S_w_p, N_bb, A_a) 

514 A_w = achromatic_signal(cast("NDArrayFloat", L_AS), S_w_p, S_w_p, N_bb, A_aw) 

515 

516 Q = brightness_correlate(A, A_w, M, surround.N_b) 

517 brightness_w = brightness_correlate(A_w, A_w, M_w, surround.N_b) 

518 # TODO: Implement whiteness-blackness :math:`Q_{wb}` computation. 

519 

520 # ------------------------------------------------------------------------- 

521 # Computing the correlate of *Lightness* :math:`J`. 

522 # ------------------------------------------------------------------------- 

523 J = lightness_correlate(Y_b, Y_w, Q, brightness_w) 

524 

525 # ------------------------------------------------------------------------- 

526 # Computing the correlate of *chroma* :math:`C_{94}`. 

527 # ------------------------------------------------------------------------- 

528 C_94 = chroma_correlate(s, Y_b, Y_w, Q, brightness_w) 

529 

530 # ------------------------------------------------------------------------- 

531 # Computing the correlate of *colourfulness* :math:`M_{94}`. 

532 # ------------------------------------------------------------------------- 

533 M_94 = colourfulness_correlate(F_L, C_94) 

534 

535 return CAM_Specification_Hunt( 

536 J=J, 

537 C=C_94, 

538 h=as_float(from_range_degrees(h)), 

539 s=s, 

540 Q=Q, 

541 M=M_94, 

542 H=None, 

543 HC=None, 

544 ) 

545 

546 

547def luminance_level_adaptation_factor( 

548 L_A: ArrayLike, 

549) -> NDArrayFloat: 

550 """ 

551 Compute the *luminance* level adaptation factor :math:`F_L`. 

552 

553 Parameters 

554 ---------- 

555 L_A 

556 Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`. 

557 

558 Returns 

559 ------- 

560 :class:`numpy.ndarray` 

561 *Luminance* level adaptation factor :math:`F_L`. 

562 

563 Examples 

564 -------- 

565 >>> luminance_level_adaptation_factor(318.31) # doctest: +ELLIPSIS 

566 1.1675444... 

567 """ 

568 

569 L_A = as_float_array(L_A) 

570 

571 k = 1 / (5 * L_A + 1) 

572 k4 = k**4 

573 F_L = 0.2 * k4 * (5 * L_A) + 0.1 * (1 - k4) ** 2 * spow(5 * L_A, 1 / 3) 

574 

575 return as_float(F_L) 

576 

577 

578def illuminant_scotopic_luminance(L_A: ArrayLike, CCT: ArrayLike) -> NDArrayFloat: 

579 """ 

580 Compute the approximate scotopic luminance :math:`L_{AS}` of the 

581 specified illuminant. 

582 

583 Parameters 

584 ---------- 

585 L_A 

586 Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`. 

587 CCT 

588 Correlated colour temperature :math:`T_{cp}` of the illuminant. 

589 

590 Returns 

591 ------- 

592 :class:`numpy.ndarray` 

593 Approximate scotopic luminance :math:`L_{AS}`. 

594 

595 Examples 

596 -------- 

597 >>> illuminant_scotopic_luminance(318.31, 6504.0) # doctest: +ELLIPSIS 

598 769.9376286... 

599 """ 

600 

601 L_A = as_float_array(L_A) 

602 CCT = as_float_array(CCT) 

603 

604 CCT = 2.26 * L_A * spow((CCT / 4000) - 0.4, 1 / 3) 

605 

606 return as_float(CCT) 

607 

608 

609def XYZ_to_rgb(XYZ: ArrayLike) -> NDArrayFloat: 

610 """ 

611 Convert from *CIE XYZ* tristimulus values to *Hunt-Pointer-Estevez* 

612 :math:`\\rho\\gamma\\beta` colourspace. 

613 

614 Parameters 

615 ---------- 

616 XYZ 

617 *CIE XYZ* tristimulus values. 

618 

619 Returns 

620 ------- 

621 :class:`numpy.ndarray` 

622 *Hunt-Pointer-Estevez* :math:`\\rho\\gamma\\beta` colourspace values. 

623 

624 Examples 

625 -------- 

626 >>> XYZ = np.array([19.01, 20.00, 21.78]) 

627 >>> XYZ_to_rgb(XYZ) # doctest: +ELLIPSIS 

628 array([ 19.4743367..., 20.3101217..., 21.78 ]) 

629 """ 

630 

631 return vecmul(MATRIX_XYZ_TO_HPE, XYZ) 

632 

633 

634def f_n(x: ArrayLike) -> NDArrayFloat: 

635 """ 

636 Define the nonlinear response function of the *Hunt* colour appearance 

637 model used to model the nonlinear behaviour of various visual responses. 

638 

639 Parameters 

640 ---------- 

641 x 

642 Visual response variable :math:`x`. 

643 

644 Returns 

645 ------- 

646 :class:`numpy.ndarray` 

647 Modeled visual response variable :math:`x`. 

648 

649 Examples 

650 -------- 

651 >>> x = np.array([0.23350512, 0.23351103, 0.23355179]) 

652 >>> f_n(x) # doctest: +ELLIPSIS 

653 array([ 5.8968592..., 5.8969521..., 5.8975927...]) 

654 """ 

655 

656 x_p = spow(x, 0.73) 

657 x_m = 40 * (x_p / (x_p + 2)) 

658 

659 return as_float_array(x_m) 

660 

661 

662def chromatic_adaptation( 

663 XYZ: ArrayLike, 

664 XYZ_w: ArrayLike, 

665 XYZ_b: ArrayLike, 

666 L_A: ArrayLike, 

667 F_L: ArrayLike, 

668 XYZ_p: ArrayLike | None = None, 

669 p: ArrayLike | None = None, 

670 helson_judd_effect: bool = False, 

671 discount_illuminant: bool = True, 

672) -> NDArrayFloat: 

673 """ 

674 Apply chromatic adaptation to the specified *CIE XYZ* tristimulus values. 

675 

676 Parameters 

677 ---------- 

678 XYZ 

679 *CIE XYZ* tristimulus values of test sample. 

680 XYZ_b 

681 *CIE XYZ* tristimulus values of background. 

682 XYZ_w 

683 *CIE XYZ* tristimulus values of reference white. 

684 L_A 

685 Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`. 

686 F_L 

687 Luminance adaptation factor :math:`F_L`. 

688 XYZ_p 

689 *CIE XYZ* tristimulus values of proximal field, assumed to be equal 

690 to background if not specified. 

691 p 

692 Simultaneous contrast/assimilation factor :math:`p` with value 

693 normalised to domain [-1, 0] when simultaneous contrast occurs and 

694 normalised to domain [0, 1] when assimilation occurs. 

695 helson_judd_effect 

696 Truth value indicating whether the *Helson-Judd* effect should be 

697 accounted for. 

698 discount_illuminant 

699 Truth value indicating if the illuminant should be discounted. 

700 

701 Returns 

702 ------- 

703 :class:`numpy.ndarray` 

704 Adapted *CIE XYZ* tristimulus values. 

705 

706 Examples 

707 -------- 

708 >>> XYZ = np.array([19.01, 20.00, 21.78]) 

709 >>> XYZ_b = np.array([95.05, 100.00, 108.88]) 

710 >>> XYZ_w = np.array([95.05, 100.00, 108.88]) 

711 >>> L_A = 318.31 

712 >>> F_L = 1.16754446415 

713 >>> chromatic_adaptation(XYZ, XYZ_w, XYZ_b, L_A, F_L) # doctest: +ELLIPSIS 

714 array([ 6.8959454..., 6.8959991..., 6.8965708...]) 

715 

716 # Coverage Doctests 

717 

718 >>> chromatic_adaptation( 

719 ... XYZ, XYZ_w, XYZ_b, L_A, F_L, discount_illuminant=False 

720 ... ) # doctest: +ELLIPSIS 

721 array([ 6.8525880..., 6.8874417..., 6.9461478...]) 

722 >>> chromatic_adaptation( 

723 ... XYZ, XYZ_w, XYZ_b, L_A, F_L, helson_judd_effect=True 

724 ... ) # doctest: +ELLIPSIS 

725 array([ 6.8959454..., 6.8959991..., 6.8965708...]) 

726 >>> chromatic_adaptation( 

727 ... XYZ, XYZ_w, XYZ_b, L_A, F_L, XYZ_p=XYZ_b, p=0.5 

728 ... ) # doctest: +ELLIPSIS 

729 array([ 9.2069020..., 9.2070219..., 9.2078373...]) 

730 """ 

731 

732 XYZ = as_float_array(XYZ) 

733 XYZ_w = as_float_array(XYZ_w) 

734 XYZ_b = as_float_array(XYZ_b) 

735 L_A = as_float_array(L_A) 

736 F_L = as_float_array(F_L) 

737 

738 rgb = XYZ_to_rgb(XYZ) 

739 rgb_w = XYZ_to_rgb(XYZ_w) 

740 Y_w = XYZ_w[..., 1] 

741 Y_b = XYZ_b[..., 1] 

742 

743 h_rgb = 3 * rgb_w / np.sum(rgb_w, axis=-1)[..., None] 

744 

745 # Computing chromatic adaptation factors. 

746 if not discount_illuminant: 

747 L_A_p = spow(L_A, 1 / 3) 

748 F_rgb = cast("NDArrayFloat", (1 + L_A_p + h_rgb) / (1 + L_A_p + (1 / h_rgb))) 

749 else: 

750 F_rgb = ones(cast("NDArrayFloat", h_rgb).shape) 

751 

752 # Computing Helson-Judd effect parameters. 

753 if helson_judd_effect: 

754 Y_b_Y_w = Y_b / Y_w 

755 D_rgb = f_n(Y_b_Y_w * F_L * F_rgb[..., 1]) - f_n(Y_b_Y_w * F_L * F_rgb) 

756 else: 

757 D_rgb = zeros(F_rgb.shape) 

758 

759 # Computing cone bleach factors. 

760 B_rgb = 10**7 / (10**7 + 5 * L_A[..., None] * (rgb_w / 100)) 

761 

762 # Computing adjusted reference white signals. 

763 if XYZ_p is not None and p is not None: 

764 rgb_p = XYZ_to_rgb(XYZ_p) 

765 rgb_w = adjusted_reference_white_signals(rgb_p, B_rgb, rgb_w, p) 

766 

767 # Computing adapted cone responses. 

768 return 1.0 + B_rgb * (f_n(F_L[..., None] * F_rgb * rgb / rgb_w) + D_rgb) 

769 

770 

771def adjusted_reference_white_signals( 

772 rgb_p: ArrayLike, 

773 rgb_b: ArrayLike, 

774 rgb_w: ArrayLike, 

775 p: ArrayLike, 

776) -> NDArrayFloat: 

777 """ 

778 Adjust reference white signals for simultaneous chromatic 

779 contrast/assimilation effects. 

780 

781 Compute adjusted cone signals in the Hunt-Pointer-Estevez 

782 :math:`\\rho\\gamma\\beta` colourspace based on the proximal field, 

783 background, and simultaneous contrast/assimilation factor. 

784 

785 Parameters 

786 ---------- 

787 rgb_p 

788 Cone signals *Hunt-Pointer-Estevez* :math:`\\rho\\gamma\\beta` 

789 colourspace array of the proximal field. 

790 rgb_b 

791 Cone signals *Hunt-Pointer-Estevez* :math:`\\rho\\gamma\\beta` 

792 colourspace array of the background. 

793 rgb_w 

794 Cone signals array *Hunt-Pointer-Estevez* 

795 :math:`\\rho\\gamma\\beta` colourspace array of the reference white. 

796 p 

797 Simultaneous contrast / assimilation factor :math:`p` with value 

798 normalised to domain [-1, 0] when simultaneous contrast occurs and 

799 normalised to domain [0, 1] when assimilation occurs. 

800 

801 Returns 

802 ------- 

803 :class:`numpy.ndarray` 

804 Adjusted cone signals *Hunt-Pointer-Estevez* 

805 :math:`\\rho\\gamma\\beta` colourspace array of the reference white. 

806 

807 Examples 

808 -------- 

809 >>> rgb_p = np.array([98.07193550, 101.13755950, 100.00000000]) 

810 >>> rgb_b = np.array([0.99984505, 0.99983840, 0.99982674]) 

811 >>> rgb_w = np.array([97.37325710, 101.54968030, 108.88000000]) 

812 >>> p = 0.1 

813 >>> adjusted_reference_white_signals(rgb_p, rgb_b, rgb_w, p) 

814 ... # doctest: +ELLIPSIS 

815 array([ 88.0792742..., 91.8569553..., 98.4876543...]) 

816 """ 

817 

818 rgb_p = as_float_array(rgb_p) 

819 rgb_b = as_float_array(rgb_b) 

820 rgb_w = as_float_array(rgb_w) 

821 p = as_float_array(p) 

822 

823 p_rgb = rgb_p / rgb_b 

824 return ( 

825 rgb_w 

826 * (spow((1 - p) * p_rgb + (1 + p) / p_rgb, 0.5)) 

827 / (spow((1 + p) * p_rgb + (1 - p) / p_rgb, 0.5)) 

828 ) 

829 

830 

831def achromatic_post_adaptation_signal(rgb: ArrayLike) -> NDArrayFloat: 

832 """ 

833 Compute the achromatic post adaptation signal :math:`A` from the specified 

834 *Hunt-Pointer-Estevez* :math:`\\rho\\gamma\\beta` colourspace array. 

835 

836 Parameters 

837 ---------- 

838 rgb 

839 *Hunt-Pointer-Estevez* :math:`\\rho\\gamma\\beta` colourspace array. 

840 

841 Returns 

842 ------- 

843 :class:`numpy.ndarray` 

844 Achromatic post adaptation signal :math:`A`. 

845 

846 Examples 

847 -------- 

848 >>> rgb = np.array([6.89594549, 6.89599915, 6.89657085]) 

849 >>> achromatic_post_adaptation_signal(rgb) # doctest: +ELLIPSIS 

850 18.9827186... 

851 """ 

852 

853 r, g, b = tsplit(rgb) 

854 

855 return 2 * r + g + (1 / 20) * b - 3.05 + 1 

856 

857 

858def colour_difference_signals(rgb: ArrayLike) -> NDArrayFloat: 

859 """ 

860 Compute the colour difference signals :math:`C_1`, :math:`C_2` and 

861 :math:`C_3` from the specified *Hunt-Pointer-Estevez* 

862 :math:`\\rho\\gamma\\beta` colourspace array. 

863 

864 Parameters 

865 ---------- 

866 rgb 

867 *Hunt-Pointer-Estevez* :math:`\\rho\\gamma\\beta` colourspace array. 

868 

869 Returns 

870 ------- 

871 :class:`numpy.ndarray` 

872 Colour difference signals :math:`C_1`, :math:`C_2` and :math:`C_3`. 

873 

874 Examples 

875 -------- 

876 >>> rgb = np.array([6.89594549, 6.89599915, 6.89657085]) 

877 >>> colour_difference_signals(rgb) # doctest: +ELLIPSIS 

878 array([ -5.3660000...e-05, -5.7170000...e-04, 6.2536000...e-04]) 

879 """ 

880 

881 r, g, b = tsplit(rgb) 

882 

883 C_1 = r - g 

884 C_2 = g - b 

885 C_3 = b - r 

886 

887 return tstack([C_1, C_2, C_3]) 

888 

889 

890def hue_angle(C: ArrayLike) -> NDArrayFloat: 

891 """ 

892 Compute the *hue* angle :math:`h` in degrees from the specified colour 

893 difference signals :math:`C`. 

894 

895 Parameters 

896 ---------- 

897 C 

898 Colour difference signals :math:`C`. 

899 

900 Returns 

901 ------- 

902 :class:`numpy.ndarray` 

903 *Hue* angle :math:`h` in degrees. 

904 

905 Examples 

906 -------- 

907 >>> C = np.array([-5.365865581996587e-05, -0.000571699383647, 0.000625358039467]) 

908 >>> hue_angle(C) # doctest: +ELLIPSIS 

909 269.2737594... 

910 """ 

911 

912 C_1, C_2, C_3 = tsplit(C) 

913 

914 hue = (180 * np.arctan2(0.5 * (C_2 - C_3) / 4.5, C_1 - (C_2 / 11)) / np.pi) % 360 

915 

916 return as_float(hue) 

917 

918 

919def eccentricity_factor(hue: ArrayLike) -> NDArrayFloat: 

920 """ 

921 Compute the eccentricity factor :math:`e_s` from the specified hue angle 

922 :math:`h` in degrees. 

923 

924 Parameters 

925 ---------- 

926 hue 

927 Hue angle :math:`h` in degrees. 

928 

929 Returns 

930 ------- 

931 :class:`numpy.ndarray` 

932 Eccentricity factor :math:`e_s`. 

933 

934 Examples 

935 -------- 

936 >>> eccentricity_factor(269.273759) # doctest: +ELLIPSIS 

937 1.1108365... 

938 """ 

939 

940 hue = as_float_array(hue) 

941 

942 h_s = HUE_DATA_FOR_HUE_QUADRATURE["h_s"] 

943 e_s = HUE_DATA_FOR_HUE_QUADRATURE["e_s"] 

944 

945 x = np.interp(hue, h_s, e_s) 

946 x = np.where(hue < 20.14, 0.856 - (hue / 20.14) * 0.056, x) 

947 x = np.where(hue > 237.53, 0.856 + 0.344 * (360 - hue) / (360 - 237.53), x) 

948 

949 return as_float(x) 

950 

951 

952def low_luminance_tritanopia_factor( 

953 L_A: ArrayLike, 

954) -> NDArrayFloat: 

955 """ 

956 Compute the low luminance tritanopia factor :math:`F_t` from the specified 

957 adapting field *luminance* :math:`L_A` in :math:`cd/m^2`. 

958 

959 Parameters 

960 ---------- 

961 L_A 

962 Adapting field *luminance* :math:`L_A` in :math:`cd/m^2`. 

963 

964 Returns 

965 ------- 

966 :class:`numpy.ndarray` 

967 Low luminance tritanopia factor :math:`F_t`. 

968 

969 Examples 

970 -------- 

971 >>> low_luminance_tritanopia_factor(318.31) # doctest: +ELLIPSIS 

972 0.9996859... 

973 """ 

974 

975 L_A = as_float_array(L_A) 

976 

977 F_t = L_A / (L_A + 0.1) 

978 

979 return as_float(F_t) 

980 

981 

982def yellowness_blueness_response( 

983 C: ArrayLike, 

984 e_s: ArrayLike, 

985 N_c: ArrayLike, 

986 N_cb: ArrayLike, 

987 F_t: ArrayLike, 

988) -> NDArrayFloat: 

989 """ 

990 Compute the yellowness-blueness response :math:`M_{yb}`. 

991 

992 Parameters 

993 ---------- 

994 C 

995 Colour difference signals :math:`C`. 

996 e_s 

997 Eccentricity factor :math:`e_s`. 

998 N_c 

999 Chromatic surround induction factor :math:`N_c`. 

1000 N_cb 

1001 Chromatic background induction factor :math:`N_{cb}`. 

1002 F_t 

1003 Low luminance tritanopia factor :math:`F_t`. 

1004 

1005 Returns 

1006 ------- 

1007 :class:`numpy.ndarray` 

1008 Yellowness-blueness response :math:`M_{yb}`. 

1009 

1010 Examples 

1011 -------- 

1012 >>> C = np.array([-5.365865581996587e-05, -0.000571699383647, 0.000625358039467]) 

1013 >>> e_s = 1.110836504862630 

1014 >>> N_c = 1.0 

1015 >>> N_cb = 0.725000000000000 

1016 >>> F_t = 0.99968593951195 

1017 >>> yellowness_blueness_response(C, e_s, N_c, N_cb, F_t) 

1018 ... # doctest: +ELLIPSIS 

1019 -0.0082372... 

1020 """ 

1021 

1022 _C_1, C_2, C_3 = tsplit(C) 

1023 e_s = as_float_array(e_s) 

1024 N_c = as_float_array(N_c) 

1025 N_cb = as_float_array(N_cb) 

1026 F_t = as_float_array(F_t) 

1027 

1028 M_yb = 100 * (0.5 * (C_2 - C_3) / 4.5) * (e_s * (10 / 13) * N_c * N_cb * F_t) 

1029 

1030 return as_float(M_yb) 

1031 

1032 

1033def redness_greenness_response( 

1034 C: ArrayLike, 

1035 e_s: ArrayLike, 

1036 N_c: ArrayLike, 

1037 N_cb: ArrayLike, 

1038) -> NDArrayFloat: 

1039 """ 

1040 Compute the redness-greenness response :math:`M_{rg}`. 

1041 

1042 Parameters 

1043 ---------- 

1044 C 

1045 Colour difference signals :math:`C`. 

1046 e_s 

1047 Eccentricity factor :math:`e_s`. 

1048 N_c 

1049 Chromatic surround induction factor :math:`N_c`. 

1050 N_cb 

1051 Chromatic background induction factor :math:`N_{cb}`. 

1052 

1053 Returns 

1054 ------- 

1055 :class:`numpy.ndarray` 

1056 Redness-greenness response :math:`M_{rg}`. 

1057 

1058 Examples 

1059 -------- 

1060 >>> C = np.array([-5.365865581996587e-05, -0.000571699383647, 0.000625358039467]) 

1061 >>> e_s = 1.110836504862630 

1062 >>> N_c = 1.0 

1063 >>> N_cb = 0.725000000000000 

1064 >>> redness_greenness_response(C, e_s, N_c, N_cb) # doctest: +ELLIPSIS 

1065 -0.0001044... 

1066 """ 

1067 

1068 C_1, C_2, _C_3 = tsplit(C) 

1069 e_s = as_float_array(e_s) 

1070 N_c = as_float_array(N_c) 

1071 N_cb = as_float_array(N_cb) 

1072 

1073 M_rg = 100 * (C_1 - (C_2 / 11)) * (e_s * (10 / 13) * N_c * N_cb) 

1074 

1075 return as_float(M_rg) 

1076 

1077 

1078def overall_chromatic_response(M_yb: ArrayLike, M_rg: ArrayLike) -> NDArrayFloat: 

1079 """ 

1080 Compute the overall chromatic response :math:`M`. 

1081 

1082 Parameters 

1083 ---------- 

1084 M_yb 

1085 Yellowness / blueness response :math:`M_{yb}`. 

1086 M_rg 

1087 Redness / greenness response :math:`M_{rg}`. 

1088 

1089 Returns 

1090 ------- 

1091 :class:`numpy.ndarray` 

1092 Overall chromatic response :math:`M`. 

1093 

1094 Examples 

1095 -------- 

1096 >>> M_yb = -0.008237223618825 

1097 >>> M_rg = -0.000104447583276 

1098 >>> overall_chromatic_response(M_yb, M_rg) # doctest: +ELLIPSIS 

1099 0.0082378... 

1100 """ 

1101 

1102 M_yb = as_float_array(M_yb) 

1103 M_rg = as_float_array(M_rg) 

1104 

1105 return spow((M_yb**2) + (M_rg**2), 0.5) 

1106 

1107 

1108def saturation_correlate(M: ArrayLike, rgb_a: ArrayLike) -> NDArrayFloat: 

1109 """ 

1110 Compute the *saturation* correlate :math:`s`. 

1111 

1112 Parameters 

1113 ---------- 

1114 M 

1115 Overall chromatic response :math:`M`. 

1116 rgb_a 

1117 Adapted *Hunt-Pointer-Estevez* :math:`\\rho\\gamma\\beta` 

1118 colourspace array. 

1119 

1120 Returns 

1121 ------- 

1122 :class:`numpy.ndarray` 

1123 *Saturation* correlate :math:`s`. 

1124 

1125 Examples 

1126 -------- 

1127 >>> M = 0.008237885787274 

1128 >>> rgb_a = np.array([6.89594549, 6.89599915, 6.89657085]) 

1129 >>> saturation_correlate(M, rgb_a) # doctest: +ELLIPSIS 

1130 0.0199093... 

1131 """ 

1132 

1133 M = as_float_array(M) 

1134 rgb_a = as_float_array(rgb_a) 

1135 

1136 s = 50 * M / np.sum(rgb_a, axis=-1) 

1137 

1138 return as_float(s) 

1139 

1140 

1141def achromatic_signal( 

1142 L_AS: ArrayLike, 

1143 S: ArrayLike, 

1144 S_w: ArrayLike, 

1145 N_bb: ArrayLike, 

1146 A_a: ArrayLike, 

1147) -> NDArrayFloat: 

1148 """ 

1149 Compute the achromatic signal :math:`A`. 

1150 

1151 Parameters 

1152 ---------- 

1153 L_AS 

1154 Scotopic luminance :math:`L_{AS}` of the illuminant. 

1155 S 

1156 Scotopic response :math:`S` to the stimulus. 

1157 S_w 

1158 Scotopic response :math:`S_w` for the reference white. 

1159 N_bb 

1160 Brightness background induction factor :math:`N_{bb}`. 

1161 A_a 

1162 Achromatic post adaptation signal of the stimulus :math:`A_a`. 

1163 

1164 Returns 

1165 ------- 

1166 :class:`numpy.ndarray` 

1167 Achromatic signal :math:`A`. 

1168 

1169 Examples 

1170 -------- 

1171 >>> L_AS = 769.9376286541402 

1172 >>> S = 20.0 

1173 >>> S_w = 100.0 

1174 >>> N_bb = 0.725000000000000 

1175 >>> A_a = 18.982718664838487 

1176 >>> achromatic_signal(L_AS, S, S_w, N_bb, A_a) # doctest: +ELLIPSIS 

1177 15.5068546... 

1178 """ 

1179 

1180 L_AS = as_float_array(L_AS) 

1181 S = as_float_array(S) 

1182 S_w = as_float_array(S_w) 

1183 N_bb = as_float_array(N_bb) 

1184 A_a = as_float_array(A_a) 

1185 

1186 L_AS_226 = L_AS / 2.26 

1187 

1188 j = 0.00001 / ((5 * L_AS_226) + 0.00001) 

1189 

1190 S_S_w = S / S_w 

1191 

1192 # Computing scotopic luminance level adaptation factor :math:`F_{LS}`. 

1193 F_LS = 3800 * (j**2) * (5 * L_AS_226) 

1194 F_LS += 0.2 * (spow(1 - (j**2), 0.4)) * (spow(5 * L_AS_226, 1 / 6)) 

1195 

1196 # Computing cone bleach factors :math:`B_S`. 

1197 B_S = 0.5 / (1 + 0.3 * spow((5 * L_AS_226) * S_S_w, 0.3)) 

1198 B_S += 0.5 / (1 + 5 * (5 * L_AS_226)) 

1199 

1200 # Computing adapted scotopic signal :math:`A_S`. 

1201 A_S = (f_n(F_LS * S_S_w) * 3.05 * B_S) + 0.3 

1202 

1203 # Computing achromatic signal :math:`A`. 

1204 A = N_bb * (A_a - 1 + A_S - 0.3 + np.sqrt(1 + (0.3**2))) 

1205 

1206 return as_float(A) 

1207 

1208 

1209def brightness_correlate( 

1210 A: ArrayLike, 

1211 A_w: ArrayLike, 

1212 M: ArrayLike, 

1213 N_b: ArrayLike, 

1214) -> NDArrayFloat: 

1215 """ 

1216 Compute the *brightness* correlate :math:`Q`. 

1217 

1218 Parameters 

1219 ---------- 

1220 A 

1221 Achromatic signal :math:`A`. 

1222 A_w 

1223 Achromatic post adaptation signal of the reference white :math:`A_w`. 

1224 M 

1225 Overall chromatic response :math:`M`. 

1226 N_b 

1227 Brightness surround induction factor :math:`N_b`. 

1228 

1229 Returns 

1230 ------- 

1231 :class:`numpy.ndarray` 

1232 *Brightness* correlate :math:`Q`. 

1233 

1234 Examples 

1235 -------- 

1236 >>> A = 15.506854623621885 

1237 >>> A_w = 35.718916676317086 

1238 >>> M = 0.008237885787274 

1239 >>> N_b = 75.0 

1240 >>> brightness_correlate(A, A_w, M, N_b) # doctest: +ELLIPSIS 

1241 22.2097654... 

1242 """ 

1243 

1244 A = as_float_array(A) 

1245 A_w = as_float_array(A_w) 

1246 M = as_float_array(M) 

1247 N_b = as_float_array(N_b) 

1248 

1249 N_1 = spow(7 * A_w, 0.5) / (5.33 * spow(N_b, 0.13)) 

1250 N_2 = (7 * A_w * spow(N_b, 0.362)) / 200 

1251 

1252 return spow(7 * (A + (M / 100)), 0.6) * N_1 - N_2 

1253 

1254 

1255def lightness_correlate( 

1256 Y_b: ArrayLike, 

1257 Y_w: ArrayLike, 

1258 Q: ArrayLike, 

1259 Q_w: ArrayLike, 

1260) -> NDArrayFloat: 

1261 """ 

1262 Compute the *lightness* correlate :math:`J`. 

1263 

1264 Parameters 

1265 ---------- 

1266 Y_b 

1267 Tristimulus value :math:`Y_b` of the background. 

1268 Y_w 

1269 Tristimulus value :math:`Y_w` of the reference white. 

1270 Q 

1271 *Brightness* correlate :math:`Q` of the stimulus. 

1272 Q_w 

1273 *Brightness* correlate :math:`Q_w` of the reference white. 

1274 

1275 Returns 

1276 ------- 

1277 :class:`numpy.ndarray` 

1278 *Lightness* correlate :math:`J`. 

1279 

1280 Examples 

1281 -------- 

1282 >>> Y_b = 100.0 

1283 >>> Y_w = 100.0 

1284 >>> Q = 22.209765491265024 

1285 >>> Q_w = 40.518065821226081 

1286 >>> lightness_correlate(Y_b, Y_w, Q, Q_w) # doctest: +ELLIPSIS 

1287 30.0462678... 

1288 """ 

1289 

1290 Y_b = as_float_array(Y_b) 

1291 Y_w = as_float_array(Y_w) 

1292 Q = as_float_array(Q) 

1293 Q_w = as_float_array(Q_w) 

1294 

1295 Z = 1 + spow(Y_b / Y_w, 0.5) 

1296 

1297 return 100 * spow(Q / Q_w, Z) 

1298 

1299 

1300def chroma_correlate( 

1301 s: ArrayLike, 

1302 Y_b: ArrayLike, 

1303 Y_w: ArrayLike, 

1304 Q: ArrayLike, 

1305 Q_w: ArrayLike, 

1306) -> NDArrayFloat: 

1307 """ 

1308 Compute the *chroma* correlate :math:`C_{94}`. 

1309 

1310 Parameters 

1311 ---------- 

1312 s 

1313 *Saturation* correlate :math:`s`. 

1314 Y_b 

1315 Tristimulus value :math:`Y_b` of the background. 

1316 Y_w 

1317 Tristimulus value :math:`Y_w` of the reference white. 

1318 Q 

1319 *Brightness* correlate :math:`Q` of the stimulus. 

1320 Q_w 

1321 *Brightness* correlate :math:`Q_w` of the reference white. 

1322 

1323 Returns 

1324 ------- 

1325 :class:`numpy.ndarray` 

1326 *Chroma* correlate :math:`C_{94}`. 

1327 

1328 Examples 

1329 -------- 

1330 >>> s = 0.0199093206929 

1331 >>> Y_b = 100.0 

1332 >>> Y_w = 100.0 

1333 >>> Q = 22.209765491265024 

1334 >>> Q_w = 40.518065821226081 

1335 >>> chroma_correlate(s, Y_b, Y_w, Q, Q_w) # doctest: +ELLIPSIS 

1336 0.1210508... 

1337 """ 

1338 

1339 s = as_float_array(s) 

1340 Y_b = as_float_array(Y_b) 

1341 Y_w = as_float_array(Y_w) 

1342 Q = as_float_array(Q) 

1343 Q_w = as_float_array(Q_w) 

1344 

1345 Y_b_Y_w = Y_b / Y_w 

1346 

1347 return ( 

1348 2.44 * spow(s, 0.69) * (spow(Q / Q_w, Y_b_Y_w)) * (1.64 - spow(0.29, Y_b_Y_w)) 

1349 ) 

1350 

1351 

1352def colourfulness_correlate(F_L: ArrayLike, C_94: ArrayLike) -> NDArrayFloat: 

1353 """ 

1354 Compute the *colourfulness* correlate :math:`M_{94}`. 

1355 

1356 Parameters 

1357 ---------- 

1358 F_L 

1359 Luminance adaptation factor :math:`F_L`. 

1360 C_94 

1361 *Chroma* correlate :math:`C_{94}`. 

1362 

1363 Returns 

1364 ------- 

1365 :class:`numpy.ndarray` 

1366 *Colourfulness* correlate :math:`M_{94}`. 

1367 

1368 Examples 

1369 -------- 

1370 >>> F_L = 1.16754446414718 

1371 >>> C_94 = 0.121050839936176 

1372 >>> colourfulness_correlate(F_L, C_94) # doctest: +ELLIPSIS 

1373 0.1238964... 

1374 """ 

1375 

1376 F_L = as_float_array(F_L) 

1377 C_94 = as_float_array(C_94) 

1378 

1379 return spow(F_L, 0.15) * C_94