composite.js 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210
  1. // Copyright 2013 Lovell Fuller and others.
  2. // SPDX-License-Identifier: Apache-2.0
  3. 'use strict';
  4. const is = require('./is');
  5. /**
  6. * Blend modes.
  7. * @member
  8. * @private
  9. */
  10. const blend = {
  11. clear: 'clear',
  12. source: 'source',
  13. over: 'over',
  14. in: 'in',
  15. out: 'out',
  16. atop: 'atop',
  17. dest: 'dest',
  18. 'dest-over': 'dest-over',
  19. 'dest-in': 'dest-in',
  20. 'dest-out': 'dest-out',
  21. 'dest-atop': 'dest-atop',
  22. xor: 'xor',
  23. add: 'add',
  24. saturate: 'saturate',
  25. multiply: 'multiply',
  26. screen: 'screen',
  27. overlay: 'overlay',
  28. darken: 'darken',
  29. lighten: 'lighten',
  30. 'colour-dodge': 'colour-dodge',
  31. 'color-dodge': 'colour-dodge',
  32. 'colour-burn': 'colour-burn',
  33. 'color-burn': 'colour-burn',
  34. 'hard-light': 'hard-light',
  35. 'soft-light': 'soft-light',
  36. difference: 'difference',
  37. exclusion: 'exclusion'
  38. };
  39. /**
  40. * Composite image(s) over the processed (resized, extracted etc.) image.
  41. *
  42. * The images to composite must be the same size or smaller than the processed image.
  43. * If both `top` and `left` options are provided, they take precedence over `gravity`.
  44. *
  45. * Any resize, rotate or extract operations in the same processing pipeline
  46. * will always be applied to the input image before composition.
  47. *
  48. * The `blend` option can be one of `clear`, `source`, `over`, `in`, `out`, `atop`,
  49. * `dest`, `dest-over`, `dest-in`, `dest-out`, `dest-atop`,
  50. * `xor`, `add`, `saturate`, `multiply`, `screen`, `overlay`, `darken`, `lighten`,
  51. * `colour-dodge`, `color-dodge`, `colour-burn`,`color-burn`,
  52. * `hard-light`, `soft-light`, `difference`, `exclusion`.
  53. *
  54. * More information about blend modes can be found at
  55. * https://www.libvips.org/API/current/libvips-conversion.html#VipsBlendMode
  56. * and https://www.cairographics.org/operators/
  57. *
  58. * @since 0.22.0
  59. *
  60. * @example
  61. * await sharp(background)
  62. * .composite([
  63. * { input: layer1, gravity: 'northwest' },
  64. * { input: layer2, gravity: 'southeast' },
  65. * ])
  66. * .toFile('combined.png');
  67. *
  68. * @example
  69. * const output = await sharp('input.gif', { animated: true })
  70. * .composite([
  71. * { input: 'overlay.png', tile: true, blend: 'saturate' }
  72. * ])
  73. * .toBuffer();
  74. *
  75. * @example
  76. * sharp('input.png')
  77. * .rotate(180)
  78. * .resize(300)
  79. * .flatten( { background: '#ff6600' } )
  80. * .composite([{ input: 'overlay.png', gravity: 'southeast' }])
  81. * .sharpen()
  82. * .withMetadata()
  83. * .webp( { quality: 90 } )
  84. * .toBuffer()
  85. * .then(function(outputBuffer) {
  86. * // outputBuffer contains upside down, 300px wide, alpha channel flattened
  87. * // onto orange background, composited with overlay.png with SE gravity,
  88. * // sharpened, with metadata, 90% quality WebP image data. Phew!
  89. * });
  90. *
  91. * @param {Object[]} images - Ordered list of images to composite
  92. * @param {Buffer|String} [images[].input] - Buffer containing image data, String containing the path to an image file, or Create object (see below)
  93. * @param {Object} [images[].input.create] - describes a blank overlay to be created.
  94. * @param {Number} [images[].input.create.width]
  95. * @param {Number} [images[].input.create.height]
  96. * @param {Number} [images[].input.create.channels] - 3-4
  97. * @param {String|Object} [images[].input.create.background] - parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
  98. * @param {Object} [images[].input.text] - describes a new text image to be created.
  99. * @param {string} [images[].input.text.text] - text to render as a UTF-8 string. It can contain Pango markup, for example `<i>Le</i>Monde`.
  100. * @param {string} [images[].input.text.font] - font name to render with.
  101. * @param {string} [images[].input.text.fontfile] - absolute filesystem path to a font file that can be used by `font`.
  102. * @param {number} [images[].input.text.width=0] - integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries.
  103. * @param {number} [images[].input.text.height=0] - integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0.
  104. * @param {string} [images[].input.text.align='left'] - text alignment (`'left'`, `'centre'`, `'center'`, `'right'`).
  105. * @param {boolean} [images[].input.text.justify=false] - set this to true to apply justification to the text.
  106. * @param {number} [images[].input.text.dpi=72] - the resolution (size) at which to render the text. Does not take effect if `height` is specified.
  107. * @param {boolean} [images[].input.text.rgba=false] - set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for Pango markup features like `<span foreground="red">Red!</span>`.
  108. * @param {number} [images[].input.text.spacing=0] - text line height in points. Will use the font line height if none is specified.
  109. * @param {String} [images[].blend='over'] - how to blend this image with the image below.
  110. * @param {String} [images[].gravity='centre'] - gravity at which to place the overlay.
  111. * @param {Number} [images[].top] - the pixel offset from the top edge.
  112. * @param {Number} [images[].left] - the pixel offset from the left edge.
  113. * @param {Boolean} [images[].tile=false] - set to true to repeat the overlay image across the entire image with the given `gravity`.
  114. * @param {Boolean} [images[].premultiplied=false] - set to true to avoid premultiplying the image below. Equivalent to the `--premultiplied` vips option.
  115. * @param {Number} [images[].density=72] - number representing the DPI for vector overlay image.
  116. * @param {Object} [images[].raw] - describes overlay when using raw pixel data.
  117. * @param {Number} [images[].raw.width]
  118. * @param {Number} [images[].raw.height]
  119. * @param {Number} [images[].raw.channels]
  120. * @param {boolean} [images[].animated=false] - Set to `true` to read all frames/pages of an animated image.
  121. * @param {string} [images[].failOn='warning'] - @see {@link /api-constructor#parameters|constructor parameters}
  122. * @param {number|boolean} [images[].limitInputPixels=268402689] - @see {@link /api-constructor#parameters|constructor parameters}
  123. * @returns {Sharp}
  124. * @throws {Error} Invalid parameters
  125. */
  126. function composite (images) {
  127. if (!Array.isArray(images)) {
  128. throw is.invalidParameterError('images to composite', 'array', images);
  129. }
  130. this.options.composite = images.map(image => {
  131. if (!is.object(image)) {
  132. throw is.invalidParameterError('image to composite', 'object', image);
  133. }
  134. const inputOptions = this._inputOptionsFromObject(image);
  135. const composite = {
  136. input: this._createInputDescriptor(image.input, inputOptions, { allowStream: false }),
  137. blend: 'over',
  138. tile: false,
  139. left: 0,
  140. top: 0,
  141. hasOffset: false,
  142. gravity: 0,
  143. premultiplied: false
  144. };
  145. if (is.defined(image.blend)) {
  146. if (is.string(blend[image.blend])) {
  147. composite.blend = blend[image.blend];
  148. } else {
  149. throw is.invalidParameterError('blend', 'valid blend name', image.blend);
  150. }
  151. }
  152. if (is.defined(image.tile)) {
  153. if (is.bool(image.tile)) {
  154. composite.tile = image.tile;
  155. } else {
  156. throw is.invalidParameterError('tile', 'boolean', image.tile);
  157. }
  158. }
  159. if (is.defined(image.left)) {
  160. if (is.integer(image.left)) {
  161. composite.left = image.left;
  162. } else {
  163. throw is.invalidParameterError('left', 'integer', image.left);
  164. }
  165. }
  166. if (is.defined(image.top)) {
  167. if (is.integer(image.top)) {
  168. composite.top = image.top;
  169. } else {
  170. throw is.invalidParameterError('top', 'integer', image.top);
  171. }
  172. }
  173. if (is.defined(image.top) !== is.defined(image.left)) {
  174. throw new Error('Expected both left and top to be set');
  175. } else {
  176. composite.hasOffset = is.integer(image.top) && is.integer(image.left);
  177. }
  178. if (is.defined(image.gravity)) {
  179. if (is.integer(image.gravity) && is.inRange(image.gravity, 0, 8)) {
  180. composite.gravity = image.gravity;
  181. } else if (is.string(image.gravity) && is.integer(this.constructor.gravity[image.gravity])) {
  182. composite.gravity = this.constructor.gravity[image.gravity];
  183. } else {
  184. throw is.invalidParameterError('gravity', 'valid gravity', image.gravity);
  185. }
  186. }
  187. if (is.defined(image.premultiplied)) {
  188. if (is.bool(image.premultiplied)) {
  189. composite.premultiplied = image.premultiplied;
  190. } else {
  191. throw is.invalidParameterError('premultiplied', 'boolean', image.premultiplied);
  192. }
  193. }
  194. return composite;
  195. });
  196. return this;
  197. }
  198. /**
  199. * Decorate the Sharp prototype with composite-related functions.
  200. * @private
  201. */
  202. module.exports = function (Sharp) {
  203. Sharp.prototype.composite = composite;
  204. Sharp.blend = blend;
  205. };