Beatmup
bindings.cpp File Reference
#include <stdexcept>
#include <vector>
#include <memory>
#include <pybind11/pybind11.h>
#include <pybind11/numpy.h>
#include <pybind11/eval.h>
#include <pybind11/stl.h>
#include "context.h"
#include "bitmap/metric.h"
#include "bitmap/resampler.h"
#include "bitmap/tools.h"
#include "contours/contours.h"
#include "filters/color_matrix.h"
#include "filters/pixelwise_filter.h"
#include "filters/sepia.h"
#include "gpu/swapper.h"
#include "gpu/variables_bundle.h"
#include "masking/flood_fill.h"
#include "nnets/conv2d.h"
#include "nnets/classifier.h"
#include "nnets/deserialized_model.h"
#include "nnets/dense.h"
#include "nnets/image_sampler.h"
#include "nnets/pooling2d.h"
#include "nnets/softmax.h"
#include "pipelining/custom_pipeline.h"
#include "pipelining/multitask.h"
#include "scene/renderer.h"
#include "scene/scene.h"
#include "shading/image_shader.h"
#include "shading/shader_applicator.h"
#include "binding_tools.hpp"
#include "bitmap.h"
#include "chunk_collection.h"

Go to the source code of this file.

Functions

 PYBIND11_MODULE (beatmup, module)
 

Function Documentation

◆ PYBIND11_MODULE()

PYBIND11_MODULE ( beatmup  ,
module   
)

PixelFormat

GL::TextureHandler

AbstractTask

Context

AbstractBitmap

InternalBitmap

Python::Bitmap

Tools

BitmapResampler

Filters::PixelwiseFilter

Filters::ColorMatrix

Filters::Sepia

IntegerCountour2D

FloodFill

AffineMapping

GL::VariablesBundle

Metric

ImageShader

ShaderApplicator

Scene and its layers

SceneRenderer

CustomPipeline::TaskHolder

CustomPipeline

Multitask

ChunkCollection

ChunkFile

Python::WritableChunkCollection

NNets::ActivationFunction

NNets::Size::Padding

NNets::AbstractOperation

NNets::Conv2D

NNets::Pooling2D

NNets::Pooling2D::Operator

NNets::Dense

NNets::ImageSampler

NNets::Softmax

NNets::Model

NNets::DeserializedModel

NNets::InferenceTask

NNets::Classifier

Definition at line 62 of file bindings.cpp.

62  {
63  module.doc() = R"doc(
64  beatmup module
65  --------------
66 
67  .. autosummary::
68  :toctree: python/_generate
69 
70  AbstractBitmap
71  AbstractTask
72  AffineMapping
73  Bitmap
74  BitmapResampler
75  ChunkCollection
76  ChunkFile
77  Context
78  CustomPipeline
79  FloodFill
80  ImageShader
81  IntegerContour2D
82  InternalBitmap
83  Metric
84  Multitask
85  PixelFormat
86  Scene
87  SceneRenderer
88  ShaderApplicator
89  WritableChunkCollection
90  )doc";
91 
92  auto gl = module.def_submodule("gl", R"doc(
93  beatmup.gl module
94  -----------------
95 
96  .. autosummary::
97  :toctree: python/_generate
98 
99  TextureHandler
100  VariablesBundle
101  )doc");
102 
103  auto filters = module.def_submodule("filters", R"doc(
104  beatmup.filters module
105  ----------------------
106 
107  .. autosummary::
108  :toctree: python/_generate
109 
110  ColorMatrix
111  PixelwiseFilter
112  Sepia
113  )doc");
114 
115  auto nnets = module.def_submodule("nnets", R"doc(
116  beatmup.nnets module
117  --------------------
118 
119  .. autosummary::
120  :toctree: python/_generate
121 
122  ActivationFunction
123  AbstractOperation
124  Classifier
125  Conv2D
126  Dense
127  DeserializedModel
128  ImageSampler
129  InferenceTask
130  Model
131  Padding
132  Pooling2D
133  Softmax
134  )doc");
135 
136  module.def("say_hi", []() {
137  Context ctx;
138  py::print("Beatmup is up and running, yay!");
139  py::exec("import platform; print('Python version:', platform.python_version())");
140  },
141  "Prints some greetings");
142 
143  /**
144  * PixelFormat
145  */
146  py::enum_<PixelFormat>(module, "PixelFormat", "Specifies bitmap pixel format")
147  .value("SINGLE_BYTE", PixelFormat::SingleByte, "single channel of 8 bits per pixel (like grayscale), unsigned integer values")
148  .value("TRIPLE_BYTE", PixelFormat::TripleByte, "3 channels of 8 bits per pixel (like RGB), unsigned integer values")
149  .value("QUAD_BYTE", PixelFormat::QuadByte, "4 channels of 8 bits per pixel (like RGBA), unsigned integer values")
150  .value("SINGLE_FLOAT", PixelFormat::SingleFloat, "single channel of 32 bits per pixel (like grayscale), single precision floating point values")
151  .value("TRIPLE_FLOAT", PixelFormat::TripleFloat, "3 channels of 32 bits per pixel, single precision floating point values")
152  .value("QUAD_FLOAT", PixelFormat::QuadFloat, "4 channels of 32 bits per pixel, single precision floating point values")
153  .value("BINARY_MASK", PixelFormat::BinaryMask, "1 bit per pixel")
154  .value("QUATERNARY_MASK", PixelFormat::QuaternaryMask, "2 bits per pixel")
155  .value("HEX_MASK", PixelFormat::HexMask, "4 bits per pixel")
156  .export_values();
157 
158  /**
159  * GL::TextureHandler
160  */
161  py::class_<GL::TextureHandler>(gl, "TextureHandler",
162  "A texture stored in GPU memory")
163 
164  .def("get_width", &GL::TextureHandler::getWidth,
165  "Returns width of the texture in pixels")
166 
167  .def("get_height", &GL::TextureHandler::getHeight,
168  "Returns height of the texture in pixels")
169 
170  .def("get_depth", &GL::TextureHandler::getDepth,
171  "Returns depth of the texture in pixels")
172 
173  .def("get_number_of_channels", &GL::TextureHandler::getNumberOfChannels,
174  "Returns number of channels containing in the texture");
175 
176  /**
177  * AbstractTask
178  */
179  py::class_<AbstractTask>(module, "AbstractTask", "Abstract task executable in a thread pool of a Context");
180 
181  /**
182  * Context
183  */
184  py::class_<Context>(module, "Context", "Beatmup engine context")
185 
186  .def(py::init<>())
187 
188  .def(py::init<const PoolIndex>())
189 
190  .def("perform_task", &Context::performTask,
191  py::arg("task"), py::arg("pool") = 0,
192  "Performs a given task. Returns its execution time in milliseconds")
193 
194  .def("repeat_task", &Context::repeatTask,
195  py::arg("task"), py::arg("abort_current"), py::arg("pool") = 0,
196  py::keep_alive<1, 2>(), // context alive => task alive
197  R"doc(
198  Ensures a given task executed at least once
199 
200  :param task: The task
201  :param abort_current: If True and the same task is currently running, the abort signal is sent.
202  :param pool: A thread pool to run the task in
203  )doc")
204 
205  .def("submit_task", &Context::submitTask,
206  py::arg("task"), py::arg("pool") = 0,
207  py::keep_alive<1, 2>(), // context alive => task alive
208  "Adds a new task to the jobs queue")
209 
210  .def("submit_persistent_task", &Context::submitPersistentTask,
211  py::arg("task"), py::arg("pool") = 0,
212  py::keep_alive<1, 2>(), // context alive => task alive
213  "Adds a new persistent task to the jobs queue")
214 
215  .def("wait_for_job", &Context::waitForJob,
216  py::arg("job"), py::arg("pool") = 0,
217  "Blocks until a given job finishes")
218 
219  .def("abort_job", &Context::abortJob,
220  py::arg("job"), py::arg("pool") = 0,
221  "Aborts a given submitted job.")
222 
223  .def("wait", &Context::wait,
224  "Blocks until all the submitted jobs are executed",
225  py::arg("pool") = 0)
226 
227  .def("busy", &Context::busy,
228  "Returns `True` if a specific thread pool in the context is executing a Task",
229  py::arg("pool") = 0)
230 
231  .def("check", &Context::check,
232  "Checks if a specific thread pool is doing great: rethrows exceptions occurred during tasks execution, if any.",
233  py::arg("pool") = 0)
234 
235  .def("max_allowed_worker_count", &Context::maxAllowedWorkerCount,
236  "Returns maximum number of working threads per task in a given thread pool",
237  py::arg("pool") = 0)
238 
239  .def("limit_worker_count", &Context::limitWorkerCount,
240  "Limits maximum number of threads (workers) when performing tasks in a given pool",
241  py::arg("max_value"), py::arg("pool") = 0)
242 
243  .def("is_gpu_queried", &Context::isGpuQueried,
244  "Returns `True` if GPU was queried and ready to use")
245 
246  .def("is_gpu_ready", &Context::isGpuReady,
247  "Returns `True` if GPU was queried and ready to use")
248 
249  .def("warm_up_gpu", &Context::warmUpGpu, R"doc(
250  Initializes GPU within a given Context if not yet (takes no effect if it already is).
251  GPU initialization may take some time and is done when a first task using GPU is being run. Warming up
252  the GPU is useful to avoid the app get stuck for some time when it launches its first task on GPU.
253  )doc")
254 
255  .def("query_gpu_info", [](Context &ctx) -> py::object {
256  std::string vendor, renderer;
257  if (ctx.queryGpuInfo(vendor, renderer))
258  return py::make_tuple<>(vendor, renderer);
259  return py::none();
260  },
261  "Queries information about GPU and returns a tuple of vendor and renderer strings, or None if no GPU available.")
262 
263  .def("empty_gpu_recycle_bin", [](Context& ctx) {
264  auto* bin = ctx.getGpuRecycleBin();
265  if (bin)
266  bin->emptyBin();
267  }, R"doc(
268  Empties GPU recycle bin.
269  When a bitmap is destroyed in the application code, its GPU storage is not destroyed immediately. This is due to the fact that destroying a
270  texture representing the bitmap content in the GPU memory needs to be done in a thread that has access to the GPU, which is one of the
271  threads in the thread pool. The textures of destroyed bitmaps are marked as unused anymore and put into a "GPU trash bin". The latter is
272  emptied by calling this function.
273  In applications doing repeated allocations and deallocations of images (e.g., processing video frames in a loop), it is recommended to empty
274  the GPU recycle bin periodically in the described way in order to prevent running out of memory.
275  )doc");
276 
277  /**
278  * AbstractBitmap
279  */
280  py::class_<AbstractBitmap, GL::TextureHandler>(module, "AbstractBitmap",
281  "Abstract bitmap class")
282 
283  .def("get_pixel_format", &AbstractBitmap::getPixelFormat,
284  "Returns pixel format of the bitmap")
285 
286  .def("get_memory_size", &AbstractBitmap::getMemorySize,
287  "Returns bitmap size in bytes")
288 
289  .def("get_context", &AbstractBitmap::getContext,
290  "Returns Context the current bitmap is attached to")
291 
292  .def("zero", &AbstractBitmap::zero,
293  "Sets all the pixels to zero")
294 
295  .def("__str__", &AbstractBitmap::toString,
296  "Returns a string describing the bitmap")
297 
298  .def("save_bmp", &AbstractBitmap::saveBmp, py::arg("filename"),
299  "Saves a bitmap to a BMP file");
300 
301  /**
302  * InternalBitmap
303  */
304  py::class_<InternalBitmap, AbstractBitmap>(module, "InternalBitmap", R"doc(
305  Bitmap whose memory is managed by the Beatmup engine.
306  Main pixel data container used internally by Beatmup. Applications would typically use a different incarnation
307  of AbstractBitmap implementing I/O operations, and InternalBitmap instances are used to exchange data between
308  different processing entities (AbstractTask instances) within the application.
309  )doc")
310 
311  .def(py::init<Context&, PixelFormat, int, int, bool>(),
312  py::arg("context"), py::arg("pixel_format"), py::arg("width"), py::arg("height"), py::arg("allocate") = true,
313  py::keep_alive<1, 2>()) // bitmap alive => context alive
314 
315  .def(py::init<Context&, const char*>(),
316  py::keep_alive<1, 2>()); // bitmap alive => context alive
317 
318  /**
319  * Python::Bitmap
320  */
321  py::class_<Python::Bitmap, AbstractBitmap>(module, "Bitmap", py::buffer_protocol(),
322  "A bitmap wrapping a numpy container without copying")
323 
324  .def(py::init<Beatmup::Context&, py::buffer&>(),
325  py::keep_alive<1, 2>()) // bitmap alive => context alive
326 
327  .def_buffer([](Python::Bitmap& bitmap) {
328  Swapper::pullPixels(bitmap);
329  return bitmap.getPythonBuffer();
330  });
331 
332  /**
333  * Tools
334  */
335  module.def_submodule("bitmaptools")
337  py::arg("bitmap"), py::arg("context"), py::arg("format"),
338  py::return_value_policy::take_ownership,
339  py::keep_alive<0, 1>(), // bitmap alive => context alive
340  R"doc(
341  Makes a copy of a bitmap for a given Context converting the data to a given pixel format.
342  Can be used to exchange image content between different instances of Context.
343  The copy is done in an AbstractTask run in the default thread pool of the source bitmap context.
344 
345  :param bitmap: the bitmap to copy
346  :param context: the Context instance the copy is associated with
347  :param format: pixel format of the copy
348  )doc")
349 
350  .def("chessboard", &BitmapTools::chessboard,
351  py::arg("context"), py::arg("width"), py::arg("height"), py::arg("cell_size"), py::arg("format") = PixelFormat::BinaryMask,
352  py::return_value_policy::take_ownership,
353  py::keep_alive<0, 1>(), // bitmap alive => context alive
354  R"doc(
355  Renders a chessboard image.
356 
357  :param context: a Context instance
358  :param width: width in pixels of the resulting bitmap
359  :param height: height in pixels of the resulting bitmap
360  :param cell_size: size of a single chessboard cell in pixels
361  :param pixel_format: pixel format of the resulting bitmap
362  )doc")
363 
364  .def("noise", [](AbstractBitmap& bitmap) { BitmapTools::noise(bitmap); },
365  py::arg("bitmap"),
366  "Fills a given bitmap with random noise.")
367 
368  .def("noise", [](AbstractBitmap& bitmap, const py::tuple& area) { BitmapTools::noise(bitmap, Python::toRectangle<int>(area)); },
369  py::arg("bitmap"), py::arg("area"),
370  "Replaces a rectangular area in a bitmap by random noise.")
371 
372  .def("make_opaque", [](AbstractBitmap& bitmap, const py::tuple& area) {
373  BitmapTools::makeOpaque(bitmap, Python::toRectangle<int>(area));
374  },
375  py::arg("bitmap"), py::arg("area"),
376  "Makes a bitmap area opaque")
377 
378  .def("invert", &BitmapTools::invert,
379  py::arg("input"), py::arg("output"),
380  "Inverses colors of an image in a pixelwise fashion")
381 
382  .def("scanline_search", [](AbstractBitmap& bitmap, const py::tuple& value, const py::tuple& startFrom) -> py::object {
383  auto pt = BitmapTools::scanlineSearch(bitmap, Python::toPixfloat4(value), Python::toPoint<int>(startFrom));
384  if (pt.x == -1 && pt.y == -1)
385  return py::none();
386  return Python::toTuple(pt);
387  },
388  py::arg("bitmap"), py::arg("value"), py::arg("start_from") = Python::toTuple(IntPoint::ZERO),
389  R"doc(
390  Goes through a bitmap in scanline order (left to right, top to bottom) until a pixel of a given color is met.
391 
392  :param source: the bitmap to scan
393  :param value: the color value to look for
394  :param start_from: starting pixel position
395 
396  Returns the next closest position of the searched value (in scanline order) or None if not found.
397  )doc");
398 
399  /**
400  * BitmapResampler
401  */
402  py::class_<BitmapResampler, AbstractTask> bitmapResampler(module, "BitmapResampler", R"doc(
403  Resamples an image to a given resolution.
404  Implements different resampling approaches, including standard ones (bilinear, bicubic, etc.) and a neural network-based 2x upsampling approach dubbed as "x2".
405  )doc");
406 
407  bitmapResampler.def(py::init<Context&>(), py::arg("context"),
408  py::keep_alive<1, 2>()) // resampler alive => context alive
409 
410  .def_property("input", &BitmapResampler::getInput,
411  py::cpp_function(&BitmapResampler::setInput, py::keep_alive<1, 2, 1>()), // instance alive => bitmap alive
412  "Input bitmap")
413 
414  .def_property("output", &BitmapResampler::getOutput,
415  py::cpp_function(&BitmapResampler::setOutput, py::keep_alive<1, 2, 2>()), // instance alive => bitmap alive
416  "Output bitmap")
417 
418  .def_property("mode", &BitmapResampler::getMode, &BitmapResampler::setMode, "Resmpling algorithm (mode)")
419 
420  .def_property("cubic_parameter", &BitmapResampler::setCubicParameter, &BitmapResampler::getCubicParameter,
421  "Cubic resampling parameter (`alpha`)")
422 
423  .def_property("input_rectangle", [](BitmapResampler& resampler, const py::tuple& area) {
424  resampler.setInputRect(Python::toRectangle<int>(area));
425  },
426  [](BitmapResampler& resampler) {
427  return Python::toTuple(resampler.getInputRect());
428  },
429  "Specifies a rectangular working area in the input bitmap. Pixels outside of this area are not used.")
430 
431  .def_property("output_rectangle", [](BitmapResampler& resampler, const py::tuple& area) {
432  resampler.setOutputRect(Python::toRectangle<int>(area));
433  },
434  [](BitmapResampler& resampler) {
435  return Python::toTuple(resampler.getOutputRect());
436  },
437  "Specifies a rectangular working area in the output bitmap. Pixels outside of this area are not affected.");
438 
439  py::enum_<BitmapResampler::Mode>(bitmapResampler, "Mode", "Resampling mode (algorithm) specification")
440  .value("NEAREST_NEIGHBOR", BitmapResampler::Mode::NEAREST_NEIGHBOR, "zero-order: usual nearest neighbor")
441  .value("BOX", BitmapResampler::Mode::BOX, "'0.5-order': anti-aliasing box filter; identical to nearest neighbor when upsampling")
442  .value("LINEAR", BitmapResampler::Mode::LINEAR, "first order: bilinear interpolation")
443  .value("CUBIC", BitmapResampler::Mode::CUBIC, "third order: bicubic interpolation")
444  .value("CONVNET", BitmapResampler::Mode::CONVNET, "upsampling x2 using a convolutional neural network")
445  .export_values();
446 
447  /**
448  * Filters::PixelwiseFilter
449  */
450  py::class_<Filters::PixelwiseFilter, AbstractTask>(filters, "PixelwiseFilter",
451  "Base class for image filters processing a given bitmap in a pixelwise fashion.")
452 
453  .def_property("input",
454  &Filters::PixelwiseFilter::getInput,
455  py::cpp_function(&Filters::PixelwiseFilter::setInput, py::keep_alive<1, 2, 1>()), // instance alive => bitmap alive
456  "Input bitmap")
457 
458  .def_property("output",
459  &Filters::PixelwiseFilter::getOutput,
460  py::cpp_function(&Filters::PixelwiseFilter::setOutput, py::keep_alive<1, 2, 2>()), // instance alive => bitmap alive
461  "Output bitmap");
462 
463  /**
464  * Filters::ColorMatrix
465  */
466  py::class_<Filters::ColorMatrix, Filters::PixelwiseFilter>(filters, "ColorMatrix",
467  "Color matrix filter: applies an affine mapping Ax + B at each pixel of a given image in RGBA space")
468 
469  .def(py::init<>())
470 
471  .def("set_coefficients", [](Filters::ColorMatrix& colorMatrix, int ouch, float bias, const py::tuple& rgba) {
472  const color4f c = Python::toColor4f(rgba);
473  colorMatrix.setCoefficients(ouch, bias, c.r, c.g, c.b, c.a);
474  },
475  "Sets color matrix coefficients for a specific output color channel",
476  py::arg("out_channel"), py::arg("add"), py::arg("rgba"))
477 
478  .def("set_hsv_correction", &Filters::ColorMatrix::setHSVCorrection,
479  "Resets the current transformation to a matrix performing standard HSV correction",
480  py::arg("hue_shift_degrees"), py::arg("saturation_factor"), py::arg("value_factor"))
481 
482  .def("set_color_inversion", [](Filters::ColorMatrix& colorMatrix, const py::tuple& hue, float saturation, float value){
483  colorMatrix.setColorInversion(Python::toColor3f(hue), saturation, value);
484  },
485  "Resets the current transformation to a fancy color inversion mode with a fixed hue point",
486  py::arg("preserved_hue"), py::arg("saturation_factor"), py::arg("value_factor"))
487 
488  .def("apply_contrast", &Filters::ColorMatrix::applyContrast,
489  "Applies a contrast adjustment by a given factor on top of the current transformation",
490  py::arg("factor"))
491 
492  .def("set_brightness", &Filters::ColorMatrix::setBrightness,
493  "Sets a brightness adjustment by a given factor (non-cumulative with respect to the current transformation)",
494  py::arg("brightness"));
495 
496  /**
497  * Filters::Sepia
498  */
499  py::class_<Filters::Sepia, Filters::PixelwiseFilter>(filters, "Sepia", "Sepia filter: an example of :class:`~beatmup.filters.PixelwiseFilter` implementation.")
500  .def(py::init<>());
501 
502  /**
503  * IntegerCountour2D
504  */
505  py::class_<IntegerContour2D>(module, "IntegerContour2D",
506  "A sequence of integer-valued 2D points")
507 
508  .def(py::init<>())
509 
510  .def("add_point", &IntegerContour2D::addPoint,
511  "Adds a new point to the end of the contour. Some points may be skipped to optimize the storage.",
512  py::arg("x"), py::arg("y"))
513 
514  .def("clear", &IntegerContour2D::clear,
515  "Removes contour content")
516 
517  .def("get_point_count", &IntegerContour2D::getPointCount,
518  "Returns number of points in the contour")
519 
520  .def("get_length", &IntegerContour2D::getLength,
521  "Returns contour length")
522 
523  .def("get_point",
524  [](IntegerContour2D& contour, int index) { return Python::toTuple(contour.getPoint(index)); },
525  py::arg("index"),
526  "Returns a point by its index");
527 
528  /**
529  * FloodFill
530  */
531  py::class_<FloodFill, AbstractTask> floodFill(module, "FloodFill",
532  R"doc(
533  Discovers areas of similar colors up to a tolerance threshold around given positions (seeds) in the input image.
534  These areas are filled with white color in another image (output). If the output bitmap is a binary mask,
535  corresponding pixels are set to `1`. The rest of the output image remains unchanged.
536  Optionally, computes contours around the discovered areas and stores the contour positions.
537  Also optionally, applies post-processing by dilating or eroding the discovered regions in the output image.
538  )doc");
539 
540  py::enum_<FloodFill::BorderMorphology>(floodFill, "BorderMorphology",
541  "Morphological postprocessing operation applied to the discovered connected components")
542  .value("NONE", FloodFill::BorderMorphology::NONE, "no postprocessing")
543  .value("DILATE", FloodFill::BorderMorphology::DILATE, "apply a dilatation")
544  .value("ERODE", FloodFill::BorderMorphology::ERODE, "apply an erosion")
545  .export_values();
546 
547  floodFill.def(py::init<>())
548 
549  .def_property("input",
550  &FloodFill::getInput,
551  py::cpp_function(&FloodFill::setInput, py::keep_alive<1, 2, 1>()), // instance alive => bitmap alive
552  "Input bitmap")
553 
554  .def_property("output",
555  &FloodFill::getOutput,
556  py::cpp_function(&FloodFill::setOutput, py::keep_alive<1, 2, 2>()), // instance alive => bitmap alive
557  "Output bitmap")
558 
559  .def_property("tolerance", &FloodFill::getTolerance, &FloodFill::setTolerance,
560  "Intensity tolerance")
561 
562  .def("set_mask_pos",
563  [](FloodFill& ff, const py::tuple& pos) { ff.setMaskPos(Python::toPoint<int>(pos)); },
564  py::arg("pos"),
565  "Specifies left-top corner position of the mask inside the input bitmap")
566 
567  .def("set_seeds", [](FloodFill& ff, const py::list& seeds) {
568  IntPoint* pts = new IntPoint[seeds.size()];
569  for (py::ssize_t i = 0; i < seeds.size(); ++i)
570  pts[i] = Python::toPoint<int>(seeds[i]);
571  ff.setSeeds(pts, seeds.size());
572  delete[] pts;
573  },
574  py::arg("seeds"),
575  "Specifies a set of seeds (starting points)")
576 
577  .def("set_compute_contours", &FloodFill::setComputeContours, py::arg("compute"),
578  "Enables or disables contours computation")
579 
580  .def("set_border_postprocessing", &FloodFill::setBorderPostprocessing,
581  py::arg("operation"), py::arg("hold_radius"), py::arg("release_radius"),
582  R"doc(
583  Specifies a morphological operation to apply to the mask border.
584 
585  :param operation: a postprocessing operation
586  :param hold_radius: erosion/dilation hold radius (output values set to 1)
587  :param release_radius: erosion/dilation radius of transition from 1 to 0
588  )doc")
589 
590  .def("get_bounds",
591  [](FloodFill& ff, const py::tuple& pos) { return Python::toTuple(ff.getBounds()); },
592  "Returns bounding box of the computed mask")
593 
594  .def("get_contour_count", &FloodFill::getContourCount,
595  "Returns number of discovered contours")
596 
597  .def("get_contour", &FloodFill::getContour,
598  py::arg("index"),
599  "Returns a contour by index if compute_contours was set to True, throws an exception otherwise");
600 
601  /**
602  * AffineMapping
603  */
604  py::class_<AffineMapping>(module, "AffineMapping", "2x3 affine mapping containing a 2x2 matrix and a 2D point")
605 
606  .def(py::init<>())
607 
608  .def("get_position",
609  [](const AffineMapping& mapping) { return Python::toTuple(mapping.getPosition()); },
610  "Returns the mapping origin")
611 
612  .def("get_matrix",
613  [](const AffineMapping& mapping) { return Python::toTuple(mapping.getMatrix()); },
614  "Returns the mapping matrix")
615 
616  .def("__call__",
617  [](const AffineMapping& mapping, const py::tuple& point) { return Python::toTuple(mapping(Python::toPoint<float>(point))); },
618  py::arg("point"),
619  "Maps a point")
620 
621  .def("invert", &AffineMapping::invert,
622  "Inverts the mapping")
623 
624  .def("get_inverse", (AffineMapping (AffineMapping::*)() const)&AffineMapping::getInverse,
625  py::return_value_policy::take_ownership,
626  "Returns inverse mapping")
627 
628  .def("get_inverse",
629  [](const AffineMapping& mapping, const py::tuple& point) { return Python::toTuple(mapping.getInverse(Python::toPoint<float>(point))); },
630  py::arg("point"),
631  "Computes inverse mapping of a point")
632 
633  .def("set_center_position", [](AffineMapping& mapping, const py::tuple& point) {
634  mapping.setCenterPosition(Python::toPoint<float>(point));
635  },
636  py::arg("point"),
637  "Adjusts the mapping origin so that the center of the axes box matches a given point")
638 
639  .def("translate", [](AffineMapping& mapping, const py::tuple& shift) {
640  mapping.translate(Python::toPoint<float>(shift));
641  },
642  py::arg("shift"),
643  "Translates the mapping")
644 
645  .def("scale", [](AffineMapping& mapping, float factor, const py::tuple& fixedPoint) {
646  mapping.scale(factor, Python::toPoint<float>(fixedPoint));
647  },
648  py::arg("factor"), py::arg("fixed_point") = py::make_tuple(0.0f, 0.0f),
649  "Scales the mapping around a given point in target domain")
650 
651  .def("rotate_degrees", [](AffineMapping& mapping, float angle, const py::tuple& fixedPoint) {
652  mapping.rotateDegrees(angle, Python::toPoint<float>(fixedPoint));
653  },
654  py::arg("angle"), py::arg("fixed_point") = py::make_tuple(0.0f, 0.0f),
655  "Rotates the mapping around a given point in target domain")
656 
657  .def("is_point_inside", [](AffineMapping& mapping, const py::tuple& point) {
658  return mapping.isPointInside(Python::toPoint<float>(point));
659  },
660  py::arg("point"),
661  "Tests whether a point from the output domain is inside the input axes span");
662 
663  /**
664  * GL::VariablesBundle
665  */
666  py::class_<GL::VariablesBundle>(gl, "VariablesBundle",
667  "Collection storing GLSL program parameters (scalars, matrices, vectors) to communicate them from user to GPU-managing thread")
668 
669  .def("set_integer", (void (GL::VariablesBundle::*)(std::string, int))&GL::VariablesBundle::setInteger,
670  py::arg("name"), py::arg("value"),
671  "Sets a scalar integer uniform value")
672 
673  .def("set_integer", (void (GL::VariablesBundle::*)(std::string, int, int))&GL::VariablesBundle::setInteger,
674  py::arg("name"), py::arg("x"), py::arg("y"),
675  "Sets a 2D integer uniform vector value")
676 
677  .def("set_integer", (void (GL::VariablesBundle::*)(std::string, int, int, int))&GL::VariablesBundle::setInteger,
678  py::arg("name"), py::arg("x"), py::arg("y"), py::arg("z"),
679  "Sets a 3D integer uniform vector value")
680 
681  .def("set_integer", (void (GL::VariablesBundle::*)(std::string, int, int, int, int))&GL::VariablesBundle::setInteger,
682  py::arg("name"), py::arg("x"), py::arg("y"), py::arg("z"), py::arg("w"),
683  "Sets a 4D integer uniform vector value")
684 
685  .def("set_float", (void (GL::VariablesBundle::*)(std::string, float))&GL::VariablesBundle::setFloat,
686  py::arg("name"), py::arg("value"),
687  "Sets a scalar float uniform value")
688 
689  .def("set_float", (void (GL::VariablesBundle::*)(std::string, float, float))&GL::VariablesBundle::setFloat,
690  py::arg("name"), py::arg("x"), py::arg("y"),
691  "Sets a 2D float uniform vector value")
692 
693  .def("set_float", (void (GL::VariablesBundle::*)(std::string, float, float, float))&GL::VariablesBundle::setFloat,
694  py::arg("name"), py::arg("x"), py::arg("y"), py::arg("z"),
695  "Sets a 3D float uniform vector value")
696 
697  .def("set_float", (void (GL::VariablesBundle::*)(std::string, float, float, float, float))&GL::VariablesBundle::setFloat,
698  py::arg("name"), py::arg("x"), py::arg("y"), py::arg("z"), py::arg("w"),
699  "Sets a 4D float uniform vector value")
700 
701  .def("set_float_matrix2", [](GL::VariablesBundle& instance, const char* name, const std::vector<float>& matrix) {
702  if (matrix.size() != 2*2)
703  throw std::invalid_argument("Expected a list-like input containing " + std::to_string(2*2) +
704  " values but got " + std::to_string(matrix.size()));
705  instance.setFloatMatrix2(name, matrix.data());
706  },
707  py::arg("name"), py::arg("matrix"),
708  "Sets a float 2*2 matrix variable value")
709 
710  .def("set_float_matrix3", [](GL::VariablesBundle& instance, const char* name, const std::vector<float>& matrix) {
711  if (matrix.size() != 3*3)
712  throw std::invalid_argument("Expected a list-like input containing " + std::to_string(3*3) +
713  " values but got " + std::to_string(matrix.size()));
714  instance.setFloatMatrix3(name, matrix.data());
715  },
716  py::arg("name"), py::arg("matrix"),
717  "Sets a float 3*3 matrix variable value")
718 
719  .def("set_float_matrix4", [](GL::VariablesBundle& instance, const char* name, const std::vector<float>& matrix) {
720  if (matrix.size() != 4*4)
721  throw std::invalid_argument("Expected a list-like input containing " + std::to_string(4*4) +
722  " values but got " + std::to_string(matrix.size()));
723  instance.setFloatMatrix4(name, matrix.data());
724  },
725  py::arg("name"), py::arg("matrix"),
726  "Sets a float 4*4 matrix variable value")
727 
728  .def("set_float_array", &GL::VariablesBundle::setFloatArray,
729  py::arg("name"), py::arg("values"),
730  "Sets a float array variable value");
731 
732  /**
733  * Metric
734  */
735  py::class_<Metric, AbstractTask> metric(module, "Metric", "Measures the difference between two bitmaps");
736 
737  py::enum_<Metric::Norm>(metric, "Norm", "Norm (distance) to measure between two images")
738  .value("L1", Metric::Norm::L1, "sum of absolute differences")
739  .value("L2", Metric::Norm::L2, "Euclidean distance: square root of squared differences")
740  .export_values();
741 
742  metric.def(py::init<>())
743 
744  .def("set_bitmaps", (void (Metric::*)(AbstractBitmap*, AbstractBitmap*))&Metric::setBitmaps,
745  py::arg("bitmap1"), py::arg("bitmap2"),
746  py::keep_alive<1, 2>(), py::keep_alive<1, 3>(), // metric alive => bitmaps alive
747  "Sets input images")
748 
749  .def("set_bitmaps", [](Metric& metric, AbstractBitmap* bitmap1, const py::tuple& roi1, AbstractBitmap* bitmap2, const py::tuple& roi2){
750  metric.setBitmaps(bitmap1, Python::toRectangle<int>(roi1),
751  bitmap2, Python::toRectangle<int>(roi2));
752  },
753  py::arg("bitmap1"), py::arg("roi1"), py::arg("bitmap2"), py::arg("roi2"),
754  py::keep_alive<1, 2>(), py::keep_alive<1, 4>(), // metric alive => bitmaps alive
755  "Sets input images and rectangular regions delimiting the measurement areas")
756 
757  .def("set_norm", &Metric::setNorm, "Specifies the norm to use in the measurement")
758 
759  .def("get_result", &Metric::getResult, "Returns the measurement result (after the task is executed")
760 
761  .def_static("psnr", &Metric::psnr, py::arg("bitmap1"), py::arg("bitmap2"),
762  "Computes peak signal-to-noise ratio in dB for two given images");
763 
764  /**
765  * ImageShader
766  */
767  py::class_<ImageShader, GL::VariablesBundle>(module, "ImageShader", "A GLSL program to process images")
768 
769  .def(py::init<Context&>(), py::arg("context"), py::keep_alive<1, 2>())
770 
771  .def("set_source_code", &ImageShader::setSourceCode,
772  py::arg("glsl"),
773  R"doc(Passes new source code to the fragment shader.
774  The new source code will be compiled and linked when next rendering occurs.)doc")
775 
776  .def_property_readonly_static("INPUT_IMAGE_DECL_TYPE",
777  [](py::object) { return ImageShader::INPUT_IMAGE_DECL_TYPE; },
778  "A virtual input image type defined at shader compile time by ordinary texture or OES texture sampler depending on the input bound")
779 
780  .def_property_readonly_static("INPUT_IMAGE_ID",
781  [](py::object) { return ImageShader::INPUT_IMAGE_ID; },
782  "Shader variable name referring to the input image")
783 
784  .def_property_readonly_static("CODE_HEADER",
785  [](py::object) { return ImageShader::CODE_HEADER; },
786  "Shader code header containing necessary declarations");
787 
788  /**
789  * ShaderApplicator
790  */
791  py::class_<ShaderApplicator, AbstractTask>(module, "ShaderApplicator", "A task applying an image shader to bitmaps")
792 
793  .def(py::init<>())
794 
795  .def("add_sampler", &ShaderApplicator::addSampler,
796  py::arg("bitmap"), py::arg("uniform_name") = ImageShader::INPUT_IMAGE_ID,
797  py::keep_alive<1, 2>(), // applicator alive => bitmap alive
798  R"doc(
799  Connects a bitmap to a shader uniform variable.
800  The bitmap connected to ImageShader::INPUT_IMAGE_ID is used to resolve the sampler type (ImageShader::INPUT_IMAGE_DECL_TYPE).
801  )doc")
802 
803  .def("remove_sampler", &ShaderApplicator::removeSampler,
804  py::arg("uniform_name"),
805  R"doc(
806  Removes a sampler with a uniform variable name.
807  Returns True if a sampler associated to the given variable existed and was removed, false otherwise.
808  )doc")
809 
810  .def("clear_samplers", &ShaderApplicator::clearSamplers, "Clears all connections of bitmaps to samplers")
811 
812  .def_property("shader",
813  &ShaderApplicator::getShader,
814  py::cpp_function(&ShaderApplicator::setShader, py::keep_alive<1, 2, 1>()), // applicator alive => shader alive
815  "Shader to apply to the bitmap(s)")
816 
817  .def_property("output_bitmap",
818  &ShaderApplicator::getOutputBitmap,
819  py::cpp_function(&ShaderApplicator::setOutputBitmap, py::keep_alive<1, 2, 2>()), // applicator alive => bitmap alive
820  "Output bitmap");
821 
822  /**
823  * Scene and its layers
824  */
825  py::class_<Scene> scene(module, "Scene", "An ordered set of layers representing renderable content");
826 
827  py::class_<Scene::Layer> layer(scene, "Layer",
828  R"doc(
829  Abstract scene layer having name, type, geometry and some content to display.
830  The layer geometry is defined by an AffineMapping describing the position and the orientation of the layer content in the rendered image.
831  )doc");
832 
833  py::enum_<Scene::Layer::Type>(layer, "Type", "Layer type")
834  .value("SCENE", Scene::Layer::Type::SceneLayer, "layer containing a scene")
835  .value("BITMAP", Scene::Layer::Type::BitmapLayer, "layer displaying a bitmap")
836  .value("MASKED_BITMAP", Scene::Layer::Type::MaskedBitmapLayer, "layer displaying a bitmap with mask")
837  .value("SHAPED_BITMAP", Scene::Layer::Type::ShapedBitmapLayer, "layer displaying a bitmap within a shape")
838  .value("SHADED_BITMAP", Scene::Layer::Type::ShadedBitmapLayer, "layer displaying a bitmap through a custom fragment shader")
839  .export_values();
840 
841  layer.def("get_type", &Scene::Layer::getType, "Returns layer type")
842 
843  .def_property("name", &Scene::Layer::getName, &Scene::Layer::setName)
844 
845  .def_property("mapping", (AffineMapping& (Scene::Layer::*)())&Scene::Layer::getMapping, &Scene::Layer::setMapping,
846  "Layer mapping in parent coordinates")
847 
848  .def("test_point", &Scene::Layer::testPoint,
849  py::arg("x"), py::arg("y"),
850  "Tests if a given point falls in the layer")
851 
852  .def("test_point", [](const Scene::Layer& layer, const py::tuple& point) {
853  Point pt = Python::toPoint<float>(point);
854  return layer.testPoint(pt.x, pt.y);
855  },
856  py::arg("point"),
857  "Tests if a given point falls in the layer")
858 
859  .def("get_child", &Scene::Layer::getChild,
860  py::arg("x"), py::arg("y"), py::arg("recursion_depth") = 0,
861  "Picks a child layer at given point, if any")
862 
863  .def("get_child", [](const Scene::Layer& layer, const py::tuple& point, unsigned int recursionDepth) {
864  Point pt = Python::toPoint<float>(point);
865  return layer.getChild(pt.x, pt.y, recursionDepth);
866  },
867  py::arg("point"), py::arg("recursion_depth") = 0,
868  "Picks a child layer at given point, if any")
869 
870  .def_property("visible", &Scene::Layer::isVisible, &Scene::Layer::setVisible,
871  "Controls the layer visibility. If set to `False`, the layer and its sublayers are ignored when rendering.")
872 
873  .def_property("phantom", &Scene::Layer::isPhantom, &Scene::Layer::setPhantom,
874  "If set to `True`, the layer goes \"phantom\": it and its sublayers, if any, are ignored when searching a layer by point.");
875 
876  py::class_<Scene::SceneLayer, Scene::Layer>(scene, "SceneLayer",
877  "Layer containing an entire scene")
878  .def("get_scene", &Scene::SceneLayer::getScene, "Returns a Scene contained in the Layer");
879 
880  py::class_<Scene::BitmapLayer, Scene::Layer>(scene, "BitmapLayer",
881  R"doc(
882  Layer having an image to render.
883  The image has a position and orientation with respect to the layer. This is expressed with an affine mapping applied on top of the layer
884  mapping.
885  )doc")
886 
887  .def_property("bitmap",
888  &Scene::BitmapLayer::getBitmap,
889  py::cpp_function(&Scene::BitmapLayer::setBitmap, py::keep_alive<1, 2, 1>()), // layer alive => bitmap alive
890  "Bitmap attached to the layer")
891 
892  .def_property("bitmap_mapping",
893  (AffineMapping& (Scene::BitmapLayer::*)())&Scene::BitmapLayer::getBitmapMapping, &Scene::BitmapLayer::setBitmapMapping,
894  "Bitmap geometry mapping applied on top of the layer mapping")
895 
896  .def_property("modulation_color",
897  [](Scene::BitmapLayer& layer) { return Python::toTuple(layer.getModulationColor()); },
898  [](Scene::BitmapLayer& layer, const py::tuple& color){ layer.setModulationColor(Python::toColor4i(color)); },
899  "Modulation color (R, G, B, A). Multiplies bitmap pixel colors when rendering");
900 
901  py::class_<Scene::CustomMaskedBitmapLayer, Scene::BitmapLayer>(scene, "CustomMaskedBitmapLayer",
902  R"doc(
903  Layer containing a bitmap and a mask applied to the bitmap when rendering.
904  Both bitmap and mask have their own positions and orientations relative to the layer's position and orientation.
905  )doc")
906 
907  .def_property("mask_mapping",
909  &Scene::CustomMaskedBitmapLayer::setMaskMapping,
910  "Mask geometry mapping applied on top of the layer mapping")
911 
912  .def_property("background_color",
913  [](Scene::CustomMaskedBitmapLayer& layer) { return Python::toTuple(layer.getBackgroundColor()); },
914  [](Scene::CustomMaskedBitmapLayer& layer, const py::tuple& color){ layer.setBackgroundColor(Python::toColor4i(color)); },
915  "Background color (R, G, B, A). Fills layer pixels falling out of the mask area");
916 
917  py::class_<Scene::MaskedBitmapLayer, Scene::CustomMaskedBitmapLayer>(scene, "MaskedBitmapLayer",
918  "Bitmap layer using another bitmap as a mask")
919 
920  .def_property("mask",
921  &Scene::MaskedBitmapLayer::getMask,
922  py::cpp_function(&Scene::MaskedBitmapLayer::setMask, py::keep_alive<1, 2, 2>()), // layer alive => bitmap alive
923  "Mask bitmap");
924 
925  py::class_<Scene::ShapedBitmapLayer, Scene::CustomMaskedBitmapLayer>(scene, "ShapedBitmapLayer",
926  "Layer containing a bitmap and a parametric mask (shape)")
927 
928  .def_property("border_width", &Scene::ShapedBitmapLayer::getBorderWidth, &Scene::ShapedBitmapLayer::setBorderWidth,
929  "Mask border thickness in pixels or normalized coordinates. " \
930  "These pixels are cropped out from the image and replaced with the background color.")
931 
932  .def_property("slope_width", &Scene::ShapedBitmapLayer::getSlopeWidth, &Scene::ShapedBitmapLayer::setSlopeWidth,
933  "Mask border slope width in pixels or normalized coordinates. "\
934  "The border slope is a linear transition from background color to image pixels.")
935 
936  .def_property("corner_radius", &Scene::ShapedBitmapLayer::getCornerRadius, &Scene::ShapedBitmapLayer::setCornerRadius,
937  "Radius of mask corners in pixels or normalized coordinates")
938 
939  .def_property("in_pixels", &Scene::ShapedBitmapLayer::getInPixels, &Scene::ShapedBitmapLayer::setInPixels,
940  "If set to `True`, all the parameter values are interpreted as if given in pixels. Otherwise the normalized coordinates are used.");
941 
942  py::class_<Scene::ShadedBitmapLayer, Scene::BitmapLayer>(scene, "ShadedBitmapLayer", "Bitmap layer using a custom shader")
943 
944  .def_property("shader",
945  &Scene::ShadedBitmapLayer::getShader,
946  py::cpp_function(&Scene::ShadedBitmapLayer::setShader, py::keep_alive<1, 2, 3>()), // layer alive => bitmap alive
947  "Fragment shader taking the layer bitmap as texture");
948 
949  scene.def(py::init<>())
950 
951  .def("new_bitmap_layer", (Scene::BitmapLayer& (Scene::*)(const char*))&Scene::newBitmapLayer,
952  py::arg("name"),
953  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
954  "Creates a new bitmap layer")
955 
956  .def("new_bitmap_layer", (Scene::BitmapLayer& (Scene::*)())&Scene::newBitmapLayer,
957  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
958  "Creates a new bitmap layer")
959 
960  .def("new_masked_bitmap_layer", (Scene::MaskedBitmapLayer& (Scene::*)(const char*))&Scene::newMaskedBitmapLayer,
961  py::arg("name"),
962  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
963  "Creates a new masked bitmap layer")
964 
965  .def("new_masked_bitmap_layer", (Scene::MaskedBitmapLayer& (Scene::*)())&Scene::newMaskedBitmapLayer,
966  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
967  "Creates a new masked bitmap layer")
968 
969  .def("new_shaped_bitmap_layer", (Scene::ShapedBitmapLayer& (Scene::*)(const char*))&Scene::newShapedBitmapLayer,
970  py::arg("name"),
971  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
972  "Creates a new shaped bitmap layer")
973 
974  .def("new_shaped_bitmap_layer", (Scene::ShapedBitmapLayer& (Scene::*)())&Scene::newShapedBitmapLayer,
975  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
976  "Creates a new shaped bitmap layer")
977 
978  .def("new_shaded_bitmap_layer", (Scene::ShadedBitmapLayer& (Scene::*)(const char*))&Scene::newShadedBitmapLayer,
979  py::arg("name"),
980  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
981  "Creates a new shaded bitmap layer")
982 
983  .def("new_shaded_bitmap_layer", (Scene::ShadedBitmapLayer& (Scene::*)())&Scene::newShadedBitmapLayer,
984  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
985  "Creates a new shaded bitmap layer")
986 
987  .def("add_scene", &Scene::addScene,
988  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
989  "Adds a subscene to the current scene.")
990 
991  .def("get_layer", (Scene::Layer* (Scene::*)(const char*) const)&Scene::getLayer,
992  py::arg("name"),
993  "Retrieves a layer by its name or None if not found")
994 
995  .def("get_layer", (Scene::Layer& (Scene::*)(int) const)&Scene::getLayer,
996  py::arg("index"),
997  "Retrieves a layer by its index")
998 
999  .def("get_layer", (Scene::Layer* (Scene::*)(float, float, unsigned int) const)&Scene::getLayer,
1000  py::arg("x"), py::arg("y"), py::arg("recursion_depth") = 0,
1001  "Retrieves a layer present at a specific point of the scene or None if not found")
1002 
1003  .def("get_layer_index", &Scene::getLayerIndex,
1004  py::arg("layer"),
1005  "Retrieves layer index in the scene or -1 if not found")
1006 
1007  .def("get_layer_count", &Scene::getLayerCount, "Returns total number of layers in the scene");
1008 
1009  /**
1010  * SceneRenderer
1011  */
1012  py::class_<SceneRenderer, AbstractTask> sceneRenderer(module, "SceneRenderer",
1013  R"doc(
1014  AbstractTask rendering a Scene.
1015  The rendering may be done to a given bitmap or on screen, if the platform supports on-screen rendering.
1016  )doc");
1017 
1018  py::enum_<SceneRenderer::OutputMapping>(sceneRenderer, "OutputMapping", "Scene coordinates to output (screen or bitmap) pixel coordinates mapping")
1019  .value("STRETCH", SceneRenderer::OutputMapping::STRETCH, "output viewport covers entirely the scene axis span, aspect ratio is not preserved in general")
1020  .value("FIT_WIDTH_TO_TOP", SceneRenderer::OutputMapping::FIT_WIDTH_TO_TOP, "width is covered entirely, height is resized to keep aspect ratio, the top borders are aligned")
1021  .value("FIT_WIDTH", SceneRenderer::OutputMapping::FIT_WIDTH, "width is covered entirely, height is resized to keep aspect ratio, point (0.5, 0.5) is mapped to the output center")
1022  .value("FIT_HEIGHT", SceneRenderer::OutputMapping::FIT_HEIGHT, "height is covered entirely, width is resized to keep aspect ratio, point (0.5, 0.5) is mapped to the output center")
1023  .export_values();
1024 
1025  sceneRenderer.def(py::init<>())
1026 
1027  .def_property("output",
1028  &SceneRenderer::getOutput,
1029  py::cpp_function(&SceneRenderer::setOutput, py::keep_alive<1, 2, 1>()), // instance alive => bitmap alive
1030  "Output bitmap")
1031 
1032  .def_property("scene",
1033  &SceneRenderer::getScene,
1034  py::cpp_function(&SceneRenderer::setScene, py::keep_alive<1, 2, 2>()), // instance alive => scene alive
1035  "Scene")
1036 
1037  .def_property("output_mapping", &SceneRenderer::getOutputMapping, &SceneRenderer::setOutputMapping,
1038  "Specifies how the scene coordinates [0,1]² are mapped to the output (screen or bitmap) pixel coordinates.")
1039 
1040  .def_property("output_reference_width", &SceneRenderer::getOutputReferenceWidth, &SceneRenderer::setOutputReferenceWidth,
1041  "Value overriding output width for elements that have their size in pixels, in order to render a resolution-independent picture")
1042 
1043  .def_property("output_pixels_fetching", &SceneRenderer::getOutputPixelsFetching, &SceneRenderer::setOutputPixelsFetching,
1044  R"doc(
1045  If set to `True`, the output image data is pulled from GPU to CPU memory every time the rendering is done.
1046  This is convenient if the rendered image is an application output result, and is further stored or sent through the network.
1047  Otherwise, if the image is to be further processed inside Beatmup, the pixel transfer likely introduces an unnecessary latency and may
1048  cause FPS drop in real-time rendering.
1049  Has no effect in on-screen rendering.
1050  )doc")
1051 
1052  .def_property("background_image",
1053  &SceneRenderer::getBackgroundImage,
1054  py::cpp_function(&SceneRenderer::setBackgroundImage, py::keep_alive<1, 2, 3>()), // instance alive => bitmap alive
1055  "Image to pave the background.")
1056 
1057  .def("reset_output", &SceneRenderer::resetOutput,
1058  R"doc(
1059  Removes a bitmap from the renderer output, if any, and switches to on-screen rendering.
1060  The rendering is done on the display currently connected to the Context running the rendering task.
1061  )doc")
1062 
1063  .def("pick_layer", &SceneRenderer::pickLayer,
1064  py::arg("x"), py::arg("y"), py::arg("inPixels"), R"doc(
1065  Searches for a layer at a given position.
1066  In contrast to :func:`~beatmup.Scene.get_layer` it takes into account the output mapping.
1067 
1068  :param x: x coordinate.
1069  :param y: y coordinate.
1070  :param pixels: If `True`, the coordinates are taken in pixels.
1071 
1072  Returns the topmost layer at the given position if any, None if no layer found.
1073  )doc");
1074 
1075  /**
1076  * CustomPipeline::TaskHolder
1077  */
1078  py::class_<CustomPipeline, AbstractTask> customPipeline(module, "CustomPipeline",
1079  R"doc(
1080  Custom pipeline: a sequence of tasks to be executed as a whole.
1081  Acts as an AbstractTask. Built by adding tasks one by one and calling measure() at the end.
1082  )doc");
1083 
1084  py::class_<CustomPipeline::TaskHolder>(customPipeline, "TaskHolder",
1085  "A task within a pipeline")
1086 
1087  .def("get_task", &CustomPipeline::TaskHolder::getTask,
1088  "Returns the task in the current holder")
1089 
1090  .def("get_run_time", &CustomPipeline::TaskHolder::getRunTime,
1091  "Returns last execution time in milliseconds");
1092 
1093  /**
1094  * CustomPipeline
1095  */
1096  customPipeline
1097  .def("get_task_count", &CustomPipeline::getTaskCount,
1098  "Returns number of tasks in the pipeline")
1099 
1100  .def("get_task", &CustomPipeline::getTask, py::arg("index"),
1101  py::return_value_policy::reference,
1102  "Retrieves a task by its index")
1103 
1104  .def("get_task_index", &CustomPipeline::getTaskIndex, py::arg("holder"),
1105  "Retrieves task index if it is in the pipeline; returns -1 otherwise")
1106 
1107  .def("add_task", &CustomPipeline::addTask, py::arg("task"),
1108  py::keep_alive<1, 2>(), // pipeline alive => task alive
1109  py::return_value_policy::reference,
1110  "Adds a new task to the end of the pipeline")
1111 
1112  .def("insert_task", &CustomPipeline::insertTask, py::arg("task"), py::arg("before"),
1113  py::keep_alive<1, 2>(), // pipeline alive => task alive
1114  py::return_value_policy::reference,
1115  "Inserts a task in a specified position of the pipeline before another task")
1116 
1117  .def("remove_task", &CustomPipeline::removeTask, py::arg("task"),
1118  "Removes a task from the pipeline, if any. Returns True on success")
1119 
1120  .def("measure", &CustomPipeline::measure,
1121  "Determines pipeline execution mode and required thread count");
1122 
1123  /**
1124  * Multitask
1125  */
1126  py::class_<Multitask, CustomPipeline> multitask(module, "Multitask",
1127  R"doc(
1128  Conditional multiple tasks execution.
1129 
1130  Beatmup offers a number of tools allowing to pipeline several tasks into a single one. This technique is particularly useful for designing
1131  complex multi-stage image processing pipelines.
1132 
1133  Multitask is the simplest such tool. It allows to concatenate different tasks into a linear conveyor and run them all or selectively.
1134  To handle this selection, each task is associated with a repetition policy specifying the conditions whether this given task is executed
1135  or ignored when the pipeline is running.
1136 
1137  Specifically, there are two extreme modes that force the task execution every time (REPEAT_ALWAYS) or its unconditional skipping
1138  (IGNORE_ALWAYS) and two more sophisticated modes with the following behavior:
1139 
1140  - IGNORE_IF_UPTODATE skips the task if no tasks were executed among the ones coming before the current task in the pipeline;
1141  - REPEAT_UPDATE forces task repetition one time on next run and just after switches the repetition policy to IGNORE_IF_UPTODATE.
1142  )doc");
1143 
1144  py::enum_<Multitask::RepetitionPolicy>(multitask, "RepetitionPolicy",
1145  "Determines when a specific task in the sequence is run when the whole sequence is invoked")
1146  .value("REPEAT_ALWAYS", Multitask::RepetitionPolicy::REPEAT_ALWAYS, "execute the task unconditionally on each run")
1147  .value("REPEAT_UPDATE", Multitask::RepetitionPolicy::REPEAT_UPDATE, "execute the task one time then switch to IGNORE_IF_UPTODATE")
1148  .value("IGNORE_IF_UPTODATE", Multitask::RepetitionPolicy::IGNORE_IF_UPTODATE, "do not execute the task if no preceding tasks are run")
1149  .value("IGNORE_ALWAYS", Multitask::RepetitionPolicy::IGNORE_ALWAYS, "do not execute the task")
1150  .export_values();
1151 
1152  multitask
1153  .def(py::init<>())
1154 
1155  .def("get_repetition_policy", &Multitask::getRepetitionPolicy, py::arg("task"),
1156  "Returns repetition policy of a specific task in the pipeline.")
1157 
1158  .def("set_repetition_policy", &Multitask::setRepetitionPolicy, py::arg("task"), py::arg("policy"),
1159  R"doc(
1160  Sets repetition policy of a task. If the pipeline is processing at the moment of the call, it is the application responsibility to abort
1161  and restart it, if the policy change needs to be applied immediately.
1162  )doc");
1163 
1164  /**
1165  * ChunkCollection
1166  */
1167  py::class_<ChunkCollection>(module, "ChunkCollection",
1168  R"doc(
1169  A key-value pair set storing pieces of arbitrary data (chunks) under string keys.
1170  A chunk is a header and a piece of data packed in memory like this: (idLength[4], id[idLength], size[sizeof(chunksize_t)], data[size])
1171  ChunkCollection defines an interface to retrieve chunks by their ids.
1172  )doc")
1173  .def("open", &ChunkCollection::open, "Opens the collection to read chunks from it.")
1174  .def("close", &ChunkCollection::close, "Closes the collection after a reading session.")
1175  .def("size", &ChunkCollection::size, "Returns the number of chunks available in the collection after it is opened.")
1176  .def("chunk_exists", &ChunkCollection::chunkExists, py::arg("id"),
1177  R"doc(
1178  Check if a specific chunk exists.
1179 
1180  :param id: the chunk id
1181 
1182  Returns `True` if only the chunk exists in the collection.
1183  )doc")
1184  .def("chunk_size", &ChunkCollection::chunkSize, py::arg("id"),
1185  R"doc(
1186  Retrieves size of a specific chunk.
1187 
1188  :param id: the chunk id
1189 
1190  Return size of the chunk in bytes, 0 if not found.
1191  )doc")
1192  .def("__getitem__", [](ChunkCollection& collection, const std::string& id) -> py::object {
1193  if (collection.chunkExists(id)) {
1194  Chunk chunk(collection, id);
1195  return py::bytes(static_cast<const char*>(chunk()), chunk.size());
1196  }
1197  return py::none();
1198  }, py::arg("id"), "Returns the chunk data by its id");
1199 
1200  /**
1201  * ChunkFile
1202  */
1203  py::class_<ChunkFile, ChunkCollection>(module, "ChunkFile",
1204  R"doc(
1205  File containing chunks.
1206  The file is not loaded in memory, but is scanned when first opened to collect the information about available chunks.
1207  )doc")
1208  .def(py::init<const std::string&, bool>(), py::arg("filename"), py::arg("open_now") = true, R"doc(
1209  Creates a chunkfile accessor.
1210  The file content is not read until open() is called.
1211 
1212  :param filename: the file name / path
1213  :param open_now: if `true`, the file is read right away. Otherwise it is done on open() call.
1214  No information is available about chunks in the file until it is opened.
1215  )doc");
1216 
1217  /**
1218  * Python::WritableChunkCollection
1219  */
1220  py::class_<Python::WritableChunkCollection, ChunkCollection>(module, "WritableChunkCollection",
1221  R"doc(
1222  Writable ChunkCollection implementation for Python.
1223  Allows to exchange binary data without copying.
1224  )doc")
1225  .def(py::init<>())
1226 
1227  .def("__setitem__", [](Python::WritableChunkCollection& collection, const std::string& id, py::buffer& buffer) { collection[id] = buffer; },
1228  "Stores new chunk")
1229 
1230  .def("__getitem__", [](Python::WritableChunkCollection& collection, const std::string& id) -> py::object {
1231  if (collection.chunkExists(id))
1232  return collection[id];
1233  return py::none();
1234  }, py::arg("id"), "Returns the chunk data by its id")
1235 
1236  .def("save", &Python::WritableChunkCollection::save, py::arg("filename"), py::arg("append"), R"doc(
1237  Saves the collection to a file.
1238 
1239  :param filename: The name of the file to write chunks to
1240  :param append: If True, writing to the end of the file (keeping the existing content). Rewriting the file otherwise.
1241  )doc");
1242 
1243  /**
1244  * NNets::ActivationFunction
1245  */
1246  py::enum_<NNets::ActivationFunction>(nnets, "ActivationFunction", "Activation function specification")
1247  .value("DEFAULT", NNets::ActivationFunction::DEFAULT, "default activation: 0..1 bounded ReLU (identity clipped to 0..1 range)")
1248  .value("BRELU6", NNets::ActivationFunction::BRELU6, "0.167 times identity clipped to 0..1 range")
1249  .value("SIGMOID_LIKE", NNets::ActivationFunction::SIGMOID_LIKE, "a piecewise-linear sigmoid function approximation")
1250  .export_values();
1251 
1252  /**
1253  * NNets::Size::Padding
1254  */
1255  py::enum_<NNets::Size::Padding>(nnets, "Padding", "Zero padding specification")
1256  .value("SAME", NNets::Size::Padding::SAME, "operation output size matches its input size for unit strides")
1257  .value("VALID", NNets::Size::Padding::VALID, "no zero padding")
1258  .export_values();
1259 
1260  /**
1261  * NNets::AbstractOperation
1262  */
1263  py::class_<NNets::AbstractOperation>(nnets, "AbstractOperation",
1264  R"doc(
1265  Abstract neural net operation (layer).
1266  Has a name used to refer the operation in a Model. The operation data (such as convolution weights) is provided through a ChunkCollection
1267  in single precision floating point format, where the chunks are searched by operation name.
1268  Operations have several inputs and outputs numbered starting from zero.
1269  )doc")
1270  .def_property_readonly("name", &NNets::AbstractOperation::getName,
1271  "Operation name")
1272 
1273  .def_property_readonly("input_count", &NNets::AbstractOperation::getInputCount,
1274  "Number of operation inputs")
1275 
1276  .def_property_readonly("output_count", &NNets::AbstractOperation::getOutputCount,
1277  "Number of operation outputs");
1278 
1279  /**
1280  * NNets::Conv2D
1281  */
1282  py::class_<NNets::Conv2D, NNets::AbstractOperation>(nnets, "Conv2D",
1283  R"doc(
1284  2D convolution operation computed on GPU.
1285  Has 2 inputs: main and residual (detailed below), and a single output.
1286  Constraints:
1287 
1288  * Input and output are 3D tensors with values in [0, 1] range sampled over 8 bits.
1289  * Number of input feature maps is 3 or a multiple of 4.
1290  * Number of output feature maps is a multiple of 4.
1291  * For group convolutions, each group contains a multiple of 4 input channels and a multiple of 4 output
1292  channels, or exactly 1 input and 1 output channel (i.e., depthwise).
1293  * Kernels are of square shape.
1294  * Strides are equal along X and Y.
1295  * Dilations are equal to 1.
1296  * If an image is given on input (3 input feature maps), only valid padding is supported.
1297  * An activation function is always applied on output.
1298 
1299  Raspberry Pi-related constraints:
1300 
1301  * Pi cannot sample more than 256 channels to compute a single output value. Actual practical limit is
1302  yet lower: something about 128 channels for pointwise convolutions and less than 100 channels for
1303  bigger kernels. When the limit is reached, Pi OpenGL driver reports an out of memory error (0x505).
1304 
1305  Features:
1306 
1307  * Bias addition integrated.
1308  * An optional residual input is available: a tensor of output shape added to the convolution result
1309  before applying the activation function.
1310  )doc")
1311 
1312  .def(py::init<const std::string&, const int, const int, const int, const int, const NNets::Size::Padding, const bool, const int, const NNets::ActivationFunction>(),
1313  py::arg("name"), py::arg("kernel_size"), py::arg("num_input_channels"), py::arg("num_output_channels"),
1314  py::arg("stride") = 1,
1315  py::arg("padding") = NNets::Size::Padding::VALID,
1316  py::arg("use_bias") = true,
1317  py::arg("num_groups") = 1,
1318  py::arg("activation") = NNets::ActivationFunction::DEFAULT,
1319  R"doc(
1320  Instantiates a 2D convolution operation.
1321 
1322  :param name: operation name.
1323  :param kernel_size: convolution kernel size.
1324  :param num_input_channels: number of input feature map channels (input depth).
1325  :param num_output_channels: number of output feature map channels (output depth).
1326  :param stride: convolution stride.
1327  :param padding: padding policy.
1328  :param use_bias: if `true`, the bias addition is enabled. The bias vector is searched in the model data.
1329  :param num_groups: number of convolution groups to get a group/depthwise convolution.
1330  :param activation: activation function applied to the operation output.
1331  )doc")
1332 
1333  .def_property_readonly("use_bias", &NNets::Conv2D::isBiasUsed, "Returns `true` if bias addition is enabled")
1334 
1335  .def_property_readonly_static("filters_chunk_suffix", [](py::object){ return NNets::Conv2D::FILTERS_CHUNK_SUFFIX; },
A very basic class for any image.
2x3 affine mapping containing a 2x2 matrix and a 2D point
Definition: geometry.h:639
void translate(const Point &shift)
Translates the mapping.
Definition: geometry.cpp:71
void setCenterPosition(const Point &newPos)
Adjusts the mapping origin so that the center of the axes box matches a given point.
Definition: geometry.cpp:67
void scale(float factor, const Point &fixedPoint=Point::ZERO)
Scales the mapping around a given point in target domain.
Definition: geometry.cpp:75
void rotateDegrees(float angle, const Point &fixedPoint=Point::ZERO)
Rotates the mapping around a given point in target domain.
Definition: geometry.cpp:81
AffineMapping getInverse() const
Returns inverse mapping.
Definition: geometry.cpp:52
bool isPointInside(const Point &point) const
Tests whether a point from the output domain is inside the input axes span.
Definition: geometry.cpp:87
Point getPosition() const
Definition: geometry.h:653
Matrix2 getMatrix() const
Definition: geometry.h:657
Resamples an image to a given resolution.
Definition: resampler.h:33
IntRectangle getInputRect() const
Definition: resampler.h:130
void setInputRect(const IntRectangle &rect)
Specifies a rectangular working area in the input bitmap.
Definition: resampler.cpp:69
void setOutputRect(const IntRectangle &rect)
Specifies a rectangular working area in the output bitmap.
Definition: resampler.cpp:74
IntRectangle getOutputRect() const
Definition: resampler.h:131
A key-value pair set storing pieces of arbitrary data (chunks) under string keys.
Definition: chunkfile.h:36
virtual bool chunkExists(const std::string &id) const =0
Check if a specific chunk exists.
Basic class: task and memory management, any kind of static data.
Definition: context.h:59
GL::RecycleBin * getGpuRecycleBin() const
Definition: context.cpp:340
bool queryGpuInfo(std::string &vendor, std::string &renderer)
Initializes the GPU if not yet and queries information about it.
Definition: context.cpp:307
Color matrix filter: applies mapping Ax + B at each pixel of a given image in RGBA space.
Definition: color_matrix.h:30
void setColorInversion(color3f preservedHue, float saturationFactor=1.0f, float valueFactor=1.0f)
Resets the current transformation to a fancy color inversion mode with a fixed hue point.
void setCoefficients(int outChannel, float bias, float r=.0f, float g=.0f, float b=.0f, float a=.0f)
Sets color matrix coefficients for a specific output color channel.
Flood fill algorithm implementation.
Definition: flood_fill.h:36
void setSeeds(const IntPoint seeds[], int seedCount)
Specifies a set of seeds (starting points)
Definition: flood_fill.cpp:56
IntRectangle getBounds() const
Returns bounding box of the computed mask.
Definition: flood_fill.h:83
void setMaskPos(const IntPoint &)
Specifies left-top corner position of the mask to compute inside the input bitmap.
Definition: flood_fill.cpp:51
void emptyBin()
Empty the bin destroying all the items in a GPU-aware thread.
Definition: recycle_bin.cpp:91
void setFloatMatrix4(std::string name, const float matrix[16])
Sets a float 4*4 matrix variable value.
void setFloatMatrix3(std::string name, const float matrix[9])
Sets a float 3*3 matrix variable value.
void setFloatMatrix2(std::string name, const float matrix[4])
Sets a float 2*2 matrix variable value.
A sequence of integer-valued 2D points.
Definition: contours.h:33
IntPoint getPoint(int index) const
Definition: contours.h:68
Bitmap whose memory is managed by the Beatmup engine.
Measures the difference between two bitmaps.
Definition: metric.h:29
void setBitmaps(AbstractBitmap *bitmap1, AbstractBitmap *bitmap2)
Sets input images.
Definition: metric.cpp:72
Wrapper of Android.Graphics.Bitmap object.
Definition: bitmap.h:35
Writable ChunkCollection implementation for Python.
bool chunkExists(const std::string &id) const
Check if a specific chunk exists.
Layer having an image to render.
Definition: scene.h:158
Layer containing a bitmap and a mask applied to the bitmap when rendering.
Definition: scene.h:197
Abstract scene layer having name, type, geometry and some content to display.
Definition: scene.h:64
virtual bool testPoint(float x, float y) const
Tests if a given point falls in the layer.
Definition: scene.cpp:240
virtual Layer * getChild(float x, float y, unsigned int recursionDepth=0) const
Picks a child layer at given point, if any.
Definition: scene.cpp:244
Bitmap layer using another bitmap as a mask.
Definition: scene.h:217
Bitmap layer using a custom shader.
Definition: scene.h:268
Layer containing a bitmap and a parametric mask (shape)
Definition: scene.h:235
An ordered set of layers representing a renderable content.
Definition: scene.h:37
IntPoint scanlineSearch(AbstractBitmap &source, pixint4 val, const IntPoint &startFrom)
Goes through a bitmap in scanline order (left to right, top to bottom) until a pixel of a given color...
Definition: tools.cpp:195
void invert(AbstractBitmap &input, AbstractBitmap &output)
Inverses colors of an image in a pixelwise fashion.
Definition: tools.cpp:155
InternalBitmap * makeCopy(AbstractBitmap &bitmap)
Makes a copy of a bitmap.
Definition: tools.cpp:72
void noise(AbstractBitmap &bitmap, IntRectangle area)
Replaces a rectangular area in a bitmap by random noise.
Definition: tools.cpp:100
InternalBitmap * chessboard(Context &context, int width, int height, int cellSize, PixelFormat pixelFormat=BinaryMask)
Renders a chessboard image.
Definition: tools.cpp:91
void makeOpaque(AbstractBitmap &bitmap, IntRectangle area)
Makes a bitmap area opaque.
Definition: tools.cpp:131
static const color4i ZERO
Definition: constants.h:32
@ NONE
no extension
Definition: program.h:64
py::tuple toTuple(const CustomPoint< T > &point)
color4f toColor4f(const py::tuple &tuple)
color4i toColor4i(const py::tuple &tuple)
pixfloat4 toPixfloat4(const py::tuple &tuple)
color3f toColor3f(const py::tuple &tuple)
@ SingleByte
single channel of 8 bits per pixel (like grayscale), unsigned integer values
@ SingleFloat
single channel of 32 bits per pixel (like grayscale), single precision floating point values
@ QuaternaryMask
2 bits per pixel
@ QuadFloat
4 channels of 32 bits per pixel, single precision floating point values,
@ TripleFloat
3 channels of 32 bits per pixel, single precision floating point values
@ QuadByte
4 channels of 8 bits per pixel (like RGBA), unsigned integer values
@ TripleByte
3 channels of 8 bits per pixel (like RGB), unsigned integer values
@ BinaryMask
1 bit per pixel
@ HexMask
4 bits per pixel
std::string to_string(Beatmup::NNets::ActivationFunction function)
plot setBitmap(bitmap)
return(jlong) new Beatmup jlong jstring name
JNIEnv jlong jstring id
vars setFloat(nameStr, val)
return collection chunkExists(JNIEnv *jenv, jobject, jlong hCollection, jstring id)
collection save(filenameStr, append==JNI_TRUE)
vars setInteger(nameStr, val)
jlong jint index
pipeline measure()
Beatmup::Context * ctx
collection close()
ctx limitWorkerCount(Beatmup::AbstractTask::validThreadCount(count),(Beatmup::PoolIndex) poolIdx)
return ctx maxAllowedWorkerCount((Beatmup::PoolIndex) poolIdx)
layer setInPixels(inPixels==JNI_TRUE)
renderer resetOutput()
Beatmup::InternalBitmap * bitmap
renderer setScene(scene)
floodFill setComputeContours(flag==JNI_TRUE)
layer setBorderWidth((float) width)
layer setSlopeWidth((float) width)
Beatmup::Scene::Layer * layer
floodFill setBorderPostprocessing((Beatmup::FloodFill::BorderMorphology) op, hold, release)
crop setInput(input)
jlong jobject size
return renderer getOutputMapping()
layer setVisible(visible==JNI_TRUE)
renderer setOutputPixelsFetching(fetch==JNI_TRUE)
return bitmap getWidth()
bitmap zero()
renderer setOutputReferenceWidth((int) width)
resampler setMode((Beatmup::BitmapResampler::Mode) mode)
renderer setOutputMapping((Beatmup::SceneRenderer::OutputMapping) mapping)
return scene getLayerCount()
layer getMapping().position.x
filter setHSVCorrection((float) h,(float) s,(float) v)
input getContext().performTask(crop)
layer setShader(shader)
layer setMask(mask)
renderer setBackgroundImage(bitmap)
return $pool getJavaReference & scene(index)
return bitmap getHeight()
JNIEnv jlong jint jfloat bias
Beatmup::AffineMapping & mapping
layer setName(nameStr.c_str())
floodFill setTolerance((float) tolerance)
Beatmup::SceneRenderer * renderer
layer setCornerRadius((float) radius)
applicator setOutputBitmap(bitmap)
resampler setCubicParameter((float) val)
renderer setOutput(bitmap)
layer getMapping().setCenterPosition(Beatmup jlong jfloat factor
return renderer getOutputReferenceWidth()
layer setPhantom(phantom==JNI_TRUE)
layer getMaskMapping().scale((float) factor
shader setSourceCode(javaChar)
return bitmap getPixelFormat()