Beatmup
bindings.cpp
Go to the documentation of this file.
1 /*
2  Beatmup image and signal processing library
3  Copyright (C) 2020, lnstadrum
4 
5  This program is free software: you can redistribute it and/or modify
6  it under the terms of the GNU General Public License as published by
7  the Free Software Foundation, either version 3 of the License, or
8  (at your option) any later version.
9 
10  This program is distributed in the hope that it will be useful,
11  but WITHOUT ANY WARRANTY; without even the implied warranty of
12  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13  GNU General Public License for more details.
14 
15  You should have received a copy of the GNU General Public License
16  along with this program. If not, see <http://www.gnu.org/licenses/>.
17 */
18 
19 #include <stdexcept>
20 #include <vector>
21 #include <memory>
22 
23 #include <pybind11/pybind11.h>
24 #include <pybind11/numpy.h>
25 #include <pybind11/eval.h>
26 #include <pybind11/stl.h>
27 
28 #include "context.h"
29 #include "bitmap/metric.h"
30 #include "bitmap/resampler.h"
31 #include "bitmap/tools.h"
32 #include "contours/contours.h"
33 #include "filters/color_matrix.h"
35 #include "filters/sepia.h"
36 #include "gpu/swapper.h"
37 #include "gpu/variables_bundle.h"
38 #include "masking/flood_fill.h"
39 #include "nnets/conv2d.h"
40 #include "nnets/classifier.h"
42 #include "nnets/dense.h"
43 #include "nnets/image_sampler.h"
44 #include "nnets/pooling2d.h"
45 #include "nnets/softmax.h"
47 #include "pipelining/multitask.h"
48 #include "scene/renderer.h"
49 #include "scene/scene.h"
50 #include "shading/image_shader.h"
52 
53 #include "binding_tools.hpp"
54 #include "bitmap.h"
55 #include "chunk_collection.h"
56 
57 
58 namespace py = pybind11;
59 using namespace Beatmup;
60 
61 
62 PYBIND11_MODULE(beatmup, module) {
63  module.doc() = R"doc(
64  beatmup module
65  --------------
66 
67  .. autosummary::
68  :toctree: python/_generate
69 
70  AbstractBitmap
71  AbstractTask
72  AffineMapping
73  Bitmap
74  BitmapResampler
75  ChunkCollection
76  ChunkFile
77  Context
78  CustomPipeline
79  FloodFill
80  ImageShader
81  IntegerContour2D
82  InternalBitmap
83  Metric
84  Multitask
85  PixelFormat
86  Scene
87  SceneRenderer
88  ShaderApplicator
89  WritableChunkCollection
90  )doc";
91 
92  auto gl = module.def_submodule("gl", R"doc(
93  beatmup.gl module
94  -----------------
95 
96  .. autosummary::
97  :toctree: python/_generate
98 
99  TextureHandler
100  VariablesBundle
101  )doc");
102 
103  auto filters = module.def_submodule("filters", R"doc(
104  beatmup.filters module
105  ----------------------
106 
107  .. autosummary::
108  :toctree: python/_generate
109 
110  ColorMatrix
111  PixelwiseFilter
112  Sepia
113  )doc");
114 
115  auto nnets = module.def_submodule("nnets", R"doc(
116  beatmup.nnets module
117  --------------------
118 
119  .. autosummary::
120  :toctree: python/_generate
121 
122  ActivationFunction
123  AbstractOperation
124  Classifier
125  Conv2D
126  Dense
127  DeserializedModel
128  ImageSampler
129  InferenceTask
130  Model
131  Padding
132  Pooling2D
133  Softmax
134  )doc");
135 
136  module.def("say_hi", []() {
137  Context ctx;
138  py::print("Beatmup is up and running, yay!");
139  py::exec("import platform; print('Python version:', platform.python_version())");
140  },
141  "Prints some greetings");
142 
143  /**
144  * PixelFormat
145  */
146  py::enum_<PixelFormat>(module, "PixelFormat", "Specifies bitmap pixel format")
147  .value("SINGLE_BYTE", PixelFormat::SingleByte, "single channel of 8 bits per pixel (like grayscale), unsigned integer values")
148  .value("TRIPLE_BYTE", PixelFormat::TripleByte, "3 channels of 8 bits per pixel (like RGB), unsigned integer values")
149  .value("QUAD_BYTE", PixelFormat::QuadByte, "4 channels of 8 bits per pixel (like RGBA), unsigned integer values")
150  .value("SINGLE_FLOAT", PixelFormat::SingleFloat, "single channel of 32 bits per pixel (like grayscale), single precision floating point values")
151  .value("TRIPLE_FLOAT", PixelFormat::TripleFloat, "3 channels of 32 bits per pixel, single precision floating point values")
152  .value("QUAD_FLOAT", PixelFormat::QuadFloat, "4 channels of 32 bits per pixel, single precision floating point values")
153  .value("BINARY_MASK", PixelFormat::BinaryMask, "1 bit per pixel")
154  .value("QUATERNARY_MASK", PixelFormat::QuaternaryMask, "2 bits per pixel")
155  .value("HEX_MASK", PixelFormat::HexMask, "4 bits per pixel")
156  .export_values();
157 
158  /**
159  * GL::TextureHandler
160  */
161  py::class_<GL::TextureHandler>(gl, "TextureHandler",
162  "A texture stored in GPU memory")
163 
164  .def("get_width", &GL::TextureHandler::getWidth,
165  "Returns width of the texture in pixels")
166 
167  .def("get_height", &GL::TextureHandler::getHeight,
168  "Returns height of the texture in pixels")
169 
170  .def("get_depth", &GL::TextureHandler::getDepth,
171  "Returns depth of the texture in pixels")
172 
173  .def("get_number_of_channels", &GL::TextureHandler::getNumberOfChannels,
174  "Returns number of channels containing in the texture");
175 
176  /**
177  * AbstractTask
178  */
179  py::class_<AbstractTask>(module, "AbstractTask", "Abstract task executable in a thread pool of a Context");
180 
181  /**
182  * Context
183  */
184  py::class_<Context>(module, "Context", "Beatmup engine context")
185 
186  .def(py::init<>())
187 
188  .def(py::init<const PoolIndex>())
189 
190  .def("perform_task", &Context::performTask,
191  py::arg("task"), py::arg("pool") = 0,
192  "Performs a given task. Returns its execution time in milliseconds")
193 
194  .def("repeat_task", &Context::repeatTask,
195  py::arg("task"), py::arg("abort_current"), py::arg("pool") = 0,
196  py::keep_alive<1, 2>(), // context alive => task alive
197  R"doc(
198  Ensures a given task executed at least once
199 
200  :param task: The task
201  :param abort_current: If True and the same task is currently running, the abort signal is sent.
202  :param pool: A thread pool to run the task in
203  )doc")
204 
205  .def("submit_task", &Context::submitTask,
206  py::arg("task"), py::arg("pool") = 0,
207  py::keep_alive<1, 2>(), // context alive => task alive
208  "Adds a new task to the jobs queue")
209 
210  .def("submit_persistent_task", &Context::submitPersistentTask,
211  py::arg("task"), py::arg("pool") = 0,
212  py::keep_alive<1, 2>(), // context alive => task alive
213  "Adds a new persistent task to the jobs queue")
214 
215  .def("wait_for_job", &Context::waitForJob,
216  py::arg("job"), py::arg("pool") = 0,
217  "Blocks until a given job finishes")
218 
219  .def("abort_job", &Context::abortJob,
220  py::arg("job"), py::arg("pool") = 0,
221  "Aborts a given submitted job.")
222 
223  .def("wait", &Context::wait,
224  "Blocks until all the submitted jobs are executed",
225  py::arg("pool") = 0)
226 
227  .def("busy", &Context::busy,
228  "Returns `True` if a specific thread pool in the context is executing a Task",
229  py::arg("pool") = 0)
230 
231  .def("check", &Context::check,
232  "Checks if a specific thread pool is doing great: rethrows exceptions occurred during tasks execution, if any.",
233  py::arg("pool") = 0)
234 
235  .def("max_allowed_worker_count", &Context::maxAllowedWorkerCount,
236  "Returns maximum number of working threads per task in a given thread pool",
237  py::arg("pool") = 0)
238 
239  .def("limit_worker_count", &Context::limitWorkerCount,
240  "Limits maximum number of threads (workers) when performing tasks in a given pool",
241  py::arg("max_value"), py::arg("pool") = 0)
242 
243  .def("is_gpu_queried", &Context::isGpuQueried,
244  "Returns `True` if GPU was queried and ready to use")
245 
246  .def("is_gpu_ready", &Context::isGpuReady,
247  "Returns `True` if GPU was queried and ready to use")
248 
249  .def("warm_up_gpu", &Context::warmUpGpu, R"doc(
250  Initializes GPU within a given Context if not yet (takes no effect if it already is).
251  GPU initialization may take some time and is done when a first task using GPU is being run. Warming up
252  the GPU is useful to avoid the app get stuck for some time when it launches its first task on GPU.
253  )doc")
254 
255  .def("query_gpu_info", [](Context &ctx) -> py::object {
256  std::string vendor, renderer;
257  if (ctx.queryGpuInfo(vendor, renderer))
258  return py::make_tuple<>(vendor, renderer);
259  return py::none();
260  },
261  "Queries information about GPU and returns a tuple of vendor and renderer strings, or None if no GPU available.")
262 
263  .def("empty_gpu_recycle_bin", [](Context& ctx) {
264  auto* bin = ctx.getGpuRecycleBin();
265  if (bin)
266  bin->emptyBin();
267  }, R"doc(
268  Empties GPU recycle bin.
269  When a bitmap is destroyed in the application code, its GPU storage is not destroyed immediately. This is due to the fact that destroying a
270  texture representing the bitmap content in the GPU memory needs to be done in a thread that has access to the GPU, which is one of the
271  threads in the thread pool. The textures of destroyed bitmaps are marked as unused anymore and put into a "GPU trash bin". The latter is
272  emptied by calling this function.
273  In applications doing repeated allocations and deallocations of images (e.g., processing video frames in a loop), it is recommended to empty
274  the GPU recycle bin periodically in the described way in order to prevent running out of memory.
275  )doc");
276 
277  /**
278  * AbstractBitmap
279  */
280  py::class_<AbstractBitmap, GL::TextureHandler>(module, "AbstractBitmap",
281  "Abstract bitmap class")
282 
283  .def("get_pixel_format", &AbstractBitmap::getPixelFormat,
284  "Returns pixel format of the bitmap")
285 
286  .def("get_memory_size", &AbstractBitmap::getMemorySize,
287  "Returns bitmap size in bytes")
288 
289  .def("get_context", &AbstractBitmap::getContext,
290  "Returns Context the current bitmap is attached to")
291 
292  .def("zero", &AbstractBitmap::zero,
293  "Sets all the pixels to zero")
294 
295  .def("__str__", &AbstractBitmap::toString,
296  "Returns a string describing the bitmap")
297 
298  .def("save_bmp", &AbstractBitmap::saveBmp, py::arg("filename"),
299  "Saves a bitmap to a BMP file");
300 
301  /**
302  * InternalBitmap
303  */
304  py::class_<InternalBitmap, AbstractBitmap>(module, "InternalBitmap", R"doc(
305  Bitmap whose memory is managed by the Beatmup engine.
306  Main pixel data container used internally by Beatmup. Applications would typically use a different incarnation
307  of AbstractBitmap implementing I/O operations, and InternalBitmap instances are used to exchange data between
308  different processing entities (AbstractTask instances) within the application.
309  )doc")
310 
311  .def(py::init<Context&, PixelFormat, int, int, bool>(),
312  py::arg("context"), py::arg("pixel_format"), py::arg("width"), py::arg("height"), py::arg("allocate") = true,
313  py::keep_alive<1, 2>()) // bitmap alive => context alive
314 
315  .def(py::init<Context&, const char*>(),
316  py::keep_alive<1, 2>()); // bitmap alive => context alive
317 
318  /**
319  * Python::Bitmap
320  */
321  py::class_<Python::Bitmap, AbstractBitmap>(module, "Bitmap", py::buffer_protocol(),
322  "A bitmap wrapping a numpy container without copying")
323 
324  .def(py::init<Beatmup::Context&, py::buffer&>(),
325  py::keep_alive<1, 2>()) // bitmap alive => context alive
326 
327  .def_buffer([](Python::Bitmap& bitmap) {
329  return bitmap.getPythonBuffer();
330  });
331 
332  /**
333  * Tools
334  */
335  module.def_submodule("bitmaptools")
337  py::arg("bitmap"), py::arg("context"), py::arg("format"),
338  py::return_value_policy::take_ownership,
339  py::keep_alive<0, 1>(), // bitmap alive => context alive
340  R"doc(
341  Makes a copy of a bitmap for a given Context converting the data to a given pixel format.
342  Can be used to exchange image content between different instances of Context.
343  The copy is done in an AbstractTask run in the default thread pool of the source bitmap context.
344 
345  :param bitmap: the bitmap to copy
346  :param context: the Context instance the copy is associated with
347  :param format: pixel format of the copy
348  )doc")
349 
350  .def("chessboard", &BitmapTools::chessboard,
351  py::arg("context"), py::arg("width"), py::arg("height"), py::arg("cell_size"), py::arg("format") = PixelFormat::BinaryMask,
352  py::return_value_policy::take_ownership,
353  py::keep_alive<0, 1>(), // bitmap alive => context alive
354  R"doc(
355  Renders a chessboard image.
356 
357  :param context: a Context instance
358  :param width: width in pixels of the resulting bitmap
359  :param height: height in pixels of the resulting bitmap
360  :param cell_size: size of a single chessboard cell in pixels
361  :param pixel_format: pixel format of the resulting bitmap
362  )doc")
363 
364  .def("noise", [](AbstractBitmap& bitmap) { BitmapTools::noise(bitmap); },
365  py::arg("bitmap"),
366  "Fills a given bitmap with random noise.")
367 
368  .def("noise", [](AbstractBitmap& bitmap, const py::tuple& area) { BitmapTools::noise(bitmap, Python::toRectangle<int>(area)); },
369  py::arg("bitmap"), py::arg("area"),
370  "Replaces a rectangular area in a bitmap by random noise.")
371 
372  .def("make_opaque", [](AbstractBitmap& bitmap, const py::tuple& area) {
373  BitmapTools::makeOpaque(bitmap, Python::toRectangle<int>(area));
374  },
375  py::arg("bitmap"), py::arg("area"),
376  "Makes a bitmap area opaque")
377 
378  .def("invert", &BitmapTools::invert,
379  py::arg("input"), py::arg("output"),
380  "Inverses colors of an image in a pixelwise fashion")
381 
382  .def("scanline_search", [](AbstractBitmap& bitmap, const py::tuple& value, const py::tuple& startFrom) -> py::object {
383  auto pt = BitmapTools::scanlineSearch(bitmap, Python::toPixfloat4(value), Python::toPoint<int>(startFrom));
384  if (pt.x == -1 && pt.y == -1)
385  return py::none();
386  return Python::toTuple(pt);
387  },
388  py::arg("bitmap"), py::arg("value"), py::arg("start_from") = Python::toTuple(IntPoint::ZERO),
389  R"doc(
390  Goes through a bitmap in scanline order (left to right, top to bottom) until a pixel of a given color is met.
391 
392  :param source: the bitmap to scan
393  :param value: the color value to look for
394  :param start_from: starting pixel position
395 
396  Returns the next closest position of the searched value (in scanline order) or None if not found.
397  )doc");
398 
399  /**
400  * BitmapResampler
401  */
402  py::class_<BitmapResampler, AbstractTask> bitmapResampler(module, "BitmapResampler", R"doc(
403  Resamples an image to a given resolution.
404  Implements different resampling approaches, including standard ones (bilinear, bicubic, etc.) and a neural network-based 2x upsampling approach dubbed as "x2".
405  )doc");
406 
407  bitmapResampler.def(py::init<Context&>(), py::arg("context"),
408  py::keep_alive<1, 2>()) // resampler alive => context alive
409 
410  .def_property("input", &BitmapResampler::getInput,
411  py::cpp_function(&BitmapResampler::setInput, py::keep_alive<1, 2, 1>()), // instance alive => bitmap alive
412  "Input bitmap")
413 
414  .def_property("output", &BitmapResampler::getOutput,
415  py::cpp_function(&BitmapResampler::setOutput, py::keep_alive<1, 2, 2>()), // instance alive => bitmap alive
416  "Output bitmap")
417 
418  .def_property("mode", &BitmapResampler::getMode, &BitmapResampler::setMode, "Resmpling algorithm (mode)")
419 
421  "Cubic resampling parameter (`alpha`)")
422 
423  .def_property("input_rectangle", [](BitmapResampler& resampler, const py::tuple& area) {
424  resampler.setInputRect(Python::toRectangle<int>(area));
425  },
426  [](BitmapResampler& resampler) {
427  return Python::toTuple(resampler.getInputRect());
428  },
429  "Specifies a rectangular working area in the input bitmap. Pixels outside of this area are not used.")
430 
431  .def_property("output_rectangle", [](BitmapResampler& resampler, const py::tuple& area) {
432  resampler.setOutputRect(Python::toRectangle<int>(area));
433  },
434  [](BitmapResampler& resampler) {
435  return Python::toTuple(resampler.getOutputRect());
436  },
437  "Specifies a rectangular working area in the output bitmap. Pixels outside of this area are not affected.");
438 
439  py::enum_<BitmapResampler::Mode>(bitmapResampler, "Mode", "Resampling mode (algorithm) specification")
440  .value("NEAREST_NEIGHBOR", BitmapResampler::Mode::NEAREST_NEIGHBOR, "zero-order: usual nearest neighbor")
441  .value("BOX", BitmapResampler::Mode::BOX, "'0.5-order': anti-aliasing box filter; identical to nearest neighbor when upsampling")
442  .value("LINEAR", BitmapResampler::Mode::LINEAR, "first order: bilinear interpolation")
443  .value("CUBIC", BitmapResampler::Mode::CUBIC, "third order: bicubic interpolation")
444  .value("CONVNET", BitmapResampler::Mode::CONVNET, "upsampling x2 using a convolutional neural network")
445  .export_values();
446 
447  /**
448  * Filters::PixelwiseFilter
449  */
450  py::class_<Filters::PixelwiseFilter, AbstractTask>(filters, "PixelwiseFilter",
451  "Base class for image filters processing a given bitmap in a pixelwise fashion.")
452 
453  .def_property("input",
455  py::cpp_function(&Filters::PixelwiseFilter::setInput, py::keep_alive<1, 2, 1>()), // instance alive => bitmap alive
456  "Input bitmap")
457 
458  .def_property("output",
460  py::cpp_function(&Filters::PixelwiseFilter::setOutput, py::keep_alive<1, 2, 2>()), // instance alive => bitmap alive
461  "Output bitmap");
462 
463  /**
464  * Filters::ColorMatrix
465  */
466  py::class_<Filters::ColorMatrix, Filters::PixelwiseFilter>(filters, "ColorMatrix",
467  "Color matrix filter: applies an affine mapping Ax + B at each pixel of a given image in RGBA space")
468 
469  .def(py::init<>())
470 
471  .def("set_coefficients", [](Filters::ColorMatrix& colorMatrix, int ouch, float bias, const py::tuple& rgba) {
472  const color4f c = Python::toColor4f(rgba);
473  colorMatrix.setCoefficients(ouch, bias, c.r, c.g, c.b, c.a);
474  },
475  "Sets color matrix coefficients for a specific output color channel",
476  py::arg("out_channel"), py::arg("add"), py::arg("rgba"))
477 
478  .def("set_hsv_correction", &Filters::ColorMatrix::setHSVCorrection,
479  "Resets the current transformation to a matrix performing standard HSV correction",
480  py::arg("hue_shift_degrees"), py::arg("saturation_factor"), py::arg("value_factor"))
481 
482  .def("set_color_inversion", [](Filters::ColorMatrix& colorMatrix, const py::tuple& hue, float saturation, float value){
483  colorMatrix.setColorInversion(Python::toColor3f(hue), saturation, value);
484  },
485  "Resets the current transformation to a fancy color inversion mode with a fixed hue point",
486  py::arg("preserved_hue"), py::arg("saturation_factor"), py::arg("value_factor"))
487 
488  .def("apply_contrast", &Filters::ColorMatrix::applyContrast,
489  "Applies a contrast adjustment by a given factor on top of the current transformation",
490  py::arg("factor"))
491 
492  .def("set_brightness", &Filters::ColorMatrix::setBrightness,
493  "Sets a brightness adjustment by a given factor (non-cumulative with respect to the current transformation)",
494  py::arg("brightness"));
495 
496  /**
497  * Filters::Sepia
498  */
499  py::class_<Filters::Sepia, Filters::PixelwiseFilter>(filters, "Sepia", "Sepia filter: an example of :class:`~beatmup.filters.PixelwiseFilter` implementation.")
500  .def(py::init<>());
501 
502  /**
503  * IntegerCountour2D
504  */
505  py::class_<IntegerContour2D>(module, "IntegerContour2D",
506  "A sequence of integer-valued 2D points")
507 
508  .def(py::init<>())
509 
510  .def("add_point", &IntegerContour2D::addPoint,
511  "Adds a new point to the end of the contour. Some points may be skipped to optimize the storage.",
512  py::arg("x"), py::arg("y"))
513 
514  .def("clear", &IntegerContour2D::clear,
515  "Removes contour content")
516 
517  .def("get_point_count", &IntegerContour2D::getPointCount,
518  "Returns number of points in the contour")
519 
520  .def("get_length", &IntegerContour2D::getLength,
521  "Returns contour length")
522 
523  .def("get_point",
524  [](IntegerContour2D& contour, int index) { return Python::toTuple(contour.getPoint(index)); },
525  py::arg("index"),
526  "Returns a point by its index");
527 
528  /**
529  * FloodFill
530  */
531  py::class_<FloodFill, AbstractTask> floodFill(module, "FloodFill",
532  R"doc(
533  Discovers areas of similar colors up to a tolerance threshold around given positions (seeds) in the input image.
534  These areas are filled with white color in another image (output). If the output bitmap is a binary mask,
535  corresponding pixels are set to `1`. The rest of the output image remains unchanged.
536  Optionally, computes contours around the discovered areas and stores the contour positions.
537  Also optionally, applies post-processing by dilating or eroding the discovered regions in the output image.
538  )doc");
539 
540  py::enum_<FloodFill::BorderMorphology>(floodFill, "BorderMorphology",
541  "Morphological postprocessing operation applied to the discovered connected components")
542  .value("NONE", FloodFill::BorderMorphology::NONE, "no postprocessing")
543  .value("DILATE", FloodFill::BorderMorphology::DILATE, "apply a dilatation")
544  .value("ERODE", FloodFill::BorderMorphology::ERODE, "apply an erosion")
545  .export_values();
546 
547  floodFill.def(py::init<>())
548 
549  .def_property("input",
551  py::cpp_function(&FloodFill::setInput, py::keep_alive<1, 2, 1>()), // instance alive => bitmap alive
552  "Input bitmap")
553 
554  .def_property("output",
556  py::cpp_function(&FloodFill::setOutput, py::keep_alive<1, 2, 2>()), // instance alive => bitmap alive
557  "Output bitmap")
558 
559  .def_property("tolerance", &FloodFill::getTolerance, &FloodFill::setTolerance,
560  "Intensity tolerance")
561 
562  .def("set_mask_pos",
563  [](FloodFill& ff, const py::tuple& pos) { ff.setMaskPos(Python::toPoint<int>(pos)); },
564  py::arg("pos"),
565  "Specifies left-top corner position of the mask inside the input bitmap")
566 
567  .def("set_seeds", [](FloodFill& ff, const py::list& seeds) {
568  IntPoint* pts = new IntPoint[seeds.size()];
569  for (py::ssize_t i = 0; i < seeds.size(); ++i)
570  pts[i] = Python::toPoint<int>(seeds[i]);
571  ff.setSeeds(pts, seeds.size());
572  delete[] pts;
573  },
574  py::arg("seeds"),
575  "Specifies a set of seeds (starting points)")
576 
577  .def("set_compute_contours", &FloodFill::setComputeContours, py::arg("compute"),
578  "Enables or disables contours computation")
579 
580  .def("set_border_postprocessing", &FloodFill::setBorderPostprocessing,
581  py::arg("operation"), py::arg("hold_radius"), py::arg("release_radius"),
582  R"doc(
583  Specifies a morphological operation to apply to the mask border.
584 
585  :param operation: a postprocessing operation
586  :param hold_radius: erosion/dilation hold radius (output values set to 1)
587  :param release_radius: erosion/dilation radius of transition from 1 to 0
588  )doc")
589 
590  .def("get_bounds",
591  [](FloodFill& ff, const py::tuple& pos) { return Python::toTuple(ff.getBounds()); },
592  "Returns bounding box of the computed mask")
593 
594  .def("get_contour_count", &FloodFill::getContourCount,
595  "Returns number of discovered contours")
596 
597  .def("get_contour", &FloodFill::getContour,
598  py::arg("index"),
599  "Returns a contour by index if compute_contours was set to True, throws an exception otherwise");
600 
601  /**
602  * AffineMapping
603  */
604  py::class_<AffineMapping>(module, "AffineMapping", "2x3 affine mapping containing a 2x2 matrix and a 2D point")
605 
606  .def(py::init<>())
607 
608  .def("get_position",
609  [](const AffineMapping& mapping) { return Python::toTuple(mapping.getPosition()); },
610  "Returns the mapping origin")
611 
612  .def("get_matrix",
613  [](const AffineMapping& mapping) { return Python::toTuple(mapping.getMatrix()); },
614  "Returns the mapping matrix")
615 
616  .def("__call__",
617  [](const AffineMapping& mapping, const py::tuple& point) { return Python::toTuple(mapping(Python::toPoint<float>(point))); },
618  py::arg("point"),
619  "Maps a point")
620 
621  .def("invert", &AffineMapping::invert,
622  "Inverts the mapping")
623 
624  .def("get_inverse", (AffineMapping (AffineMapping::*)() const)&AffineMapping::getInverse,
625  py::return_value_policy::take_ownership,
626  "Returns inverse mapping")
627 
628  .def("get_inverse",
629  [](const AffineMapping& mapping, const py::tuple& point) { return Python::toTuple(mapping.getInverse(Python::toPoint<float>(point))); },
630  py::arg("point"),
631  "Computes inverse mapping of a point")
632 
633  .def("set_center_position", [](AffineMapping& mapping, const py::tuple& point) {
634  mapping.setCenterPosition(Python::toPoint<float>(point));
635  },
636  py::arg("point"),
637  "Adjusts the mapping origin so that the center of the axes box matches a given point")
638 
639  .def("translate", [](AffineMapping& mapping, const py::tuple& shift) {
640  mapping.translate(Python::toPoint<float>(shift));
641  },
642  py::arg("shift"),
643  "Translates the mapping")
644 
645  .def("scale", [](AffineMapping& mapping, float factor, const py::tuple& fixedPoint) {
646  mapping.scale(factor, Python::toPoint<float>(fixedPoint));
647  },
648  py::arg("factor"), py::arg("fixed_point") = py::make_tuple(0.0f, 0.0f),
649  "Scales the mapping around a given point in target domain")
650 
651  .def("rotate_degrees", [](AffineMapping& mapping, float angle, const py::tuple& fixedPoint) {
652  mapping.rotateDegrees(angle, Python::toPoint<float>(fixedPoint));
653  },
654  py::arg("angle"), py::arg("fixed_point") = py::make_tuple(0.0f, 0.0f),
655  "Rotates the mapping around a given point in target domain")
656 
657  .def("is_point_inside", [](AffineMapping& mapping, const py::tuple& point) {
658  return mapping.isPointInside(Python::toPoint<float>(point));
659  },
660  py::arg("point"),
661  "Tests whether a point from the output domain is inside the input axes span");
662 
663  /**
664  * GL::VariablesBundle
665  */
666  py::class_<GL::VariablesBundle>(gl, "VariablesBundle",
667  "Collection storing GLSL program parameters (scalars, matrices, vectors) to communicate them from user to GPU-managing thread")
668 
669  .def("set_integer", (void (GL::VariablesBundle::*)(std::string, int))&GL::VariablesBundle::setInteger,
670  py::arg("name"), py::arg("value"),
671  "Sets a scalar integer uniform value")
672 
673  .def("set_integer", (void (GL::VariablesBundle::*)(std::string, int, int))&GL::VariablesBundle::setInteger,
674  py::arg("name"), py::arg("x"), py::arg("y"),
675  "Sets a 2D integer uniform vector value")
676 
677  .def("set_integer", (void (GL::VariablesBundle::*)(std::string, int, int, int))&GL::VariablesBundle::setInteger,
678  py::arg("name"), py::arg("x"), py::arg("y"), py::arg("z"),
679  "Sets a 3D integer uniform vector value")
680 
681  .def("set_integer", (void (GL::VariablesBundle::*)(std::string, int, int, int, int))&GL::VariablesBundle::setInteger,
682  py::arg("name"), py::arg("x"), py::arg("y"), py::arg("z"), py::arg("w"),
683  "Sets a 4D integer uniform vector value")
684 
685  .def("set_float", (void (GL::VariablesBundle::*)(std::string, float))&GL::VariablesBundle::setFloat,
686  py::arg("name"), py::arg("value"),
687  "Sets a scalar float uniform value")
688 
689  .def("set_float", (void (GL::VariablesBundle::*)(std::string, float, float))&GL::VariablesBundle::setFloat,
690  py::arg("name"), py::arg("x"), py::arg("y"),
691  "Sets a 2D float uniform vector value")
692 
693  .def("set_float", (void (GL::VariablesBundle::*)(std::string, float, float, float))&GL::VariablesBundle::setFloat,
694  py::arg("name"), py::arg("x"), py::arg("y"), py::arg("z"),
695  "Sets a 3D float uniform vector value")
696 
697  .def("set_float", (void (GL::VariablesBundle::*)(std::string, float, float, float, float))&GL::VariablesBundle::setFloat,
698  py::arg("name"), py::arg("x"), py::arg("y"), py::arg("z"), py::arg("w"),
699  "Sets a 4D float uniform vector value")
700 
701  .def("set_float_matrix2", [](GL::VariablesBundle& instance, const char* name, const std::vector<float>& matrix) {
702  if (matrix.size() != 2*2)
703  throw std::invalid_argument("Expected a list-like input containing " + std::to_string(2*2) +
704  " values but got " + std::to_string(matrix.size()));
705  instance.setFloatMatrix2(name, matrix.data());
706  },
707  py::arg("name"), py::arg("matrix"),
708  "Sets a float 2*2 matrix variable value")
709 
710  .def("set_float_matrix3", [](GL::VariablesBundle& instance, const char* name, const std::vector<float>& matrix) {
711  if (matrix.size() != 3*3)
712  throw std::invalid_argument("Expected a list-like input containing " + std::to_string(3*3) +
713  " values but got " + std::to_string(matrix.size()));
714  instance.setFloatMatrix3(name, matrix.data());
715  },
716  py::arg("name"), py::arg("matrix"),
717  "Sets a float 3*3 matrix variable value")
718 
719  .def("set_float_matrix4", [](GL::VariablesBundle& instance, const char* name, const std::vector<float>& matrix) {
720  if (matrix.size() != 4*4)
721  throw std::invalid_argument("Expected a list-like input containing " + std::to_string(4*4) +
722  " values but got " + std::to_string(matrix.size()));
723  instance.setFloatMatrix4(name, matrix.data());
724  },
725  py::arg("name"), py::arg("matrix"),
726  "Sets a float 4*4 matrix variable value")
727 
728  .def("set_float_array", &GL::VariablesBundle::setFloatArray,
729  py::arg("name"), py::arg("values"),
730  "Sets a float array variable value");
731 
732  /**
733  * Metric
734  */
735  py::class_<Metric, AbstractTask> metric(module, "Metric", "Measures the difference between two bitmaps");
736 
737  py::enum_<Metric::Norm>(metric, "Norm", "Norm (distance) to measure between two images")
738  .value("L1", Metric::Norm::L1, "sum of absolute differences")
739  .value("L2", Metric::Norm::L2, "Euclidean distance: square root of squared differences")
740  .export_values();
741 
742  metric.def(py::init<>())
743 
744  .def("set_bitmaps", (void (Metric::*)(AbstractBitmap*, AbstractBitmap*))&Metric::setBitmaps,
745  py::arg("bitmap1"), py::arg("bitmap2"),
746  py::keep_alive<1, 2>(), py::keep_alive<1, 3>(), // metric alive => bitmaps alive
747  "Sets input images")
748 
749  .def("set_bitmaps", [](Metric& metric, AbstractBitmap* bitmap1, const py::tuple& roi1, AbstractBitmap* bitmap2, const py::tuple& roi2){
750  metric.setBitmaps(bitmap1, Python::toRectangle<int>(roi1),
751  bitmap2, Python::toRectangle<int>(roi2));
752  },
753  py::arg("bitmap1"), py::arg("roi1"), py::arg("bitmap2"), py::arg("roi2"),
754  py::keep_alive<1, 2>(), py::keep_alive<1, 4>(), // metric alive => bitmaps alive
755  "Sets input images and rectangular regions delimiting the measurement areas")
756 
757  .def("set_norm", &Metric::setNorm, "Specifies the norm to use in the measurement")
758 
759  .def("get_result", &Metric::getResult, "Returns the measurement result (after the task is executed")
760 
761  .def_static("psnr", &Metric::psnr, py::arg("bitmap1"), py::arg("bitmap2"),
762  "Computes peak signal-to-noise ratio in dB for two given images");
763 
764  /**
765  * ImageShader
766  */
767  py::class_<ImageShader, GL::VariablesBundle>(module, "ImageShader", "A GLSL program to process images")
768 
769  .def(py::init<Context&>(), py::arg("context"), py::keep_alive<1, 2>())
770 
771  .def("set_source_code", &ImageShader::setSourceCode,
772  py::arg("glsl"),
773  R"doc(Passes new source code to the fragment shader.
774  The new source code will be compiled and linked when next rendering occurs.)doc")
775 
776  .def_property_readonly_static("INPUT_IMAGE_DECL_TYPE",
777  [](py::object) { return ImageShader::INPUT_IMAGE_DECL_TYPE; },
778  "A virtual input image type defined at shader compile time by ordinary texture or OES texture sampler depending on the input bound")
779 
780  .def_property_readonly_static("INPUT_IMAGE_ID",
781  [](py::object) { return ImageShader::INPUT_IMAGE_ID; },
782  "Shader variable name referring to the input image")
783 
784  .def_property_readonly_static("CODE_HEADER",
785  [](py::object) { return ImageShader::CODE_HEADER; },
786  "Shader code header containing necessary declarations");
787 
788  /**
789  * ShaderApplicator
790  */
791  py::class_<ShaderApplicator, AbstractTask>(module, "ShaderApplicator", "A task applying an image shader to bitmaps")
792 
793  .def(py::init<>())
794 
795  .def("add_sampler", &ShaderApplicator::addSampler,
796  py::arg("bitmap"), py::arg("uniform_name") = ImageShader::INPUT_IMAGE_ID,
797  py::keep_alive<1, 2>(), // applicator alive => bitmap alive
798  R"doc(
799  Connects a bitmap to a shader uniform variable.
800  The bitmap connected to ImageShader::INPUT_IMAGE_ID is used to resolve the sampler type (ImageShader::INPUT_IMAGE_DECL_TYPE).
801  )doc")
802 
803  .def("remove_sampler", &ShaderApplicator::removeSampler,
804  py::arg("uniform_name"),
805  R"doc(
806  Removes a sampler with a uniform variable name.
807  Returns True if a sampler associated to the given variable existed and was removed, false otherwise.
808  )doc")
809 
810  .def("clear_samplers", &ShaderApplicator::clearSamplers, "Clears all connections of bitmaps to samplers")
811 
812  .def_property("shader",
814  py::cpp_function(&ShaderApplicator::setShader, py::keep_alive<1, 2, 1>()), // applicator alive => shader alive
815  "Shader to apply to the bitmap(s)")
816 
817  .def_property("output_bitmap",
819  py::cpp_function(&ShaderApplicator::setOutputBitmap, py::keep_alive<1, 2, 2>()), // applicator alive => bitmap alive
820  "Output bitmap");
821 
822  /**
823  * Scene and its layers
824  */
825  py::class_<Scene> scene(module, "Scene", "An ordered set of layers representing renderable content");
826 
827  py::class_<Scene::Layer> layer(scene, "Layer",
828  R"doc(
829  Abstract scene layer having name, type, geometry and some content to display.
830  The layer geometry is defined by an AffineMapping describing the position and the orientation of the layer content in the rendered image.
831  )doc");
832 
833  py::enum_<Scene::Layer::Type>(layer, "Type", "Layer type")
834  .value("SCENE", Scene::Layer::Type::SceneLayer, "layer containing a scene")
835  .value("BITMAP", Scene::Layer::Type::BitmapLayer, "layer displaying a bitmap")
836  .value("MASKED_BITMAP", Scene::Layer::Type::MaskedBitmapLayer, "layer displaying a bitmap with mask")
837  .value("SHAPED_BITMAP", Scene::Layer::Type::ShapedBitmapLayer, "layer displaying a bitmap within a shape")
838  .value("SHADED_BITMAP", Scene::Layer::Type::ShadedBitmapLayer, "layer displaying a bitmap through a custom fragment shader")
839  .export_values();
840 
841  layer.def("get_type", &Scene::Layer::getType, "Returns layer type")
842 
843  .def_property("name", &Scene::Layer::getName, &Scene::Layer::setName)
844 
846  "Layer mapping in parent coordinates")
847 
848  .def("test_point", &Scene::Layer::testPoint,
849  py::arg("x"), py::arg("y"),
850  "Tests if a given point falls in the layer")
851 
852  .def("test_point", [](const Scene::Layer& layer, const py::tuple& point) {
853  Point pt = Python::toPoint<float>(point);
854  return layer.testPoint(pt.x, pt.y);
855  },
856  py::arg("point"),
857  "Tests if a given point falls in the layer")
858 
859  .def("get_child", &Scene::Layer::getChild,
860  py::arg("x"), py::arg("y"), py::arg("recursion_depth") = 0,
861  "Picks a child layer at given point, if any")
862 
863  .def("get_child", [](const Scene::Layer& layer, const py::tuple& point, unsigned int recursionDepth) {
864  Point pt = Python::toPoint<float>(point);
865  return layer.getChild(pt.x, pt.y, recursionDepth);
866  },
867  py::arg("point"), py::arg("recursion_depth") = 0,
868  "Picks a child layer at given point, if any")
869 
870  .def_property("visible", &Scene::Layer::isVisible, &Scene::Layer::setVisible,
871  "Controls the layer visibility. If set to `False`, the layer and its sublayers are ignored when rendering.")
872 
873  .def_property("phantom", &Scene::Layer::isPhantom, &Scene::Layer::setPhantom,
874  "If set to `True`, the layer goes \"phantom\": it and its sublayers, if any, are ignored when searching a layer by point.");
875 
876  py::class_<Scene::SceneLayer, Scene::Layer>(scene, "SceneLayer",
877  "Layer containing an entire scene")
878  .def("get_scene", &Scene::SceneLayer::getScene, "Returns a Scene contained in the Layer");
879 
880  py::class_<Scene::BitmapLayer, Scene::Layer>(scene, "BitmapLayer",
881  R"doc(
882  Layer having an image to render.
883  The image has a position and orientation with respect to the layer. This is expressed with an affine mapping applied on top of the layer
884  mapping.
885  )doc")
886 
887  .def_property("bitmap",
889  py::cpp_function(&Scene::BitmapLayer::setBitmap, py::keep_alive<1, 2, 1>()), // layer alive => bitmap alive
890  "Bitmap attached to the layer")
891 
892  .def_property("bitmap_mapping",
894  "Bitmap geometry mapping applied on top of the layer mapping")
895 
896  .def_property("modulation_color",
897  [](Scene::BitmapLayer& layer) { return Python::toTuple(layer.getModulationColor()); },
898  [](Scene::BitmapLayer& layer, const py::tuple& color){ layer.setModulationColor(Python::toColor4i(color)); },
899  "Modulation color (R, G, B, A). Multiplies bitmap pixel colors when rendering");
900 
901  py::class_<Scene::CustomMaskedBitmapLayer, Scene::BitmapLayer>(scene, "CustomMaskedBitmapLayer",
902  R"doc(
903  Layer containing a bitmap and a mask applied to the bitmap when rendering.
904  Both bitmap and mask have their own positions and orientations relative to the layer's position and orientation.
905  )doc")
906 
907  .def_property("mask_mapping",
910  "Mask geometry mapping applied on top of the layer mapping")
911 
912  .def_property("background_color",
913  [](Scene::CustomMaskedBitmapLayer& layer) { return Python::toTuple(layer.getBackgroundColor()); },
914  [](Scene::CustomMaskedBitmapLayer& layer, const py::tuple& color){ layer.setBackgroundColor(Python::toColor4i(color)); },
915  "Background color (R, G, B, A). Fills layer pixels falling out of the mask area");
916 
917  py::class_<Scene::MaskedBitmapLayer, Scene::CustomMaskedBitmapLayer>(scene, "MaskedBitmapLayer",
918  "Bitmap layer using another bitmap as a mask")
919 
920  .def_property("mask",
922  py::cpp_function(&Scene::MaskedBitmapLayer::setMask, py::keep_alive<1, 2, 2>()), // layer alive => bitmap alive
923  "Mask bitmap");
924 
925  py::class_<Scene::ShapedBitmapLayer, Scene::CustomMaskedBitmapLayer>(scene, "ShapedBitmapLayer",
926  "Layer containing a bitmap and a parametric mask (shape)")
927 
929  "Mask border thickness in pixels or normalized coordinates. " \
930  "These pixels are cropped out from the image and replaced with the background color.")
931 
933  "Mask border slope width in pixels or normalized coordinates. "\
934  "The border slope is a linear transition from background color to image pixels.")
935 
937  "Radius of mask corners in pixels or normalized coordinates")
938 
940  "If set to `True`, all the parameter values are interpreted as if given in pixels. Otherwise the normalized coordinates are used.");
941 
942  py::class_<Scene::ShadedBitmapLayer, Scene::BitmapLayer>(scene, "ShadedBitmapLayer", "Bitmap layer using a custom shader")
943 
944  .def_property("shader",
946  py::cpp_function(&Scene::ShadedBitmapLayer::setShader, py::keep_alive<1, 2, 3>()), // layer alive => bitmap alive
947  "Fragment shader taking the layer bitmap as texture");
948 
949  scene.def(py::init<>())
950 
951  .def("new_bitmap_layer", (Scene::BitmapLayer& (Scene::*)(const char*))&Scene::newBitmapLayer,
952  py::arg("name"),
953  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
954  "Creates a new bitmap layer")
955 
956  .def("new_bitmap_layer", (Scene::BitmapLayer& (Scene::*)())&Scene::newBitmapLayer,
957  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
958  "Creates a new bitmap layer")
959 
960  .def("new_masked_bitmap_layer", (Scene::MaskedBitmapLayer& (Scene::*)(const char*))&Scene::newMaskedBitmapLayer,
961  py::arg("name"),
962  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
963  "Creates a new masked bitmap layer")
964 
965  .def("new_masked_bitmap_layer", (Scene::MaskedBitmapLayer& (Scene::*)())&Scene::newMaskedBitmapLayer,
966  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
967  "Creates a new masked bitmap layer")
968 
969  .def("new_shaped_bitmap_layer", (Scene::ShapedBitmapLayer& (Scene::*)(const char*))&Scene::newShapedBitmapLayer,
970  py::arg("name"),
971  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
972  "Creates a new shaped bitmap layer")
973 
974  .def("new_shaped_bitmap_layer", (Scene::ShapedBitmapLayer& (Scene::*)())&Scene::newShapedBitmapLayer,
975  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
976  "Creates a new shaped bitmap layer")
977 
978  .def("new_shaded_bitmap_layer", (Scene::ShadedBitmapLayer& (Scene::*)(const char*))&Scene::newShadedBitmapLayer,
979  py::arg("name"),
980  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
981  "Creates a new shaded bitmap layer")
982 
983  .def("new_shaded_bitmap_layer", (Scene::ShadedBitmapLayer& (Scene::*)())&Scene::newShadedBitmapLayer,
984  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
985  "Creates a new shaded bitmap layer")
986 
987  .def("add_scene", &Scene::addScene,
988  py::return_value_policy::reference, py::keep_alive<1, 0>(), // scene alive => layer alive
989  "Adds a subscene to the current scene.")
990 
991  .def("get_layer", (Scene::Layer* (Scene::*)(const char*) const)&Scene::getLayer,
992  py::arg("name"),
993  "Retrieves a layer by its name or None if not found")
994 
995  .def("get_layer", (Scene::Layer& (Scene::*)(int) const)&Scene::getLayer,
996  py::arg("index"),
997  "Retrieves a layer by its index")
998 
999  .def("get_layer", (Scene::Layer* (Scene::*)(float, float, unsigned int) const)&Scene::getLayer,
1000  py::arg("x"), py::arg("y"), py::arg("recursion_depth") = 0,
1001  "Retrieves a layer present at a specific point of the scene or None if not found")
1002 
1003  .def("get_layer_index", &Scene::getLayerIndex,
1004  py::arg("layer"),
1005  "Retrieves layer index in the scene or -1 if not found")
1006 
1007  .def("get_layer_count", &Scene::getLayerCount, "Returns total number of layers in the scene");
1008 
1009  /**
1010  * SceneRenderer
1011  */
1012  py::class_<SceneRenderer, AbstractTask> sceneRenderer(module, "SceneRenderer",
1013  R"doc(
1014  AbstractTask rendering a Scene.
1015  The rendering may be done to a given bitmap or on screen, if the platform supports on-screen rendering.
1016  )doc");
1017 
1018  py::enum_<SceneRenderer::OutputMapping>(sceneRenderer, "OutputMapping", "Scene coordinates to output (screen or bitmap) pixel coordinates mapping")
1019  .value("STRETCH", SceneRenderer::OutputMapping::STRETCH, "output viewport covers entirely the scene axis span, aspect ratio is not preserved in general")
1020  .value("FIT_WIDTH_TO_TOP", SceneRenderer::OutputMapping::FIT_WIDTH_TO_TOP, "width is covered entirely, height is resized to keep aspect ratio, the top borders are aligned")
1021  .value("FIT_WIDTH", SceneRenderer::OutputMapping::FIT_WIDTH, "width is covered entirely, height is resized to keep aspect ratio, point (0.5, 0.5) is mapped to the output center")
1022  .value("FIT_HEIGHT", SceneRenderer::OutputMapping::FIT_HEIGHT, "height is covered entirely, width is resized to keep aspect ratio, point (0.5, 0.5) is mapped to the output center")
1023  .export_values();
1024 
1025  sceneRenderer.def(py::init<>())
1026 
1027  .def_property("output",
1029  py::cpp_function(&SceneRenderer::setOutput, py::keep_alive<1, 2, 1>()), // instance alive => bitmap alive
1030  "Output bitmap")
1031 
1032  .def_property("scene",
1034  py::cpp_function(&SceneRenderer::setScene, py::keep_alive<1, 2, 2>()), // instance alive => scene alive
1035  "Scene")
1036 
1037  .def_property("output_mapping", &SceneRenderer::getOutputMapping, &SceneRenderer::setOutputMapping,
1038  "Specifies how the scene coordinates [0,1]² are mapped to the output (screen or bitmap) pixel coordinates.")
1039 
1040  .def_property("output_reference_width", &SceneRenderer::getOutputReferenceWidth, &SceneRenderer::setOutputReferenceWidth,
1041  "Value overriding output width for elements that have their size in pixels, in order to render a resolution-independent picture")
1042 
1043  .def_property("output_pixels_fetching", &SceneRenderer::getOutputPixelsFetching, &SceneRenderer::setOutputPixelsFetching,
1044  R"doc(
1045  If set to `True`, the output image data is pulled from GPU to CPU memory every time the rendering is done.
1046  This is convenient if the rendered image is an application output result, and is further stored or sent through the network.
1047  Otherwise, if the image is to be further processed inside Beatmup, the pixel transfer likely introduces an unnecessary latency and may
1048  cause FPS drop in real-time rendering.
1049  Has no effect in on-screen rendering.
1050  )doc")
1051 
1052  .def_property("background_image",
1054  py::cpp_function(&SceneRenderer::setBackgroundImage, py::keep_alive<1, 2, 3>()), // instance alive => bitmap alive
1055  "Image to pave the background.")
1056 
1057  .def("reset_output", &SceneRenderer::resetOutput,
1058  R"doc(
1059  Removes a bitmap from the renderer output, if any, and switches to on-screen rendering.
1060  The rendering is done on the display currently connected to the Context running the rendering task.
1061  )doc")
1062 
1063  .def("pick_layer", &SceneRenderer::pickLayer,
1064  py::arg("x"), py::arg("y"), py::arg("inPixels"), R"doc(
1065  Searches for a layer at a given position.
1066  In contrast to :func:`~beatmup.Scene.get_layer` it takes into account the output mapping.
1067 
1068  :param x: x coordinate.
1069  :param y: y coordinate.
1070  :param pixels: If `True`, the coordinates are taken in pixels.
1071 
1072  Returns the topmost layer at the given position if any, None if no layer found.
1073  )doc");
1074 
1075  /**
1076  * CustomPipeline::TaskHolder
1077  */
1078  py::class_<CustomPipeline, AbstractTask> customPipeline(module, "CustomPipeline",
1079  R"doc(
1080  Custom pipeline: a sequence of tasks to be executed as a whole.
1081  Acts as an AbstractTask. Built by adding tasks one by one and calling measure() at the end.
1082  )doc");
1083 
1084  py::class_<CustomPipeline::TaskHolder>(customPipeline, "TaskHolder",
1085  "A task within a pipeline")
1086 
1087  .def("get_task", &CustomPipeline::TaskHolder::getTask,
1088  "Returns the task in the current holder")
1089 
1090  .def("get_run_time", &CustomPipeline::TaskHolder::getRunTime,
1091  "Returns last execution time in milliseconds");
1092 
1093  /**
1094  * CustomPipeline
1095  */
1096  customPipeline
1097  .def("get_task_count", &CustomPipeline::getTaskCount,
1098  "Returns number of tasks in the pipeline")
1099 
1100  .def("get_task", &CustomPipeline::getTask, py::arg("index"),
1101  py::return_value_policy::reference,
1102  "Retrieves a task by its index")
1103 
1104  .def("get_task_index", &CustomPipeline::getTaskIndex, py::arg("holder"),
1105  "Retrieves task index if it is in the pipeline; returns -1 otherwise")
1106 
1107  .def("add_task", &CustomPipeline::addTask, py::arg("task"),
1108  py::keep_alive<1, 2>(), // pipeline alive => task alive
1109  py::return_value_policy::reference,
1110  "Adds a new task to the end of the pipeline")
1111 
1112  .def("insert_task", &CustomPipeline::insertTask, py::arg("task"), py::arg("before"),
1113  py::keep_alive<1, 2>(), // pipeline alive => task alive
1114  py::return_value_policy::reference,
1115  "Inserts a task in a specified position of the pipeline before another task")
1116 
1117  .def("remove_task", &CustomPipeline::removeTask, py::arg("task"),
1118  "Removes a task from the pipeline, if any. Returns True on success")
1119 
1120  .def("measure", &CustomPipeline::measure,
1121  "Determines pipeline execution mode and required thread count");
1122 
1123  /**
1124  * Multitask
1125  */
1126  py::class_<Multitask, CustomPipeline> multitask(module, "Multitask",
1127  R"doc(
1128  Conditional multiple tasks execution.
1129 
1130  Beatmup offers a number of tools allowing to pipeline several tasks into a single one. This technique is particularly useful for designing
1131  complex multi-stage image processing pipelines.
1132 
1133  Multitask is the simplest such tool. It allows to concatenate different tasks into a linear conveyor and run them all or selectively.
1134  To handle this selection, each task is associated with a repetition policy specifying the conditions whether this given task is executed
1135  or ignored when the pipeline is running.
1136 
1137  Specifically, there are two extreme modes that force the task execution every time (REPEAT_ALWAYS) or its unconditional skipping
1138  (IGNORE_ALWAYS) and two more sophisticated modes with the following behavior:
1139 
1140  - IGNORE_IF_UPTODATE skips the task if no tasks were executed among the ones coming before the current task in the pipeline;
1141  - REPEAT_UPDATE forces task repetition one time on next run and just after switches the repetition policy to IGNORE_IF_UPTODATE.
1142  )doc");
1143 
1144  py::enum_<Multitask::RepetitionPolicy>(multitask, "RepetitionPolicy",
1145  "Determines when a specific task in the sequence is run when the whole sequence is invoked")
1146  .value("REPEAT_ALWAYS", Multitask::RepetitionPolicy::REPEAT_ALWAYS, "execute the task unconditionally on each run")
1147  .value("REPEAT_UPDATE", Multitask::RepetitionPolicy::REPEAT_UPDATE, "execute the task one time then switch to IGNORE_IF_UPTODATE")
1148  .value("IGNORE_IF_UPTODATE", Multitask::RepetitionPolicy::IGNORE_IF_UPTODATE, "do not execute the task if no preceding tasks are run")
1149  .value("IGNORE_ALWAYS", Multitask::RepetitionPolicy::IGNORE_ALWAYS, "do not execute the task")
1150  .export_values();
1151 
1152  multitask
1153  .def(py::init<>())
1154 
1155  .def("get_repetition_policy", &Multitask::getRepetitionPolicy, py::arg("task"),
1156  "Returns repetition policy of a specific task in the pipeline.")
1157 
1158  .def("set_repetition_policy", &Multitask::setRepetitionPolicy, py::arg("task"), py::arg("policy"),
1159  R"doc(
1160  Sets repetition policy of a task. If the pipeline is processing at the moment of the call, it is the application responsibility to abort
1161  and restart it, if the policy change needs to be applied immediately.
1162  )doc");
1163 
1164  /**
1165  * ChunkCollection
1166  */
1167  py::class_<ChunkCollection>(module, "ChunkCollection",
1168  R"doc(
1169  A key-value pair set storing pieces of arbitrary data (chunks) under string keys.
1170  A chunk is a header and a piece of data packed in memory like this: (idLength[4], id[idLength], size[sizeof(chunksize_t)], data[size])
1171  ChunkCollection defines an interface to retrieve chunks by their ids.
1172  )doc")
1173  .def("open", &ChunkCollection::open, "Opens the collection to read chunks from it.")
1174  .def("close", &ChunkCollection::close, "Closes the collection after a reading session.")
1175  .def("size", &ChunkCollection::size, "Returns the number of chunks available in the collection after it is opened.")
1176  .def("chunk_exists", &ChunkCollection::chunkExists, py::arg("id"),
1177  R"doc(
1178  Check if a specific chunk exists.
1179 
1180  :param id: the chunk id
1181 
1182  Returns `True` if only the chunk exists in the collection.
1183  )doc")
1184  .def("chunk_size", &ChunkCollection::chunkSize, py::arg("id"),
1185  R"doc(
1186  Retrieves size of a specific chunk.
1187 
1188  :param id: the chunk id
1189 
1190  Return size of the chunk in bytes, 0 if not found.
1191  )doc")
1192  .def("__getitem__", [](ChunkCollection& collection, const std::string& id) -> py::object {
1193  if (collection.chunkExists(id)) {
1194  Chunk chunk(collection, id);
1195  return py::bytes(static_cast<const char*>(chunk()), chunk.size());
1196  }
1197  return py::none();
1198  }, py::arg("id"), "Returns the chunk data by its id");
1199 
1200  /**
1201  * ChunkFile
1202  */
1203  py::class_<ChunkFile, ChunkCollection>(module, "ChunkFile",
1204  R"doc(
1205  File containing chunks.
1206  The file is not loaded in memory, but is scanned when first opened to collect the information about available chunks.
1207  )doc")
1208  .def(py::init<const std::string&, bool>(), py::arg("filename"), py::arg("open_now") = true, R"doc(
1209  Creates a chunkfile accessor.
1210  The file content is not read until open() is called.
1211 
1212  :param filename: the file name / path
1213  :param open_now: if `true`, the file is read right away. Otherwise it is done on open() call.
1214  No information is available about chunks in the file until it is opened.
1215  )doc");
1216 
1217  /**
1218  * Python::WritableChunkCollection
1219  */
1220  py::class_<Python::WritableChunkCollection, ChunkCollection>(module, "WritableChunkCollection",
1221  R"doc(
1222  Writable ChunkCollection implementation for Python.
1223  Allows to exchange binary data without copying.
1224  )doc")
1225  .def(py::init<>())
1226 
1227  .def("__setitem__", [](Python::WritableChunkCollection& collection, const std::string& id, py::buffer& buffer) { collection[id] = buffer; },
1228  "Stores new chunk")
1229 
1230  .def("__getitem__", [](Python::WritableChunkCollection& collection, const std::string& id) -> py::object {
1231  if (collection.chunkExists(id))
1232  return collection[id];
1233  return py::none();
1234  }, py::arg("id"), "Returns the chunk data by its id")
1235 
1236  .def("save", &Python::WritableChunkCollection::save, py::arg("filename"), py::arg("append"), R"doc(
1237  Saves the collection to a file.
1238 
1239  :param filename: The name of the file to write chunks to
1240  :param append: If True, writing to the end of the file (keeping the existing content). Rewriting the file otherwise.
1241  )doc");
1242 
1243  /**
1244  * NNets::ActivationFunction
1245  */
1246  py::enum_<NNets::ActivationFunction>(nnets, "ActivationFunction", "Activation function specification")
1247  .value("DEFAULT", NNets::ActivationFunction::DEFAULT, "default activation: 0..1 bounded ReLU (identity clipped to 0..1 range)")
1248  .value("BRELU6", NNets::ActivationFunction::BRELU6, "0.167 times identity clipped to 0..1 range")
1249  .value("SIGMOID_LIKE", NNets::ActivationFunction::SIGMOID_LIKE, "a piecewise-linear sigmoid function approximation")
1250  .export_values();
1251 
1252  /**
1253  * NNets::Size::Padding
1254  */
1255  py::enum_<NNets::Size::Padding>(nnets, "Padding", "Zero padding specification")
1256  .value("SAME", NNets::Size::Padding::SAME, "operation output size matches its input size for unit strides")
1257  .value("VALID", NNets::Size::Padding::VALID, "no zero padding")
1258  .export_values();
1259 
1260  /**
1261  * NNets::AbstractOperation
1262  */
1263  py::class_<NNets::AbstractOperation>(nnets, "AbstractOperation",
1264  R"doc(
1265  Abstract neural net operation (layer).
1266  Has a name used to refer the operation in a Model. The operation data (such as convolution weights) is provided through a ChunkCollection
1267  in single precision floating point format, where the chunks are searched by operation name.
1268  Operations have several inputs and outputs numbered starting from zero.
1269  )doc")
1270  .def_property_readonly("name", &NNets::AbstractOperation::getName,
1271  "Operation name")
1272 
1273  .def_property_readonly("input_count", &NNets::AbstractOperation::getInputCount,
1274  "Number of operation inputs")
1275 
1276  .def_property_readonly("output_count", &NNets::AbstractOperation::getOutputCount,
1277  "Number of operation outputs");
1278 
1279  /**
1280  * NNets::Conv2D
1281  */
1282  py::class_<NNets::Conv2D, NNets::AbstractOperation>(nnets, "Conv2D",
1283  R"doc(
1284  2D convolution operation computed on GPU.
1285  Has 2 inputs: main and residual (detailed below), and a single output.
1286  Constraints:
1287 
1288  * Input and output are 3D tensors with values in [0, 1] range sampled over 8 bits.
1289  * Number of input feature maps is 3 or a multiple of 4.
1290  * Number of output feature maps is a multiple of 4.
1291  * For group convolutions, each group contains a multiple of 4 input channels and a multiple of 4 output
1292  channels, or exactly 1 input and 1 output channel (i.e., depthwise).
1293  * Kernels are of square shape.
1294  * Strides are equal along X and Y.
1295  * Dilations are equal to 1.
1296  * If an image is given on input (3 input feature maps), only valid padding is supported.
1297  * An activation function is always applied on output.
1298 
1299  Raspberry Pi-related constraints:
1300 
1301  * Pi cannot sample more than 256 channels to compute a single output value. Actual practical limit is
1302  yet lower: something about 128 channels for pointwise convolutions and less than 100 channels for
1303  bigger kernels. When the limit is reached, Pi OpenGL driver reports an out of memory error (0x505).
1304 
1305  Features:
1306 
1307  * Bias addition integrated.
1308  * An optional residual input is available: a tensor of output shape added to the convolution result
1309  before applying the activation function.
1310  )doc")
1311 
1312  .def(py::init<const std::string&, const int, const int, const int, const int, const NNets::Size::Padding, const bool, const int, const NNets::ActivationFunction>(),
1313  py::arg("name"), py::arg("kernel_size"), py::arg("num_input_channels"), py::arg("num_output_channels"),
1314  py::arg("stride") = 1,
1315  py::arg("padding") = NNets::Size::Padding::VALID,
1316  py::arg("use_bias") = true,
1317  py::arg("num_groups") = 1,
1318  py::arg("activation") = NNets::ActivationFunction::DEFAULT,
1319  R"doc(
1320  Instantiates a 2D convolution operation.
1321 
1322  :param name: operation name.
1323  :param kernel_size: convolution kernel size.
1324  :param num_input_channels: number of input feature map channels (input depth).
1325  :param num_output_channels: number of output feature map channels (output depth).
1326  :param stride: convolution stride.
1327  :param padding: padding policy.
1328  :param use_bias: if `true`, the bias addition is enabled. The bias vector is searched in the model data.
1329  :param num_groups: number of convolution groups to get a group/depthwise convolution.
1330  :param activation: activation function applied to the operation output.
1331  )doc")
1332 
1333  .def_property_readonly("use_bias", &NNets::Conv2D::isBiasUsed, "Returns `true` if bias addition is enabled")
1334 
1335  .def_property_readonly_static("filters_chunk_suffix", [](py::object){ return NNets::Conv2D::FILTERS_CHUNK_SUFFIX; },
1336  "Suffix added to the op name to get the filters chunk id in the model data")
1337 
1338  .def_property_readonly_static("bias_chunk_suffix", [](py::object){ return NNets::Conv2D::BIAS_CHUNK_SUFFIX; },
1339  "Suffix added to the op name to get the filters chunk id in the model data");
1340 
1341  /**
1342  * NNets::Pooling2D
1343  */
1344  py::class_<NNets::Pooling2D, NNets::AbstractOperation> pooling2d(nnets, "Pooling2D",
1345  R"doc(
1346  2D pooling operation computed on GPU.
1347  Has a single input and a single output.
1348  Constraints:
1349 
1350  * Input and output are 3D tensors with values in [0, 1] range sampled over 8 bits.
1351  * Number of feature maps is a multiple of 4.
1352  * Pooling area is of square shape.
1353  * Strides are equal along X and Y.
1354  * Average pooling only accepts valid zero padding,
1355 
1356  Raspberry Pi-related constraints:
1357 
1358  * Pi cannot sample more than 256 channels to compute a single output value. Actual practical limit is
1359  yet lower: pooling size may be limited by 10. When the limit is reached, Pi OpenGL driver reports an
1360  out of memory error (0x505).
1361  )doc");
1362 
1363  /**
1364  * NNets::Pooling2D::Operator
1365  */
1366  py::enum_<NNets::Pooling2D::Operator>(pooling2d, "Operator", "Pooling operator specification")
1367  .value("MAX", NNets::Pooling2D::Operator::MAX, "max pooling")
1368  .value("AVERAGE", NNets::Pooling2D::Operator::AVERAGE, "average pooling")
1369  .export_values();
1370 
1371  pooling2d.def(py::init<const std::string&, const NNets::Pooling2D::Operator, const int, const int, const NNets::Size::Padding>(),
1372  py::arg("name"), py::arg("operator"), py::arg("size"),
1373  py::arg("stride") = 0,
1374  py::arg("padding") = NNets::Size::Padding::VALID,
1375  R"doc(
1376  Instantiates a 2D pooling operation.
1377 
1378  :param name: layer name.
1379  :param op: pooling operator.
1380  :param size: spatial pooling operational size.
1381  :param stride: pooling stride; if 0, the size is used.
1382  :param padding: zero padding applied to the input.
1383  )doc");
1384 
1385  /**
1386  * NNets::Dense
1387  */
1388  py::class_<NNets::Dense, NNets::AbstractOperation>(nnets, "Dense",
1389  R"doc(
1390  Dense (linear) layer.
1391  Computes `A*x + b` for input feature vector `x`, a matrix `A` and an optional bias vector `b`.
1392  Accepts a GL::Vector or a flat Storage view on input, amd only a GL::Vector on output.
1393  )doc")
1394  .def(py::init<Context&, const std::string&, int, bool>(),
1395  py::arg("context"), py::arg("name"), py::arg("num_output_dims"), py::arg("use_bias"),
1396  py::keep_alive<1, 2>(), // operation alive => context alive
1397  R"doc(
1398  Creates a Dense operation.
1399 
1400  :param context: a context instance
1401  :param name: operation name
1402  :param num_output_dims: number of output dimensions
1403  :param use_bias: if True, the bias vector addition is enabled
1404  )doc")
1405 
1406  .def_property_readonly_static("matrix_chunk_suffix", [](py::object){ return NNets::Dense::MATRIX_CHUNK_SUFFIX; },
1407  "Suffix added to the op name to get the matrix chunk id in the model data")
1408 
1409  .def_property_readonly_static("bias_chunk_suffix", [](py::object){ return NNets::Dense::BIAS_CHUNK_SUFFIX; },
1410  "Suffix added to the op name to get the bias chunk id in the model data");
1411 
1412  /**
1413  * NNets::ImageSampler
1414  */
1415  py::class_<NNets::ImageSampler, NNets::AbstractOperation>(nnets, "ImageSampler",
1416  R"doc(
1417  Image preprocessing operation.
1418  Samples an image of a fixed size from an arbitrary size texture. Has three key missions.
1419  * If enabled, performs a center crop keeping the output aspect ratio (otherwise the input is stretched to fit the output).
1420  * If enabled, uses linear interpolation when possible to reduce aliasing (otherwise nearest neighbor sampling is used).
1421  * Brings support of OES textures. This allows for example to read data directly from camera in Android.
1422  )doc")
1423  .def(py::init([](const std::string& name, const py::tuple& size, bool centerCrop, bool linearInterpolation) {
1424  return new NNets::ImageSampler(name, Python::toPoint<int>(size), centerCrop, linearInterpolation);
1425  }),
1426  py::arg("name"), py::arg("size"), py::arg("center_crop") = true, py::arg("linear_interp") = true,
1427  R"doc(
1428  Creates an instance of image preprocessing operation.
1429 
1430  :param name: operation name
1431  :param size: a tuple containing output image size in pixels
1432  :param center_crop: if True, the center crop is enabled
1433  :param linear_interp: if True, the linear interpolation is enabled
1434  )doc")
1435 
1436  .def_property("rotation", &NNets::ImageSampler::getRotation, &NNets::ImageSampler::setRotation, "Number of times a clockwise rotation by 90 degree is applied to the input image");
1437 
1438  /**
1439  * NNets::Softmax
1440  */
1441  py::class_<NNets::Softmax, NNets::AbstractOperation>(nnets, "Softmax",
1442  R"doc(
1443  Softmax layer.
1444  It does not have output, but acts as a sink. The resulting probabilities are returned by getProbabilities().
1445  This operation is executed on CPU.
1446  )doc")
1447  .def(py::init<const std::string&>(), py::arg("name"), R"doc(
1448  Creates a softmax layer.
1449 
1450  :param name: operation name
1451  )doc")
1452 
1453  .def("get_probabilities", &NNets::Softmax::getProbabilities, "Returns the list of probabilities");
1454 
1455  /**
1456  * NNets::Model
1457  */
1458  py::class_<NNets::Model>(nnets, "Model",
1459  R"doc(
1460  Neural net model.
1461  Contains a list of operations and programmatically defined interconnections between them using addConnection().
1462  Enables access to the model memory at any point in the model through addOutput() and getModelData().
1463  The memory needed to store internal data during the inference is allocated automatically; storages are reused when possible.
1464  The inference of a Model is performed by InferenceTask.
1465  )doc")
1466 
1467  .def(py::init<Context&>(), py::arg("context"), py::keep_alive<1, 2>()) // model alive => context alive
1468 
1469  .def("append", (void (NNets::Model::*)(NNets::AbstractOperation*, bool))&NNets::Model::append,
1470  py::arg("new_op"), py::arg("connect") = false,
1471  py::keep_alive<1, 2>(), // model alive => op alive
1472  R"doc(
1473  Adds a new operation to the model.
1474  The operation is added to the end of the operations list. The execution order corresponds to the addition order.
1475 
1476  :param new_op: the new operation
1477  :param connect: if `True`, the main operation input is connected to the operation output
1478  )doc")
1479 
1480  .def("add_operation", (void (NNets::Model::*)(const std::string&, NNets::AbstractOperation*))&NNets::Model::addOperation,
1481  py::arg("op_name"), py::arg("new_op"),
1482  py::keep_alive<1, 3>(), // model alive => op alive
1483  R"doc(
1484  Adds a new operation to the model before another operation in the execution order.
1485  The Model does not takes ownership of the passed pointer. The new operation is not automatically connected to other operations.
1486 
1487  :param op_name: name of the operation the new operation is inserted before
1488  :param new_op: the new operation
1489  )doc")
1490 
1491  .def("add_connection", (void (NNets::Model::*)(const std::string&, const std::string&, int, int, int))&NNets::Model::addConnection,
1492  py::arg("source_op"), py::arg("dest_op"), py::arg("output") = 0, py::arg("input") = 0, py::arg("shuffle") = 0,
1493  R"doc(
1494  Adds a connection between two given ops.
1495 
1496  :param source_op: name of the operation emitting the data
1497  :param dest_op: name of the operation receiving the data
1498  :param output: output number of the source operation
1499  :param input: input number of the destination operation
1500  :param shuffle: if greater than zero, the storage is shuffled.
1501  For shuffle = `n`, the output channels are sent to the destination operation in the following order:
1502  `0, 1, 2, 3, 4n, 4n+1, 4n+2, 4n+3, 8n, 8n+1, 8n+2, 8n+3, ..., 4, 5, 6, 7, 4n+4, 4n+5, 4n+6, 4n+7, 8n+4, ...`
1503  )doc")
1504 
1505  .def("add_output", (void (NNets::Model::*)(const std::string&, int))&NNets::Model::addOutput, py::arg("operation"), py::arg("output") = 0,
1506  R"doc(
1507  Enables reading output data from the model memory through get_output_data().
1508  A given operation output is connected to a storage that might be accessed by the application after the run.
1509 
1510  :operation: name of the operation to get data from
1511  :output: the operation output index
1512  )doc")
1513 
1514  .def("add_output", (void (NNets::Model::*)(const NNets::AbstractOperation&, int))&NNets::Model::addOutput, py::arg("operation"), py::arg("output") = 0,
1515  R"doc(
1516  Enables reading output data from the model memory through get_output_data().
1517  A given operation output is connected to a storage that might be accessed by the application after the run.
1518 
1519  :operation: operation to get data from. If not in the model, an exception is thrown.
1520  :output: the operation output index
1521  )doc")
1522 
1523  .def("get_output_data", &Python::getModelOutputDataByName, py::arg("op_name"), py::arg("output") = 0,
1524  R"doc(
1525  Reads data from the model memory.
1526  add_output() is needed to be called first in order to enable reading the data. Otherwise None is returned.
1527 
1528  :op_name: name of the operation to get data from
1529  :output: the operation output index
1530 
1531  Returns data array or None.
1532  )doc")
1533 
1534  .def("get_output_data", &Python::getModelOutputDataByOp, py::arg("operation"), py::arg("output") = 0,
1535  R"doc(
1536  Reads data from the model memory.
1537  add_output() is needed to be called first in order to enable reading the data. Otherwise None is returned.
1538 
1539  :operation: the operation to get data from
1540  :output: the operation output index
1541 
1542  Returns data array or None.
1543  )doc")
1544 
1545  .def("get_first_operation", (NNets::AbstractOperation& (NNets::Model::*)())&NNets::Model::getFirstOperation,
1546  py::return_value_policy::reference,
1547  "Returns the first operation in the model")
1548 
1549  .def("get_last_operation", (NNets::AbstractOperation& (NNets::Model::*)())&NNets::Model::getLastOperation,
1550  py::return_value_policy::reference,
1551  "Returns the last operation in the model")
1552 
1553  .def("serialize", &NNets::Model::serializeToString, "Returns serialized representation of the model as a string.")
1554 
1555  .def("count_multiply_adds", &NNets::Model::countMultiplyAdds, "Provides an estimation of the number of multiply-adds characterizing the model complexity.")
1556 
1557  .def("count_texel_fetches", &NNets::Model::countTexelFetches, "Provides an estimation of the total number of texels fetched by all the operations in the model per image.");
1558 
1559  /**
1560  * NNets::DeserializedModel
1561  */
1562  py::class_<NNets::DeserializedModel, NNets::Model>(nnets, "DeserializedModel",
1563  R"doc(
1564  Model reconstructed from a serialized representation.
1565  The representation format is the one rendered with Model::serialize(): a YAML-like listing containing "ops" and "connections" sections
1566  describing the model operations in execution order and connections between them respectively (see NNetsModelSerialization).
1567  )doc")
1568 
1569  .def(py::init<Context&, const std::string&>(), py::arg("context"), py::arg("str"),
1570  py::keep_alive<1, 2>() // model alive => context alive
1571  );
1572 
1573  /**
1574  * NNets::InferenceTask
1575  */
1576  py::class_<NNets::InferenceTask, AbstractTask>(nnets, "InferenceTask", "Task running inference of a Model")
1577  .def(py::init<NNets::Model&, ChunkCollection&>(), py::arg("model"), py::arg("data"),
1578  py::keep_alive<1, 2>(), py::keep_alive<1, 3>()) // task alive => model and data alive
1579 
1580  .def("connect", (void (NNets::InferenceTask::*)(AbstractBitmap&, const std::string&, int))&NNets::InferenceTask::connect,
1581  py::arg("image"), py::arg("op_name"), py::arg("input_index") = 0,
1582  py::keep_alive<1, 2, 1>(), // task alive => image alive
1583  R"doc(
1584  Connects an image to a specific operation input.
1585  Ensures the image content is up-to-date in GPU memory by the time the inference is run.
1586 
1587  :image: the image
1588  :op_name: the operation name
1589  :input_index: the input index of the operation
1590  )doc")
1591 
1593  py::arg("image"), py::arg("operation"), py::arg("input_index") = 0,
1594  py::keep_alive<1, 2, 1>(), // task alive => image alive
1595  R"doc(
1596  Connects an image to a specific operation input.
1597  Ensures the image content is up-to-date in GPU memory by the time the inference is run.
1598 
1599  :image: The image
1600  :operation: The operation
1601  :input_index: The input index of the operation
1602  )doc");
1603 
1604  /**
1605  * NNets::Classifier
1606  */
1607  py::class_<NNets::Classifier, NNets::Model, NNets::InferenceTask>(nnets, "Classifier",
1608  R"doc(
1609  Image classifier base class.
1610  Makes a runnable AbstractTask from a Model. Adds an image input and a vector of probabilities for output.
1611  )doc")
1612 
1613  .def("__call__", &NNets::Classifier::operator(),
1614  R"doc(
1615  Classifies an image (blocking).
1616  The very first call includes the model preparation and might be slow as hell. Subsequent calls only run the inference and are likely
1617  much faster.
1618 
1619  :param input: The input image
1620 
1621  Returns a vector of probabilities per class.
1622  )doc")
1623 
1624  .def("start", &NNets::Classifier::start,
1625  R"doc(
1626  Initiates the classification of a given image.
1627  The call is non-blocking.
1628 
1629  :param input: The input image
1630 
1631  Returns a job corresponding to the submitted task.
1632  )doc")
1633 
1634  .def("get_probabilities", &NNets::Classifier::getProbabilities,
1635  "Returns the last classification results (vector of probabilities per class).");
1636 
1637 }
PYBIND11_MODULE(beatmup, module)
Definition: bindings.cpp:62
A very basic class for any image.
Context & getContext() const
virtual const PixelFormat getPixelFormat() const =0
Pixel format of the bitmap.
std::string toString() const
Retruns a string describing the bitmap.
void saveBmp(const char *filename)
Saves the bitmap to a BMP file.
virtual const msize getMemorySize() const =0
Bitmap size in bytes.
void zero()
Sets all the pixels to zero.
2x3 affine mapping containing a 2x2 matrix and a 2D point
Definition: geometry.h:639
void translate(const Point &shift)
Translates the mapping.
Definition: geometry.cpp:71
void invert()
Inverts the mapping.
Definition: geometry.cpp:47
void setCenterPosition(const Point &newPos)
Adjusts the mapping origin so that the center of the axes box matches a given point.
Definition: geometry.cpp:67
void scale(float factor, const Point &fixedPoint=Point::ZERO)
Scales the mapping around a given point in target domain.
Definition: geometry.cpp:75
void rotateDegrees(float angle, const Point &fixedPoint=Point::ZERO)
Rotates the mapping around a given point in target domain.
Definition: geometry.cpp:81
AffineMapping getInverse() const
Returns inverse mapping.
Definition: geometry.cpp:52
bool isPointInside(const Point &point) const
Tests whether a point from the output domain is inside the input axes span.
Definition: geometry.cpp:87
Point getPosition() const
Definition: geometry.h:653
Matrix2 getMatrix() const
Definition: geometry.h:657
Resamples an image to a given resolution.
Definition: resampler.h:33
void setOutput(AbstractBitmap *output)
Sets the output image.
Definition: resampler.cpp:51
void setInput(AbstractBitmap *input)
Sets the image to process.
Definition: resampler.cpp:45
float getCubicParameter() const
Returns cubic interpolation parameter ("alpha").
Definition: resampler.h:110
void setCubicParameter(float alpha)
Sets cubic interpolation parameter ("alpha").
Definition: resampler.cpp:64
@ CONVNET
upsampling x2 using a convolutional neural network
@ NEAREST_NEIGHBOR
zero-order: usual nearest neighbor
@ LINEAR
first order: bilinear interpolation
@ CUBIC
third order: bicubic interpolation
@ BOX
"0.5-order": anti-aliasing box filter; identical to nearest neighbor when upsampling
IntRectangle getInputRect() const
Definition: resampler.h:130
void setInputRect(const IntRectangle &rect)
Specifies a rectangular working area in the input bitmap.
Definition: resampler.cpp:69
Mode getMode() const
Returns currently selected resampling algorithm.
Definition: resampler.h:98
void setOutputRect(const IntRectangle &rect)
Specifies a rectangular working area in the output bitmap.
Definition: resampler.cpp:74
IntRectangle getOutputRect() const
Definition: resampler.h:131
AbstractBitmap * getInput()
Definition: resampler.h:86
void setMode(Mode mode)
Sets the resampling algorithm to use.
Definition: resampler.cpp:59
AbstractBitmap * getOutput()
Definition: resampler.h:87
A key-value pair set storing pieces of arbitrary data (chunks) under string keys.
Definition: chunkfile.h:36
virtual size_t size() const =0
Returns the number of chunks available in the collection after it is opened.
virtual void close()=0
Closes the collection after a reading session.
virtual bool chunkExists(const std::string &id) const =0
Check if a specific chunk exists.
virtual chunksize_t chunkSize(const std::string &id) const =0
Retrieves size of a specific chunk.
virtual void open()=0
Opens the collection to read chunks from it.
Basic class: task and memory management, any kind of static data.
Definition: context.h:59
const ThreadIndex maxAllowedWorkerCount(const PoolIndex pool=DEFAULT_POOL) const
Definition: context.cpp:276
bool isGpuReady() const
Definition: context.cpp:296
float performTask(AbstractTask &task, const PoolIndex pool=DEFAULT_POOL)
Performs a given task.
Definition: context.cpp:240
void wait(const PoolIndex pool=DEFAULT_POOL)
Blocks until all the submitted jobs are executed.
Definition: context.cpp:264
Job submitTask(AbstractTask &task, const PoolIndex pool=DEFAULT_POOL)
Adds a new task to the jobs queue.
Definition: context.cpp:248
bool abortJob(Job job, const PoolIndex pool=DEFAULT_POOL)
Aborts a given submitted job.
Definition: context.cpp:260
GL::RecycleBin * getGpuRecycleBin() const
Definition: context.cpp:340
Job submitPersistentTask(AbstractTask &task, const PoolIndex pool=DEFAULT_POOL)
Adds a new persistent task to the jobs queue.
Definition: context.cpp:252
bool queryGpuInfo(std::string &vendor, std::string &renderer)
Initializes the GPU if not yet and queries information about it.
Definition: context.cpp:307
void check(const PoolIndex pool=DEFAULT_POOL)
Checks if a specific thread pool is doing great: rethrows exceptions occurred during tasks execution,...
Definition: context.cpp:272
bool busy(const PoolIndex pool=DEFAULT_POOL)
Queries whether a given thread pool is busy with a task.
Definition: context.cpp:268
void repeatTask(AbstractTask &task, bool abortCurrent, const PoolIndex pool=DEFAULT_POOL)
Ensures a given task executed at least once.
Definition: context.cpp:244
void warmUpGpu()
Initializes GPU within a given Context if not yet (takes no effect if it already is).
Definition: context.cpp:300
bool isGpuQueried() const
Definition: context.cpp:292
void limitWorkerCount(ThreadIndex maxValue, const PoolIndex pool=DEFAULT_POOL)
Limits maximum number of threads (workers) when performing tasks in a given pool.
Definition: context.cpp:280
void waitForJob(Job job, const PoolIndex pool=DEFAULT_POOL)
Waits until a given job finishes.
Definition: context.cpp:256
bool removeTask(const TaskHolder &task)
Removes a task from the pipeline.
TaskHolder & getTask(int) const
Retrieves a task by its index.
int getTaskIndex(const TaskHolder &)
Retrieves task index if it is in the pipeline; returns -1 otherwise.
TaskHolder & insertTask(AbstractTask &task, const TaskHolder &before)
Inserts a task in a specified position of the pipeline before another task.
TaskHolder & addTask(AbstractTask &)
Adds a new task to the end of the pipeline.
void measure()
Determines pipeline execution mode and required thread count.
static const CustomPoint ZERO
Definition: geometry.h:122
Color matrix filter: applies mapping Ax + B at each pixel of a given image in RGBA space.
Definition: color_matrix.h:30
void setBrightness(float brightness)
Sets a brightness adjustment by a given factor (non-cumulative with respect to the current transforma...
void setColorInversion(color3f preservedHue, float saturationFactor=1.0f, float valueFactor=1.0f)
Resets the current transformation to a fancy color inversion mode with a fixed hue point.
void setHSVCorrection(float hueShiftDegrees, float saturationFactor=1.0f, float valueFactor=1.0f)
Resets the current transformation to a matrix performing standard HSV correction.
void setCoefficients(int outChannel, float bias, float r=.0f, float g=.0f, float b=.0f, float a=.0f)
Sets color matrix coefficients for a specific output color channel.
void applyContrast(float factor)
Applies a contrast adjustment by a given factor on top of the current transformation.
virtual void setInput(AbstractBitmap *input)
virtual void setOutput(AbstractBitmap *output)
Flood fill algorithm implementation.
Definition: flood_fill.h:36
void setSeeds(const IntPoint seeds[], int seedCount)
Specifies a set of seeds (starting points)
Definition: flood_fill.cpp:56
IntRectangle getBounds() const
Returns bounding box of the computed mask.
Definition: flood_fill.h:83
const AbstractBitmap * getInput() const
Returns input bitmap (null if not set yet)
Definition: flood_fill.h:73
void setComputeContours(bool)
Enables or disables contours computation.
Definition: flood_fill.cpp:70
void setInput(AbstractBitmap *)
Sets the input bitmap.
Definition: flood_fill.cpp:41
void setBorderPostprocessing(BorderMorphology operation, float holdRadius, float releaseRadius)
Specifies a morphological operation to apply to the mask border.
Definition: flood_fill.cpp:166
void setMaskPos(const IntPoint &)
Specifies left-top corner position of the mask to compute inside the input bitmap.
Definition: flood_fill.cpp:51
const IntegerContour2D & getContour(int contourIndex) const
Returns a contour by index if computeContours was true, throws an exception otherwise.
Definition: flood_fill.cpp:173
float getTolerance() const
Returns yjr intensity tolerance threshold.
Definition: flood_fill.h:124
void setTolerance(float)
Sets the intensity tolerance threshold used to decide on similarity of neighboring pixels.
Definition: flood_fill.cpp:161
const AbstractBitmap * getOutput() const
Returns output bitmap (null if not set yet)
Definition: flood_fill.h:78
int getContourCount() const
Returns number of detected contours.
Definition: flood_fill.h:88
void setOutput(AbstractBitmap *)
Specifies the bitmap to put the resulting mask to.
Definition: flood_fill.cpp:46
void emptyBin()
Empty the bin destroying all the items in a GPU-aware thread.
Definition: recycle_bin.cpp:91
virtual const int getDepth() const =0
Depth of the texture in pixels.
const int getNumberOfChannels() const
Returns number of channels containing in the texture.
virtual const int getHeight() const =0
Height of the texture in pixels.
virtual const int getWidth() const =0
Width of the texture in pixels.
void setFloatMatrix4(std::string name, const float matrix[16])
Sets a float 4*4 matrix variable value.
void setInteger(std::string name, int value)
Sets a scalar integer uniform value.
void setFloat(std::string name, float value)
Sets a scalar float uniform value.
void setFloatMatrix3(std::string name, const float matrix[9])
Sets a float 3*3 matrix variable value.
void setFloatMatrix2(std::string name, const float matrix[4])
Sets a float 2*2 matrix variable value.
void setFloatArray(std::string name, const std::vector< float > &values)
Sets a float array variable value.
static const std::string CODE_HEADER
Shader code header containing necessary declarations.
Definition: image_shader.h:114
void setSourceCode(const std::string &sourceCode)
Passes new source code to the fragment shader.
static const std::string INPUT_IMAGE_DECL_TYPE
A virtual input image type defined at shader compile time by ordinary texture or OES texture sampler ...
Definition: image_shader.h:104
static const std::string INPUT_IMAGE_ID
Shader variable name referring to the input image.
Definition: image_shader.h:109
A sequence of integer-valued 2D points.
Definition: contours.h:33
float getLength() const
Definition: contours.h:62
IntPoint getPoint(int index) const
Definition: contours.h:68
void clear()
Removes contour content.
Definition: contours.cpp:171
int getPointCount() const
Definition: contours.h:59
void addPoint(int x, int y)
Adds a new point to the end of the contour.
Definition: contours.cpp:137
Bitmap whose memory is managed by the Beatmup engine.
Measures the difference between two bitmaps.
Definition: metric.h:29
static float psnr(AbstractBitmap &bitmap1, AbstractBitmap &bitmap2)
Definition: metric.cpp:139
void setNorm(Norm norm)
Specifies the norm to use in the measurement.
Definition: metric.h:67
void setBitmaps(AbstractBitmap *bitmap1, AbstractBitmap *bitmap2)
Sets input images.
Definition: metric.cpp:72
double getResult() const
Definition: metric.h:72
RepetitionPolicy getRepetitionPolicy(const TaskHolder &)
Definition: multitask.cpp:78
void setRepetitionPolicy(TaskHolder &taskHolder, RepetitionPolicy policy)
Sets repetition policy of a task.
Definition: multitask.cpp:85
@ IGNORE_IF_UPTODATE
do not execute the task if no preceding tasks are run
@ REPEAT_ALWAYS
execute the task unconditionally on each run
@ IGNORE_ALWAYS
do not execute the task
@ REPEAT_UPDATE
execute the task one time then switch to IGNORE_IF_UPTODATE
Abstract neural net operation (layer).
Definition: operation.h:46
virtual int getOutputCount() const
Returns number of operation outputs.
Definition: operation.h:135
virtual int getInputCount() const
Returns number of operation inputs.
Definition: operation.h:129
std::string getName() const
Definition: operation.h:242
const std::vector< float > & getProbabilities() const
Returns the last classification results.
Definition: classifier.h:69
Job start(AbstractBitmap &input)
Initiates the classification of a given image.
Definition: classifier.cpp:50
static const char * BIAS_CHUNK_SUFFIX
suffix added to the op name to get the bias chunk id in the model data
Definition: conv2d.h:98
static const char * FILTERS_CHUNK_SUFFIX
suffix added to the op name to get the filters chunk id in the model data
Definition: conv2d.h:97
bool isBiasUsed() const
Definition: conv2d.h:124
static const char * BIAS_CHUNK_SUFFIX
suffix added to the op name to get the bias chunk id in the model data
Definition: dense.h:56
static const char * MATRIX_CHUNK_SUFFIX
suffix added to the op name to get the matrix chunk id in the model data
Definition: dense.h:55
Image preprocessing operation.
Definition: image_sampler.h:32
int getRotation() const
Returns rotation applied to the input image.
Definition: image_sampler.h:85
void setRotation(int quarterTurns)
Specifies a rotation to apply to the input image.
Definition: image_sampler.h:79
Task running inference of a Model.
void connect(AbstractBitmap &image, AbstractOperation &operation, int inputIndex=0)
Connects an image to a specific operation input.
Neural net model.
Definition: model.h:92
unsigned long countTexelFetches() const
Provides an estimation of the total number of texels fetched by all the operations in the model per i...
Definition: model.cpp:516
void addConnection(AbstractOperation &source, AbstractOperation &dest, int output=0, int input=0, int shuffle=0)
Definition: model.cpp:91
void addOutput(const std::string &operation, int output=0)
Enables reading output data from the model memory through getOutputData().
Definition: model.cpp:101
unsigned long countMultiplyAdds() const
Provides an estimation of the number of multiply-adds characterizing the model complexity.
Definition: model.cpp:508
std::string serializeToString() const
Returns serialized representation of the model as a string.
Definition: model.cpp:579
AbstractOperation & getLastOperation()
Definition: model.h:294
AbstractOperation & getFirstOperation()
Definition: model.h:293
void addOperation(const std::string &opName, AbstractOperation *newOp)
Adds a new operation to the model before another operation in the execution order.
Definition: model.cpp:68
void append(AbstractOperation *newOp, bool connect=false)
Adds a new operation to the model.
Definition: model.cpp:47
@ SAME
operation output size matches its input size for unit strides
const std::vector< float > & getProbabilities() const
Definition: softmax.h:64
Wrapper of Android.Graphics.Bitmap object.
Definition: bitmap.h:35
Writable ChunkCollection implementation for Python.
void save(const std::string &filename, bool append=false)
Saves the collection to a file.
AbstractBitmap * getBackgroundImage() const
Definition: renderer.h:132
bool getOutputPixelsFetching() const
Reports whether the output bitmap pixels are automatically offloaded from GPU to CPU memory every tim...
Definition: renderer.cpp:116
void setScene(Scene *scene)
Definition: renderer.cpp:71
void resetOutput()
Removes a bitmap from the renderer output, if any, and switches to on-screen rendering.
Definition: renderer.cpp:81
void setOutputPixelsFetching(bool fetch)
Specifies whether the output image data is pulled from GPU to CPU memory every time the rendering is ...
Definition: renderer.cpp:111
void setOutputMapping(const OutputMapping mapping)
Specifies the output mapping specifying how the scene coordinates [0,1]² are mapped to the output (sc...
Definition: renderer.cpp:86
void setOutput(AbstractBitmap *bitmap)
Attaches a bitmap to the renderer output.
Definition: renderer.cpp:76
const Scene * getScene() const
Definition: renderer.cpp:61
void setBackgroundImage(AbstractBitmap *)
Sets an image to pave the background.
Definition: renderer.cpp:106
Scene::Layer * pickLayer(float x, float y, bool inPixels) const
Retrieves a scene layer visible at a given point, if any.
Definition: renderer.cpp:121
OutputMapping getOutputMapping() const
Retrieves the output mapping specifying how the scene coordinates [0,1]² are mapped to the output (sc...
Definition: renderer.cpp:91
int getOutputReferenceWidth() const
Definition: renderer.cpp:101
AbstractBitmap * getOutput() const
Definition: renderer.cpp:66
void setOutputReferenceWidth(int newWidth)
Sets a value overriding output width for elements that have their size in pixels, in order to render ...
Definition: renderer.cpp:96
Layer having an image to render.
Definition: scene.h:158
void setBitmapMapping(const AffineMapping &mapping)
Definition: scene.h:186
void setBitmap(AbstractBitmap *bitmap)
Definition: scene.h:182
const AbstractBitmap * getBitmap() const
Definition: scene.h:181
AffineMapping & getBitmapMapping()
Definition: scene.h:184
Layer containing a bitmap and a mask applied to the bitmap when rendering.
Definition: scene.h:197
AffineMapping & getMaskMapping()
Definition: scene.h:205
void setMaskMapping(const AffineMapping &mapping)
Definition: scene.h:207
Abstract scene layer having name, type, geometry and some content to display.
Definition: scene.h:64
virtual bool testPoint(float x, float y) const
Tests if a given point falls in the layer.
Definition: scene.cpp:240
@ SceneLayer
layer containing a scene
@ MaskedBitmapLayer
layer displaying a bitmap with mask
@ ShadedBitmapLayer
layer displaying a bitmap through a custom fragment shader
@ BitmapLayer
layer displaying a bitmap
@ ShapedBitmapLayer
layer displaying a bitmap within a shape
void setVisible(bool visible)
Sets layer visibility.
Definition: scene.h:124
void setName(const char *name)
Definition: scene.h:95
void setMapping(const AffineMapping &mapping)
Definition: scene.h:99
bool isVisible() const
Returns layer visibility flag.
Definition: scene.h:114
virtual Layer * getChild(float x, float y, unsigned int recursionDepth=0) const
Picks a child layer at given point, if any.
Definition: scene.cpp:244
Type getType() const
Definition: scene.h:92
const std::string & getName() const
Definition: scene.h:94
bool isPhantom() const
Returns true if the layer is ignored when searching a layer by point.
Definition: scene.h:119
AffineMapping & getMapping()
Definition: scene.h:97
void setPhantom(bool phantom)
Makes/unmakes the layer "phantom".
Definition: scene.h:130
Bitmap layer using another bitmap as a mask.
Definition: scene.h:217
void setMask(AbstractBitmap *mask)
Definition: scene.h:227
const AbstractBitmap * getMask() const
Definition: scene.h:226
const Beatmup::Scene & getScene() const
Definition: scene.h:147
Bitmap layer using a custom shader.
Definition: scene.h:268
ImageShader * getShader() const
Definition: scene.h:277
void setShader(ImageShader *shader)
Definition: scene.h:278
Layer containing a bitmap and a parametric mask (shape)
Definition: scene.h:235
void setInPixels(bool inPixels)
Definition: scene.h:259
float getCornerRadius() const
Definition: scene.h:255
void setBorderWidth(float borderWidth)
Definition: scene.h:250
float getBorderWidth() const
Definition: scene.h:249
void setCornerRadius(float cornerRadius)
Definition: scene.h:256
void setSlopeWidth(float slopeWidth)
Definition: scene.h:253
An ordered set of layers representing a renderable content.
Definition: scene.h:37
ShadedBitmapLayer & newShadedBitmapLayer()
Definition: scene.cpp:150
MaskedBitmapLayer & newMaskedBitmapLayer()
Definition: scene.cpp:130
Layer * getLayer(const char *name) const
Retrieves a layer by its name or null if not found.
Definition: scene.cpp:163
BitmapLayer & newBitmapLayer()
Definition: scene.cpp:120
int getLayerCount() const
Returns total number of layers in the scene.
Definition: scene.cpp:199
ShapedBitmapLayer & newShapedBitmapLayer()
Definition: scene.cpp:140
int getLayerIndex(const Layer &layer) const
Returns layer index in the scene or -1 if not found.
Definition: scene.cpp:191
SceneLayer & addScene(const Scene &scene)
Adds a subscene to the current scene.
Definition: scene.cpp:155
AbstractBitmap * getOutputBitmap() const
void clearSamplers()
Clears all connections of bitmaps to samplers.
void setShader(ImageShader *shader)
void addSampler(AbstractBitmap *bitmap, const std::string uniformName=ImageShader::INPUT_IMAGE_ID)
Connects a bitmap to a shader uniform variable.
void setOutputBitmap(AbstractBitmap *bitmap)
ImageShader * getShader() const
bool removeSampler(const std::string uniformName)
Removes a sampler with a uniform variable name.
static void pullPixels(AbstractBitmap &bitmap)
Copies bitmap from GPU memory to RAM.
Definition: swapper.cpp:48
IntPoint scanlineSearch(AbstractBitmap &source, pixint4 val, const IntPoint &startFrom)
Goes through a bitmap in scanline order (left to right, top to bottom) until a pixel of a given color...
Definition: tools.cpp:195
void invert(AbstractBitmap &input, AbstractBitmap &output)
Inverses colors of an image in a pixelwise fashion.
Definition: tools.cpp:155
InternalBitmap * makeCopy(AbstractBitmap &bitmap)
Makes a copy of a bitmap.
Definition: tools.cpp:72
void noise(AbstractBitmap &bitmap, IntRectangle area)
Replaces a rectangular area in a bitmap by random noise.
Definition: tools.cpp:100
InternalBitmap * chessboard(Context &context, int width, int height, int cellSize, PixelFormat pixelFormat=BinaryMask)
Renders a chessboard image.
Definition: tools.cpp:91
void makeOpaque(AbstractBitmap &bitmap, IntRectangle area)
Makes a bitmap area opaque.
Definition: tools.cpp:131
@ NONE
no extension
Definition: program.h:64
@ SIGMOID_LIKE
piecewise-linear sigmoid approximation
@ DEFAULT
default activation: 0..1 bounded ReLU (identity clipped to 0..1 range)
@ BRELU6
0.167 times identity clipped to 0..1 range
py::tuple toTuple(const CustomPoint< T > &point)
color4f toColor4f(const py::tuple &tuple)
color4i toColor4i(const py::tuple &tuple)
pixfloat4 toPixfloat4(const py::tuple &tuple)
color3f toColor3f(const py::tuple &tuple)
py::object getModelOutputDataByOp(NNets::Model &model, const NNets::AbstractOperation &operation, int output)
py::object getModelOutputDataByName(NNets::Model &model, const std::string &opName, int output)
@ SingleByte
single channel of 8 bits per pixel (like grayscale), unsigned integer values
@ SingleFloat
single channel of 32 bits per pixel (like grayscale), single precision floating point values
@ QuaternaryMask
2 bits per pixel
@ QuadFloat
4 channels of 32 bits per pixel, single precision floating point values,
@ TripleFloat
3 channels of 32 bits per pixel, single precision floating point values
@ QuadByte
4 channels of 8 bits per pixel (like RGBA), unsigned integer values
@ TripleByte
3 channels of 8 bits per pixel (like RGB), unsigned integer values
@ BinaryMask
1 bit per pixel
@ HexMask
4 bits per pixel
std::string to_string(Beatmup::NNets::ActivationFunction function)
return(jlong) new Beatmup jlong jstring name
JNIEnv jlong jstring id
jlong jint index
Beatmup::Context * ctx
Beatmup::InternalBitmap * bitmap
Beatmup::Scene::Layer * layer
jlong jobject size
return $pool getJavaReference & scene(index)
JNIEnv jlong jint jfloat bias
Beatmup::AffineMapping & mapping
Beatmup::SceneRenderer * renderer
layer getMapping().setCenterPosition(Beatmup jlong jfloat factor