68 :toctree: python/_generate
89 WritableChunkCollection
92 auto gl = module.def_submodule(
"gl", R
"doc(
97 :toctree: python/_generate
103 auto filters = module.def_submodule(
"filters", R
"doc(
104 beatmup.filters module
105 ----------------------
108 :toctree: python/_generate
115 auto nnets = module.def_submodule(
"nnets", R
"doc(
120 :toctree: python/_generate
136 module.def("say_hi", []() {
138 py::print(
"Beatmup is up and running, yay!");
139 py::exec(
"import platform; print('Python version:', platform.python_version())");
141 "Prints some greetings");
146 py::enum_<PixelFormat>(module,
"PixelFormat",
"Specifies bitmap pixel format")
147 .value(
"SINGLE_BYTE",
PixelFormat::SingleByte,
"single channel of 8 bits per pixel (like grayscale), unsigned integer values")
148 .value(
"TRIPLE_BYTE",
PixelFormat::TripleByte,
"3 channels of 8 bits per pixel (like RGB), unsigned integer values")
149 .value(
"QUAD_BYTE",
PixelFormat::QuadByte,
"4 channels of 8 bits per pixel (like RGBA), unsigned integer values")
150 .value(
"SINGLE_FLOAT",
PixelFormat::SingleFloat,
"single channel of 32 bits per pixel (like grayscale), single precision floating point values")
151 .value(
"TRIPLE_FLOAT",
PixelFormat::TripleFloat,
"3 channels of 32 bits per pixel, single precision floating point values")
152 .value(
"QUAD_FLOAT",
PixelFormat::QuadFloat,
"4 channels of 32 bits per pixel, single precision floating point values")
161 py::class_<GL::TextureHandler>(gl,
"TextureHandler",
162 "A texture stored in GPU memory")
165 "Returns width of the texture in pixels")
168 "Returns height of the texture in pixels")
170 .def(
"get_depth", &GL::TextureHandler::getDepth,
171 "Returns depth of the texture in pixels")
173 .def(
"get_number_of_channels", &GL::TextureHandler::getNumberOfChannels,
174 "Returns number of channels containing in the texture");
179 py::class_<AbstractTask>(module,
"AbstractTask",
"Abstract task executable in a thread pool of a Context");
184 py::class_<Context>(module,
"Context",
"Beatmup engine context")
188 .def(py::init<const PoolIndex>())
190 .def(
"perform_task", &Context::performTask,
191 py::arg(
"task"), py::arg(
"pool") = 0,
192 "Performs a given task. Returns its execution time in milliseconds")
194 .def(
"repeat_task", &Context::repeatTask,
195 py::arg(
"task"), py::arg(
"abort_current"), py::arg(
"pool") = 0,
196 py::keep_alive<1, 2>(),
198 Ensures a given task executed at least once
200 :param task: The task
201 :param abort_current: If True and the same task is currently running, the abort signal is sent.
202 :param pool: A thread pool to run the task in
205 .def("submit_task", &Context::submitTask,
206 py::arg(
"task"), py::arg(
"pool") = 0,
207 py::keep_alive<1, 2>(),
208 "Adds a new task to the jobs queue")
210 .def(
"submit_persistent_task", &Context::submitPersistentTask,
211 py::arg(
"task"), py::arg(
"pool") = 0,
212 py::keep_alive<1, 2>(),
213 "Adds a new persistent task to the jobs queue")
215 .def(
"wait_for_job", &Context::waitForJob,
216 py::arg(
"job"), py::arg(
"pool") = 0,
217 "Blocks until a given job finishes")
219 .def(
"abort_job", &Context::abortJob,
220 py::arg(
"job"), py::arg(
"pool") = 0,
221 "Aborts a given submitted job.")
223 .def(
"wait", &Context::wait,
224 "Blocks until all the submitted jobs are executed",
227 .def(
"busy", &Context::busy,
228 "Returns `True` if a specific thread pool in the context is executing a Task",
231 .def(
"check", &Context::check,
232 "Checks if a specific thread pool is doing great: rethrows exceptions occurred during tasks execution, if any.",
236 "Returns maximum number of working threads per task in a given thread pool",
240 "Limits maximum number of threads (workers) when performing tasks in a given pool",
241 py::arg(
"max_value"), py::arg(
"pool") = 0)
243 .def(
"is_gpu_queried", &Context::isGpuQueried,
244 "Returns `True` if GPU was queried and ready to use")
246 .def(
"is_gpu_ready", &Context::isGpuReady,
247 "Returns `True` if GPU was queried and ready to use")
249 .def(
"warm_up_gpu", &Context::warmUpGpu, R
"doc(
250 Initializes GPU within a given Context if not yet (takes no effect if it already is).
251 GPU initialization may take some time and is done when a first task using GPU is being run. Warming up
252 the GPU is useful to avoid the app get stuck for some time when it launches its first task on GPU.
255 .def("query_gpu_info", [](
Context &
ctx) -> py::object {
258 return py::make_tuple<>(vendor,
renderer);
261 "Queries information about GPU and returns a tuple of vendor and renderer strings, or None if no GPU available.")
263 .def(
"empty_gpu_recycle_bin", [](
Context&
ctx) {
268 Empties GPU recycle bin.
269 When a bitmap is destroyed in the application code, its GPU storage is not destroyed immediately. This is due to the fact that destroying a
270 texture representing the bitmap content in the GPU memory needs to be done in a thread that has access to the GPU, which is one of the
271 threads in the thread pool. The textures of destroyed bitmaps are marked as unused anymore and put into a "GPU trash bin". The latter is
272 emptied by calling this function.
273 In applications doing repeated allocations and deallocations of images (e.g., processing video frames in a loop), it is recommended to empty
274 the GPU recycle bin periodically in the described way in order to prevent running out of memory.
280 py::class_<AbstractBitmap, GL::TextureHandler>(module,
"AbstractBitmap",
281 "Abstract bitmap class")
284 "Returns pixel format of the bitmap")
286 .def(
"get_memory_size", &AbstractBitmap::getMemorySize,
287 "Returns bitmap size in bytes")
290 "Returns Context the current bitmap is attached to")
293 "Sets all the pixels to zero")
295 .def(
"__str__", &AbstractBitmap::toString,
296 "Returns a string describing the bitmap")
298 .def(
"save_bmp", &AbstractBitmap::saveBmp, py::arg(
"filename"),
299 "Saves a bitmap to a BMP file");
304 py::class_<InternalBitmap, AbstractBitmap>(module,
"InternalBitmap", R
"doc(
305 Bitmap whose memory is managed by the Beatmup engine.
306 Main pixel data container used internally by Beatmup. Applications would typically use a different incarnation
307 of AbstractBitmap implementing I/O operations, and InternalBitmap instances are used to exchange data between
308 different processing entities (AbstractTask instances) within the application.
311 .def(py::init<Context&, PixelFormat, int, int, bool>(),
312 py::arg("context"), py::arg(
"pixel_format"), py::arg(
"width"), py::arg(
"height"), py::arg(
"allocate") =
true,
313 py::keep_alive<1, 2>())
315 .def(py::init<Context&, const char*>(),
316 py::keep_alive<1, 2>());
321 py::class_<Python::Bitmap, AbstractBitmap>(module,
"Bitmap", py::buffer_protocol(),
322 "A bitmap wrapping a numpy container without copying")
324 .def(py::init<Beatmup::Context&, py::buffer&>(),
325 py::keep_alive<1, 2>())
328 Swapper::pullPixels(
bitmap);
329 return bitmap.getPythonBuffer();
335 module.def_submodule(
"bitmaptools")
337 py::arg(
"bitmap"), py::arg(
"context"), py::arg(
"format"),
338 py::return_value_policy::take_ownership,
339 py::keep_alive<0, 1>(),
341 Makes a copy of a bitmap for a given Context converting the data to a given pixel format.
342 Can be used to exchange image content between different instances of Context.
343 The copy is done in an AbstractTask run in the default thread pool of the source bitmap context.
345 :param bitmap: the bitmap to copy
346 :param context: the Context instance the copy is associated with
347 :param format: pixel format of the copy
351 py::arg(
"context"), py::arg(
"width"), py::arg(
"height"), py::arg(
"cell_size"), py::arg(
"format") =
PixelFormat::BinaryMask,
352 py::return_value_policy::take_ownership,
353 py::keep_alive<0, 1>(),
355 Renders a chessboard image.
357 :param context: a Context instance
358 :param width: width in pixels of the resulting bitmap
359 :param height: height in pixels of the resulting bitmap
360 :param cell_size: size of a single chessboard cell in pixels
361 :param pixel_format: pixel format of the resulting bitmap
366 "Fills a given bitmap with random noise.")
369 py::arg(
"bitmap"), py::arg(
"area"),
370 "Replaces a rectangular area in a bitmap by random noise.")
375 py::arg(
"bitmap"), py::arg(
"area"),
376 "Makes a bitmap area opaque")
379 py::arg(
"input"), py::arg(
"output"),
380 "Inverses colors of an image in a pixelwise fashion")
382 .def(
"scanline_search", [](
AbstractBitmap&
bitmap,
const py::tuple& value,
const py::tuple& startFrom) -> py::object {
384 if (pt.x == -1 && pt.y == -1)
390 Goes through a bitmap in scanline order (left to right, top to bottom) until a pixel of a given color is met.
392 :param source: the bitmap to scan
393 :param value: the color value to look for
394 :param start_from: starting pixel position
396 Returns the next closest position of the searched value (in scanline order) or None if not found.
402 py::class_<BitmapResampler, AbstractTask> bitmapResampler(module,
"BitmapResampler", R
"doc(
403 Resamples an image to a given resolution.
404 Implements different resampling approaches, including standard ones (bilinear, bicubic, etc.) and a neural network-based 2x upsampling approach dubbed as "x2".
407 bitmapResampler.def(py::init<Context&>(), py::arg("context"),
408 py::keep_alive<1, 2>())
410 .def_property(
"input", &BitmapResampler::getInput,
414 .def_property(
"output", &BitmapResampler::getOutput,
421 "Cubic resampling parameter (`alpha`)")
423 .def_property(
"input_rectangle", [](
BitmapResampler& resampler,
const py::tuple& area) {
429 "Specifies a rectangular working area in the input bitmap. Pixels outside of this area are not used.")
431 .def_property(
"output_rectangle", [](
BitmapResampler& resampler,
const py::tuple& area) {
437 "Specifies a rectangular working area in the output bitmap. Pixels outside of this area are not affected.");
439 py::enum_<BitmapResampler::Mode>(bitmapResampler,
"Mode",
"Resampling mode (algorithm) specification")
440 .value(
"NEAREST_NEIGHBOR", BitmapResampler::Mode::NEAREST_NEIGHBOR,
"zero-order: usual nearest neighbor")
441 .value(
"BOX", BitmapResampler::Mode::BOX,
"'0.5-order': anti-aliasing box filter; identical to nearest neighbor when upsampling")
442 .value(
"LINEAR", BitmapResampler::Mode::LINEAR,
"first order: bilinear interpolation")
443 .value(
"CUBIC", BitmapResampler::Mode::CUBIC,
"third order: bicubic interpolation")
444 .value(
"CONVNET", BitmapResampler::Mode::CONVNET,
"upsampling x2 using a convolutional neural network")
450 py::class_<Filters::PixelwiseFilter, AbstractTask>(filters,
"PixelwiseFilter",
451 "Base class for image filters processing a given bitmap in a pixelwise fashion.")
453 .def_property(
"input",
454 &Filters::PixelwiseFilter::getInput,
458 .def_property(
"output",
459 &Filters::PixelwiseFilter::getOutput,
466 py::class_<Filters::ColorMatrix, Filters::PixelwiseFilter>(filters,
"ColorMatrix",
467 "Color matrix filter: applies an affine mapping Ax + B at each pixel of a given image in RGBA space")
475 "Sets color matrix coefficients for a specific output color channel",
476 py::arg(
"out_channel"), py::arg(
"add"), py::arg(
"rgba"))
479 "Resets the current transformation to a matrix performing standard HSV correction",
480 py::arg(
"hue_shift_degrees"), py::arg(
"saturation_factor"), py::arg(
"value_factor"))
482 .def(
"set_color_inversion", [](
Filters::ColorMatrix& colorMatrix,
const py::tuple& hue,
float saturation,
float value){
485 "Resets the current transformation to a fancy color inversion mode with a fixed hue point",
486 py::arg(
"preserved_hue"), py::arg(
"saturation_factor"), py::arg(
"value_factor"))
488 .def(
"apply_contrast", &Filters::ColorMatrix::applyContrast,
489 "Applies a contrast adjustment by a given factor on top of the current transformation",
492 .def(
"set_brightness", &Filters::ColorMatrix::setBrightness,
493 "Sets a brightness adjustment by a given factor (non-cumulative with respect to the current transformation)",
494 py::arg(
"brightness"));
499 py::class_<Filters::Sepia, Filters::PixelwiseFilter>(filters,
"Sepia",
"Sepia filter: an example of :class:`~beatmup.filters.PixelwiseFilter` implementation.")
505 py::class_<IntegerContour2D>(module,
"IntegerContour2D",
506 "A sequence of integer-valued 2D points")
510 .def(
"add_point", &IntegerContour2D::addPoint,
511 "Adds a new point to the end of the contour. Some points may be skipped to optimize the storage.",
512 py::arg(
"x"), py::arg(
"y"))
514 .def(
"clear", &IntegerContour2D::clear,
515 "Removes contour content")
517 .def(
"get_point_count", &IntegerContour2D::getPointCount,
518 "Returns number of points in the contour")
520 .def(
"get_length", &IntegerContour2D::getLength,
521 "Returns contour length")
526 "Returns a point by its index");
531 py::class_<FloodFill, AbstractTask> floodFill(module,
"FloodFill",
533 Discovers areas of similar colors up to a tolerance threshold around given positions (seeds) in the input image.
534 These areas are filled with white color in another image (output). If the output bitmap is a binary mask,
535 corresponding pixels are set to `1`. The rest of the output image remains unchanged.
536 Optionally, computes contours around the discovered areas and stores the contour positions.
537 Also optionally, applies post-processing by dilating or eroding the discovered regions in the output image.
540 py::enum_<FloodFill::BorderMorphology>(floodFill, "BorderMorphology",
541 "Morphological postprocessing operation applied to the discovered connected components")
543 .value(
"DILATE", FloodFill::BorderMorphology::DILATE,
"apply a dilatation")
544 .value(
"ERODE", FloodFill::BorderMorphology::ERODE,
"apply an erosion")
547 floodFill.def(py::init<>())
549 .def_property(
"input",
550 &FloodFill::getInput,
554 .def_property(
"output",
555 &FloodFill::getOutput,
560 "Intensity tolerance")
565 "Specifies left-top corner position of the mask inside the input bitmap")
567 .def(
"set_seeds", [](
FloodFill& ff,
const py::list& seeds) {
569 for (py::ssize_t i = 0; i < seeds.size(); ++i)
570 pts[i] = Python::toPoint<int>(seeds[i]);
575 "Specifies a set of seeds (starting points)")
578 "Enables or disables contours computation")
581 py::arg(
"operation"), py::arg(
"hold_radius"), py::arg(
"release_radius"),
583 Specifies a morphological operation to apply to the mask border.
585 :param operation: a postprocessing operation
586 :param hold_radius: erosion/dilation hold radius (output values set to 1)
587 :param release_radius: erosion/dilation radius of transition from 1 to 0
592 "Returns bounding box of the computed mask")
594 .def(
"get_contour_count", &FloodFill::getContourCount,
595 "Returns number of discovered contours")
597 .def(
"get_contour", &FloodFill::getContour,
599 "Returns a contour by index if compute_contours was set to True, throws an exception otherwise");
604 py::class_<AffineMapping>(module,
"AffineMapping",
"2x3 affine mapping containing a 2x2 matrix and a 2D point")
610 "Returns the mapping origin")
614 "Returns the mapping matrix")
622 "Inverts the mapping")
625 py::return_value_policy::take_ownership,
626 "Returns inverse mapping")
631 "Computes inverse mapping of a point")
637 "Adjusts the mapping origin so that the center of the axes box matches a given point")
643 "Translates the mapping")
648 py::arg(
"factor"), py::arg(
"fixed_point") = py::make_tuple(0.0f, 0.0f),
649 "Scales the mapping around a given point in target domain")
654 py::arg(
"angle"), py::arg(
"fixed_point") = py::make_tuple(0.0f, 0.0f),
655 "Rotates the mapping around a given point in target domain")
661 "Tests whether a point from the output domain is inside the input axes span");
666 py::class_<GL::VariablesBundle>(gl,
"VariablesBundle",
667 "Collection storing GLSL program parameters (scalars, matrices, vectors) to communicate them from user to GPU-managing thread")
670 py::arg(
"name"), py::arg(
"value"),
671 "Sets a scalar integer uniform value")
674 py::arg(
"name"), py::arg(
"x"), py::arg(
"y"),
675 "Sets a 2D integer uniform vector value")
678 py::arg(
"name"), py::arg(
"x"), py::arg(
"y"), py::arg(
"z"),
679 "Sets a 3D integer uniform vector value")
682 py::arg(
"name"), py::arg(
"x"), py::arg(
"y"), py::arg(
"z"), py::arg(
"w"),
683 "Sets a 4D integer uniform vector value")
686 py::arg(
"name"), py::arg(
"value"),
687 "Sets a scalar float uniform value")
690 py::arg(
"name"), py::arg(
"x"), py::arg(
"y"),
691 "Sets a 2D float uniform vector value")
694 py::arg(
"name"), py::arg(
"x"), py::arg(
"y"), py::arg(
"z"),
695 "Sets a 3D float uniform vector value")
698 py::arg(
"name"), py::arg(
"x"), py::arg(
"y"), py::arg(
"z"), py::arg(
"w"),
699 "Sets a 4D float uniform vector value")
701 .def(
"set_float_matrix2", [](
GL::VariablesBundle& instance,
const char*
name,
const std::vector<float>& matrix) {
702 if (matrix.size() != 2*2)
703 throw std::invalid_argument(
"Expected a list-like input containing " +
std::to_string(2*2) +
707 py::arg(
"name"), py::arg(
"matrix"),
708 "Sets a float 2*2 matrix variable value")
710 .def(
"set_float_matrix3", [](
GL::VariablesBundle& instance,
const char*
name,
const std::vector<float>& matrix) {
711 if (matrix.size() != 3*3)
712 throw std::invalid_argument(
"Expected a list-like input containing " +
std::to_string(3*3) +
716 py::arg(
"name"), py::arg(
"matrix"),
717 "Sets a float 3*3 matrix variable value")
719 .def(
"set_float_matrix4", [](
GL::VariablesBundle& instance,
const char*
name,
const std::vector<float>& matrix) {
720 if (matrix.size() != 4*4)
721 throw std::invalid_argument(
"Expected a list-like input containing " +
std::to_string(4*4) +
725 py::arg(
"name"), py::arg(
"matrix"),
726 "Sets a float 4*4 matrix variable value")
728 .def(
"set_float_array", &GL::VariablesBundle::setFloatArray,
729 py::arg(
"name"), py::arg(
"values"),
730 "Sets a float array variable value");
735 py::class_<Metric, AbstractTask> metric(module,
"Metric",
"Measures the difference between two bitmaps");
737 py::enum_<Metric::Norm>(metric,
"Norm",
"Norm (distance) to measure between two images")
738 .value(
"L1", Metric::Norm::L1,
"sum of absolute differences")
739 .value(
"L2", Metric::Norm::L2,
"Euclidean distance: square root of squared differences")
742 metric.def(py::init<>())
745 py::arg(
"bitmap1"), py::arg(
"bitmap2"),
746 py::keep_alive<1, 2>(), py::keep_alive<1, 3>(),
750 metric.
setBitmaps(bitmap1, Python::toRectangle<int>(roi1),
751 bitmap2, Python::toRectangle<int>(roi2));
753 py::arg(
"bitmap1"), py::arg(
"roi1"), py::arg(
"bitmap2"), py::arg(
"roi2"),
754 py::keep_alive<1, 2>(), py::keep_alive<1, 4>(),
755 "Sets input images and rectangular regions delimiting the measurement areas")
757 .def(
"set_norm", &Metric::setNorm,
"Specifies the norm to use in the measurement")
759 .def(
"get_result", &Metric::getResult,
"Returns the measurement result (after the task is executed")
761 .def_static(
"psnr", &Metric::psnr, py::arg(
"bitmap1"), py::arg(
"bitmap2"),
762 "Computes peak signal-to-noise ratio in dB for two given images");
767 py::class_<ImageShader, GL::VariablesBundle>(module,
"ImageShader",
"A GLSL program to process images")
769 .def(py::init<Context&>(), py::arg(
"context"), py::keep_alive<1, 2>())
773 R
"doc(Passes new source code to the fragment shader.
774 The new source code will be compiled and linked when next rendering occurs.)doc")
776 .def_property_readonly_static("INPUT_IMAGE_DECL_TYPE",
777 [](py::object) {
return ImageShader::INPUT_IMAGE_DECL_TYPE; },
778 "A virtual input image type defined at shader compile time by ordinary texture or OES texture sampler depending on the input bound")
780 .def_property_readonly_static(
"INPUT_IMAGE_ID",
781 [](py::object) {
return ImageShader::INPUT_IMAGE_ID; },
782 "Shader variable name referring to the input image")
784 .def_property_readonly_static(
"CODE_HEADER",
785 [](py::object) {
return ImageShader::CODE_HEADER; },
786 "Shader code header containing necessary declarations");
791 py::class_<ShaderApplicator, AbstractTask>(module,
"ShaderApplicator",
"A task applying an image shader to bitmaps")
795 .def(
"add_sampler", &ShaderApplicator::addSampler,
796 py::arg(
"bitmap"), py::arg(
"uniform_name") = ImageShader::INPUT_IMAGE_ID,
797 py::keep_alive<1, 2>(),
799 Connects a bitmap to a shader uniform variable.
800 The bitmap connected to ImageShader::INPUT_IMAGE_ID is used to resolve the sampler type (ImageShader::INPUT_IMAGE_DECL_TYPE).
803 .def("remove_sampler", &ShaderApplicator::removeSampler,
804 py::arg(
"uniform_name"),
806 Removes a sampler with a uniform variable name.
807 Returns True if a sampler associated to the given variable existed and was removed, false otherwise.
810 .def("clear_samplers", &ShaderApplicator::clearSamplers,
"Clears all connections of bitmaps to samplers")
812 .def_property(
"shader",
813 &ShaderApplicator::getShader,
815 "Shader to apply to the bitmap(s)")
817 .def_property(
"output_bitmap",
818 &ShaderApplicator::getOutputBitmap,
825 py::class_<Scene>
scene(module,
"Scene",
"An ordered set of layers representing renderable content");
827 py::class_<Scene::Layer>
layer(
scene,
"Layer",
829 Abstract scene layer having name, type, geometry and some content to display.
830 The layer geometry is defined by an AffineMapping describing the position and the orientation of the layer content in the rendered image.
833 py::enum_<Scene::Layer::Type>(layer, "Type",
"Layer type")
834 .value(
"SCENE", Scene::Layer::Type::SceneLayer,
"layer containing a scene")
835 .value(
"BITMAP", Scene::Layer::Type::BitmapLayer,
"layer displaying a bitmap")
836 .value(
"MASKED_BITMAP", Scene::Layer::Type::MaskedBitmapLayer,
"layer displaying a bitmap with mask")
837 .value(
"SHAPED_BITMAP", Scene::Layer::Type::ShapedBitmapLayer,
"layer displaying a bitmap within a shape")
838 .value(
"SHADED_BITMAP", Scene::Layer::Type::ShadedBitmapLayer,
"layer displaying a bitmap through a custom fragment shader")
841 layer.def(
"get_type", &Scene::Layer::getType,
"Returns layer type")
846 "Layer mapping in parent coordinates")
848 .def(
"test_point", &Scene::Layer::testPoint,
849 py::arg(
"x"), py::arg(
"y"),
850 "Tests if a given point falls in the layer")
853 Point pt = Python::toPoint<float>(point);
857 "Tests if a given point falls in the layer")
859 .def(
"get_child", &Scene::Layer::getChild,
860 py::arg(
"x"), py::arg(
"y"), py::arg(
"recursion_depth") = 0,
861 "Picks a child layer at given point, if any")
863 .def(
"get_child", [](
const Scene::Layer&
layer,
const py::tuple& point,
unsigned int recursionDepth) {
864 Point pt = Python::toPoint<float>(point);
867 py::arg(
"point"), py::arg(
"recursion_depth") = 0,
868 "Picks a child layer at given point, if any")
871 "Controls the layer visibility. If set to `False`, the layer and its sublayers are ignored when rendering.")
874 "If set to `True`, the layer goes \"phantom\": it and its sublayers, if any, are ignored when searching a layer by point.");
876 py::class_<Scene::SceneLayer, Scene::Layer>(
scene,
"SceneLayer",
877 "Layer containing an entire scene")
878 .def(
"get_scene", &Scene::SceneLayer::getScene,
"Returns a Scene contained in the Layer");
880 py::class_<Scene::BitmapLayer, Scene::Layer>(
scene,
"BitmapLayer",
882 Layer having an image to render.
883 The image has a position and orientation with respect to the layer. This is expressed with an affine mapping applied on top of the layer
887 .def_property("bitmap",
888 &Scene::BitmapLayer::getBitmap,
890 "Bitmap attached to the layer")
892 .def_property(
"bitmap_mapping",
894 "Bitmap geometry mapping applied on top of the layer mapping")
896 .def_property(
"modulation_color",
899 "Modulation color (R, G, B, A). Multiplies bitmap pixel colors when rendering");
901 py::class_<Scene::CustomMaskedBitmapLayer, Scene::BitmapLayer>(
scene,
"CustomMaskedBitmapLayer",
903 Layer containing a bitmap and a mask applied to the bitmap when rendering.
904 Both bitmap and mask have their own positions and orientations relative to the layer's position and orientation.
907 .def_property("mask_mapping",
909 &Scene::CustomMaskedBitmapLayer::setMaskMapping,
910 "Mask geometry mapping applied on top of the layer mapping")
912 .def_property(
"background_color",
915 "Background color (R, G, B, A). Fills layer pixels falling out of the mask area");
917 py::class_<Scene::MaskedBitmapLayer, Scene::CustomMaskedBitmapLayer>(
scene,
"MaskedBitmapLayer",
918 "Bitmap layer using another bitmap as a mask")
920 .def_property(
"mask",
921 &Scene::MaskedBitmapLayer::getMask,
925 py::class_<Scene::ShapedBitmapLayer, Scene::CustomMaskedBitmapLayer>(
scene,
"ShapedBitmapLayer",
926 "Layer containing a bitmap and a parametric mask (shape)")
929 "Mask border thickness in pixels or normalized coordinates. " \
930 "These pixels are cropped out from the image and replaced with the background color.")
933 "Mask border slope width in pixels or normalized coordinates. "\
934 "The border slope is a linear transition from background color to image pixels.")
937 "Radius of mask corners in pixels or normalized coordinates")
940 "If set to `True`, all the parameter values are interpreted as if given in pixels. Otherwise the normalized coordinates are used.");
942 py::class_<Scene::ShadedBitmapLayer, Scene::BitmapLayer>(
scene,
"ShadedBitmapLayer",
"Bitmap layer using a custom shader")
944 .def_property(
"shader",
945 &Scene::ShadedBitmapLayer::getShader,
947 "Fragment shader taking the layer bitmap as texture");
949 scene.def(py::init<>())
953 py::return_value_policy::reference, py::keep_alive<1, 0>(),
954 "Creates a new bitmap layer")
957 py::return_value_policy::reference, py::keep_alive<1, 0>(),
958 "Creates a new bitmap layer")
962 py::return_value_policy::reference, py::keep_alive<1, 0>(),
963 "Creates a new masked bitmap layer")
966 py::return_value_policy::reference, py::keep_alive<1, 0>(),
967 "Creates a new masked bitmap layer")
971 py::return_value_policy::reference, py::keep_alive<1, 0>(),
972 "Creates a new shaped bitmap layer")
975 py::return_value_policy::reference, py::keep_alive<1, 0>(),
976 "Creates a new shaped bitmap layer")
980 py::return_value_policy::reference, py::keep_alive<1, 0>(),
981 "Creates a new shaded bitmap layer")
984 py::return_value_policy::reference, py::keep_alive<1, 0>(),
985 "Creates a new shaded bitmap layer")
987 .def(
"add_scene", &Scene::addScene,
988 py::return_value_policy::reference, py::keep_alive<1, 0>(),
989 "Adds a subscene to the current scene.")
993 "Retrieves a layer by its name or None if not found")
997 "Retrieves a layer by its index")
999 .def(
"get_layer", (
Scene::Layer* (
Scene::*)(
float,
float,
unsigned int)
const)&Scene::getLayer,
1000 py::arg(
"x"), py::arg(
"y"), py::arg(
"recursion_depth") = 0,
1001 "Retrieves a layer present at a specific point of the scene or None if not found")
1003 .def(
"get_layer_index", &Scene::getLayerIndex,
1005 "Retrieves layer index in the scene or -1 if not found")
1012 py::class_<SceneRenderer, AbstractTask> sceneRenderer(module,
"SceneRenderer",
1014 AbstractTask rendering a Scene.
1015 The rendering may be done to a given bitmap or on screen, if the platform supports on-screen rendering.
1018 py::enum_<SceneRenderer::OutputMapping>(sceneRenderer, "OutputMapping",
"Scene coordinates to output (screen or bitmap) pixel coordinates mapping")
1019 .value(
"STRETCH", SceneRenderer::OutputMapping::STRETCH,
"output viewport covers entirely the scene axis span, aspect ratio is not preserved in general")
1020 .value(
"FIT_WIDTH_TO_TOP", SceneRenderer::OutputMapping::FIT_WIDTH_TO_TOP,
"width is covered entirely, height is resized to keep aspect ratio, the top borders are aligned")
1021 .value(
"FIT_WIDTH", SceneRenderer::OutputMapping::FIT_WIDTH,
"width is covered entirely, height is resized to keep aspect ratio, point (0.5, 0.5) is mapped to the output center")
1022 .value(
"FIT_HEIGHT", SceneRenderer::OutputMapping::FIT_HEIGHT,
"height is covered entirely, width is resized to keep aspect ratio, point (0.5, 0.5) is mapped to the output center")
1025 sceneRenderer.def(py::init<>())
1027 .def_property(
"output",
1028 &SceneRenderer::getOutput,
1032 .def_property(
"scene",
1033 &SceneRenderer::getScene,
1038 "Specifies how the scene coordinates [0,1]² are mapped to the output (screen or bitmap) pixel coordinates.")
1041 "Value overriding output width for elements that have their size in pixels, in order to render a resolution-independent picture")
1045 If set to `True`, the output image data is pulled from GPU to CPU memory every time the rendering is done.
1046 This is convenient if the rendered image is an application output result, and is further stored or sent through the network.
1047 Otherwise, if the image is to be further processed inside Beatmup, the pixel transfer likely introduces an unnecessary latency and may
1048 cause FPS drop in real-time rendering.
1049 Has no effect in on-screen rendering.
1052 .def_property("background_image",
1053 &SceneRenderer::getBackgroundImage,
1055 "Image to pave the background.")
1059 Removes a bitmap from the renderer output, if any, and switches to on-screen rendering.
1060 The rendering is done on the display currently connected to the Context running the rendering task.
1063 .def("pick_layer", &SceneRenderer::pickLayer,
1064 py::arg(
"x"), py::arg(
"y"), py::arg(
"inPixels"), R
"doc(
1065 Searches for a layer at a given position.
1066 In contrast to :func:`~beatmup.Scene.get_layer` it takes into account the output mapping.
1068 :param x: x coordinate.
1069 :param y: y coordinate.
1070 :param pixels: If `True`, the coordinates are taken in pixels.
1072 Returns the topmost layer at the given position if any, None if no layer found.
1078 py::class_<CustomPipeline, AbstractTask> customPipeline(module,
"CustomPipeline",
1080 Custom pipeline: a sequence of tasks to be executed as a whole.
1081 Acts as an AbstractTask. Built by adding tasks one by one and calling measure() at the end.
1084 py::class_<CustomPipeline::TaskHolder>(customPipeline, "TaskHolder",
1085 "A task within a pipeline")
1087 .def(
"get_task", &CustomPipeline::TaskHolder::getTask,
1088 "Returns the task in the current holder")
1090 .def(
"get_run_time", &CustomPipeline::TaskHolder::getRunTime,
1091 "Returns last execution time in milliseconds");
1097 .def(
"get_task_count", &CustomPipeline::getTaskCount,
1098 "Returns number of tasks in the pipeline")
1100 .def(
"get_task", &CustomPipeline::getTask, py::arg(
"index"),
1101 py::return_value_policy::reference,
1102 "Retrieves a task by its index")
1104 .def(
"get_task_index", &CustomPipeline::getTaskIndex, py::arg(
"holder"),
1105 "Retrieves task index if it is in the pipeline; returns -1 otherwise")
1107 .def(
"add_task", &CustomPipeline::addTask, py::arg(
"task"),
1108 py::keep_alive<1, 2>(),
1109 py::return_value_policy::reference,
1110 "Adds a new task to the end of the pipeline")
1112 .def(
"insert_task", &CustomPipeline::insertTask, py::arg(
"task"), py::arg(
"before"),
1113 py::keep_alive<1, 2>(),
1114 py::return_value_policy::reference,
1115 "Inserts a task in a specified position of the pipeline before another task")
1117 .def(
"remove_task", &CustomPipeline::removeTask, py::arg(
"task"),
1118 "Removes a task from the pipeline, if any. Returns True on success")
1121 "Determines pipeline execution mode and required thread count");
1126 py::class_<Multitask, CustomPipeline> multitask(module,
"Multitask",
1128 Conditional multiple tasks execution.
1130 Beatmup offers a number of tools allowing to pipeline several tasks into a single one. This technique is particularly useful for designing
1131 complex multi-stage image processing pipelines.
1133 Multitask is the simplest such tool. It allows to concatenate different tasks into a linear conveyor and run them all or selectively.
1134 To handle this selection, each task is associated with a repetition policy specifying the conditions whether this given task is executed
1135 or ignored when the pipeline is running.
1137 Specifically, there are two extreme modes that force the task execution every time (REPEAT_ALWAYS) or its unconditional skipping
1138 (IGNORE_ALWAYS) and two more sophisticated modes with the following behavior:
1140 - IGNORE_IF_UPTODATE skips the task if no tasks were executed among the ones coming before the current task in the pipeline;
1141 - REPEAT_UPDATE forces task repetition one time on next run and just after switches the repetition policy to IGNORE_IF_UPTODATE.
1144 py::enum_<Multitask::RepetitionPolicy>(multitask, "RepetitionPolicy",
1145 "Determines when a specific task in the sequence is run when the whole sequence is invoked")
1146 .value(
"REPEAT_ALWAYS", Multitask::RepetitionPolicy::REPEAT_ALWAYS,
"execute the task unconditionally on each run")
1147 .value(
"REPEAT_UPDATE", Multitask::RepetitionPolicy::REPEAT_UPDATE,
"execute the task one time then switch to IGNORE_IF_UPTODATE")
1148 .value(
"IGNORE_IF_UPTODATE", Multitask::RepetitionPolicy::IGNORE_IF_UPTODATE,
"do not execute the task if no preceding tasks are run")
1149 .value(
"IGNORE_ALWAYS", Multitask::RepetitionPolicy::IGNORE_ALWAYS,
"do not execute the task")
1155 .def(
"get_repetition_policy", &Multitask::getRepetitionPolicy, py::arg(
"task"),
1156 "Returns repetition policy of a specific task in the pipeline.")
1158 .def(
"set_repetition_policy", &Multitask::setRepetitionPolicy, py::arg(
"task"), py::arg(
"policy"),
1160 Sets repetition policy of a task. If the pipeline is processing at the moment of the call, it is the application responsibility to abort
1161 and restart it, if the policy change needs to be applied immediately.
1167 py::class_<ChunkCollection>(module,
"ChunkCollection",
1169 A key-value pair set storing pieces of arbitrary data (chunks) under string keys.
1170 A chunk is a header and a piece of data packed in memory like this: (idLength[4], id[idLength], size[sizeof(chunksize_t)], data[size])
1171 ChunkCollection defines an interface to retrieve chunks by their ids.
1173 .def("open", &ChunkCollection::open,
"Opens the collection to read chunks from it.")
1175 .def(
"size", &
ChunkCollection::size,
"Returns the number of chunks available in the collection after it is opened.")
1178 Check if a specific chunk exists.
1180 :param id: the chunk id
1182 Returns `True` if only the chunk exists in the collection.
1184 .def("chunk_size", &ChunkCollection::chunkSize, py::arg(
"id"),
1186 Retrieves size of a specific chunk.
1188 :param id: the chunk id
1190 Return size of the chunk in bytes, 0 if not found.
1192 .def("__getitem__", [](
ChunkCollection& collection,
const std::string&
id) -> py::object {
1194 Chunk chunk(collection, id);
1195 return py::bytes(static_cast<const char*>(chunk()), chunk.size());
1198 }, py::arg(
"id"),
"Returns the chunk data by its id");
1203 py::class_<ChunkFile, ChunkCollection>(module,
"ChunkFile",
1205 File containing chunks.
1206 The file is not loaded in memory, but is scanned when first opened to collect the information about available chunks.
1208 .def(py::init<const std::string&, bool>(), py::arg("filename"), py::arg(
"open_now") =
true, R
"doc(
1209 Creates a chunkfile accessor.
1210 The file content is not read until open() is called.
1212 :param filename: the file name / path
1213 :param open_now: if `true`, the file is read right away. Otherwise it is done on open() call.
1214 No information is available about chunks in the file until it is opened.
1220 py::class_<Python::WritableChunkCollection, ChunkCollection>(module,
"WritableChunkCollection",
1222 Writable ChunkCollection implementation for Python.
1223 Allows to exchange binary data without copying.
1232 return collection[
id];
1234 }, py::arg(
"id"),
"Returns the chunk data by its id")
1237 Saves the collection to a file.
1239 :param filename: The name of the file to write chunks to
1240 :param append: If True, writing to the end of the file (keeping the existing content). Rewriting the file otherwise.
1246 py::enum_<NNets::ActivationFunction>(nnets,
"ActivationFunction",
"Activation function specification")
1247 .value(
"DEFAULT", NNets::ActivationFunction::DEFAULT,
"default activation: 0..1 bounded ReLU (identity clipped to 0..1 range)")
1248 .value(
"BRELU6", NNets::ActivationFunction::BRELU6,
"0.167 times identity clipped to 0..1 range")
1249 .value(
"SIGMOID_LIKE", NNets::ActivationFunction::SIGMOID_LIKE,
"a piecewise-linear sigmoid function approximation")
1255 py::enum_<NNets::Size::Padding>(nnets,
"Padding",
"Zero padding specification")
1256 .value(
"SAME", NNets::Size::Padding::SAME,
"operation output size matches its input size for unit strides")
1257 .value(
"VALID", NNets::Size::Padding::VALID,
"no zero padding")
1263 py::class_<NNets::AbstractOperation>(nnets,
"AbstractOperation",
1265 Abstract neural net operation (layer).
1266 Has a name used to refer the operation in a Model. The operation data (such as convolution weights) is provided through a ChunkCollection
1267 in single precision floating point format, where the chunks are searched by operation name.
1268 Operations have several inputs and outputs numbered starting from zero.
1270 .def_property_readonly("name", &NNets::AbstractOperation::getName,
1273 .def_property_readonly(
"input_count", &NNets::AbstractOperation::getInputCount,
1274 "Number of operation inputs")
1276 .def_property_readonly(
"output_count", &NNets::AbstractOperation::getOutputCount,
1277 "Number of operation outputs");
1282 py::class_<NNets::Conv2D, NNets::AbstractOperation>(nnets,
"Conv2D",
1284 2D convolution operation computed on GPU.
1285 Has 2 inputs: main and residual (detailed below), and a single output.
1288 * Input and output are 3D tensors with values in [0, 1] range sampled over 8 bits.
1289 * Number of input feature maps is 3 or a multiple of 4.
1290 * Number of output feature maps is a multiple of 4.
1291 * For group convolutions, each group contains a multiple of 4 input channels and a multiple of 4 output
1292 channels, or exactly 1 input and 1 output channel (i.e., depthwise).
1293 * Kernels are of square shape.
1294 * Strides are equal along X and Y.
1295 * Dilations are equal to 1.
1296 * If an image is given on input (3 input feature maps), only valid padding is supported.
1297 * An activation function is always applied on output.
1299 Raspberry Pi-related constraints:
1301 * Pi cannot sample more than 256 channels to compute a single output value. Actual practical limit is
1302 yet lower: something about 128 channels for pointwise convolutions and less than 100 channels for
1303 bigger kernels. When the limit is reached, Pi OpenGL driver reports an out of memory error (0x505).
1307 * Bias addition integrated.
1308 * An optional residual input is available: a tensor of output shape added to the convolution result
1309 before applying the activation function.
1312 .def(py::init<const std::string&, const int, const int, const int, const int, const NNets::Size::Padding, const bool, const int, const NNets::ActivationFunction>(),
1313 py::arg("name"), py::arg(
"kernel_size"), py::arg(
"num_input_channels"), py::arg(
"num_output_channels"),
1314 py::arg(
"stride") = 1,
1315 py::arg(
"padding") = NNets::Size::Padding::VALID,
1316 py::arg(
"use_bias") =
true,
1317 py::arg(
"num_groups") = 1,
1318 py::arg(
"activation") = NNets::ActivationFunction::DEFAULT,
1320 Instantiates a 2D convolution operation.
1322 :param name: operation name.
1323 :param kernel_size: convolution kernel size.
1324 :param num_input_channels: number of input feature map channels (input depth).
1325 :param num_output_channels: number of output feature map channels (output depth).
1326 :param stride: convolution stride.
1327 :param padding: padding policy.
1328 :param use_bias: if `true`, the bias addition is enabled. The bias vector is searched in the model data.
1329 :param num_groups: number of convolution groups to get a group/depthwise convolution.
1330 :param activation: activation function applied to the operation output.
1333 .def_property_readonly("use_bias", &NNets::Conv2D::isBiasUsed,
"Returns `true` if bias addition is enabled")
1335 .def_property_readonly_static(
"filters_chunk_suffix", [](py::object){
return NNets::Conv2D::FILTERS_CHUNK_SUFFIX; },
A very basic class for any image.
2x3 affine mapping containing a 2x2 matrix and a 2D point
void translate(const Point &shift)
Translates the mapping.
void setCenterPosition(const Point &newPos)
Adjusts the mapping origin so that the center of the axes box matches a given point.
void scale(float factor, const Point &fixedPoint=Point::ZERO)
Scales the mapping around a given point in target domain.
void rotateDegrees(float angle, const Point &fixedPoint=Point::ZERO)
Rotates the mapping around a given point in target domain.
AffineMapping getInverse() const
Returns inverse mapping.
bool isPointInside(const Point &point) const
Tests whether a point from the output domain is inside the input axes span.
Point getPosition() const
Matrix2 getMatrix() const
Resamples an image to a given resolution.
IntRectangle getInputRect() const
void setInputRect(const IntRectangle &rect)
Specifies a rectangular working area in the input bitmap.
void setOutputRect(const IntRectangle &rect)
Specifies a rectangular working area in the output bitmap.
IntRectangle getOutputRect() const
A key-value pair set storing pieces of arbitrary data (chunks) under string keys.
virtual bool chunkExists(const std::string &id) const =0
Check if a specific chunk exists.
Basic class: task and memory management, any kind of static data.
GL::RecycleBin * getGpuRecycleBin() const
bool queryGpuInfo(std::string &vendor, std::string &renderer)
Initializes the GPU if not yet and queries information about it.
Color matrix filter: applies mapping Ax + B at each pixel of a given image in RGBA space.
void setColorInversion(color3f preservedHue, float saturationFactor=1.0f, float valueFactor=1.0f)
Resets the current transformation to a fancy color inversion mode with a fixed hue point.
void setCoefficients(int outChannel, float bias, float r=.0f, float g=.0f, float b=.0f, float a=.0f)
Sets color matrix coefficients for a specific output color channel.
Flood fill algorithm implementation.
void setSeeds(const IntPoint seeds[], int seedCount)
Specifies a set of seeds (starting points)
IntRectangle getBounds() const
Returns bounding box of the computed mask.
void setMaskPos(const IntPoint &)
Specifies left-top corner position of the mask to compute inside the input bitmap.
void emptyBin()
Empty the bin destroying all the items in a GPU-aware thread.
void setFloatMatrix4(std::string name, const float matrix[16])
Sets a float 4*4 matrix variable value.
void setFloatMatrix3(std::string name, const float matrix[9])
Sets a float 3*3 matrix variable value.
void setFloatMatrix2(std::string name, const float matrix[4])
Sets a float 2*2 matrix variable value.
A sequence of integer-valued 2D points.
IntPoint getPoint(int index) const
Bitmap whose memory is managed by the Beatmup engine.
Measures the difference between two bitmaps.
void setBitmaps(AbstractBitmap *bitmap1, AbstractBitmap *bitmap2)
Sets input images.
Wrapper of Android.Graphics.Bitmap object.
Writable ChunkCollection implementation for Python.
bool chunkExists(const std::string &id) const
Check if a specific chunk exists.
Layer having an image to render.
Layer containing a bitmap and a mask applied to the bitmap when rendering.
Abstract scene layer having name, type, geometry and some content to display.
virtual bool testPoint(float x, float y) const
Tests if a given point falls in the layer.
virtual Layer * getChild(float x, float y, unsigned int recursionDepth=0) const
Picks a child layer at given point, if any.
Bitmap layer using another bitmap as a mask.
Bitmap layer using a custom shader.
Layer containing a bitmap and a parametric mask (shape)
An ordered set of layers representing a renderable content.
static const color4i ZERO
py::tuple toTuple(const CustomPoint< T > &point)
color4f toColor4f(const py::tuple &tuple)
color4i toColor4i(const py::tuple &tuple)
pixfloat4 toPixfloat4(const py::tuple &tuple)
color3f toColor3f(const py::tuple &tuple)
@ SingleByte
single channel of 8 bits per pixel (like grayscale), unsigned integer values
@ SingleFloat
single channel of 32 bits per pixel (like grayscale), single precision floating point values
@ QuaternaryMask
2 bits per pixel
@ QuadFloat
4 channels of 32 bits per pixel, single precision floating point values,
@ TripleFloat
3 channels of 32 bits per pixel, single precision floating point values
@ QuadByte
4 channels of 8 bits per pixel (like RGBA), unsigned integer values
@ TripleByte
3 channels of 8 bits per pixel (like RGB), unsigned integer values
@ BinaryMask
1 bit per pixel
@ HexMask
4 bits per pixel
std::string to_string(Beatmup::NNets::ActivationFunction function)
return(jlong) new Beatmup jlong jstring name
vars setFloat(nameStr, val)
return collection chunkExists(JNIEnv *jenv, jobject, jlong hCollection, jstring id)
collection save(filenameStr, append==JNI_TRUE)
vars setInteger(nameStr, val)
ctx limitWorkerCount(Beatmup::AbstractTask::validThreadCount(count),(Beatmup::PoolIndex) poolIdx)
return ctx maxAllowedWorkerCount((Beatmup::PoolIndex) poolIdx)
layer setInPixels(inPixels==JNI_TRUE)
Beatmup::InternalBitmap * bitmap
floodFill setComputeContours(flag==JNI_TRUE)
layer setBorderWidth((float) width)
layer setSlopeWidth((float) width)
Beatmup::Scene::Layer * layer
floodFill setBorderPostprocessing((Beatmup::FloodFill::BorderMorphology) op, hold, release)
return renderer getOutputMapping()
layer setVisible(visible==JNI_TRUE)
renderer setOutputPixelsFetching(fetch==JNI_TRUE)
renderer setOutputReferenceWidth((int) width)
resampler setMode((Beatmup::BitmapResampler::Mode) mode)
renderer setOutputMapping((Beatmup::SceneRenderer::OutputMapping) mapping)
return scene getLayerCount()
layer getMapping().position.x
filter setHSVCorrection((float) h,(float) s,(float) v)
input getContext().performTask(crop)
renderer setBackgroundImage(bitmap)
return $pool getJavaReference & scene(index)
return bitmap getHeight()
JNIEnv jlong jint jfloat bias
Beatmup::AffineMapping & mapping
layer setName(nameStr.c_str())
floodFill setTolerance((float) tolerance)
Beatmup::SceneRenderer * renderer
layer setCornerRadius((float) radius)
applicator setOutputBitmap(bitmap)
resampler setCubicParameter((float) val)
renderer setOutput(bitmap)
layer getMapping().setCenterPosition(Beatmup jlong jfloat factor
return renderer getOutputReferenceWidth()
layer setPhantom(phantom==JNI_TRUE)
layer getMaskMapping().scale((float) factor
shader setSourceCode(javaChar)
return bitmap getPixelFormat()