Process Data from a PushBroom Device With an Object Detector

This example will connect to the virtual pushbroom camera (using ObjectDetectorTest.hdr and its references), measure a white & dark reference. It will then create a processing context for the model object_detector_test.fluxmdl that contains a background masking filter and an object detector.

The input cube of this example looks like this when loaded into fluxTrainer:

_images/example_pushbroom_object_detector_input.png

This example demonstrates how the object detector in fluxEngine can be used even though the data is processed just one line at a time.

All detected objects are output.

C++

The source code of the example can be found in the file example_pushbroom_object_detector.cpp:

  1#if defined(_WIN32) && defined(_MSC_VER)
  2#include <windows.h>
  3#endif
  4
  5#include <iostream>
  6#include <iomanip>
  7#include <string>
  8#include <fstream>
  9#include <streambuf>
 10#include <algorithm>
 11#include <utility>
 12
 13#include <cstddef>
 14
 15#include <fluxEngine/fluxEngine>
 16
 17#include "paths.h"
 18#include "helpers.h"
 19
 20int main()
 21{
 22    try {
 23        std::cout << "fluxEngine version: " << fluxEngine::versionString() << std::endl;
 24        fluxEngine::Handle handle(readFile(g_licenseFileName));
 25        handle.setDriverBaseDirectory(g_driverDirectory);
 26        handle.setDriverIsolationExecutable(g_driverIsolationExecutable);
 27        handle.createProcessingThreads(4);
 28
 29        // Load virtual camera
 30        fluxEngine::EnumerationResult enumeratedDevices = fluxEngine::enumerateDevices(handle, -1, std::chrono::seconds{1});
 31        fluxEngine::EnumeratedDevice* virtualCameraDevice = nullptr;
 32        for (auto const& device : enumeratedDevices.devices) {
 33            if (device->driver->name == "VirtualHyperCamera") {
 34                virtualCameraDevice = device.get();
 35                break;
 36            }
 37        }
 38
 39        if (!virtualCameraDevice)
 40            throw std::runtime_error("Could not find virtual camera driver");
 41
 42        fluxEngine::ConnectionSettings connectionSettings;
 43        connectionSettings.driverName = virtualCameraDevice->driver->name;
 44        connectionSettings.driverType = virtualCameraDevice->driver->type;
 45        connectionSettings.id = virtualCameraDevice->id;
 46        connectionSettings.timeout = std::chrono::seconds{60};
 47        connectionSettings.connectionParameters["Cube"] = encodeFileNameForConnectionParameter(g_odCubeFileName);
 48        connectionSettings.connectionParameters["WhiteReferenceCube"] = encodeFileNameForConnectionParameter(g_odWhiteCubeFileName);
 49        connectionSettings.connectionParameters["DarkReferenceCube"] = encodeFileNameForConnectionParameter(g_odDarkCubeFileName);
 50
 51        std::cout << "Attempting to connect to device...\n" << std::flush;
 52        for (auto const& parameter : connectionSettings.connectionParameters)
 53            std::cout << "  - " << parameter.first << ": " << parameter.second << "\n" << std::flush;
 54        fluxEngine::DeviceGroup deviceGroup = fluxEngine::connectDeviceGroup(handle, connectionSettings);
 55        std::cout << "Connected.\n" << std::flush;
 56        fluxEngine::InstrumentDevice* camera = dynamic_cast<fluxEngine::InstrumentDevice*>(deviceGroup.primaryDevice());
 57        if (!camera) {
 58            deviceGroup.disconnect(std::chrono::seconds{5});
 59            throw std::runtime_error("The device is not an instrument device");
 60        }
 61
 62        camera->setupInternalBuffers(5);
 63
 64        /* Load model
 65         *
 66         * This should be done after connecting with the camera, in
 67         * case the license is tied to a camera serial number. (In
 68         * case the license is tied to a dongle or a mainboard id,
 69         * this may be done beforehand.)
 70         */
 71        fluxEngine::Model model = fluxEngine::Model(handle, fluxEngine::Model::FromFile, g_odModelFileName);
 72
 73        /* NOTE:
 74         * For real devices a this point the user should probably be
 75         * asked to insert a white reference underneath the camera.
 76         *
 77         * For the virtual device this is not required.
 78         */
 79
 80        fluxEngine::InstrumentDevice::AcquisitionParameters acqParams;
 81        std::cout << "Measuring white reference:\n" << std::flush;
 82        fluxEngine::BufferContainer whiteReference = fluxEngine::createRingBufferContainer(camera, 10);
 83        acqParams.referenceName = "WhiteReference";
 84        camera->startAcquisition(acqParams);
 85        for (int i = 0; i < 10; ++i) {
 86            fluxEngine::BufferInfo buffer = camera->retrieveBuffer(std::chrono::seconds{1});
 87            if (buffer.ok) {
 88                whiteReference.add(buffer);
 89                camera->returnBuffer(buffer.id);
 90            }
 91        }
 92        camera->stopAcquisition();
 93        std::cout << "Done.\n" << std::flush;
 94
 95        /* NOTE:
 96         * For real devices a this point the user should probably be
 97         * asked to obscure the optics in front of the camera in order
 98         * for a proper dark reference to be measured.
 99         *
100         * For the virtual device this is not required.
101         *
102         * Some cameras do have an internal shutter, where manual user
103         * intervention is also not required here.
104         */
105
106        std::cout << "Measuring dark reference:\n" << std::flush;
107        fluxEngine::BufferContainer darkReference = fluxEngine::createBufferContainer(camera, 10);
108        acqParams.referenceName = "DarkReference";
109        camera->startAcquisition(acqParams);
110        for (int i = 0; i < 10; ++i) {
111            fluxEngine::BufferInfo buffer = camera->retrieveBuffer(std::chrono::seconds{1});
112            if (buffer.ok) {
113                darkReference.add(buffer);
114                camera->returnBuffer(buffer.id);
115            }
116        }
117        camera->stopAcquisition();
118        std::cout << "Done.\n" << std::flush;
119
120        // Create recording context
121        fluxEngine::ProcessingContext::InstrumentParameters instrumentParameters;
122        instrumentParameters.whiteReference = &whiteReference;
123        instrumentParameters.darkReference = &darkReference;
124        fluxEngine::ProcessingContext ctx = fluxEngine::ProcessingContext::createInstrumentProcessingContext(camera, model, instrumentParameters);
125        ctx.setUseExtendedObjects(true);
126        int const sinkIndex = ctx.findOutputSink(/* outputId = */ 0);
127
128        /* NOTE:
129         * For real devices a this point the user should probably be
130         * asked to position the object to measure underneath the
131         * camera and start the motion of the motion control device
132         * they have.
133         *
134         * For the virtual device this is not required.
135         */
136
137        std::cout << "Starting acquisition:\n" << std::flush;
138        acqParams.referenceName = {};
139        camera->startAcquisition(acqParams);
140        std::cout << "Done.\n" << std::flush;
141
142        std::cout << "Processing the first 100 lines...\n" << std::flush;
143        int y = 0;
144        while (y < 100) {
145            fluxEngine::BufferInfo buffer = camera->retrieveBuffer(std::chrono::milliseconds{100});
146            if (!buffer.ok)
147                continue;
148
149            ctx.setSourceData(buffer);
150            ctx.processNext();
151
152            auto sinkData = ctx.outputSinkData(sinkIndex);
153            auto beginPointer = static_cast<fluxEngine::OutputExtendedObject const*>(sinkData.data);
154            auto endPointer = beginPointer + sinkData.sizes[0];
155            for (auto it = beginPointer; it != endPointer; ++it) {
156                std::cout << "Found object [at line number " << y
157                          << "]: bounding box (" << it->boundingBoxX() << ", " << it->boundingBoxY()
158                          << ") size (" << it->boundingBoxWidth() << ", " << it->boundingBoxHeight()
159                          << "), center of gravity (" << it->gravityCenterX() << ", " << it->gravityCenterY()
160                          << "), area " << it->area() << "\n" << std::flush;
161            }
162
163            camera->returnBuffer(buffer.id);
164            ++y;
165        }
166
167        std::cout << "Stopping acquisition:\n" << std::flush;
168        camera->stopAcquisition();
169        std::cout << "Done.\n" << std::flush;
170    } catch (std::exception& e) {
171        std::cerr << "Error: " << e.what() << std::endl;
172        return 1;
173    } catch (...) {
174        std::cerr << "Unknown error." << std::endl;
175        return 1;
176    }
177
178    return 0;
179}

This source file will compile to the executable ExamplePushBroomObjectDetector.

The following classes and methods are among those used in this example:

.NET

The source code of the example can be found in the file ExamplePushBroomObjectDetector\Program.cs.

  1using System;
  2
  3namespace ExamplePushBroomObjectDetector
  4{
  5    class Program
  6    {
  7        static void Main(string[] args)
  8        {
  9            Console.WriteLine("fluxEngine version: " + LuxFlux.fluxEngineNET.Version.String);
 10            var handle = new LuxFlux.fluxEngineNET.Handle(ExampleHelpers.IO.ReadLicenseFile());
 11            handle.SetDriverBaseDirectory(ExampleHelpers.Paths.DriverDirectory);
 12            handle.CreateProcessingThreads(4);
 13
 14            // Load virtual camera
 15            var enumeratedDevices = LuxFlux.fluxEngineNET.DeviceEnumeration.EnumerateDevices(handle, null, TimeSpan.FromSeconds(1));
 16            LuxFlux.fluxEngineNET.EnumeratedDevice virtualCameraDevice = null;
 17            foreach (var device in enumeratedDevices.Devices)
 18            {
 19                if (device.Driver.Name == "VirtualHyperCamera")
 20                {
 21                    virtualCameraDevice = device;
 22                    break;
 23                }
 24            }
 25
 26            if (virtualCameraDevice == null)
 27                throw new Exception("Could not find virtual camera driver");
 28
 29            var connectionSettings = new LuxFlux.fluxEngineNET.ConnectionSettings();
 30            connectionSettings.DriverName = virtualCameraDevice.Driver.Name;
 31            connectionSettings.DriverType = virtualCameraDevice.Driver.Type;
 32            connectionSettings.Id = virtualCameraDevice.Id;
 33            connectionSettings.Timeout = TimeSpan.FromSeconds(60);
 34            connectionSettings.ConnectionParameters = new System.Collections.Generic.Dictionary<string, string>();
 35            connectionSettings.ConnectionParameters["Cube"] = ExampleHelpers.Paths.ExampleDataFileName("ObjectDetectorTest.hdr");
 36            connectionSettings.ConnectionParameters["WhiteReferenceCube"] = ExampleHelpers.Paths.ExampleDataFileName("ObjectDetectorTest_White.hdr");
 37            connectionSettings.ConnectionParameters["DarkReferenceCube"] = ExampleHelpers.Paths.ExampleDataFileName("ObjectDetectorTest_Dark.hdr");
 38
 39            Console.WriteLine("Attempting to connect to device...");
 40            var deviceGroup = LuxFlux.fluxEngineNET.DeviceGroup.Connect(handle, connectionSettings);
 41            Console.WriteLine("Connected.");
 42            if (!(deviceGroup.PrimaryDevice is LuxFlux.fluxEngineNET.InstrumentDevice))
 43            {
 44                deviceGroup.Disconnect(TimeSpan.FromSeconds(5));
 45                throw new Exception("The device is not an instrument device.");
 46            }
 47            var camera = (LuxFlux.fluxEngineNET.InstrumentDevice)deviceGroup.PrimaryDevice;
 48
 49            camera.SetupInternalBuffers(5);
 50
 51            /* Load model
 52             *
 53             * This should be done after connecting with the camera, in
 54             * case the license is tied to a camera serial number. (In
 55             * case the license is tied to a dongle or a mainboard id,
 56             * this may be done beforehand.)
 57             */
 58            var model = LuxFlux.fluxEngineNET.Model.LoadFromFile(handle, ExampleHelpers.Paths.ExampleDataFileName("object_detector_test.fluxmdl"));
 59
 60            /* NOTE:
 61             * For real devices a this point the user should probably be
 62             * asked to insert a white reference underneath the camera.
 63             *
 64             * For the virtual device this is not required.
 65             */
 66
 67            var acqParams = new LuxFlux.fluxEngineNET.InstrumentDevice.AcquisitionParameters();
 68            Console.WriteLine("Measuring white reference:");
 69            var whiteReference = LuxFlux.fluxEngineNET.Util.CreateRingBufferContainer(camera, 10);
 70            acqParams.ReferenceName = "WhiteReference";
 71            camera.StartAcquisition(acqParams);
 72            for (int i = 0; i < 10; ++i)
 73            {
 74                var buffer = camera.RetrieveBuffer(TimeSpan.FromSeconds(1));
 75                if (buffer != null)
 76                {
 77                    try
 78                    {
 79                        whiteReference.Add(buffer);
 80                    }
 81                    finally
 82                    {
 83                        camera.ReturnBuffer(buffer);
 84                    }
 85                }
 86            }
 87            camera.StopAcquisition();
 88            Console.WriteLine("Done.");
 89
 90            /* NOTE:
 91             * For real devices a this point the user should probably be
 92             * asked to obscure the optics in front of the camera in order
 93             * for a proper dark reference to be measured.
 94             *
 95             * For the virtual device this is not required.
 96             *
 97             * Some cameras do have an internal shutter, where manual user
 98             * intervention is also not required here.
 99             */
100
101            Console.WriteLine("Measuring dark reference:");
102            var darkReference = LuxFlux.fluxEngineNET.Util.CreateRingBufferContainer(camera, 10);
103            acqParams.ReferenceName = "DarkReference";
104            camera.StartAcquisition(acqParams);
105            for (int i = 0; i < 10; ++i)
106            {
107                var buffer = camera.RetrieveBuffer(TimeSpan.FromSeconds(1));
108                if (buffer != null)
109                {
110                    try
111                    {
112                        darkReference.Add(buffer);
113                    }
114                    finally
115                    {
116                        camera.ReturnBuffer(buffer);
117                    }
118                }
119            }
120            camera.StopAcquisition();
121            Console.WriteLine("Done.");
122
123            // Create processing context
124            var instrumentReferences = new LuxFlux.fluxEngineNET.ProcessingContext.BufferReferenceInput();
125            instrumentReferences.WhiteReference = whiteReference;
126            instrumentReferences.DarkReference = darkReference;
127            var instrumentParameters = new LuxFlux.fluxEngineNET.ProcessingContext.InstrumentParameters();
128            instrumentParameters.ReferenceInput = instrumentReferences;
129            var ctx = LuxFlux.fluxEngineNET.ProcessingContext.CreateForInstrumentProcessing(camera, model, instrumentParameters);
130            int sinkIndex = ctx.OutputSinkInfoById(/* outputId = */ 0).Index;
131
132            /* NOTE:
133             * For real devices a this point the user should probably be
134             * asked to position the object to measure underneath the
135             * camera and start the motion of the motion control device
136             * they have.
137             *
138             * For the virtual device this is not required.
139             */
140
141            Console.WriteLine("Starting acquisition:");
142            acqParams.ReferenceName = null;
143            camera.StartAcquisition(acqParams);
144            Console.WriteLine("Done.");
145
146            Console.WriteLine("Processing the first 100 lines...");
147            int y = 0;
148            while (y < 100)
149            {
150                var buffer = camera.RetrieveBuffer(TimeSpan.FromMilliseconds(100));
151                if (buffer == null)
152                    continue;
153
154                try
155                {
156                    ctx.SetSourceData(buffer);
157                    ctx.ProcessNext();
158
159                    var data = ctx.OutputSinkData(sinkIndex).AsObjectList;
160                    foreach (var obj in data)
161                    {
162                        Console.WriteLine($"Found object [at line number {y}]: bounding box ({obj.BoundingBoxX}, {obj.BoundingBoxY}), size ({obj.BoundingBoxWidth}, {obj.BoundingBoxHeight}), center of gravity ({obj.GravityCenterX}, {obj.GravityCenterY}), area {obj.Area}");
163                    }
164                }
165                finally
166                {
167                    camera.ReturnBuffer(buffer);
168                }
169                ++y;
170            }
171            Console.WriteLine("Done.");
172
173            Console.WriteLine("Stopping acquisition:");
174            camera.StopAcquisition();
175            Console.WriteLine("Done.");
176
177            Console.WriteLine("Disconnecting from device...");
178            deviceGroup.Disconnect(TimeSpan.FromSeconds(5));
179            Console.WriteLine("Done.");
180            ctx.Dispose();
181            handle.Dispose();
182        }
183    }
184}

The following classes and methods are among those used in this example:

Python

The source code of the example can be found in the file example_pushbroom_object_detector.py:

  1#!/usr/bin/env python3
  2
  3import fluxEngine
  4import os, sys
  5
  6import fluxEngine_example_paths as paths
  7data_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'data')
  8
  9print('fluxEngine version {}'.format(fluxEngine.versionString()))
 10with open(paths.licenseFileName, 'rb') as f:
 11    handle = fluxEngine.Handle(f.read())
 12handle.setDriverBaseDirectory(paths.driverDirectory)
 13handle.createProcessingThreads(4)
 14
 15enumeratedDevices = fluxEngine.enumerateDevices(handle, -1, 5000)
 16virtualCameraDevice = None
 17for device in enumeratedDevices.devices:
 18    if device.driver.name == 'VirtualHyperCamera':
 19        virtualCameraDevice = device
 20
 21if not virtualCameraDevice:
 22    raise Exception('Could not find virtual camera driver')
 23
 24connectionSettings = fluxEngine.ConnectionSettings(virtualCameraDevice.driver.name, virtualCameraDevice.driver.type, virtualCameraDevice.id)
 25connectionSettings.timeout = 60000
 26connectionSettings.connectionParameters['Cube'] = os.path.join(data_dir, 'ObjectDetectorTest.hdr')
 27connectionSettings.connectionParameters['WhiteReferenceCube'] = os.path.join(data_dir, 'ObjectDetectorTest_White.hdr')
 28connectionSettings.connectionParameters['DarkReferenceCube'] = os.path.join(data_dir, 'ObjectDetectorTest_Dark.hdr')
 29
 30print("Attempting to connect to device...")
 31deviceGroup = fluxEngine.DeviceGroup(handle, connectionSettings)
 32print("Connected.")
 33
 34camera = deviceGroup.primaryDevice()
 35if not isinstance(camera, fluxEngine.InstrumentDevice):
 36    deviceGroup.disconnect(5000)
 37    raise Exception('The device is not an instrument device')
 38
 39camera.setupInternalBuffers(5)
 40
 41# Load model
 42#
 43# This should be done after connecting with the camera, in
 44# case the license is tied to a camera serial number. (In
 45# case the license is tied to a dongle or a mainboard id,
 46# this may be done beforehand.)
 47with open(os.path.join(data_dir, 'object_detector_test.fluxmdl'), 'rb') as f:
 48    model = fluxEngine.Model(handle, f.read())
 49
 50# NOTE:
 51# For real devices a this point the user should probably be
 52# asked to insert a white reference underneath the camera.
 53#
 54# For the virtual device this is not required.
 55
 56acqParams = fluxEngine.InstrumentDevice.AcquisitionParameters()
 57
 58print('Measuring white reference:')
 59whiteReference = fluxEngine.BufferContainer(camera, 10)
 60acqParams.referenceName = "WhiteReference"
 61camera.startAcquisition(acqParams)
 62for i in range(10):
 63    buffer = camera.retrieveBuffer(1000)
 64    if buffer:
 65        whiteReference.add(buffer)
 66        camera.returnBuffer(buffer)
 67camera.stopAcquisition()
 68print('Done.')
 69
 70# NOTE:
 71# For real devices a this point the user should probably be
 72# asked to obscure the optics in front of the camera in order
 73# for a proper dark reference to be measured.
 74#
 75# For the virtual device this is not required.
 76#
 77# Some cameras do have an internal shutter, where manual user
 78# intervention is also not required here.
 79#
 80
 81print('Measuring dark reference:')
 82darkReference = fluxEngine.BufferContainer(camera, 10)
 83acqParams.referenceName = "DarkReference"
 84camera.startAcquisition(acqParams)
 85for i in range(10):
 86    buffer = camera.retrieveBuffer(1000)
 87    if buffer:
 88        darkReference.add(buffer)
 89        camera.returnBuffer(buffer)
 90camera.stopAcquisition()
 91print('Done.')
 92
 93# Create recording contect
 94instrumentParameters = fluxEngine.InstrumentParameters()
 95instrumentParameters.whiteReference = whiteReference
 96instrumentParameters.darkReference = darkReference
 97
 98context = fluxEngine.ProcessingContext(model, fluxEngine.ProcessingContext.InstrumentProcessing,
 99                                       device=camera,
100                                       instrumentParameters=instrumentParameters)
101
102sinkIndex = context.findOutputSink(0)
103
104# NOTE:
105# For real devices a this point the user should probably be
106# asked to position the object to measure underneath the
107# camera and start the motion of the motion control device
108# they have.
109#
110# For the virtual device this is not required.
111#
112
113print('Starting acquisition:')
114acqParams.referenceName = None
115camera.startAcquisition(acqParams)
116print('Done.')
117
118print('Processing the first 100 lines...')
119y = 0
120while y < 100:
121    buffer = camera.retrieveBuffer(100)
122    if not buffer:
123        continue
124
125    context.setSourceData(buffer)
126    context.processNext()
127    data = context.outputSinkData(0)
128    for obj in data:
129        print("Found object [at line number {}]: bounding box ({}, {}) size ({}, {}), center of gravity ({}, {}), area {}".
130              format(y, obj.boundingBox[0], obj.boundingBox[1], obj.boundingBox[2], obj.boundingBox[3],
131                     obj.gravityCenter[0], obj.gravityCenter[1], obj.area))
132    camera.returnBuffer(buffer)
133    y += 1
134print('Done.')
135
136print('Stopping acquisition:')
137camera.stopAcquisition()
138print('Done.')

The following classes and methods are among those used in this example:

Expected Output

The output should look like the following:

fluxEngine version: [...]
Attempting to connect to device...
- DarkReferenceCube: examples/data/ObjectDetectorTest_Dark.hdr
- Cube: examples/data/ObjectDetectorTest.hdr
- WhiteReferenceCube: examples/data/ObjectDetectorTest_White.hdr
Connected.
Measuring white reference:
Done.
Measuring dark reference:
Done.
Starting acquisition:
Done.
Processing the first 100 lines...
Found object [at line number 39]: bounding box (14, 12) size (29, 27), center of gravity (28.3425, 24.7256), area 616
Found object [at line number 39]: bounding box (59, 15) size (35, 24), center of gravity (76.2283, 26.875), area 552
Found object [at line number 86]: bounding box (44, 54) size (50, 32), center of gravity (73.5509, 70.309), area 1042
Found object [at line number 93]: bounding box (16, 43) size (45, 50), center of gravity (35.6035, 64.5995), area 1251
Stopping acquisition:
Done.