Record an ENVI Cube in Reflectances from a PushBroom (Normalized)

This example is identical to the previous example, but it generates an ENVI cube in reflectances, and normalizes the wavelengths to a regular grid from 950nm to 1650nm in steps of 4nm.

C++

The source code of the example can be found in the file example_pushbroom_create_envi_normalized.cpp:

  1#if defined(_WIN32) && defined(_MSC_VER)
  2#include <windows.h>
  3#endif
  4
  5#include <iostream>
  6#include <iomanip>
  7#include <string>
  8#include <fstream>
  9#include <streambuf>
 10#include <algorithm>
 11#include <utility>
 12
 13#include <cstddef>
 14
 15#include <fluxEngine/fluxEngine>
 16
 17#include "paths.h"
 18#include "helpers.h"
 19
 20int main()
 21{
 22    try {
 23        std::cout << "fluxEngine version: " << fluxEngine::versionString() << std::endl;
 24        fluxEngine::Handle handle(readFile(g_licenseFileName));
 25        handle.setDriverBaseDirectory(g_driverDirectory);
 26        handle.setDriverIsolationExecutable(g_driverIsolationExecutable);
 27        handle.createProcessingThreads(4);
 28
 29        // Load virtual camera
 30        fluxEngine::EnumerationResult enumeratedDevices = fluxEngine::enumerateDevices(handle, -1, std::chrono::seconds{1});
 31        fluxEngine::EnumeratedDevice* virtualCameraDevice = nullptr;
 32        for (auto const& device : enumeratedDevices.devices) {
 33            if (device->driver->name == "VirtualHyperCamera") {
 34                virtualCameraDevice = device.get();
 35                break;
 36            }
 37        }
 38
 39        if (!virtualCameraDevice)
 40            throw std::runtime_error("Could not find virtual camera driver");
 41
 42        fluxEngine::ConnectionSettings connectionSettings;
 43        connectionSettings.driverName = virtualCameraDevice->driver->name;
 44        connectionSettings.driverType = virtualCameraDevice->driver->type;
 45        connectionSettings.id = virtualCameraDevice->id;
 46        connectionSettings.timeout = std::chrono::seconds{60};
 47        connectionSettings.connectionParameters["Cube"] = encodeFileNameForConnectionParameter(g_cubeFileName);
 48        connectionSettings.connectionParameters["WhiteReferenceCube"] = encodeFileNameForConnectionParameter(g_whiteCubeFileName);
 49        connectionSettings.connectionParameters["DarkReferenceCube"] = encodeFileNameForConnectionParameter(g_darkCubeFileName);
 50
 51        std::cout << "Attempting to connect to device...\n" << std::flush;
 52        for (auto const& parameter : connectionSettings.connectionParameters)
 53            std::cout << "  - " << parameter.first << ": " << parameter.second << "\n" << std::flush;
 54        fluxEngine::DeviceGroup deviceGroup = fluxEngine::connectDeviceGroup(handle, connectionSettings);
 55        std::cout << "Connected.\n" << std::flush;
 56        fluxEngine::InstrumentDevice* camera = dynamic_cast<fluxEngine::InstrumentDevice*>(deviceGroup.primaryDevice());
 57        if (!camera) {
 58            deviceGroup.disconnect(std::chrono::seconds{5});
 59            throw std::runtime_error("The device is not an instrument device");
 60        }
 61
 62        camera->setupInternalBuffers(5);
 63
 64        /* NOTE:
 65         * For real devices a this point the user should probably be
 66         * asked to insert a white reference underneath the camera.
 67         *
 68         * For the virtual device this is not required.
 69         */
 70
 71        fluxEngine::InstrumentDevice::AcquisitionParameters acqParams;
 72        std::cout << "Measuring white reference:\n" << std::flush;
 73        fluxEngine::BufferContainer whiteReference = fluxEngine::createRingBufferContainer(camera, 10);
 74        acqParams.referenceName = "WhiteReference";
 75        camera->startAcquisition(acqParams);
 76        for (int i = 0; i < 10; ++i) {
 77            fluxEngine::BufferInfo buffer = camera->retrieveBuffer(std::chrono::seconds{1});
 78            if (buffer.ok) {
 79                whiteReference.add(buffer);
 80                camera->returnBuffer(buffer.id);
 81            }
 82        }
 83        camera->stopAcquisition();
 84        std::cout << "Done.\n" << std::flush;
 85
 86        /* NOTE:
 87         * For real devices a this point the user should probably be
 88         * asked to obscure the optics in front of the camera in order
 89         * for a proper dark reference to be measured.
 90         *
 91         * For the virtual device this is not required.
 92         *
 93         * Some cameras do have an internal shutter, where manual user
 94         * intervention is also not required here.
 95         */
 96
 97        std::cout << "Measuring dark reference:\n" << std::flush;
 98        fluxEngine::BufferContainer darkReference = fluxEngine::createBufferContainer(camera, 10);
 99        acqParams.referenceName = "DarkReference";
100        camera->startAcquisition(acqParams);
101        for (int i = 0; i < 10; ++i) {
102            fluxEngine::BufferInfo buffer = camera->retrieveBuffer(std::chrono::seconds{1});
103            if (buffer.ok) {
104                darkReference.add(buffer);
105                camera->returnBuffer(buffer.id);
106            }
107        }
108        camera->stopAcquisition();
109        std::cout << "Done.\n" << std::flush;
110
111        // Create recording context
112        fluxEngine::ProcessingContext::InstrumentParameters instrumentParameters;
113        instrumentParameters.whiteReference = &whiteReference;
114        instrumentParameters.darkReference = &darkReference;
115        // We want to normalize to a regularized wavelength grid
116        // and we want to record only reflectances
117        std::vector<double> wavelengthGrid;
118        wavelengthGrid.resize(176);
119        for (std::size_t i = 0; i < 176; ++i)
120            wavelengthGrid[i] = 950.0 + static_cast<double>(i) * 4.0;
121        fluxEngine::ProcessingContext::HSIRecordingResult contextAndInfo = fluxEngine::ProcessingContext::createInstrumentHSIRecordingContext(camera, fluxEngine::ValueType::Reflectance, instrumentParameters, wavelengthGrid);
122
123        std::cout << "The recording will create a cube with the wavelengths: [";
124        for (std::size_t i = 0; i < contextAndInfo.wavelengths.size(); ++i) {
125            if (i > 0)
126                std::cout << ", ";
127            std::cout << contextAndInfo.wavelengths[i];
128        }
129        std::cout << "]\n";
130
131        // Create buffer container for recording 100 lines
132        fluxEngine::BufferContainer recordingBuffer = fluxEngine::createBufferContainer(contextAndInfo.context, 100);
133
134        /* NOTE:
135         * For real devices a this point the user should probably be
136         * asked to position the object to measure underneath the
137         * camera and start the motion of the motion control device
138         * they have.
139         *
140         * For the virtual device this is not required.
141         */
142
143        std::cout << "Starting acquisition:\n" << std::flush;
144        acqParams.referenceName = {};
145        camera->startAcquisition(acqParams);
146        std::cout << "Done.\n" << std::flush;
147
148        std::cout << "Recording buffers" << std::flush;
149        while (recordingBuffer.count() < 100) {
150            fluxEngine::BufferInfo buffer = camera->retrieveBuffer(std::chrono::milliseconds{100});
151            if (!buffer.ok)
152                continue;
153
154            contextAndInfo.context.setSourceData(buffer);
155            contextAndInfo.context.processNext();
156            recordingBuffer.addLastResult(contextAndInfo.context);
157            std::cout << '.' << std::flush;
158
159            camera->returnBuffer(buffer.id);
160        }
161        std::cout << "\n" << std::flush;
162
163        std::cout << "Stopping acquisition:\n" << std::flush;
164        camera->stopAcquisition();
165        std::cout << "Done.\n" << std::flush;
166
167        std::cout << "Creating measurement:\n" << std::flush;
168        fluxEngine::MeasurementHSICubeBufferInput recordingInput;
169        recordingInput.name = "Sample recording";
170        recordingInput.valueType = fluxEngine::ValueType::Reflectance;
171        recordingInput.bufferContainers.push_back(&recordingBuffer);
172        recordingInput.wavelengths = contextAndInfo.wavelengths;
173        recordingInput.whiteReference = contextAndInfo.whiteReference;
174        recordingInput.darkReference = contextAndInfo.darkReference;
175        recordingInput.calibrationInfo = &contextAndInfo.calibrationInfo;
176        fluxEngine::MeasurementList cube = fluxEngine::createMeasurementHSICube(handle, recordingInput);
177        std::cout << "Done.\n" << std::flush;
178
179        std::cout << "Saving measurement to disk:\n" << std::flush;
180        fluxEngine::saveMeasurementList(handle, cube, "ENVI", g_recordingCubeFileName, true);
181        std::cout << "Done.\n" << std::flush;
182    } catch (std::exception& e) {
183        std::cerr << "Error: " << e.what() << std::endl;
184        return 1;
185    } catch (...) {
186        std::cerr << "Unknown error." << std::endl;
187        return 1;
188    }
189
190    return 0;
191}

This source file will compile to the executable ExamplePushBroomCreateENVINormalized.

.NET

The source code of the example can be found in the file ExamplePushBroomCreateENVINormalized\Program.cs.

  1using System;
  2
  3namespace ExamplePushBroomCreateENVINormalized
  4{
  5    class Program
  6    {
  7        static void Main(string[] args)
  8        {
  9            Console.WriteLine("fluxEngine version: " + LuxFlux.fluxEngineNET.Version.String);
 10            var handle = new LuxFlux.fluxEngineNET.Handle(ExampleHelpers.IO.ReadLicenseFile());
 11            handle.SetDriverBaseDirectory(ExampleHelpers.Paths.DriverDirectory);
 12            handle.CreateProcessingThreads(4);
 13
 14            // Load virtual camera
 15            var enumeratedDevices = LuxFlux.fluxEngineNET.DeviceEnumeration.EnumerateDevices(handle, null, TimeSpan.FromSeconds(1));
 16            LuxFlux.fluxEngineNET.EnumeratedDevice virtualCameraDevice = null;
 17            foreach (var device in enumeratedDevices.Devices)
 18            {
 19                if (device.Driver.Name == "VirtualHyperCamera")
 20                {
 21                    virtualCameraDevice = device;
 22                    break;
 23                }
 24            }
 25
 26            if (virtualCameraDevice == null)
 27                throw new Exception("Could not find virtual camera driver");
 28
 29            var connectionSettings = new LuxFlux.fluxEngineNET.ConnectionSettings();
 30            connectionSettings.DriverName = virtualCameraDevice.Driver.Name;
 31            connectionSettings.DriverType = virtualCameraDevice.Driver.Type;
 32            connectionSettings.Id = virtualCameraDevice.Id;
 33            connectionSettings.Timeout = TimeSpan.FromSeconds(60);
 34            connectionSettings.ConnectionParameters = new System.Collections.Generic.Dictionary<string, string>();
 35            connectionSettings.ConnectionParameters["Cube"] = ExampleHelpers.Paths.ExampleDataFileName("MiniCube.hdr");
 36            connectionSettings.ConnectionParameters["WhiteReferenceCube"] = ExampleHelpers.Paths.ExampleDataFileName("MiniCube_White.hdr");
 37            connectionSettings.ConnectionParameters["DarkReferenceCube"] = ExampleHelpers.Paths.ExampleDataFileName("MiniCube_Dark.hdr");
 38
 39            Console.WriteLine("Attempting to connect to device...");
 40            var deviceGroup = LuxFlux.fluxEngineNET.DeviceGroup.Connect(handle, connectionSettings);
 41            Console.WriteLine("Connected.");
 42            if (!(deviceGroup.PrimaryDevice is LuxFlux.fluxEngineNET.InstrumentDevice))
 43            {
 44                deviceGroup.Disconnect(TimeSpan.FromSeconds(5));
 45                throw new Exception("The device is not an instrument device.");
 46            }
 47            var camera = (LuxFlux.fluxEngineNET.InstrumentDevice)deviceGroup.PrimaryDevice;
 48
 49            camera.SetupInternalBuffers(5);
 50
 51
 52            /* NOTE:
 53             * For real devices a this point the user should probably be
 54             * asked to insert a white reference underneath the camera.
 55             *
 56             * For the virtual device this is not required.
 57             */
 58
 59            var acqParams = new LuxFlux.fluxEngineNET.InstrumentDevice.AcquisitionParameters();
 60            Console.WriteLine("Measuring white reference:");
 61            var whiteReference = LuxFlux.fluxEngineNET.Util.CreateRingBufferContainer(camera, 10);
 62            acqParams.ReferenceName = "WhiteReference";
 63            camera.StartAcquisition(acqParams);
 64            for (int i = 0; i < 10; ++i)
 65            {
 66                var buffer = camera.RetrieveBuffer(TimeSpan.FromSeconds(1));
 67                if (buffer != null)
 68                {
 69                    try
 70                    {
 71                        whiteReference.Add(buffer);
 72                    }
 73                    finally
 74                    {
 75                        camera.ReturnBuffer(buffer);
 76                    }
 77                }
 78            }
 79            camera.StopAcquisition();
 80            Console.WriteLine("Done.");
 81
 82            /* NOTE:
 83             * For real devices a this point the user should probably be
 84             * asked to obscure the optics in front of the camera in order
 85             * for a proper dark reference to be measured.
 86             *
 87             * For the virtual device this is not required.
 88             *
 89             * Some cameras do have an internal shutter, where manual user
 90             * intervention is also not required here.
 91             */
 92
 93            Console.WriteLine("Measuring dark reference:");
 94            var darkReference = LuxFlux.fluxEngineNET.Util.CreateRingBufferContainer(camera, 10);
 95            acqParams.ReferenceName = "DarkReference";
 96            camera.StartAcquisition(acqParams);
 97            for (int i = 0; i < 10; ++i)
 98            {
 99                var buffer = camera.RetrieveBuffer(TimeSpan.FromSeconds(1));
100                if (buffer != null)
101                {
102                    try
103                    {
104                        darkReference.Add(buffer);
105                    }
106                    finally
107                    {
108                        camera.ReturnBuffer(buffer);
109                    }
110                }
111            }
112            camera.StopAcquisition();
113            Console.WriteLine("Done.");
114
115            // Create recording context
116            var instrumentReferences = new LuxFlux.fluxEngineNET.ProcessingContext.BufferReferenceInput();
117            instrumentReferences.WhiteReference = whiteReference;
118            instrumentReferences.DarkReference = darkReference;
119            var instrumentParameters = new LuxFlux.fluxEngineNET.ProcessingContext.InstrumentParameters();
120            instrumentParameters.ReferenceInput = instrumentReferences;
121
122            double[] wavelengths = new double[176];
123            for (int i = 0; i < 176; ++i)
124                wavelengths[i] = 950.0 + (double)i * 4.0;
125
126            var contextAndInfo = LuxFlux.fluxEngineNET.ProcessingContext.CreateForInstrumentHSIRecording(camera, LuxFlux.fluxEngineNET.ValueType.Reflectance, instrumentParameters, wavelengths);
127            Console.WriteLine($"The recording will create a cube with the wavelengths: [{string.Join(", ", contextAndInfo.Wavelengths)}]");
128
129            // Create buffer container for recording 100 lines
130            var recordingBuffer = LuxFlux.fluxEngineNET.Util.CreateBufferContainer(contextAndInfo.Context, 100);
131
132            /* NOTE:
133             * For real devices a this point the user should probably be
134             * asked to position the object to measure underneath the
135             * camera and start the motion of the motion control device
136             * they have.
137             *
138             * For the virtual device this is not required.
139             */
140
141            Console.WriteLine("Starting acquisition:");
142            acqParams.ReferenceName = null;
143            camera.StartAcquisition(acqParams);
144            Console.WriteLine("Done.");
145
146            Console.Out.Write("Recording buffers");
147            Console.Out.Flush();
148            while (recordingBuffer.Count < 100)
149            {
150                var buffer = camera.RetrieveBuffer(TimeSpan.FromMilliseconds(100));
151                if (buffer == null)
152                    continue;
153
154                try
155                {
156                    contextAndInfo.Context.SetSourceData(buffer);
157                    contextAndInfo.Context.ProcessNext();
158                    recordingBuffer.AddLastResult(contextAndInfo.Context);
159                    Console.Out.Write(".");
160                    Console.Out.Flush();
161                }
162                finally
163                {
164                    camera.ReturnBuffer(buffer);
165                }
166            }
167            Console.WriteLine("");
168
169            Console.WriteLine("Stopping acquisition:");
170            camera.StopAcquisition();
171            Console.WriteLine("Done.");
172
173            Console.WriteLine("Creating measurement:");
174            var recordingInput = new LuxFlux.fluxEngineNET.MeasurementHSICubeBufferInput();
175            recordingInput.Name = "Sample recording";
176            recordingInput.ValueType = LuxFlux.fluxEngineNET.ValueType.Reflectance;
177            recordingInput.BufferContainers = new LuxFlux.fluxEngineNET.BufferContainer[1];
178            recordingInput.BufferContainers[0] = recordingBuffer;
179            recordingInput.WhiteReference = contextAndInfo.WhiteReference;
180            recordingInput.DarkReference = contextAndInfo.DarkReference;
181            recordingInput.IlluminationReference = contextAndInfo.IlluminationReference;
182            recordingInput.CalibrationInfo = contextAndInfo.CalibrationInfo;
183            recordingInput.Wavelengths = contextAndInfo.Wavelengths;
184            var cube = LuxFlux.fluxEngineNET.MeasurementList.CreateForHSICube(handle, recordingInput);
185            Console.WriteLine("Done.");
186
187            Console.WriteLine("Saving measurement to disk:");
188            LuxFlux.fluxEngineNET.IO.SaveMeasurementList(handle, cube, "ENVI", ExampleHelpers.Paths.RecordingCubeFileName, true);
189            Console.WriteLine("Done.");
190
191            Console.WriteLine("Disconnecting from device...");
192            deviceGroup.Disconnect(TimeSpan.FromSeconds(5));
193            Console.WriteLine("Done.");
194            cube.Dispose();
195            handle.Dispose();
196        }
197    }
198}

Python

The source code of the example can be found in the file example_pushbroom_create_envi_normalized.py:

  1#!/usr/bin/env python3
  2
  3import fluxEngine
  4import os, sys
  5
  6import fluxEngine_example_paths as paths
  7data_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'data')
  8
  9print('fluxEngine version {}'.format(fluxEngine.versionString()))
 10with open(paths.licenseFileName, 'rb') as f:
 11    handle = fluxEngine.Handle(f.read())
 12handle.setDriverBaseDirectory(paths.driverDirectory)
 13handle.createProcessingThreads(4)
 14
 15enumeratedDevices = fluxEngine.enumerateDevices(handle, -1, 5000)
 16virtualCameraDevice = None
 17for device in enumeratedDevices.devices:
 18    if device.driver.name == 'VirtualHyperCamera':
 19        virtualCameraDevice = device
 20
 21if not virtualCameraDevice:
 22    raise Exception('Could not find virtual camera driver')
 23
 24connectionSettings = fluxEngine.ConnectionSettings(virtualCameraDevice.driver.name, virtualCameraDevice.driver.type, virtualCameraDevice.id)
 25connectionSettings.timeout = 60000
 26connectionSettings.connectionParameters['Cube'] = os.path.join(data_dir, 'MiniCube.hdr')
 27connectionSettings.connectionParameters['WhiteReferenceCube'] = os.path.join(data_dir, 'MiniCube_White.hdr')
 28connectionSettings.connectionParameters['DarkReferenceCube'] = os.path.join(data_dir, 'MiniCube_Dark.hdr')
 29
 30print("Attempting to connect to device...")
 31deviceGroup = fluxEngine.DeviceGroup(handle, connectionSettings)
 32print("Connected.")
 33
 34camera = deviceGroup.primaryDevice()
 35if not isinstance(camera, fluxEngine.InstrumentDevice):
 36    deviceGroup.disconnect(5000)
 37    raise Exception('The device is not an instrument device')
 38
 39camera.setupInternalBuffers(5)
 40
 41# NOTE:
 42# For real devices a this point the user should probably be
 43# asked to insert a white reference underneath the camera.
 44#
 45# For the virtual device this is not required.
 46
 47acqParams = fluxEngine.InstrumentDevice.AcquisitionParameters()
 48
 49print('Measuring white reference:')
 50whiteReference = fluxEngine.BufferContainer(camera, 10)
 51acqParams.referenceName = "WhiteReference"
 52camera.startAcquisition(acqParams)
 53for i in range(10):
 54    buffer = camera.retrieveBuffer(1000)
 55    if buffer:
 56        whiteReference.add(buffer)
 57        camera.returnBuffer(buffer)
 58camera.stopAcquisition()
 59print('Done.')
 60
 61# NOTE:
 62# For real devices a this point the user should probably be
 63# asked to obscure the optics in front of the camera in order
 64# for a proper dark reference to be measured.
 65#
 66# For the virtual device this is not required.
 67#
 68# Some cameras do have an internal shutter, where manual user
 69# intervention is also not required here.
 70#
 71
 72print('Measuring dark reference:')
 73darkReference = fluxEngine.BufferContainer(camera, 10)
 74acqParams.referenceName = "DarkReference"
 75camera.startAcquisition(acqParams)
 76for i in range(10):
 77    buffer = camera.retrieveBuffer(1000)
 78    if buffer:
 79        darkReference.add(buffer)
 80        camera.returnBuffer(buffer)
 81camera.stopAcquisition()
 82print('Done.')
 83
 84# Create recording contect
 85instrumentParameters = fluxEngine.InstrumentParameters()
 86instrumentParameters.whiteReference = whiteReference
 87instrumentParameters.darkReference = darkReference
 88
 89# We want to normalize to a regularized wavelength grid
 90# and we want to record only reflectances
 91wavelengthGrid = []
 92for i in range(176):
 93    wavelengthGrid.append(950.0 + i * 4.0)
 94
 95context = fluxEngine.ProcessingContext(None, fluxEngine.ProcessingContext.InstrumentHSIRecording,
 96                                       device=camera,
 97                                       valueType=fluxEngine.ValueType.Reflectance,
 98                                       instrumentParameters=instrumentParameters,
 99                                       wavelengths=wavelengthGrid)
100recordingInfo = context.hsiRecordingResultInfo()
101
102print("The recording will create a cube with the wavelengths: [{}]".format(", ".join('{}'.format(l) for l in recordingInfo.wavelengths)))
103
104# Create buffer container for recording 100 lines
105recordingBuffer = fluxEngine.createBufferContainerForRecordingContext(context, 100)
106
107# NOTE:
108# For real devices a this point the user should probably be
109# asked to position the object to measure underneath the
110# camera and start the motion of the motion control device
111# they have.
112#
113# For the virtual device this is not required.
114#
115
116print('Starting acquisition:')
117acqParams.referenceName = None
118camera.startAcquisition(acqParams)
119print('Done.')
120
121print('Recording buffers:')
122while recordingBuffer.count() < 100:
123    buffer = camera.retrieveBuffer(100)
124    if not buffer:
125        continue
126
127    context.setSourceData(buffer)
128    context.processNext()
129    recordingBuffer.addLastResult(context)
130    sys.stdout.write('.')
131    sys.stdout.flush()
132    camera.returnBuffer(buffer)
133sys.stdout.write('\n')
134print('Done.')
135
136print('Stopping acquisition:')
137camera.stopAcquisition()
138print('Done.')
139
140print('Creating measurement:')
141recordingInput = fluxEngine.MeasurementHSICubeBufferInput()
142recordingInput.name = "Sample recording"
143recordingInput.valueType = fluxEngine.ValueType.Reflectance
144recordingInput.bufferContainers.append(recordingBuffer)
145recordingInput.wavelengths = recordingInfo.wavelengths
146recordingInput.whiteReference = recordingInfo.whiteReference
147recordingInput.darkReference = recordingInfo.darkReference
148recordingInput.calibrationInfo = recordingInfo.calibrationInfo
149
150cube = fluxEngine.createMeasurementHSICube(handle, recordingInput)
151print('Done.')
152
153print('Saving measurement to disk:')
154fluxEngine.saveMeasurementList(handle, cube, "ENVI", paths.recordingCubeFileName, True)
155print('Done.')

Expected Output

The output should look like the following:

fluxEngine version: [...]
Attempting to connect to device...
- DarkReferenceCube: examples/data/MiniCube_Dark.hdr
- Cube: examples/data/MiniCube.hdr
- WhiteReferenceCube: examples/data/MiniCube_White.hdr
Connected.
Measuring white reference:
Done.
Measuring dark reference:
Done.
The recording will create a cube with the wavelengths: [950, 954, 958, 962, 966, 970, 974, 978, 982, 986, 990, 994, 998, 1002, 1006, 1010, 1014, 1018, 1022, 1026, 1030, 1034, 1038, 1042, 1046, 1050, 1054, 1058, 1062, 1066, 1070, 1074, 1078, 1082, 1086, 1090, 1094, 1098, 1102, 1106, 1110, 1114, 1118, 1122, 1126, 1130, 1134, 1138, 1142, 1146, 1150, 1154, 1158, 1162, 1166, 1170, 1174, 1178, 1182, 1186, 1190, 1194, 1198, 1202, 1206, 1210, 1214, 1218, 1222, 1226, 1230, 1234, 1238, 1242, 1246, 1250, 1254, 1258, 1262, 1266, 1270, 1274, 1278, 1282, 1286, 1290, 1294, 1298, 1302, 1306, 1310, 1314, 1318, 1322, 1326, 1330, 1334, 1338, 1342, 1346, 1350, 1354, 1358, 1362, 1366, 1370, 1374, 1378, 1382, 1386, 1390, 1394, 1398, 1402, 1406, 1410, 1414, 1418, 1422, 1426, 1430, 1434, 1438, 1442, 1446, 1450, 1454, 1458, 1462, 1466, 1470, 1474, 1478, 1482, 1486, 1490, 1494, 1498, 1502, 1506, 1510, 1514, 1518, 1522, 1526, 1530, 1534, 1538, 1542, 1546, 1550, 1554, 1558, 1562, 1566, 1570, 1574, 1578, 1582, 1586, 1590, 1594, 1598, 1602, 1606, 1610, 1614, 1618, 1622, 1626, 1630, 1634, 1638, 1642, 1646, 1650]
Starting acquisition:
Done.
Recording buffers....................................................................................................
Stopping acquisition:
Done.
Creating measurement:
Done.
Saving measurement to disk:
Done.

This will then store an ENVI cube in the file name specified in g_recordingCubeFileName (C++) or recordingCubeFileName (Python). The cube will be saved in reflectances, and no references will be saved for this reason.