HI,
This is not working, it throws java.lang.UnsupportedOperationException: Invalid operation at line DepthStream depthStream = DepthStream.get(streamReader);
I wonder how is it working for you, thanks!
Sure, this is it. I am running your code as it is. Made no modifications. It must be mentioned that this exception is also thrown when creating a DepthStream, Color Stream, InfraredStream BUT it works for PointStream and BodyStream. This is so weird.
java.lang.UnsupportedOperationException: Invalid operation
at com.orbbec.astra.ApiStatus.throwIfError(ApiStatus.java:73)
at com.orbbec.astra.NativeMethods.checkStatus(NativeMethods.java:13)
at com.orbbec.astra.ImageStream.(ImageStream.java:15)
at com.orbbec.astra.DepthStream.(DepthStream.java:8)
at com.orbbec.astra.DepthStream.get(DepthStream.java:18)
at com.orbbec.astra.DepthStream.get(DepthStream.java:22)
at DepthStreamTest.depthStreamTest(DepthStreamTest.java:54)
I think that your issue is that you need to initialize the stream differently for the other stream type.
here is a more in depth example ripped directly from my robot project (so I know it works), it sets up both a video stream and a depth stream and has separate listener code for each.
The only thing missing here is my columnEvaluator class, which I use to help convert the depth stream to 2D Lidar like data.
Also note the OpenNI2 SDK contains some java sample code too, it’s worth a look.
import java.awt.image.BufferedImage;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.math3.geometry.euclidean.threed.Rotation;
import org.apache.commons.math3.geometry.euclidean.threed.RotationOrder;
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.openni.Device;
import org.openni.DeviceInfo;
import org.openni.OpenNI;
import org.openni.PixelFormat;
import org.openni.Point3D;
import org.openni.SensorType;
import org.openni.VideoFrameRef;
import org.openni.VideoMode;
import org.openni.VideoStream;
import org.openni.VideoStream.NewFrameListener;
import com.hazelcast.core.HazelcastInstanceNotActiveException;
import au.com.rsutton.hazelcast.DataLogValue;
public class PointCloudProvider
{
private Device device;
Logger logger = LogManager.getLogger();
void startStream(String s[], final PointCloudListener listener)
{
System.out.println(System.getProperty("java.library.path"));
// put the astra libs of the library path so java can find them
System.loadLibrary("astra_core");
System.loadLibrary("astra");
// System.loadLibrary("astra_jni");
// initialize OpenNI
OpenNI.initialize();
String uri;
if (s != null && s.length > 0)
{
uri = s[0];
} else
{
List<DeviceInfo> devicesInfo = OpenNI.enumerateDevices();
if (devicesInfo.isEmpty())
{
System.out.println("No device is connected");
return;
}
uri = devicesInfo.get(0).getUri();
}
device = Device.open(uri);
VideoStream depthStream = VideoStream.create(device, SensorType.DEPTH);
List<VideoMode> supportedDepthModes = depthStream.getSensorInfo().getSupportedVideoModes();
depthStream.setVideoMode(listener.chooseDepthMode(supportedDepthModes));
depthStream.start();
depthStream.addNewFrameListener(pointCloudFrameListener(listener));
VideoStream colorStream = VideoStream.create(device, SensorType.COLOR);
List<VideoMode> supportedColorModes = colorStream.getSensorInfo().getSupportedVideoModes();
colorStream.setVideoMode(listener.chooseColorMode(supportedColorModes));
colorStream.start();
colorStream.addNewFrameListener(colorFrameListener(listener));
}
private NewFrameListener colorFrameListener(PointCloudListener listener)
{
return new NewFrameListener()
{
long lastTime = System.currentTimeMillis();
@Override
public void onFrameReady(VideoStream stream)
{
VideoFrameRef frame = stream.readFrame();
PixelFormat pixelFormat = frame.getVideoMode().getPixelFormat();
if (pixelFormat != PixelFormat.RGB888)
{
System.out.println("Pixel Fromat is not the expeceted DEPTH_1_MM, actual is :" + pixelFormat);
}
final BufferedImage res = new BufferedImage(frame.getWidth(), frame.getHeight(),
BufferedImage.TYPE_INT_RGB);
int width = frame.getWidth();
int height = frame.getHeight();
ByteBuffer frameData = frame.getData().order(ByteOrder.LITTLE_ENDIAN);
int pos = 0;
while (frameData.remaining() > 0)
{
int red = frameData.get() & 0xFF;
int green = frameData.get() & 0xFF;
int blue = frameData.get() & 0xFF;
int rgb = 0xFF000000 | (red << 16) | (green << 8) | blue;
res.setRGB(pos % width, pos / width, rgb);
pos++;
}
frame.release();
try
{
listener.evaluateColorFrame(res);
} catch (HazelcastInstanceNotActiveException e)
{
// we've been shut down
stopStream();
}
logger.debug("ms per color frame " + (System.currentTimeMillis() - lastTime));
lastTime = System.currentTimeMillis();
}
};
}
private NewFrameListener pointCloudFrameListener(final PointCloudListener listener)
{
return new NewFrameListener()
{
long lastTime = System.currentTimeMillis();
@Override
public void onFrameReady(VideoStream stream)
{
VideoFrameRef frame = stream.readFrame();
new DataLogValue("FOV h", "" + stream.getHorizontalFieldOfView()).publish();
new DataLogValue("FOV v", "" + stream.getVerticalFieldOfView()).publish();
PixelFormat pixelFormat = frame.getVideoMode().getPixelFormat();
PixelFormat expectedFormat = PixelFormat.DEPTH_1_MM;
if (pixelFormat != expectedFormat)
{
System.out.println(
"Pixel Fromat is not the expeceted " + expectedFormat + ", actual is :" + pixelFormat);
}
int width = frame.getWidth();
int height = frame.getHeight();
// System.out.println("Width: " + width + " Height: " + height);
ShortBuffer sb = frame.getData().order(ByteOrder.LITTLE_ENDIAN).asShortBuffer();
sb.rewind();
int z = 0;
List<Vector3D> pointCloud = new LinkedList<>();
for (int x = 0; x < width; x += 10)
{
ColumnEvaluator columnEvaluator = new ColumnEvaluator(stream, x);
for (int y = 0; y < height; y += 1)
{
z = sb.get(x + (y * width));
if (z < 0)
{
z = 65536 + z;
}
if (z > 0)
{
// broken with RealSense D435 <br>
// Point3D<Float> point =
// CoordinateConverter.convertDepthToWorld(stream,
// x, y, z);
columnEvaluator.addPoint(y, z);
}
}
Vector3D object = columnEvaluator.findObjects(3);
if (object != null)
{
pointCloud.add(object);
}
}
frame.release();
try
{
listener.evaluatePointCloud(pointCloud);
} catch (HazelcastInstanceNotActiveException e)
{
// we've been shut down
stopStream();
}
logger.warn("ms per depth frame " + (System.currentTimeMillis() - lastTime));
lastTime = System.currentTimeMillis();
}
};
}
Point3D<Float> convertDepthToWorld(VideoStream stream, int x, int y, int z)
{
float scaling = 1.35f;
float hfov = (stream.getHorizontalFieldOfView());
float vfov = stream.getVerticalFieldOfView();
VideoMode videoMode = stream.getVideoMode();
Double xResolution = new Double(videoMode.getResolutionX());
Double yResolution = new Double(videoMode.getResolutionY());
double halfX = xResolution / 2.0;
double yAxisRotation = -hfov * ((x - halfX) / xResolution);
double halfY = yResolution / 2.0;
double xAxisRotation = vfov * ((y - halfY) / yResolution);
Vector3D vector = new Rotation(RotationOrder.XYZ, xAxisRotation, yAxisRotation, 0)
.applyTo(new Vector3D(0, 0, z)).scalarMultiply(scaling);
return new Point3D<>(new Float(vector.getX()), new Float(vector.getY()), new Float(vector.getZ()));
}
void stopStream()
{
device.close();
OpenNI.shutdown();
}
}