The folks at OpenKinect did a good job at providing JNI wrappers for their Kinect driver. What they didn’t do is provide a nice example showing how to process the byte stream data from the RGB and depth cameras. As far as I can tell, even the comments in their c-code doesn’t fully describe the data format. The OpenKinect wiki provides a bit more information but tends to be out of date in places and still didn’t fully describe the data format. So off to google to find a working example in Java. All I could find was a bunch of stuff telling people to use the Processing OpenKinect code. That’s of little use to me, but I did find browsing their source code useful.
So for those of you who just want a straight forward example demonstrating how use OpenKinect in Java, you’ve come to the right place. The code below does use BoofCV for some of the image processing and display, but that’s not essential and could be easily modified to not use BoofCV.
OpenKinect Version: 0.1.2
BoofCV Version: 0.14
/** * Example demonstrating how to process and display data from the Kinect. * * @author Peter Abeles */ public class OpenKinectStreamingTest { { // Modify this link to be where you store your shared library NativeLibrary.addSearchPath("freenect", "/home/pja/libfreenect/build/lib"); } MultiSpectral<ImageUInt8> rgb = new MultiSpectral<ImageUInt8>(ImageUInt8.class,1,1,3); ImageUInt16 depth = new ImageUInt16(1,1); BufferedImage outRgb; ImagePanel guiRgb; BufferedImage outDepth; ImagePanel guiDepth; public void process() { Context kinect = Freenect.createContext(); if( kinect.numDevices() < 0 ) throw new RuntimeException("No kinect found!"); Device device = kinect.openDevice(0); device.setDepthFormat(DepthFormat.REGISTERED); device.setVideoFormat(VideoFormat.RGB); device.startDepth(new DepthHandler() { @Override public void onFrameReceived(FrameMode mode, ByteBuffer frame, int timestamp) { processDepth(mode,frame,timestamp); } }); device.startVideo(new VideoHandler() { @Override public void onFrameReceived(FrameMode mode, ByteBuffer frame, int timestamp) { processRgb(mode,frame,timestamp); } }); long starTime = System.currentTimeMillis(); while( starTime+100000 > System.currentTimeMillis() ) {} System.out.println("100 Seconds elapsed"); device.stopDepth(); device.stopVideo(); device.close(); } protected void processDepth( FrameMode mode, ByteBuffer frame, int timestamp ) { System.out.println("Got depth! "+timestamp); if( outDepth == null ) { depth.reshape(mode.getWidth(),mode.getHeight()); outDepth = new BufferedImage(depth.width,depth.height,BufferedImage.TYPE_INT_BGR); guiDepth = ShowImages.showWindow(outDepth,"Depth Image"); } int indexIn = 0; for( int y = 0; y < rgb.height; y++ ) { int indexOut = rgb.startIndex + y*rgb.stride; for( int x = 0; x < rgb.width; x++ , indexOut++ ) { depth.data[indexOut] = (short)((frame.get(indexIn++) & 0xFF) | ((frame.get(indexIn++) & 0xFF) << 8 )); } } VisualizeImageData.grayUnsigned(depth,outDepth,1000); guiDepth.repaint(); } protected void processRgb( FrameMode mode, ByteBuffer frame, int timestamp ) { if( mode.getVideoFormat() != VideoFormat.RGB ) { System.out.println("Bad rgb format!"); } System.out.println("Got rgb! "+timestamp); if( outRgb == null ) { rgb.reshape(mode.getWidth(),mode.getHeight()); outRgb = new BufferedImage(rgb.width,rgb.height,BufferedImage.TYPE_INT_BGR); guiRgb = ShowImages.showWindow(outRgb,"RGB Image"); } ImageUInt8 band0 = rgb.getBand(0); ImageUInt8 band1 = rgb.getBand(1); ImageUInt8 band2 = rgb.getBand(2); int indexIn = 0; for( int y = 0; y < rgb.height; y++ ) { int indexOut = rgb.startIndex + y*rgb.stride; for( int x = 0; x < rgb.width; x++ , indexOut++ ) { band2.data[indexOut] = frame.get(indexIn++); band1.data[indexOut] = frame.get(indexIn++); band0.data[indexOut] = frame.get(indexIn++); } } ConvertBufferedImage.convertTo_U8(rgb,outRgb); guiRgb.repaint(); } public static void main( String args[] ) { OpenKinectStreamingTest app = new OpenKinectStreamingTest(); app.process(); } }