Monday, 21 September 2009

Live camera preview in the Android emulator

I've been looking into getting a live camera preview working in the Android emulator. Currently the Android emulator just gives a black and white chess board animation. After having a look around I found the web site of Tom Gibara who has done some great work to get a live preview working in the emulator. The link to his work can be found here. The basics are that you run the WebcamBroadcaster as a standard java app on your PC. If there are any video devices attached to you PC, it will pick them up and broadcast the frames captured over a socket connection. You then run a SocketCamera class as part of an app in the android emulator, and as long as you have the correct ip address and port it should display the captured images in the emulator. On looking into Tom's code I saw that it seemed to be written for an older version of the Android API so I thought I'd have a go at updating it. As a starting point I'm going to use the
CameraPreview sample code available on the android developers website. My aim was to take this code and with as little changes as possible make it so it could be used to give a live camera preview in the emulator.

So the first thing I did was to create a new class called SocketCamera, this is based on Tom's version of the SocketCamera, but unlike Tom's version I am trying to implement a subset of the new camera class android.hardware.Camera and not the older class android.hardware.CameraDevice. Please keep in mind that I've implemented just a subset of the Camera class API. The code was put together fairly quickly and is a bit rough round the edges. Anyhow, here's my new SocketCamera class:


package com.example.socketcamera;

import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.Socket;

import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.SurfaceHolder;

public class SocketCamera {

private static final String LOG_TAG = "SocketCamera:";
private static final int SOCKET_TIMEOUT = 1000;

static private SocketCamera socketCamera;
private CameraCapture capture;
private Camera parametersCamera;
private SurfaceHolder surfaceHolder;

//Set the IP address of your pc here!!
private final String address = "192.168.1.12";
private final int port = 9889;

private final boolean preserveAspectRatio = true;
private final Paint paint = new Paint();


private int width = 240;
private int height = 320;
private Rect bounds = new Rect(0, 0, width, height);

private SocketCamera() {
//Just used so that we can pass Camera.Paramters in getters and setters
parametersCamera = Camera.open();
}

static public SocketCamera open()
{
if (socketCamera == null) {
socketCamera = new SocketCamera();
}

Log.i(LOG_TAG, "Creating Socket Camera");
return socketCamera;
}

public void startPreview() {
capture = new CameraCapture();
capture.setCapturing(true);
capture.start();
Log.i(LOG_TAG, "Starting Socket Camera");

}

public void stopPreview(){
capture.setCapturing(false);
Log.i(LOG_TAG, "Stopping Socket Camera");
}

public void setPreviewDisplay(SurfaceHolder surfaceHolder) throws IOException {
this.surfaceHolder = surfaceHolder;
}

public void setParameters(Camera.Parameters parameters) {
//Bit of a hack so the interface looks like that of
Log.i(LOG_TAG, "Setting Socket Camera parameters");
parametersCamera.setParameters(parameters);
Size size = parameters.getPreviewSize();
bounds = new Rect(0, 0, size.width, size.height);
}
public Camera.Parameters getParameters() {
Log.i(LOG_TAG, "Getting Socket Camera parameters");
return parametersCamera.getParameters();
}

public void release() {
Log.i(LOG_TAG, "Releasing Socket Camera parameters");
//TODO need to implement this function
}


private class CameraCapture extends Thread {

private boolean capturing = false;

public boolean isCapturing() {
return capturing;
}

public void setCapturing(boolean capturing) {
this.capturing = capturing;
}

@Override
public void run() {
while (capturing) {
Canvas c = null;
try {
c = surfaceHolder.lockCanvas(null);
synchronized (surfaceHolder) {
Socket socket = null;
try {
socket = new Socket();
socket.bind(null);
socket.setSoTimeout(SOCKET_TIMEOUT);
socket.connect(new InetSocketAddress(address, port), SOCKET_TIMEOUT);

//obtain the bitmap
InputStream in = socket.getInputStream();
Bitmap bitmap = BitmapFactory.decodeStream(in);

//render it to canvas, scaling if necessary
if (
bounds.right == bitmap.getWidth() &&
bounds.bottom == bitmap.getHeight()) {
c.drawBitmap(bitmap, 0, 0, null);
} else {
Rect dest;
if (preserveAspectRatio) {
dest = new Rect(bounds);
dest.bottom = bitmap.getHeight() * bounds.right / bitmap.getWidth();
dest.offset(0, (bounds.bottom - dest.bottom)/2);
} else {
dest = bounds;
}
if (c != null)
{
c.drawBitmap(bitmap, null, dest, paint);
}
}

} catch (RuntimeException e) {
e.printStackTrace();

} catch (IOException e) {
e.printStackTrace();
} finally {
try {
socket.close();
} catch (IOException e) {
/* ignore */
}
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {

// do this in a finally so that if an exception is thrown
// during the above, we don't leave the Surface in an
// inconsistent state
if (c != null) {
surfaceHolder.unlockCanvasAndPost(c);
}
}
}
Log.i(LOG_TAG, "Socket Camera capture stopped");
}
}

}

Make sure that you change the ip address to that of your PC.

Now we just need to make a few small modifications to the original CameraPreview. In this class look for the Preview class that extends the SurfaceView. Now we just need to comments out three lines and replace them with our own:



class Preview extends SurfaceView implements SurfaceHolder.Callback {
SurfaceHolder mHolder;
//Camera mCamera;
SocketCamera mCamera;
Preview(Context context) {
super(context);

// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
//mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
//mCamera = Camera.open();
mCamera = SocketCamera.open();
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
// TODO: add more exception handling logic here
}
}


Here i've change three lines:

1. Camera mCamera is replaced with SocketCamera mCamera
2. mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); is replaced with mHolder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
3. mCamera = Camera.open(); is replaced with mCamera = SocketCamera.open();.

So that's it.Now just make sure WebcamBroadcaster is running and start up the CameraPreview app in the Android emulator, you should now be seeing live previews in the emulator. Here's a short video of my emulator with the live preview: (Yes i know, it's me waving a book around)










Note: if the WebcamBroadcaster is not picking up your devices you most probably have a classpath issue. Make sure that you classpath points to the jmf.jar that is in the same folder as the jmf.properties file. If JMstudio works ok, its very likely that you have a classpath issue.

Oh, one last thing. I also updated the WebCamBroadcaster so that it can be used with YUV format cameras, so here's the code for that as well:

package com.webcambroadcaster;
import java.awt.Dimension;
import java.awt.image.BufferedImage;
import java.io.BufferedOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.Vector;

import javax.imageio.ImageIO;
import javax.media.Buffer;
import javax.media.CannotRealizeException;
import javax.media.CaptureDeviceInfo;
import javax.media.CaptureDeviceManager;
import javax.media.Format;
import javax.media.Manager;
import javax.media.MediaLocator;
import javax.media.NoDataSourceException;
import javax.media.NoPlayerException;
import javax.media.Player;
import javax.media.control.FrameGrabbingControl;
import javax.media.format.RGBFormat;
import javax.media.format.VideoFormat;
import javax.media.format.YUVFormat;
import javax.media.protocol.CaptureDevice;
import javax.media.protocol.DataSource;
import javax.media.util.BufferToImage;

/**
* A disposable class that uses JMF to serve a still sequence captured from a
* webcam over a socket connection. It doesn't use TCP, it just blindly
* captures a still, JPEG compresses it, and pumps it out over any incoming
* socket connection.
*
* @author Tom Gibara
*
*/

public class WebcamBroadcaster {

public static boolean RAW = false;


private static Player createPlayer(int width, int height) {
try {
Vector<CaptureDeviceInfo> devices = CaptureDeviceManager.getDeviceList(null);
for (CaptureDeviceInfo info : devices) {
DataSource source;
Format[] formats = info.getFormats();
for (Format format : formats) {
if ((format instanceof RGBFormat)) {
RGBFormat rgb = (RGBFormat) format;
Dimension size = rgb.getSize();
if (size.width != width || size.height != height) continue;
if (rgb.getPixelStride() != 3) continue;
if (rgb.getBitsPerPixel() != 24) continue;
if ( rgb.getLineStride() != width*3 ) continue;
MediaLocator locator = info.getLocator();
source = Manager.createDataSource(locator);
source.connect();
System.out.println("RGB Format Found");
((CaptureDevice)source).getFormatControls()[0].setFormat(rgb);
} else if ((format instanceof YUVFormat)) {
YUVFormat yuv = (YUVFormat) format;
Dimension size = yuv.getSize();
if (size.width != width || size.height != height) continue;
MediaLocator locator = info.getLocator();
source = Manager.createDataSource(locator);
source.connect();
System.out.println("YUV Format Found");
((CaptureDevice)source).getFormatControls()[0].setFormat(yuv);
} else {
continue;
}

return Manager.createRealizedPlayer(source);
}
}
} catch (IOException e) {
System.out.println(e.toString());
e.printStackTrace();
} catch (NoPlayerException e) {
System.out.println(e.toString());
e.printStackTrace();
} catch (CannotRealizeException e) {
System.out.println(e.toString());
e.printStackTrace();
} catch (NoDataSourceException e) {
System.out.println(e.toString());
e.printStackTrace();
}
return null;
}

public static void main(String[] args) {
int[] values = new int[args.length];
for (int i = 0; i < values.length; i++) {
values[i] = Integer.parseInt(args[i]);
}

WebcamBroadcaster wb;
if (values.length == 0) {
wb = new WebcamBroadcaster();
} else if (values.length == 1) {
wb = new WebcamBroadcaster(values[0]);
} else if (values.length == 2) {
wb = new WebcamBroadcaster(values[0], values[1]);
} else {
wb = new WebcamBroadcaster(values[0], values[1], values[2]);
}

wb.start();
}

public static final int DEFAULT_PORT = 9889;
public static final int DEFAULT_WIDTH = 320;
public static final int DEFAULT_HEIGHT = 240;

private final Object lock = new Object();

private final int width;
private final int height;
private final int port;

private boolean running;

private Player player;
private FrameGrabbingControl control;
private boolean stopping;
private Worker worker;

public WebcamBroadcaster(int width, int height, int port) {
this.width = width;
this.height = height;
this.port = port;
}

public WebcamBroadcaster(int width, int height) {
this(width, height, DEFAULT_PORT);
}

public WebcamBroadcaster(int port) {
this(DEFAULT_WIDTH, DEFAULT_HEIGHT, port);
}

public WebcamBroadcaster() {
this(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_PORT);
}

public void start() {
synchronized (lock) {
if (running) return;
player = createPlayer(width, height);
if (player == null) {
System.err.println("Unable to find a suitable player");
return;
}
System.out.println("Starting the player");
player.start();
control = (FrameGrabbingControl) player.getControl("javax.media.control.FrameGrabbingControl");
worker = new Worker();
worker.start();
System.out.println("Grabbing frames");
running = true;
}
}

public void stop() throws InterruptedException {
synchronized (lock) {
if (!running) return;
if (player != null) {
control = null;
player.stop();
player = null;
}
stopping = true;
running = false;
worker = null;
}
try {
worker.join();
} finally {
stopping = false;
}
}

private class Worker extends Thread {

private final int[] data = new int[width*height];

@Override
public void run() {
ServerSocket ss;
try {
ss = new ServerSocket(port);

} catch (IOException e) {
e.printStackTrace();
return;
}

while(true) {
FrameGrabbingControl c;
synchronized (lock) {
if (stopping) break;
c = control;
}
Socket socket = null;
try {
socket = ss.accept();

Buffer buffer = c.grabFrame();
BufferToImage btoi = new BufferToImage((VideoFormat)buffer.getFormat());
BufferedImage image = (BufferedImage) btoi.createImage(buffer);

if (image != null) {
OutputStream out = socket.getOutputStream();
if (RAW) {
image.getWritableTile(0, 0).getDataElements(0, 0, width, height, data);
image.releaseWritableTile(0, 0);
DataOutputStream dout = new DataOutputStream(new BufferedOutputStream(out));
for (int i = 0; i < data.length; i++) {
dout.writeInt(data[i]);
}
dout.close();
} else {
ImageIO.write(image, "JPEG", out);
}
}

socket.close();
socket = null;
} catch (IOException e) {
e.printStackTrace();
} finally {
if (socket != null)
try {
socket.close();
} catch (IOException e) {
/* ignore */
}
}

}

try {
ss.close();
} catch (IOException e) {
/* ignore */
}
}

}

}