As I was investigating the QRcode APIs, I was confronted to a specific request :
How to emulate the camera of my smartphone within the android emulator ?

Since the 14th version of the Android SDK, the emulator can "supposedly" use a webcam but there's some problems with laptop PC and integrated webcams (at least with mine).
There is a possible solution below and this is the corresponding Eclipse Android Project.

After a few well placed Google keywords, I found an interesting page : Live Camera Previews in Android from Tom Gibara. But there was two problems :
The first point was resolved by Atom (Neil Davies) on his blog : Live camera preview in the Android emulator.
It just needed a slight modification as using the setParameters call generate an exception with my SDK, so I quickly bypassed that with a specific method : forcePreviewSize(int,int).

First the SocketCamera class which get images through a socket call :
package com.example.livecam;

import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.Socket;

import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.SurfaceHolder;

/**
 * Authors :
 * - Tom Gibara - the original Socket Camera :
 *  http://www.tomgibara.com/android/camera-source
 * - Neil Davies - modified version with updated Android classes :
 *  http://www.inter-fuser.com/2009/09/live-camera-preview-in-android-emulator.html
 * - Joan Reynaud (forcePreviewSize, using the setParameters generate an exception so I had to add a specific method)
 */
public class SocketCamera {

	private static final String LOG_TAG = "SocketCamera";
	private static final int SOCKET_TIMEOUT = 1000;

	static private SocketCamera socketCamera;
	private CameraCapture capture;
	private Camera parametersCamera;
	private SurfaceHolder surfaceHolder;

	//Set the IP address of your pc here!! (be careful 127.0.0.1 means the emutor itself, not your localhost)
	private final String address = "10.0.2.2"; // localhost onto which the emulator is running
	private final int port = 9889;

	private final boolean preserveAspectRatio = true;
	private final Paint paint = new Paint();


	private int width = 480;
	private int height = 640;
	private Rect bounds = new Rect(0, 0, width, height);

	private SocketCamera() {
		//Just used so that we can pass Camera.Parameters in getters and setters
		parametersCamera = Camera.open();
	}

	static public SocketCamera open()
	{
		Log.d(LOG_TAG, "Creating Socket Camera");
		if (socketCamera == null) {
			socketCamera = new SocketCamera();
		}
		return socketCamera;
	}

	public void startPreview() {
		capture = new CameraCapture();
		capture.setCapturing(true);
		capture.start(); 
		Log.d(LOG_TAG, "Starting Socket Camera");

	}

	public void stopPreview(){
		capture.setCapturing(false);
		Log.d(LOG_TAG, "Stopping Socket Camera");
	}

	public void setPreviewDisplay(SurfaceHolder surfaceHolder) throws IOException {
		this.surfaceHolder = surfaceHolder;
	}

	public void setParameters(Camera.Parameters parameters) {
		//Bit of a hack so the interface looks like that of
		Log.d(LOG_TAG, "Setting Socket Camera parameters");
		parametersCamera.setParameters(parameters);
		Size size = parameters.getPreviewSize();
		bounds = new Rect(0, 0, size.width, size.height);
	}
	public Camera.Parameters getParameters() { 
		Log.d(LOG_TAG, "Getting Socket Camera parameters");
		return parametersCamera.getParameters(); 
	} 

	public void release() {
		Log.d(LOG_TAG, "Releasing Socket Camera parameters");
		stopPreview();
	} 
	
	public void forcePreviewSize(int w, int h) {
		bounds = new Rect(0, 0, w, h);
	}


	private class CameraCapture extends Thread  {

		private boolean capturing = false;

		public boolean isCapturing() {
			return capturing;
		}

		public void setCapturing(boolean capturing) {
			this.capturing = capturing;
		}

		@Override
		public void run() {
			while (capturing) {
				Canvas c = null;
				try {
					c = surfaceHolder.lockCanvas(null);
					synchronized (surfaceHolder) {
						Socket socket = null;
						try {
							socket = new Socket();
							socket.bind(null);
							socket.setSoTimeout(SOCKET_TIMEOUT);
							socket.connect(new InetSocketAddress(address, port), SOCKET_TIMEOUT);

							//obtain the bitmap
							InputStream in = socket.getInputStream();
							Bitmap bitmap = BitmapFactory.decodeStream(in);

							//render it to canvas, scaling if necessary
							if (
									bounds.right == bitmap.getWidth() &&
									bounds.bottom == bitmap.getHeight()) {
								c.drawBitmap(bitmap, 0, 0, null);
							} else {
								Rect dest;
								if (preserveAspectRatio) {
									dest = new Rect(bounds);
									dest.bottom = bitmap.getHeight() * bounds.right / bitmap.getWidth();
									dest.offset(0, (bounds.bottom - dest.bottom)/2);
								} else {
									dest = bounds;
								}
								if (c != null)
								{ 
									c.drawBitmap(bitmap, null, dest, paint);
								}
							}

						} catch (RuntimeException e) {
							Log.d(LOG_TAG, "RE:"+e.getMessage());
							e.printStackTrace();

						} catch (IOException e) {
							Log.d(LOG_TAG, "IO"+e.getMessage());
							e.printStackTrace();
						} finally {
							try {
								socket.close();
							} catch (IOException e) {
								/* ignore */
							}
						}
					}
				} catch (Exception e) {
					Log.d(LOG_TAG, "E:"+e.getMessage());
					e.printStackTrace();
				} finally {

					// do this in a finally so that if an exception is thrown
					// during the above, we don't leave the Surface in an
					// inconsistent state
					if (c != null) {
						surfaceHolder.unlockCanvasAndPost(c);
					}
				}
			}
			Log.d(LOG_TAG, "Socket Camera capture stopped");
		}
	}

}

The SurfaceView class which display things :

package com.example.livecam;

import java.io.IOException;

import android.content.Context;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

/**
 * Authors :
 * - Tom Gibara (the original Preview class : http://www.tomgibara.com/android/camera-source)
 * - Neil Davies (modified version with updated Android classes : http://www.inter-fuser.com/2009/09/live-camera-preview-in-android-emulator.html)
 * - Joan Reynaud (the forcePreviewSize call) 
 */
class Preview extends SurfaceView implements SurfaceHolder.Callback {
	
	private static final String TAG = "Preview";
	
	SurfaceHolder mHolder;
	//Camera mCamera;
	SocketCamera mCamera;

	Preview(Context context) {
		super(context);

		// Install a SurfaceHolder.Callback so we get notified when the
		// underlying surface is created and destroyed.
		mHolder = getHolder();
		mHolder.addCallback(this);
		//mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
		mHolder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
	}

	public void surfaceCreated(SurfaceHolder holder) {
		Log.d(TAG, "Surface created.");
		// The Surface has been created, acquire the camera and tell it where
		// to draw.
		//mCamera = Camera.open();
		mCamera = SocketCamera.open();
		try {
			mCamera.setPreviewDisplay(holder);
		} catch (IOException exception) {
			mCamera.release();
			mCamera = null;
			// TODO: add more exception handling logic here
		}
	}

	public void surfaceChanged(SurfaceHolder holder, int format, int width,
			int height) {
		if (mCamera != null) {
			mCamera.forcePreviewSize(width, height);
			mCamera.startPreview();
		}
	}

	public void surfaceDestroyed(SurfaceHolder holder) {
		Log.d(TAG, "Surface destroyed.");
		if (mCamera != null) {
			mCamera.release();
		}
	}
}
Then I had to rewrite the broadcaster with something different than Java Media Framework.
I tried some QRcode API examples with Processing and JMyron.
My integrated webcam worked well through JMyron, so I used JMyron to do the work which was quite simple in fact.

The only problem with JMyron is at installation time on my 64bits Windows 7. The extra DLLs (DSVL.dll and myron-ezcam.dll) must be placed in the Windows/SysWOW64 and not in Windows/System32.
Don't forget the "-Djava.library.path" at execution time. It must point to the directory containing JMyron.jar and JMyron.dll.

So here is my modified webcam broadcaster : (Eclipse Project File)
package jopc;

import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.OutputStream;
import java.net.ServerSocket;
import java.net.Socket;

import javax.imageio.ImageIO;

import JMyron.JMyron;

/**
 * 
 * Based on the original WebcamBroadcaster from Tom Gibara.
 * A disposable class that uses JMyron to serve a still sequence captured from a
 * webcam over a socket connection. It doesn't use TCP, it just blindly
 * captures a still, JPEG compresses it, and pumps it out over any incoming
 * socket connection.
 * 
 * @author Tom Gibara
 *
 */

public class WebcamBroadcaster {

	public static void main(String[] args) {
		int[] values = new int[args.length];
		for (int i = 0; i < values.length; i++) {
			values[i] = Integer.parseInt(args[i]);
		}
		
		WebcamBroadcaster wb;
		if (values.length == 0) {
			wb = new WebcamBroadcaster();
		} else if (values.length == 1) {
			wb = new WebcamBroadcaster(values[0]);
		} else if (values.length == 2) {
			wb = new WebcamBroadcaster(values[0], values[1]);
		} else {
			wb = new WebcamBroadcaster(values[0], values[1], values[2]);
		}
		
		wb.start();
	}
	
	public static final int DEFAULT_PORT = 9889;
	public static final int DEFAULT_WIDTH = 640;
	public static final int DEFAULT_HEIGHT = 480;
	
	private final Object lock = new Object();
	
	private final int width;
	private final int height;
	private final int port;
	
	private boolean running;
	
	private JMyron player;	// default camera object
	private boolean stopping;
	private Worker worker;
	
	public WebcamBroadcaster(int width, int height, int port) {
		this.width = width;
		this.height = height;
		this.port = port;
	}

	public WebcamBroadcaster(int width, int height) {
		this(width, height, DEFAULT_PORT);
	}

	public WebcamBroadcaster(int port) {
		this(DEFAULT_WIDTH, DEFAULT_HEIGHT, port);
	}

	public WebcamBroadcaster() {
		this(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_PORT);
	}
	
	public void start() {
		synchronized (lock) {
			if (running) return;
			player = new JMyron();
			if (player == null) {
				System.err.println("Unable to find a suitable player");
				return;
			}
			player.start(width, height);
			player.findGlobs(0);	//disable the intelligence to speed up frame rate
			worker = new Worker();
			worker.start();
			running = true;
		}
	}

	public void stop() throws InterruptedException {
		synchronized (lock) {
			if (!running) return;
			if (player != null) {
				player.stop();
				player = null;
			}
			stopping = true;
			running = false;
			worker = null;
		}
		try {
			worker.join();
		} finally {
			stopping = false;
		}
	}

	private class Worker extends Thread {
		
		@Override
		public void run() {
			ServerSocket ss; 
			try {
				ss = new ServerSocket(port);
				
			} catch (IOException e) {
				e.printStackTrace();
				return;
			}
			
			while(true) {
				JMyron m;
				synchronized (lock) {
					if (stopping) break;
					m = player;
				}
				Socket socket = null;
				try {
					socket = ss.accept();
							
					m.update();//update the camera view
					int [] img = m.image(); //get an image of the camera
					if (img != null) {
						BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
						image.setRGB(0, 0, width, height, img, 0, width);
						
						OutputStream out = socket.getOutputStream();
						ImageIO.write(image, "JPEG", out);
					}
					
					socket.close();
					socket = null;
				} catch (IOException e) {
					e.printStackTrace();
				} finally {
					if (socket != null)
						try {
							socket.close();
						} catch (IOException e) {
							// ignore
						}
				}
				
			}
			
			try {
				ss.close();
			} catch (IOException e) {
				// ignore
			}
		}

	}
	
}