Monday, April 30, 2012

Get color on a specified location from ImageView's background bitmap


Modify from the last exercise "Detect touched position on a ImageView"; get color from the background bitmap on the touched location. The TextView's text color will be changed according to the background color on the touched position.



Modify the custom ImageView, TouchView.java, to add the method getColor() to get color on a specified location and pass to the updateMsg() method of main activity. Notice that we have to convert the x,y from position on View to x, y on bitmap before getPixel() call.
package com.exercise.AndroidDetechTouch;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.widget.ImageView;

public class TouchView extends ImageView {
 
 Bitmap bitmap;
 double bmWidth, bmHeight; 

 public TouchView(Context context) {
  super(context);
  // TODO Auto-generated constructor stub
  init();
 }

 public TouchView(Context context, AttributeSet attrs) {
  super(context, attrs);
  // TODO Auto-generated constructor stub
  init();
 }

 public TouchView(Context context, AttributeSet attrs, int defStyle) {
  super(context, attrs, defStyle);
  // TODO Auto-generated constructor stub
  init();
 }
 
 private void init(){
  
  bitmap = ((BitmapDrawable)getBackground()).getBitmap();
  bmWidth = (double)bitmap.getWidth();
  bmHeight = (double)bitmap.getHeight();
 }

 @Override
 protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
  // TODO Auto-generated method stub
  setMeasuredDimension(MeasureSpec.getSize(widthMeasureSpec),
       MeasureSpec.getSize(heightMeasureSpec));
 }

 @Override
 public boolean onTouchEvent(MotionEvent event) {
  // TODO Auto-generated method stub

  
  switch(event.getAction()){
  case MotionEvent.ACTION_DOWN:
  case MotionEvent.ACTION_MOVE:
   float x = event.getX();
   float y = event.getY();

   int color = getColor(x, y);
      ((AndroidDetechTouchActivity)getContext()).updateMsg("Touched@" + x + " : " + y, color);
      
   break;
  case MotionEvent.ACTION_UP:
   ((AndroidDetechTouchActivity)getContext()).updateMsg("", 0);
   break;
  }
  
  return true;
 }
 
 private int getColor(float x, float y){

  if ( x < 0 || y < 0 || x > (float)getWidth() || y > (float)getHeight()){
   return 0; //Invalid, return 0
  }else{
   //Convert touched x, y on View to on Bitmap
   int xBm = (int)(x * (bmWidth / (double)getWidth()));
      int yBm = (int)(y * (bmHeight / (double)getHeight()));

   return bitmap.getPixel(xBm, yBm);
  }

 }

}


Modify updateMsg() method of main activity, AndroidDetechTouchActivity.java, to include color and update TextView's TextColor.
package com.exercise.AndroidDetechTouch;

import android.app.Activity;
import android.os.Bundle;
import android.widget.TextView;

public class AndroidDetechTouchActivity extends Activity {
 
 TextView msg;
 TouchView touchView;
 
    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.main);
        msg = (TextView)findViewById(R.id.msg);
        touchView = (TouchView)findViewById(R.id.touchview);

    }

    public void updateMsg(String tMsg, int color){
     msg.setTextColor(color);
     msg.setText(tMsg);
    }
   
}


Keep using main.xml from the last exercise.

Download the files.

Next: - Display text on a specified location in a custom View

Sunday, April 29, 2012

Detect touched position on a ImageView


A custom ImageView is implemented, with onTouchEvent() method overrided. When user touched on the ImageView, the touch position will be passed to main activity and displayed on a TextView.



Implement the custom ImageView, TouchView.java.
package com.exercise.AndroidDetechTouch;

import android.content.Context;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.widget.ImageView;

public class TouchView extends ImageView {

 public TouchView(Context context) {
  super(context);
  // TODO Auto-generated constructor stub
 }

 public TouchView(Context context, AttributeSet attrs) {
  super(context, attrs);
  // TODO Auto-generated constructor stub
 }

 public TouchView(Context context, AttributeSet attrs, int defStyle) {
  super(context, attrs, defStyle);
  // TODO Auto-generated constructor stub
 }

 @Override
 protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
  // TODO Auto-generated method stub
  setMeasuredDimension(MeasureSpec.getSize(widthMeasureSpec),
       MeasureSpec.getSize(heightMeasureSpec));
 }

 @Override
 public boolean onTouchEvent(MotionEvent event) {
  // TODO Auto-generated method stub
  
  
  switch(event.getAction()){
  case MotionEvent.ACTION_DOWN:
  case MotionEvent.ACTION_MOVE:
   float x = event.getX();
   float y = event.getY();
   ((AndroidDetechTouchActivity)getContext()).updateMsg("Touched@" + x + " : " + y);
   break;
  case MotionEvent.ACTION_UP:
   ((AndroidDetechTouchActivity)getContext()).updateMsg("");
   break;
  }
  
  return true;
 }

}


Modify the main activity to add updateMsg() method. It will be called from custom ImageView.
package com.exercise.AndroidDetechTouch;

import android.app.Activity;
import android.os.Bundle;
import android.widget.TextView;

public class AndroidDetechTouchActivity extends Activity {
 
 TextView msg;
 TouchView touchView;
 
    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.main);
        msg = (TextView)findViewById(R.id.msg);
        touchView = (TouchView)findViewById(R.id.touchview);

    }

    public void updateMsg(String tMsg){
     msg.setText(tMsg);
    }
   
}


main.xml, to add the custom ImageView and a TextView overlapped.
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
    android:layout_width="fill_parent"
    android:layout_height="fill_parent"
    android:orientation="vertical" >

    <TextView
        android:layout_width="fill_parent"
        android:layout_height="wrap_content"
        android:text="@string/hello" />
 <FrameLayout 
        android:layout_width="fill_parent"
        android:layout_height="fill_parent"
     >
     <LinearLayout 
         android:layout_width="fill_parent"
         android:layout_height="fill_parent"
         android:orientation="vertical">
         <TextView
             android:id="@+id/msg"
             android:layout_width="fill_parent"
             android:layout_height="wrap_content"
             />
     </LinearLayout>
     <com.exercise.AndroidDetechTouch.TouchView
         android:id="@+id/touchview"
         android:layout_width="fill_parent"
         android:layout_height="fill_parent"
         android:background="@drawable/ic_launcher"
         />
 </FrameLayout>
</LinearLayout>


Download the files.

Next: - Get pixel color on a specified location from ImageView's background bitmap

StrictMode.setThreadPolicy and StrictMode.ThreadPolicy.Builder


It was described in last exercise "android.os.NetworkOnMainThreadException"; if you access Network (or Disk read/write) in UI thread, with minSdkVersion targeting the Honeycomb or higher, exception will be thrown. And the solution of using AsyncTask was provided in the exercise.

Here is another un-recommended approach: change StrictMode Policy.

StrictMode is a developer tool which detects things you might be doing by accident and brings them to your attention so you can fix them. StrictMode is most commonly used to catch accidental disk or network access on the application's main thread, UI thread.

Using StrictMode.ThreadPolicy.Builder, you can create your own StrictMode.ThreadPolicy, to permit or apply penalty to detected problems:
  • penaltyDeath(): Crash the whole process on violation.
  • penaltyDeathOnNetwork(): Crash the whole process on any network usage.
  • penaltyDialog(): Show an annoying dialog to the developer on detected violations, rate-limited to be only a little annoying.
  • penaltyDropBox(): Enable detected violations log a stacktrace and timing data to the DropBox on policy violation.
  • penaltyFlashScreen(): Flash the screen during a violation.
  • penaltyLog(): Log detected violations to the system log.

It's a example to change thread policy for Network operation, show an annoying dialog, and permit.


package com.exercise.AndroidInternetTxt;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import android.app.Activity;
import android.os.Bundle;
import android.os.StrictMode;
import android.widget.TextView;

public class AndroidInternetTxt extends Activity {
 
 TextView textMsg, textPrompt;
 final String textSource = "http://sites.google.com/site/androidersite/text.txt";


   /** Called when the activity is first created. */
   @Override
   public void onCreate(Bundle savedInstanceState) {
       super.onCreate(savedInstanceState);
       setContentView(R.layout.main);
       textPrompt = (TextView)findViewById(R.id.textprompt);
       textMsg = (TextView)findViewById(R.id.textmsg);
      
       textPrompt.setText("Wait...");
       
       StrictMode.setThreadPolicy(new StrictMode.ThreadPolicy.Builder()
       .detectNetwork() // or .detectAll() for all detectable problems
       .penaltyDialog()  //show a dialog
       //.permitNetwork() //permit Network access 
       .build());
      
       URL textUrl;

       try {
        textUrl = new URL(textSource);

        BufferedReader bufferReader 
         = new BufferedReader(new InputStreamReader(textUrl.openStream()));
        
        String StringBuffer;
        String stringText = "";
        while ((StringBuffer = bufferReader.readLine()) != null) {
         stringText += StringBuffer;   
        }
        bufferReader.close();

        textMsg.setText(stringText);
       } catch (MalformedURLException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        textMsg.setText(e.toString());   
       } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        textMsg.setText(e.toString());   
       }
  
       textPrompt.setText("Finished!");    
   }

}


Download the files.

Saturday, April 28, 2012

Beginning Android ADK with Arduino

Whether you're new to Arduino and Android development, or you've tinkered a bit with either one, this is the book for you. Android has always been a natural fit with Arduino projects, but now that Google has released the Android Open Accessory Development Kit (the Android ADK), combining Android with Arduino to create custom gadgets has become even easier.
Beginning Android ADK with Arduino shows how the ADK works and how it can be used with a variety of Arduino boards to create a variety of fun projects that showcase the abilities of the ADK.

Mario Böhmer will walk you through several projects, including making sounds, driving motors, and creating alarm systems, all while explaining how to use the ADK and how standard Arduino boards may differ from Google-branded Arduinos. You aren't tied to specific hardware with this book; use what you have, and this book will show you how.

What you’ll learn

  • How different boards work with the ADK
  • How to create your first sketch and project
  • How to work with light and sound
  • How to work with servos and DC motors
  • How to work with photoresistors and thermistors to sense the environment
  • How to make your own capacitive touch game show buzzer
  • How to create your own camera-enabled alarm system

Who this book is for

This book is for beginning Arduino and Android enthusiasts, or Arduino developers who want to try out the new Android ADK.

Table of Contents

  1. Introduction
  2. Andriod and Arduino: Getting to Know Each Other
  3. Outputs
  4. Inputs
  5. Sounds
  6. Light Intensity Sensing
  7. Temperature Sensing
  8. A Sense of Touch
  9. Making Things Move
  10. Alarm System


android.os.NetworkOnMainThreadException

Refer to my old exercise "Read Text file from internet, using Java code": it a simple exercise to read something from internet. It can be download here in project form.

It work as expected, to display the text file from internet, for android:minSdkVersion="9" or older. But fail with android:minSdkVersion="10" or higher. It's a strange and interesting issue for me.

OK for android:minSdkVersion='9' or older



Fail for android:minSdkVersion='10' or higher


After investigated into the logcat, I found that it's Caused by: android.os.NetworkOnMainThreadException!

android.os.NetworkOnMainThreadException is a exception that is thrown when an application attempts to perform a networking operation on its main thread.

This is only thrown for applications targeting the Honeycomb SDK or higher (actually it fail in my exercise with API level 10). Applications targeting earlier SDK versions are allowed to do networking on their main event loop threads, but it's heavily discouraged.

The solution is to move the internet accessing code to a background thread, AsyncTask in my exercise.

package com.exercise.AndroidInternetTxt;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;

import android.app.Activity;
import android.os.AsyncTask;
import android.os.Bundle;
import android.widget.TextView;

public class AndroidInternetTxt extends Activity {
 
 TextView textMsg, textPrompt;
 final String textSource = "http://sites.google.com/site/androidersite/text.txt";


   /** Called when the activity is first created. */
   @Override
   public void onCreate(Bundle savedInstanceState) {
       super.onCreate(savedInstanceState);
       setContentView(R.layout.main);
       textPrompt = (TextView)findViewById(R.id.textprompt);
       textMsg = (TextView)findViewById(R.id.textmsg);
      
       textPrompt.setText("Wait...");
      
       new MyTask().execute();
       
       /*
       URL textUrl;

       try {
        textUrl = new URL(textSource);

        BufferedReader bufferReader 
         = new BufferedReader(new InputStreamReader(textUrl.openStream()));
        
        String StringBuffer;
        String stringText = "";
        while ((StringBuffer = bufferReader.readLine()) != null) {
         stringText += StringBuffer;   
        }
        bufferReader.close();

        textMsg.setText(stringText);
       } catch (MalformedURLException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        textMsg.setText(e.toString());   
       } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        textMsg.setText(e.toString());   
       }
  
       textPrompt.setText("Finished!");    
       */
   }
   
   private class MyTask extends AsyncTask<Void, Void, Void>{
    
    String textResult;
    
    @Override
    protected Void doInBackground(Void... params) {
     
        URL textUrl;

        try {
         textUrl = new URL(textSource);

         BufferedReader bufferReader 
          = new BufferedReader(new InputStreamReader(textUrl.openStream()));
         
         String StringBuffer;
         String stringText = "";
         while ((StringBuffer = bufferReader.readLine()) != null) {
          stringText += StringBuffer;   
         }
         bufferReader.close();

         textResult = stringText;
        } catch (MalformedURLException e) {
         // TODO Auto-generated catch block
         e.printStackTrace();
         textResult = e.toString();   
        } catch (IOException e) {
         // TODO Auto-generated catch block
         e.printStackTrace();
         textResult = e.toString();   
        }

     return null;
     
    }
    
    @Override
    protected void onPostExecute(Void result) {
     
     textMsg.setText(textResult);
     textPrompt.setText("Finished!");  
     
     super.onPostExecute(result);   
    }

   }
}


Download the files.

Another un-recommended approach: StrictMode.setThreadPolicy and StrictMode.ThreadPolicy.Builder

More example:
- Load Bitmap from internet in background thread using AsyncTask


Friday, April 27, 2012

Touch to select focus and metering area

Further work on last exercise "Gets the distances from the camera to the focus point - getFocusDistances()" (and the post "Set Camera.Parameters"), It's modified to implement touching to select focus and metering area.

Touch to select focus and metering area


Modify from the exercise "Gets the distances from the camera to the focus point - getFocusDistances()". In this exercise, a new class CameraSurfaceView.java (extends SurfaceView) is implemented to replace the SurfaceView. And override the onTouchEvent(MotionEvent event) method to get user touch position, and area. The touched area will be passed to main activity, AndroidCamera.java, via touchFocus() method.

package com.exercise.AndroidCamera;

import android.content.Context;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.SurfaceView;

public class CameraSurfaceView extends SurfaceView {

 public CameraSurfaceView(Context context) {
  super(context);
  // TODO Auto-generated constructor stub
 }

 public CameraSurfaceView(Context context, AttributeSet attrs) {
  super(context, attrs);
  // TODO Auto-generated constructor stub
 }

 public CameraSurfaceView(Context context, AttributeSet attrs, int defStyle) {
  super(context, attrs, defStyle);
  // TODO Auto-generated constructor stub
 }

 @Override
 protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
  // TODO Auto-generated method stub
  setMeasuredDimension(
    MeasureSpec.getSize(widthMeasureSpec),
    MeasureSpec.getSize(heightMeasureSpec));
 }

 @Override
 public boolean onTouchEvent(MotionEvent event) {
  
  if(event.getAction() == MotionEvent.ACTION_DOWN){
   float x = event.getX();
      float y = event.getY();
      float touchMajor = event.getTouchMajor();
      float touchMinor = event.getTouchMinor();
      
      Rect touchRect = new Rect(
        (int)(x - touchMajor/2), 
        (int)(y - touchMinor/2), 
        (int)(x + touchMajor/2), 
        (int)(y + touchMinor/2));
      
      ((AndroidCamera)getContext()).touchFocus(touchRect);
  }
  
  
  return true;
 }
 
}

Modify main.xml to place CameraSurfaceView, instead of Surfaceview.

<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
    android:layout_width="fill_parent"
    android:layout_height="fill_parent"
    android:orientation="vertical" >

    <TextView
        android:layout_width="fill_parent"
        android:layout_height="wrap_content"
        android:text="@string/hello" />
    <TextView
        android:id="@+id/prompt"
        android:layout_width="fill_parent"
        android:layout_height="wrap_content"/>
    <com.exercise.AndroidCamera.CameraSurfaceView
        android:id="@+id/camerapreview"  
        android:layout_width="fill_parent" 
        android:layout_height="wrap_content" />

</LinearLayout>

Modify the main Java code, To handle touchFocus() method. And also remove the original layoutBackground OnClickListener().

package com.exercise.AndroidCamera;

import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

import android.app.Activity;
import android.content.ContentValues;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.Face;
import android.hardware.Camera.FaceDetectionListener;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore.Images.Media;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.View;
import android.view.ViewGroup.LayoutParams;
import android.widget.Button;
import android.widget.TextView;

public class AndroidCamera extends Activity implements SurfaceHolder.Callback{

 Camera camera;
 CameraSurfaceView cameraSurfaceView;
 SurfaceHolder surfaceHolder;
 boolean previewing = false;
 LayoutInflater controlInflater = null;
 
 Button buttonTakePicture;
 TextView prompt;
 
 DrawingView drawingView;
 Face[] detectedFaces;
 
 final int RESULT_SAVEIMAGE = 0;
 
 private ScheduledExecutorService myScheduledExecutorService;
 
    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.main);
        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
        
        getWindow().setFormat(PixelFormat.UNKNOWN);
        cameraSurfaceView = (CameraSurfaceView)findViewById(R.id.camerapreview);
        surfaceHolder = cameraSurfaceView.getHolder();
        surfaceHolder.addCallback(this);
        surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
        
        drawingView = new DrawingView(this);
        LayoutParams layoutParamsDrawing 
         = new LayoutParams(LayoutParams.FILL_PARENT, 
           LayoutParams.FILL_PARENT);
        this.addContentView(drawingView, layoutParamsDrawing);
        
        controlInflater = LayoutInflater.from(getBaseContext());
        View viewControl = controlInflater.inflate(R.layout.control, null);
        LayoutParams layoutParamsControl 
         = new LayoutParams(LayoutParams.FILL_PARENT, 
           LayoutParams.FILL_PARENT);
        this.addContentView(viewControl, layoutParamsControl);
        
        buttonTakePicture = (Button)findViewById(R.id.takepicture);
        buttonTakePicture.setOnClickListener(new Button.OnClickListener(){

   @Override
   public void onClick(View arg0) {
    // TODO Auto-generated method stub
    camera.takePicture(myShutterCallback, 
      myPictureCallback_RAW, myPictureCallback_JPG);
   }});
        
        /*
        LinearLayout layoutBackground = (LinearLayout)findViewById(R.id.background);
        layoutBackground.setOnClickListener(new LinearLayout.OnClickListener(){

   @Override
   public void onClick(View arg0) {
    // TODO Auto-generated method stub

    buttonTakePicture.setEnabled(false);
    camera.autoFocus(myAutoFocusCallback);
   }});
  */
        
        prompt = (TextView)findViewById(R.id.prompt);
    }
    
    public void touchFocus(final Rect tfocusRect){
     
     buttonTakePicture.setEnabled(false);
  
     camera.stopFaceDetection();
  
     //Convert from View's width and height to +/- 1000
  final Rect targetFocusRect = new Rect(
    tfocusRect.left * 2000/drawingView.getWidth() - 1000,
    tfocusRect.top * 2000/drawingView.getHeight() - 1000,
    tfocusRect.right * 2000/drawingView.getWidth() - 1000,
    tfocusRect.bottom * 2000/drawingView.getHeight() - 1000);
  
  final List<Camera.Area> focusList = new ArrayList<Camera.Area>();
  Camera.Area focusArea = new Camera.Area(targetFocusRect, 1000);
  focusList.add(focusArea);
  
  Parameters para = camera.getParameters();
  para.setFocusAreas(focusList);
  para.setMeteringAreas(focusList);
  camera.setParameters(para);
  
  camera.autoFocus(myAutoFocusCallback);
  
  drawingView.setHaveTouch(true, tfocusRect);
    drawingView.invalidate();
    }
    
    FaceDetectionListener faceDetectionListener
    = new FaceDetectionListener(){

  @Override
  public void onFaceDetection(Face[] faces, Camera tcamera) {
   
   if (faces.length == 0){
    //prompt.setText(" No Face Detected! ");
    drawingView.setHaveFace(false);
   }else{
    //prompt.setText(String.valueOf(faces.length) + " Face Detected :) ");
    drawingView.setHaveFace(true);
    detectedFaces = faces;
    
    //Set the FocusAreas using the first detected face
    List<Camera.Area> focusList = new ArrayList<Camera.Area>();
    Camera.Area firstFace = new Camera.Area(faces[0].rect, 1000);
    focusList.add(firstFace);
    
    Parameters para = camera.getParameters();
    
    if(para.getMaxNumFocusAreas()>0){
     para.setFocusAreas(focusList);
       }
       
       if(para.getMaxNumMeteringAreas()>0){
     para.setMeteringAreas(focusList);
    }
       
       camera.setParameters(para);

    buttonTakePicture.setEnabled(false);

    //Stop further Face Detection
    camera.stopFaceDetection();
    
    buttonTakePicture.setEnabled(false);
    
    /*
     * Allways throw java.lang.RuntimeException: autoFocus failed 
     * if I call autoFocus(myAutoFocusCallback) here!
     * 
     camera.autoFocus(myAutoFocusCallback);
    */
    
    //Delay call autoFocus(myAutoFocusCallback)
    myScheduledExecutorService = Executors.newScheduledThreadPool(1);
    myScheduledExecutorService.schedule(new Runnable(){
          public void run() {
           camera.autoFocus(myAutoFocusCallback);
            }
          }, 500, TimeUnit.MILLISECONDS);

   }
   
   drawingView.invalidate();
   
  }};
    
    AutoFocusCallback myAutoFocusCallback = new AutoFocusCallback(){

  @Override
  public void onAutoFocus(boolean arg0, Camera arg1) {
   // TODO Auto-generated method stub
   if (arg0){
    buttonTakePicture.setEnabled(true);
    camera.cancelAutoFocus();      
   }
   
   float focusDistances[] = new float[3];
   arg1.getParameters().getFocusDistances(focusDistances);
   prompt.setText("Optimal Focus Distance(meters): " 
     + focusDistances[Camera.Parameters.FOCUS_DISTANCE_OPTIMAL_INDEX]);

  }};
    
    ShutterCallback myShutterCallback = new ShutterCallback(){

  @Override
  public void onShutter() {
   // TODO Auto-generated method stub
   
  }};
  
 PictureCallback myPictureCallback_RAW = new PictureCallback(){

  @Override
  public void onPictureTaken(byte[] arg0, Camera arg1) {
   // TODO Auto-generated method stub
   
  }};
  
 PictureCallback myPictureCallback_JPG = new PictureCallback(){

  @Override
  public void onPictureTaken(byte[] arg0, Camera arg1) {
   // TODO Auto-generated method stub
   /*Bitmap bitmapPicture 
    = BitmapFactory.decodeByteArray(arg0, 0, arg0.length); */
   
   Uri uriTarget = getContentResolver().insert(Media.EXTERNAL_CONTENT_URI, new ContentValues());

   OutputStream imageFileOS;
   try {
    imageFileOS = getContentResolver().openOutputStream(uriTarget);
    imageFileOS.write(arg0);
    imageFileOS.flush();
    imageFileOS.close();
    
    prompt.setText("Image saved: " + uriTarget.toString());
    
   } catch (FileNotFoundException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   }

   camera.startPreview();
   camera.startFaceDetection();
  }};

 @Override
 public void surfaceChanged(SurfaceHolder holder, int format, int width,
   int height) {
  // TODO Auto-generated method stub
  if(previewing){
   camera.stopFaceDetection();
   camera.stopPreview();
   previewing = false;
  }
  
  if (camera != null){
   try {
    camera.setPreviewDisplay(surfaceHolder);
    camera.startPreview();

    prompt.setText(String.valueOf(
      "Max Face: " + camera.getParameters().getMaxNumDetectedFaces()));
    camera.startFaceDetection();
    previewing = true;
   } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   }
  }
 }

 @Override
 public void surfaceCreated(SurfaceHolder holder) {
  // TODO Auto-generated method stub
  camera = Camera.open();
  camera.setFaceDetectionListener(faceDetectionListener);
 }

 @Override
 public void surfaceDestroyed(SurfaceHolder holder) {
  // TODO Auto-generated method stub
  camera.stopFaceDetection();
  camera.stopPreview();
  camera.release();
  camera = null;
  previewing = false;
 }
 
 private class DrawingView extends View{
  
  boolean haveFace;
  Paint drawingPaint;
  
  boolean haveTouch;
  Rect touchArea;

  public DrawingView(Context context) {
   super(context);
   haveFace = false;
   drawingPaint = new Paint();
   drawingPaint.setColor(Color.GREEN);
   drawingPaint.setStyle(Paint.Style.STROKE); 
   drawingPaint.setStrokeWidth(2);
   
   haveTouch = false;
  }
  
  public void setHaveFace(boolean h){
   haveFace = h;
  }
  
  public void setHaveTouch(boolean t, Rect tArea){
   haveTouch = t;
   touchArea = tArea;
  }

  @Override
  protected void onDraw(Canvas canvas) {
   // TODO Auto-generated method stub
   if(haveFace){

    // Camera driver coordinates range from (-1000, -1000) to (1000, 1000).
     // UI coordinates range from (0, 0) to (width, height).
     
     int vWidth = getWidth();
     int vHeight = getHeight();
    
    for(int i=0; i<detectedFaces.length; i++){
     
     if(i == 0){
      drawingPaint.setColor(Color.GREEN);
     }else{
      drawingPaint.setColor(Color.RED);
     }
     
     int l = detectedFaces[i].rect.left;
     int t = detectedFaces[i].rect.top;
     int r = detectedFaces[i].rect.right;
     int b = detectedFaces[i].rect.bottom;
     int left = (l+1000) * vWidth/2000;
     int top  = (t+1000) * vHeight/2000;
     int right = (r+1000) * vWidth/2000;
     int bottom = (b+1000) * vHeight/2000;
     canvas.drawRect(
       left, top, right, bottom,  
       drawingPaint);
    }
   }else{
    canvas.drawColor(Color.TRANSPARENT);
   }
   
   if(haveTouch){
    drawingPaint.setColor(Color.BLUE);
    canvas.drawRect(
      touchArea.left, touchArea.top, touchArea.right, touchArea.bottom,  
      drawingPaint);
   }
  }
  
 }
}


Download the files.

Thursday, April 26, 2012

Set Camera.Parameters

In the exercise "Android 4 Face Detection: setFocusAreas() using face detected faces", I set focus area using the code:

camera.getParameters().setFocusAreas(focusList);
camera.getParameters().setMeteringAreas(focusList);

It's in-correct!

It should be modified as:

Parameters para = camera.getParameters(); 
para.setFocusAreas(focusList); 
camera.setParameters(para);

and

Parameters para = camera.getParameters(); 
para.setMeteringAreas(focusList); 
camera.setParameters(para);

Wednesday, April 25, 2012

Gets the distances from the camera to the focus point - getFocusDistances()


The method getFocusDistances(float[] output) of android.hardware.Camera.Parameters gets the distances from the camera to where an object appears to be in focus. The object is sharpest at the optimal focus distance. The depth of field is the far focus distance minus near focus distance. ~ Since: API Level 9.



Modify AndroidCamera.java from last exercise "java.lang.RuntimeException: autoFocus failed", call getFocusDistances() in onAutoFocus() of myAutoFocusCallback, when focused.


package com.exercise.AndroidCamera;

import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

import android.app.Activity;
import android.content.ContentValues;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.Face;
import android.hardware.Camera.FaceDetectionListener;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore.Images.Media;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup.LayoutParams;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;

public class AndroidCamera extends Activity implements SurfaceHolder.Callback{

 Camera camera;
 SurfaceView surfaceView;
 SurfaceHolder surfaceHolder;
 boolean previewing = false;
 LayoutInflater controlInflater = null;
 
 Button buttonTakePicture;
 TextView prompt;
 
 DrawingView drawingView;
 Face[] detectedFaces;
 
 final int RESULT_SAVEIMAGE = 0;
 
 private ScheduledExecutorService myScheduledExecutorService;
 
    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.main);
        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
        
        getWindow().setFormat(PixelFormat.UNKNOWN);
        surfaceView = (SurfaceView)findViewById(R.id.camerapreview);
        surfaceHolder = surfaceView.getHolder();
        surfaceHolder.addCallback(this);
        surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
        
        drawingView = new DrawingView(this);
        LayoutParams layoutParamsDrawing 
         = new LayoutParams(LayoutParams.FILL_PARENT, 
           LayoutParams.FILL_PARENT);
        this.addContentView(drawingView, layoutParamsDrawing);
        
        controlInflater = LayoutInflater.from(getBaseContext());
        View viewControl = controlInflater.inflate(R.layout.control, null);
        LayoutParams layoutParamsControl 
         = new LayoutParams(LayoutParams.FILL_PARENT, 
           LayoutParams.FILL_PARENT);
        this.addContentView(viewControl, layoutParamsControl);
        
        buttonTakePicture = (Button)findViewById(R.id.takepicture);
        buttonTakePicture.setOnClickListener(new Button.OnClickListener(){

   @Override
   public void onClick(View arg0) {
    // TODO Auto-generated method stub
    camera.takePicture(myShutterCallback, 
      myPictureCallback_RAW, myPictureCallback_JPG);
   }});
        
        LinearLayout layoutBackground = (LinearLayout)findViewById(R.id.background);
        layoutBackground.setOnClickListener(new LinearLayout.OnClickListener(){

   @Override
   public void onClick(View arg0) {
    // TODO Auto-generated method stub

    buttonTakePicture.setEnabled(false);
    camera.autoFocus(myAutoFocusCallback);
   }});
        
        prompt = (TextView)findViewById(R.id.prompt);
    }
    
    FaceDetectionListener faceDetectionListener
    = new FaceDetectionListener(){

  @Override
  public void onFaceDetection(Face[] faces, Camera tcamera) {
   
   if (faces.length == 0){
    //prompt.setText(" No Face Detected! ");
    drawingView.setHaveFace(false);
   }else{
    //prompt.setText(String.valueOf(faces.length) + " Face Detected :) ");
    drawingView.setHaveFace(true);
    detectedFaces = faces;
    
    //Set the FocusAreas using the first detected face
    List<Camera.Area> focusList = new ArrayList<Camera.Area>();
    Camera.Area firstFace = new Camera.Area(faces[0].rect, 1000);
    focusList.add(firstFace);
    
    if(camera.getParameters().getMaxNumFocusAreas()>0){
     camera.getParameters().setFocusAreas(focusList);
    }
    
    if(camera.getParameters().getMaxNumMeteringAreas()>0){
     camera.getParameters().setMeteringAreas(focusList);
    }

    buttonTakePicture.setEnabled(false);
    //camera.getParameters().setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
    
    //Stop further Face Detection
    camera.stopFaceDetection();
    
    buttonTakePicture.setEnabled(false);
    
    /*
     * Allways throw java.lang.RuntimeException: autoFocus failed 
     * if I call autoFocus(myAutoFocusCallback) here!
     * 
     camera.autoFocus(myAutoFocusCallback);
    */
    
    //Delay call autoFocus(myAutoFocusCallback)
    myScheduledExecutorService = Executors.newScheduledThreadPool(1);
    myScheduledExecutorService.schedule(new Runnable(){
          public void run() {
           camera.autoFocus(myAutoFocusCallback);
            }
          }, 500, TimeUnit.MILLISECONDS);

   }
   
   drawingView.invalidate();
   
  }};
    
    AutoFocusCallback myAutoFocusCallback = new AutoFocusCallback(){

  @Override
  public void onAutoFocus(boolean arg0, Camera arg1) {
   // TODO Auto-generated method stub
   if (arg0){
    buttonTakePicture.setEnabled(true);
    camera.cancelAutoFocus();      
   }
   
   float focusDistances[] = new float[3];
   arg1.getParameters().getFocusDistances(focusDistances);
   prompt.setText("Optimal Focus Distance(meters): " 
     + focusDistances[Camera.Parameters.FOCUS_DISTANCE_OPTIMAL_INDEX]);

  }};
    
    ShutterCallback myShutterCallback = new ShutterCallback(){

  @Override
  public void onShutter() {
   // TODO Auto-generated method stub
   
  }};
  
 PictureCallback myPictureCallback_RAW = new PictureCallback(){

  @Override
  public void onPictureTaken(byte[] arg0, Camera arg1) {
   // TODO Auto-generated method stub
   
  }};
  
 PictureCallback myPictureCallback_JPG = new PictureCallback(){

  @Override
  public void onPictureTaken(byte[] arg0, Camera arg1) {
   // TODO Auto-generated method stub
   /*Bitmap bitmapPicture 
    = BitmapFactory.decodeByteArray(arg0, 0, arg0.length); */
   
   Uri uriTarget = getContentResolver().insert(Media.EXTERNAL_CONTENT_URI, new ContentValues());

   OutputStream imageFileOS;
   try {
    imageFileOS = getContentResolver().openOutputStream(uriTarget);
    imageFileOS.write(arg0);
    imageFileOS.flush();
    imageFileOS.close();
    
    prompt.setText("Image saved: " + uriTarget.toString());
    
   } catch (FileNotFoundException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   }

   camera.startPreview();
   camera.startFaceDetection();
  }};

 @Override
 public void surfaceChanged(SurfaceHolder holder, int format, int width,
   int height) {
  // TODO Auto-generated method stub
  if(previewing){
   camera.stopFaceDetection();
   camera.stopPreview();
   previewing = false;
  }
  
  if (camera != null){
   try {
    camera.setPreviewDisplay(surfaceHolder);
    camera.startPreview();

    prompt.setText(String.valueOf(
      "Max Face: " + camera.getParameters().getMaxNumDetectedFaces()));
    camera.startFaceDetection();
    previewing = true;
   } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   }
  }
 }

 @Override
 public void surfaceCreated(SurfaceHolder holder) {
  // TODO Auto-generated method stub
  camera = Camera.open();
  camera.setFaceDetectionListener(faceDetectionListener);
 }

 @Override
 public void surfaceDestroyed(SurfaceHolder holder) {
  // TODO Auto-generated method stub
  camera.stopFaceDetection();
  camera.stopPreview();
  camera.release();
  camera = null;
  previewing = false;
 }
 
 private class DrawingView extends View{
  
  boolean haveFace;
  Paint drawingPaint;

  public DrawingView(Context context) {
   super(context);
   haveFace = false;
   drawingPaint = new Paint();
   drawingPaint.setColor(Color.GREEN);
   drawingPaint.setStyle(Paint.Style.STROKE); 
   drawingPaint.setStrokeWidth(2);
  }
  
  public void setHaveFace(boolean h){
   haveFace = h;
  }

  @Override
  protected void onDraw(Canvas canvas) {
   // TODO Auto-generated method stub
   if(haveFace){

    // Camera driver coordinates range from (-1000, -1000) to (1000, 1000).
     // UI coordinates range from (0, 0) to (width, height).
     
     int vWidth = getWidth();
     int vHeight = getHeight();
    
    for(int i=0; i<detectedFaces.length; i++){
     
     if(i == 0){
      drawingPaint.setColor(Color.GREEN);
     }else{
      drawingPaint.setColor(Color.RED);
     }
     
     int l = detectedFaces[i].rect.left;
     int t = detectedFaces[i].rect.top;
     int r = detectedFaces[i].rect.right;
     int b = detectedFaces[i].rect.bottom;
     int left = (l+1000) * vWidth/2000;
     int top  = (t+1000) * vHeight/2000;
     int right = (r+1000) * vWidth/2000;
     int bottom = (b+1000) * vHeight/2000;
     canvas.drawRect(
       left, top, right, bottom,  
       drawingPaint);
    }
   }else{
    canvas.drawColor(Color.TRANSPARENT);
   }
  }
  
 }
}


Download the files.

Note:
- Set Camera.Parameters

Related:
- Touch to select focus and metering area


Tuesday, April 24, 2012

java.lang.RuntimeException: autoFocus failed


Refer to the last exercise "Android 4 Face Detection: setFocusAreas() using face detected faces", it will throw java.lang.RuntimeException: autoFocus failed almost everytime in onFaceDetection() when camera.autoFocus(myAutoFocusCallback) is called after face detected, and setFocusAreas() called.

I delay calling camera.autoFocus(myAutoFocusCallback) for 500ms (using ScheduledExecutorService), it seem that the problem solved.



package com.exercise.AndroidCamera;

import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

import android.app.Activity;
import android.content.ContentValues;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.Face;
import android.hardware.Camera.FaceDetectionListener;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore.Images.Media;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup.LayoutParams;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;

public class AndroidCamera extends Activity implements SurfaceHolder.Callback{

 Camera camera;
 SurfaceView surfaceView;
 SurfaceHolder surfaceHolder;
 boolean previewing = false;
 LayoutInflater controlInflater = null;
 
 Button buttonTakePicture;
 TextView prompt;
 
 DrawingView drawingView;
 Face[] detectedFaces;
 
 final int RESULT_SAVEIMAGE = 0;
 
 private ScheduledExecutorService myScheduledExecutorService;
 
    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.main);
        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
        
        getWindow().setFormat(PixelFormat.UNKNOWN);
        surfaceView = (SurfaceView)findViewById(R.id.camerapreview);
        surfaceHolder = surfaceView.getHolder();
        surfaceHolder.addCallback(this);
        surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
        
        drawingView = new DrawingView(this);
        LayoutParams layoutParamsDrawing 
         = new LayoutParams(LayoutParams.FILL_PARENT, 
           LayoutParams.FILL_PARENT);
        this.addContentView(drawingView, layoutParamsDrawing);
        
        controlInflater = LayoutInflater.from(getBaseContext());
        View viewControl = controlInflater.inflate(R.layout.control, null);
        LayoutParams layoutParamsControl 
         = new LayoutParams(LayoutParams.FILL_PARENT, 
           LayoutParams.FILL_PARENT);
        this.addContentView(viewControl, layoutParamsControl);
        
        buttonTakePicture = (Button)findViewById(R.id.takepicture);
        buttonTakePicture.setOnClickListener(new Button.OnClickListener(){

   @Override
   public void onClick(View arg0) {
    // TODO Auto-generated method stub
    camera.takePicture(myShutterCallback, 
      myPictureCallback_RAW, myPictureCallback_JPG);
   }});
        
        LinearLayout layoutBackground = (LinearLayout)findViewById(R.id.background);
        layoutBackground.setOnClickListener(new LinearLayout.OnClickListener(){

   @Override
   public void onClick(View arg0) {
    // TODO Auto-generated method stub

    buttonTakePicture.setEnabled(false);
    camera.autoFocus(myAutoFocusCallback);
   }});
        
        prompt = (TextView)findViewById(R.id.prompt);
    }
    
    FaceDetectionListener faceDetectionListener
    = new FaceDetectionListener(){

  @Override
  public void onFaceDetection(Face[] faces, Camera tcamera) {
   
   if (faces.length == 0){
    prompt.setText(" No Face Detected! ");
    drawingView.setHaveFace(false);
   }else{
    prompt.setText(String.valueOf(faces.length) + " Face Detected :) ");
    drawingView.setHaveFace(true);
    detectedFaces = faces;
    
    //Set the FocusAreas using the first detected face
    List<Camera.Area> focusList = new ArrayList<Camera.Area>();
    Camera.Area firstFace = new Camera.Area(faces[0].rect, 1000);
    focusList.add(firstFace);
    
    if(camera.getParameters().getMaxNumFocusAreas()>0){
     camera.getParameters().setFocusAreas(focusList);
    }
    
    if(camera.getParameters().getMaxNumMeteringAreas()>0){
     camera.getParameters().setMeteringAreas(focusList);
    }

    buttonTakePicture.setEnabled(false);
    //camera.getParameters().setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
    
    //Stop further Face Detection
    camera.stopFaceDetection();
    
    buttonTakePicture.setEnabled(false);
    
    /*
     * Allways throw java.lang.RuntimeException: autoFocus failed 
     * if I call autoFocus(myAutoFocusCallback) here!
     * 
     camera.autoFocus(myAutoFocusCallback);
    */
    
    //Delay call autoFocus(myAutoFocusCallback)
    myScheduledExecutorService = Executors.newScheduledThreadPool(1);
    myScheduledExecutorService.schedule(new Runnable(){
          public void run() {
           camera.autoFocus(myAutoFocusCallback);
            }
          }, 500, TimeUnit.MILLISECONDS);

   }
   
   drawingView.invalidate();
   
  }};
    
    AutoFocusCallback myAutoFocusCallback = new AutoFocusCallback(){

  @Override
  public void onAutoFocus(boolean arg0, Camera arg1) {
   // TODO Auto-generated method stub
   if (arg0){
    buttonTakePicture.setEnabled(true);
    camera.cancelAutoFocus();      
   }

  }};
    
    ShutterCallback myShutterCallback = new ShutterCallback(){

  @Override
  public void onShutter() {
   // TODO Auto-generated method stub
   
  }};
  
 PictureCallback myPictureCallback_RAW = new PictureCallback(){

  @Override
  public void onPictureTaken(byte[] arg0, Camera arg1) {
   // TODO Auto-generated method stub
   
  }};
  
 PictureCallback myPictureCallback_JPG = new PictureCallback(){

  @Override
  public void onPictureTaken(byte[] arg0, Camera arg1) {
   // TODO Auto-generated method stub
   /*Bitmap bitmapPicture 
    = BitmapFactory.decodeByteArray(arg0, 0, arg0.length); */
   
   Uri uriTarget = getContentResolver().insert(Media.EXTERNAL_CONTENT_URI, new ContentValues());

   OutputStream imageFileOS;
   try {
    imageFileOS = getContentResolver().openOutputStream(uriTarget);
    imageFileOS.write(arg0);
    imageFileOS.flush();
    imageFileOS.close();
    
    prompt.setText("Image saved: " + uriTarget.toString());
    
   } catch (FileNotFoundException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   }

   camera.startPreview();
   camera.startFaceDetection();
  }};

 @Override
 public void surfaceChanged(SurfaceHolder holder, int format, int width,
   int height) {
  // TODO Auto-generated method stub
  if(previewing){
   camera.stopFaceDetection();
   camera.stopPreview();
   previewing = false;
  }
  
  if (camera != null){
   try {
    camera.setPreviewDisplay(surfaceHolder);
    camera.startPreview();

    prompt.setText(String.valueOf(
      "Max Face: " + camera.getParameters().getMaxNumDetectedFaces()));
    camera.startFaceDetection();
    previewing = true;
   } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   }
  }
 }

 @Override
 public void surfaceCreated(SurfaceHolder holder) {
  // TODO Auto-generated method stub
  camera = Camera.open();
  camera.setFaceDetectionListener(faceDetectionListener);
 }

 @Override
 public void surfaceDestroyed(SurfaceHolder holder) {
  // TODO Auto-generated method stub
  camera.stopFaceDetection();
  camera.stopPreview();
  camera.release();
  camera = null;
  previewing = false;
 }
 
 private class DrawingView extends View{
  
  boolean haveFace;
  Paint drawingPaint;

  public DrawingView(Context context) {
   super(context);
   haveFace = false;
   drawingPaint = new Paint();
   drawingPaint.setColor(Color.GREEN);
   drawingPaint.setStyle(Paint.Style.STROKE); 
   drawingPaint.setStrokeWidth(2);
  }
  
  public void setHaveFace(boolean h){
   haveFace = h;
  }

  @Override
  protected void onDraw(Canvas canvas) {
   // TODO Auto-generated method stub
   if(haveFace){

    // Camera driver coordinates range from (-1000, -1000) to (1000, 1000).
     // UI coordinates range from (0, 0) to (width, height).
     
     int vWidth = getWidth();
     int vHeight = getHeight();
    
    for(int i=0; i<detectedFaces.length; i++){
     
     if(i == 0){
      drawingPaint.setColor(Color.GREEN);
     }else{
      drawingPaint.setColor(Color.RED);
     }
     
     int l = detectedFaces[i].rect.left;
     int t = detectedFaces[i].rect.top;
     int r = detectedFaces[i].rect.right;
     int b = detectedFaces[i].rect.bottom;
     int left = (l+1000) * vWidth/2000;
     int top  = (t+1000) * vHeight/2000;
     int right = (r+1000) * vWidth/2000;
     int bottom = (b+1000) * vHeight/2000;
     canvas.drawRect(
       left, top, right, bottom,  
       drawingPaint);
    }
   }else{
    canvas.drawColor(Color.TRANSPARENT);
   }
  }
  
 }
}


Download the files.

Next: - Gets the distances from the camera to the focus point - getFocusDistances()

Monday, April 23, 2012

Android 4 Face Detection: setFocusAreas() using face detected faces


Last exercise "Android 4 Face Detection: Display detected face area" we can get the detected face areas in onFaceDetection() of FaceDetectionListener. We can create a List of Camera.Area from the detected faces of Face[], to assign the area for focusing.



note:
Before using this API or setFocusAreas(List), apps should call getMaxNumFocusAreas() to know the maximum number of focus areas first. If the value is 0, focus area is not supported.


Each focus area is a rectangle with specified weight. The direction is relative to the sensor orientation, that is, what the sensor sees. The direction is not affected by the rotation or mirroring of setDisplayOrientation(int). Coordinates of the rectangle range from -1000 to 1000. (-1000, -1000) is the upper left point. (1000, 1000) is the lower right point. The width and height of focus areas cannot be 0 or negative.


The weight must range from 1 to 1000. The weight should be interpreted as a per-pixel weight - all pixels in the area have the specified weight. This means a small area with the same weight as a larger area will have less influence on the focusing than the larger area. Focus areas can partially overlap and the driver will add the weights in the overlap region. ~ Reference: http://developer.android.com/reference/android/hardware/Camera.Parameters.html#getFocusAreas().

Modify the main code in last exercise:

package com.exercise.AndroidCamera;

import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;

import android.app.Activity;
import android.content.ContentValues;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.Face;
import android.hardware.Camera.FaceDetectionListener;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore.Images.Media;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup.LayoutParams;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;

public class AndroidCamera extends Activity implements SurfaceHolder.Callback{

 Camera camera;
 SurfaceView surfaceView;
 SurfaceHolder surfaceHolder;
 boolean previewing = false;
 LayoutInflater controlInflater = null;
 
 Button buttonTakePicture;
 TextView prompt;
 
 DrawingView drawingView;
 Face[] detectedFaces;
 
 final int RESULT_SAVEIMAGE = 0;
 
    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.main);
        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
        
        getWindow().setFormat(PixelFormat.UNKNOWN);
        surfaceView = (SurfaceView)findViewById(R.id.camerapreview);
        surfaceHolder = surfaceView.getHolder();
        surfaceHolder.addCallback(this);
        surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
        
        drawingView = new DrawingView(this);
        LayoutParams layoutParamsDrawing 
         = new LayoutParams(LayoutParams.FILL_PARENT, 
           LayoutParams.FILL_PARENT);
        this.addContentView(drawingView, layoutParamsDrawing);
        
        controlInflater = LayoutInflater.from(getBaseContext());
        View viewControl = controlInflater.inflate(R.layout.control, null);
        LayoutParams layoutParamsControl 
         = new LayoutParams(LayoutParams.FILL_PARENT, 
           LayoutParams.FILL_PARENT);
        this.addContentView(viewControl, layoutParamsControl);
        
        buttonTakePicture = (Button)findViewById(R.id.takepicture);
        buttonTakePicture.setOnClickListener(new Button.OnClickListener(){

   @Override
   public void onClick(View arg0) {
    // TODO Auto-generated method stub
    camera.takePicture(myShutterCallback, 
      myPictureCallback_RAW, myPictureCallback_JPG);
   }});
        
        LinearLayout layoutBackground = (LinearLayout)findViewById(R.id.background);
        layoutBackground.setOnClickListener(new LinearLayout.OnClickListener(){

   @Override
   public void onClick(View arg0) {
    // TODO Auto-generated method stub

    buttonTakePicture.setEnabled(false);
    camera.autoFocus(myAutoFocusCallback);
   }});
        
        prompt = (TextView)findViewById(R.id.prompt);
    }
    
    FaceDetectionListener faceDetectionListener
    = new FaceDetectionListener(){

  @Override
  public void onFaceDetection(Face[] faces, Camera camera) {
   
   if (faces.length == 0){
    prompt.setText(" No Face Detected! ");
    drawingView.setHaveFace(false);
   }else{
    prompt.setText(String.valueOf(faces.length) + " Face Detected :) ");
    drawingView.setHaveFace(true);
    detectedFaces = faces;
    
    /*
    int maxNumFocusAreas = camera.getParameters().getMaxNumFocusAreas();
    int maxNumMeteringAreas = camera.getParameters().getMaxNumMeteringAreas();
    prompt.setText(String.valueOf(faces.length) + " Face Detected :) "
      + " maxNumFocusAreas=" + maxNumFocusAreas 
      + " maxNumMeteringAreas=" + maxNumMeteringAreas
      );
      */
    
    //Set the FocusAreas using the first detected face
    List<Camera.Area> focusList = new ArrayList<Camera.Area>();
    Camera.Area firstFace = new Camera.Area(faces[0].rect, 1000);
    focusList.add(firstFace);
    
    if(camera.getParameters().getMaxNumFocusAreas()>0){
     camera.getParameters().setFocusAreas(focusList);
    }
    
    if(camera.getParameters().getMaxNumMeteringAreas()>0){
     camera.getParameters().setMeteringAreas(focusList);
    }

    buttonTakePicture.setEnabled(false);
    //camera.getParameters().setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
    
    //Stop further Face Detection
    camera.stopFaceDetection();
    
    buttonTakePicture.setEnabled(false);
    camera.autoFocus(myAutoFocusCallback);

   }
   
   drawingView.invalidate();
   
  }};
    
    AutoFocusCallback myAutoFocusCallback = new AutoFocusCallback(){

  @Override
  public void onAutoFocus(boolean arg0, Camera arg1) {
   // TODO Auto-generated method stub
   buttonTakePicture.setEnabled(true);
   camera.cancelAutoFocus();

  }};
    
    ShutterCallback myShutterCallback = new ShutterCallback(){

  @Override
  public void onShutter() {
   // TODO Auto-generated method stub
   
  }};
  
 PictureCallback myPictureCallback_RAW = new PictureCallback(){

  @Override
  public void onPictureTaken(byte[] arg0, Camera arg1) {
   // TODO Auto-generated method stub
   
  }};
  
 PictureCallback myPictureCallback_JPG = new PictureCallback(){

  @Override
  public void onPictureTaken(byte[] arg0, Camera arg1) {
   // TODO Auto-generated method stub
   /*Bitmap bitmapPicture 
    = BitmapFactory.decodeByteArray(arg0, 0, arg0.length); */
   
   Uri uriTarget = getContentResolver().insert(Media.EXTERNAL_CONTENT_URI, new ContentValues());

   OutputStream imageFileOS;
   try {
    imageFileOS = getContentResolver().openOutputStream(uriTarget);
    imageFileOS.write(arg0);
    imageFileOS.flush();
    imageFileOS.close();
    
    prompt.setText("Image saved: " + uriTarget.toString());
    
   } catch (FileNotFoundException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   }

   camera.startPreview();
   camera.startFaceDetection();
  }};

 @Override
 public void surfaceChanged(SurfaceHolder holder, int format, int width,
   int height) {
  // TODO Auto-generated method stub
  if(previewing){
   camera.stopFaceDetection();
   camera.stopPreview();
   previewing = false;
  }
  
  if (camera != null){
   try {
    camera.setPreviewDisplay(surfaceHolder);
    camera.startPreview();

    prompt.setText(String.valueOf(
      "Max Face: " + camera.getParameters().getMaxNumDetectedFaces()));
    camera.startFaceDetection();
    previewing = true;
   } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   }
  }
 }

 @Override
 public void surfaceCreated(SurfaceHolder holder) {
  // TODO Auto-generated method stub
  camera = Camera.open();
  camera.setFaceDetectionListener(faceDetectionListener);
 }

 @Override
 public void surfaceDestroyed(SurfaceHolder holder) {
  // TODO Auto-generated method stub
  camera.stopFaceDetection();
  camera.stopPreview();
  camera.release();
  camera = null;
  previewing = false;
 }
 
 private class DrawingView extends View{
  
  boolean haveFace;
  Paint drawingPaint;

  public DrawingView(Context context) {
   super(context);
   haveFace = false;
   drawingPaint = new Paint();
   drawingPaint.setColor(Color.GREEN);
   drawingPaint.setStyle(Paint.Style.STROKE); 
   drawingPaint.setStrokeWidth(2);
  }
  
  public void setHaveFace(boolean h){
   haveFace = h;
  }

  @Override
  protected void onDraw(Canvas canvas) {
   // TODO Auto-generated method stub
   if(haveFace){

    // Camera driver coordinates range from (-1000, -1000) to (1000, 1000).
     // UI coordinates range from (0, 0) to (width, height).
     
     int vWidth = getWidth();
     int vHeight = getHeight();
    
    for(int i=0; i<detectedFaces.length; i++){
     
     if(i == 0){
      drawingPaint.setColor(Color.GREEN);
     }else{
      drawingPaint.setColor(Color.RED);
     }
     
     int l = detectedFaces[i].rect.left;
     int t = detectedFaces[i].rect.top;
     int r = detectedFaces[i].rect.right;
     int b = detectedFaces[i].rect.bottom;
     int left = (l+1000) * vWidth/2000;
     int top  = (t+1000) * vHeight/2000;
     int right = (r+1000) * vWidth/2000;
     int bottom = (b+1000) * vHeight/2000;
     canvas.drawRect(
       left, top, right, bottom,  
       drawingPaint);
    }
   }else{
    canvas.drawColor(Color.TRANSPARENT);
   }
  }
  
 }
}

Important Note!

Please note that it's a simple exercise to try the face detection function for Android 4, not a completed application. I test it in Galaxy Nexus, it throw error of java.lang.RuntimeException: autoFocus failed sometimes. May be because of cannot focus. ~ Solved, refer the next post: java.lang.RuntimeException: autoFocus failed.

It is another bug here. The code of setFocusAreas() and setMeteringAreas() is in-correct! Please refer to the post Set Camera.Parameters.

Download the files.

Saturday, April 21, 2012

Sony Mobile Device Loaner Program for developers


This Device Loaner Program is intended as a low cost (free!) option to allow developers to test apps on a physical Xperia smartphone or accessory device for up to 30 days.

For the smaller studios and independent developers with limited resources, this is a perfect program to take advantage of. With it, developers can immediately borrow an Xperia™ smartphone for up to 30 days. The service is completely free, and initially aimed at the U.S. and Canadian market. For developers outside of these markets, you’ll be paying for international shipping fees, plus applicable customs/duties for your particular destination.

- Device Loaner Program


Friday, April 20, 2012

Android 4 Face Detection: Display detected face area

Further works on last exercise "Face detection for Camera". Once face detected, the call-back method onFaceDetection() of FaceDetectionListener will be called with parameter of Face[], a list of face objects. The face object, Face, contain bounds of the face (rect).



A custom View, DrawingView, is added to draw rectangle to show the detected face area, over the Preview SurfaceView.

For rect, bounds of the face: (-1000, -1000) represents the top-left of the camera field of view, and (1000, 1000) represents the bottom-right of the field of view ~ Refer http://developer.android.com/reference/android/hardware/Camera.Face.html#rect.

So we have to convert from (-1000, -1000)~(1000, 1000) to (0, 0)~(width, height of the View) in onDraw() of DrawingView.

Modify the main code in AndroidCamera.java
package com.exercise.AndroidCamera;

import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;

import android.app.Activity;
import android.content.ContentValues;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.Face;
import android.hardware.Camera.FaceDetectionListener;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore.Images.Media;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup.LayoutParams;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;

public class AndroidCamera extends Activity implements SurfaceHolder.Callback{

 Camera camera;
 SurfaceView surfaceView;
 SurfaceHolder surfaceHolder;
 boolean previewing = false;
 LayoutInflater controlInflater = null;
 
 Button buttonTakePicture;
 TextView prompt;
 
 DrawingView drawingView;
 Face[] detectedFaces;
 
 final int RESULT_SAVEIMAGE = 0;
 
    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.main);
        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
        
        getWindow().setFormat(PixelFormat.UNKNOWN);
        surfaceView = (SurfaceView)findViewById(R.id.camerapreview);
        surfaceHolder = surfaceView.getHolder();
        surfaceHolder.addCallback(this);
        surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
        
        drawingView = new DrawingView(this);
        LayoutParams layoutParamsDrawing 
         = new LayoutParams(LayoutParams.FILL_PARENT, 
           LayoutParams.FILL_PARENT);
        this.addContentView(drawingView, layoutParamsDrawing);
        
        controlInflater = LayoutInflater.from(getBaseContext());
        View viewControl = controlInflater.inflate(R.layout.control, null);
        LayoutParams layoutParamsControl 
         = new LayoutParams(LayoutParams.FILL_PARENT, 
           LayoutParams.FILL_PARENT);
        this.addContentView(viewControl, layoutParamsControl);
        
        buttonTakePicture = (Button)findViewById(R.id.takepicture);
        buttonTakePicture.setOnClickListener(new Button.OnClickListener(){

   @Override
   public void onClick(View arg0) {
    // TODO Auto-generated method stub
    camera.takePicture(myShutterCallback, 
      myPictureCallback_RAW, myPictureCallback_JPG);
   }});
        
        LinearLayout layoutBackground = (LinearLayout)findViewById(R.id.background);
        layoutBackground.setOnClickListener(new LinearLayout.OnClickListener(){

   @Override
   public void onClick(View arg0) {
    // TODO Auto-generated method stub

    buttonTakePicture.setEnabled(false);
    camera.autoFocus(myAutoFocusCallback);
   }});
        
        prompt = (TextView)findViewById(R.id.prompt);
    }
    
    FaceDetectionListener faceDetectionListener
    = new FaceDetectionListener(){

  @Override
  public void onFaceDetection(Face[] faces, Camera camera) {
   
   if (faces.length == 0){
    prompt.setText(" No Face Detected! ");
    drawingView.setHaveFace(false);
   }else{
    prompt.setText(String.valueOf(faces.length) + " Face Detected :) ");
    drawingView.setHaveFace(true);
    detectedFaces = faces;
   }
   
   drawingView.invalidate();
   
  }};
    
    AutoFocusCallback myAutoFocusCallback = new AutoFocusCallback(){

  @Override
  public void onAutoFocus(boolean arg0, Camera arg1) {
   // TODO Auto-generated method stub
   buttonTakePicture.setEnabled(true);
  }};
    
    ShutterCallback myShutterCallback = new ShutterCallback(){

  @Override
  public void onShutter() {
   // TODO Auto-generated method stub
   
  }};
  
 PictureCallback myPictureCallback_RAW = new PictureCallback(){

  @Override
  public void onPictureTaken(byte[] arg0, Camera arg1) {
   // TODO Auto-generated method stub
   
  }};
  
 PictureCallback myPictureCallback_JPG = new PictureCallback(){

  @Override
  public void onPictureTaken(byte[] arg0, Camera arg1) {
   // TODO Auto-generated method stub
   /*Bitmap bitmapPicture 
    = BitmapFactory.decodeByteArray(arg0, 0, arg0.length); */
   
   Uri uriTarget = getContentResolver().insert(Media.EXTERNAL_CONTENT_URI, new ContentValues());

   OutputStream imageFileOS;
   try {
    imageFileOS = getContentResolver().openOutputStream(uriTarget);
    imageFileOS.write(arg0);
    imageFileOS.flush();
    imageFileOS.close();
    
    prompt.setText("Image saved: " + uriTarget.toString());
    
   } catch (FileNotFoundException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   }

   camera.startPreview();
   camera.startFaceDetection();
  }};

 @Override
 public void surfaceChanged(SurfaceHolder holder, int format, int width,
   int height) {
  // TODO Auto-generated method stub
  if(previewing){
   camera.stopFaceDetection();
   camera.stopPreview();
   previewing = false;
  }
  
  if (camera != null){
   try {
    camera.setPreviewDisplay(surfaceHolder);
    camera.startPreview();

    prompt.setText(String.valueOf(
      "Max Face: " + camera.getParameters().getMaxNumDetectedFaces()));
    camera.startFaceDetection();
    previewing = true;
   } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   }
  }
 }

 @Override
 public void surfaceCreated(SurfaceHolder holder) {
  // TODO Auto-generated method stub
  camera = Camera.open();
  camera.setFaceDetectionListener(faceDetectionListener);
 }

 @Override
 public void surfaceDestroyed(SurfaceHolder holder) {
  // TODO Auto-generated method stub
  camera.stopFaceDetection();
  camera.stopPreview();
  camera.release();
  camera = null;
  previewing = false;
 }
 
 private class DrawingView extends View{
  
  boolean haveFace;
  Paint drawingPaint;

  public DrawingView(Context context) {
   super(context);
   haveFace = false;
   drawingPaint = new Paint();
   drawingPaint.setColor(Color.GREEN);
   drawingPaint.setStyle(Paint.Style.STROKE); 
   drawingPaint.setStrokeWidth(2);
  }
  
  public void setHaveFace(boolean h){
   haveFace = h;
  }

  @Override
  protected void onDraw(Canvas canvas) {
   // TODO Auto-generated method stub
   if(haveFace){

    // Camera driver coordinates range from (-1000, -1000) to (1000, 1000).
     // UI coordinates range from (0, 0) to (width, height).
     
     int vWidth = getWidth();
     int vHeight = getHeight();
    
    for(int i=0; i<detectedFaces.length; i++){
     
     int l = detectedFaces[i].rect.left;
     int t = detectedFaces[i].rect.top;
     int r = detectedFaces[i].rect.right;
     int b = detectedFaces[i].rect.bottom;
     int left = (l+1000) * vWidth/2000;
     int top  = (t+1000) * vHeight/2000;
     int right = (r+1000) * vWidth/2000;
     int bottom = (b+1000) * vHeight/2000;
     canvas.drawRect(
       left, top, right, bottom,  
       drawingPaint);
    }
   }else{
    canvas.drawColor(Color.TRANSPARENT);
   }
  }
  
 }
}


Download the files.

Next: - Android 4 Face Detection: setFocusAreas() using face detected faces

Thursday, April 19, 2012

Face detection for Camera

Long long time ago(2010-05-19), I have a post "Android FaceDetector" about how to use android.media.FaceDetector to identify faces in a Bitmap.

Start from Android 4, API Level 14, the android.hardware.Camera class provide the feature of Face Detection. You can build apps to detect face on camera at live.

2 Face detected

The method startFaceDetection() starts the face detection. This should be called after preview is started. The camera will notify Camera.FaceDetectionListener of the detected faces in the preview frame. The detected faces may be the same as the previous ones. Applications should call stopFaceDetection() to stop the face detection. This method is supported if getMaxNumDetectedFaces() returns a number larger than 0. If the face detection has started, apps should not call this again.

When the face detection is running, setWhiteBalance(String), setFocusAreas(List), and setMeteringAreas(List) have no effect. The camera uses the detected faces to do auto-white balance, auto exposure, and autofocus.


If the apps call autoFocus(AutoFocusCallback), the camera will stop sending face callbacks. The last face callback indicates the areas used to do autofocus. After focus completes, face detection will resume sending face callbacks. If the apps call cancelAutoFocus(), the face callbacks will also resume.

After calling takePicture(Camera.ShutterCallback, Camera.PictureCallback, Camera.PictureCallback) or stopPreview(), and then resuming preview with startPreview(), the apps should call this method again to resume face detection.


It's modify from the exercise "Start Camera auto-focusing, autoFocus()" to add function of face detection.

Modify main.xml to add a SurfaceView for camera preview, and a TextView to display the number of face detected.
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
   android:layout_width="fill_parent"
   android:layout_height="fill_parent"
   android:orientation="vertical" >

   <TextView
       android:layout_width="fill_parent"
       android:layout_height="wrap_content"
       android:text="@string/hello" />
   <TextView
       android:id="@+id/prompt"
       android:layout_width="fill_parent"
       android:layout_height="wrap_content"/>
   <SurfaceView
       android:id="@+id/camerapreview" 
       android:layout_width="fill_parent"
       android:layout_height="wrap_content" />

</LinearLayout>


Create /res/layout/control.xml, it's a view overlap with the preview SurfaceView, to provide a button to take picture.
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
 android:id="@+id/background"
   android:orientation="vertical"
   android:layout_width="fill_parent"
   android:layout_height="fill_parent"
   android:gravity="bottom"
   >
<Button
 android:id="@+id/takepicture" 
 android:layout_width="wrap_content"
 android:layout_height="wrap_content"
 android:text=" * Take Picture "
 android:layout_gravity="right"
 android:layout_margin="10px"
 />
</LinearLayout>


Main code, call camera.startFaceDetection() after camera.startPreview(), and call camera.stopFaceDetection() before camera.stopPreview(), also define our FaceDetectionListener to display the result of number of face detected.
package com.exercise.AndroidCamera;

import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;

import android.app.Activity;
import android.content.ContentValues;
import android.content.pm.ActivityInfo;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.Face;
import android.hardware.Camera.FaceDetectionListener;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.ShutterCallback;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore.Images.Media;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup.LayoutParams;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;

public class AndroidCamera extends Activity implements SurfaceHolder.Callback{

 Camera camera;
 SurfaceView surfaceView;
 SurfaceHolder surfaceHolder;
 boolean previewing = false;
 LayoutInflater controlInflater = null;
 
 Button buttonTakePicture;
 TextView prompt;
 
 final int RESULT_SAVEIMAGE = 0;
 
   /** Called when the activity is first created. */
   @Override
   public void onCreate(Bundle savedInstanceState) {
       super.onCreate(savedInstanceState);
       setContentView(R.layout.main);
       setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
      
       getWindow().setFormat(PixelFormat.UNKNOWN);
       surfaceView = (SurfaceView)findViewById(R.id.camerapreview);
       surfaceHolder = surfaceView.getHolder();
       surfaceHolder.addCallback(this);
       surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
      
       controlInflater = LayoutInflater.from(getBaseContext());
       View viewControl = controlInflater.inflate(R.layout.control, null);
       LayoutParams layoutParamsControl
        = new LayoutParams(LayoutParams.FILL_PARENT,
        LayoutParams.FILL_PARENT);
       this.addContentView(viewControl, layoutParamsControl);
      
       buttonTakePicture = (Button)findViewById(R.id.takepicture);
       buttonTakePicture.setOnClickListener(new Button.OnClickListener(){

   @Override
   public void onClick(View arg0) {
    // TODO Auto-generated method stub
    camera.takePicture(myShutterCallback,
      myPictureCallback_RAW, myPictureCallback_JPG);
   }});
      
       LinearLayout layoutBackground = (LinearLayout)findViewById(R.id.background);
       layoutBackground.setOnClickListener(new LinearLayout.OnClickListener(){

   @Override
   public void onClick(View arg0) {
    // TODO Auto-generated method stub

    buttonTakePicture.setEnabled(false);
    camera.autoFocus(myAutoFocusCallback);
   }});
      
       prompt = (TextView)findViewById(R.id.prompt);
   }
  
   FaceDetectionListener faceDetectionListener
   = new FaceDetectionListener(){

  @Override
  public void onFaceDetection(Face[] faces, Camera camera) {
   
   if (faces.length == 0){
    prompt.setText(" No Face Detected! ");
   }else{
    prompt.setText(String.valueOf(faces.length) + " Face Detected :) ");
   }
   
   
  }};
  
   AutoFocusCallback myAutoFocusCallback = new AutoFocusCallback(){

  @Override
  public void onAutoFocus(boolean arg0, Camera arg1) {
   // TODO Auto-generated method stub
   buttonTakePicture.setEnabled(true);
  }};
  
   ShutterCallback myShutterCallback = new ShutterCallback(){

  @Override
  public void onShutter() {
   // TODO Auto-generated method stub
   
  }};
  
 PictureCallback myPictureCallback_RAW = new PictureCallback(){

  @Override
  public void onPictureTaken(byte[] arg0, Camera arg1) {
   // TODO Auto-generated method stub
   
  }};
  
 PictureCallback myPictureCallback_JPG = new PictureCallback(){

  @Override
  public void onPictureTaken(byte[] arg0, Camera arg1) {
   // TODO Auto-generated method stub
   /*Bitmap bitmapPicture
    = BitmapFactory.decodeByteArray(arg0, 0, arg0.length); */
   
   Uri uriTarget = getContentResolver().insert(Media.EXTERNAL_CONTENT_URI, new ContentValues());

   OutputStream imageFileOS;
   try {
    imageFileOS = getContentResolver().openOutputStream(uriTarget);
    imageFileOS.write(arg0);
    imageFileOS.flush();
    imageFileOS.close();
    
    prompt.setText("Image saved: " + uriTarget.toString());
    
   } catch (FileNotFoundException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   }

   
   camera.startPreview();
   camera.startFaceDetection();
  }};

 @Override
 public void surfaceChanged(SurfaceHolder holder, int format, int width,
   int height) {
  // TODO Auto-generated method stub
  if(previewing){
   camera.stopFaceDetection();
   camera.stopPreview();
   previewing = false;
  }
  
  if (camera != null){
   try {
    camera.setPreviewDisplay(surfaceHolder);
    camera.startPreview();

    prompt.setText(String.valueOf(
      "Max Face: " + camera.getParameters().getMaxNumDetectedFaces()));
    camera.startFaceDetection();
    previewing = true;
   } catch (IOException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
   }
  }
 }

 @Override
 public void surfaceCreated(SurfaceHolder holder) {
  // TODO Auto-generated method stub
  camera = Camera.open();
  camera.setFaceDetectionListener(faceDetectionListener);
 }

 @Override
 public void surfaceDestroyed(SurfaceHolder holder) {
  // TODO Auto-generated method stub
  camera.stopFaceDetection();
  camera.stopPreview();
  camera.release();
  camera = null;
  previewing = false;
 }
}


Modify AndroidManifest.xml, to add permission of "android.permission.CAMERA", and set android:minSdkVersion="14".
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
   package="com.exercise.AndroidCamera"
   android:versionCode="1"
   android:versionName="1.0" >

   <uses-sdk android:minSdkVersion="14" />
   <uses-permission android:name="android.permission.CAMERA"/>

   <application
       android:icon="@drawable/ic_launcher"
       android:label="@string/app_name" >
       <activity
           android:name=".AndroidCamera"
           android:label="@string/app_name" >
           <intent-filter>
               <action android:name="android.intent.action.MAIN" />

               <category android:name="android.intent.category.LAUNCHER" />
           </intent-filter>
       </activity>
   </application>

</manifest>


Download the files.

Please note, it's a in-complete exercise. It simple display the number of face detected.

Next: - Android 4 Face Detection: Display detected face area

Wednesday, April 18, 2012

Implement ShareActionProvider for Android 4

In Android 4.0 (API Level 14), android.widget.ShareActionProvider is provided for share action. It is responsible for creating views that enable data sharing and also to show a sub menu with sharing activities if the hosting item is placed on the overflow menu.

It's a example to share plain text using ShareActionProvider.

Implement ShareActionProvider for Android 4

With share history:

With share history

Create /menu/menu.xml to define our ActionBar, with Share action only.
<menu xmlns:android="http://schemas.android.com/apk/res/android">
<item android:id="@+id/menu_item_share"
android:showAsAction="ifRoom"
android:title="Share"
android:actionProviderClass="android.widget.ShareActionProvider" />
</menu>


Main Java code:
package com.exercise.AndroidShareActionProvider;

import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.ShareActionProvider;

public class AndroidShareActionProviderActivity extends Activity {

private ShareActionProvider myShareActionProvider;

@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
}

@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu, menu);
MenuItem item = menu.findItem(R.id.menu_item_share);
myShareActionProvider = (ShareActionProvider)item.getActionProvider();
myShareActionProvider.setShareHistoryFileName(
ShareActionProvider.DEFAULT_SHARE_HISTORY_FILE_NAME);
myShareActionProvider.setShareIntent(createShareIntent());
return true;
}

private Intent createShareIntent() {
Intent shareIntent = new Intent(Intent.ACTION_SEND);
shareIntent.setType("text/plain");
shareIntent.putExtra(Intent.EXTRA_TEXT,
"http://android-er.blogspot.com/");
return shareIntent;
}

}


Download the files.