Sunday, August 16, 2015

Google Play services Face Detection, get Landmarks (eyes, nose, etc.)

getLandmarks() method of com.google.android.gms.vision.face.Face return a list of Landmarks (eyes, nose, etc.) found on the face. A landmark detector must be specified via setLandmarkType(int) to detect landmarks. The landmark detector may not find all possible landmarks on any given face.


This example modify from the post "Face Detection with Google Play services, Mobile Vision API", with fix of "FaceDetector error: Cannot resolve method setTrackingEnabled(boolean)", add the feature to detect landmarks on detected faces.


Sets detect all landmarks, by calling setLandmarkType(FaceDetector.ALL_LANDMARKS) when build FaceDetector with FaceDetector.Builder.

Then call getLandmarks() for each detected faces.

com.example.androidfacedetection.MainActivity.java
package com.example.androidfacedetection;

import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PointF;
import android.graphics.RectF;
import android.graphics.drawable.BitmapDrawable;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.SparseArray;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.Toast;

import com.google.android.gms.vision.Frame;
import com.google.android.gms.vision.face.Face;
import com.google.android.gms.vision.face.FaceDetector;
import com.google.android.gms.vision.face.Landmark;

import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;

public class MainActivity extends AppCompatActivity {

    private static final int RQS_LOADIMAGE = 1;
    private Button btnLoad, btnDetFace;
    private ImageView imgView;
    private Bitmap myBitmap;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);

        btnLoad = (Button)findViewById(R.id.btnLoad);
        btnDetFace = (Button)findViewById(R.id.btnDetectFace);
        imgView = (ImageView)findViewById(R.id.imgview);

        btnLoad.setOnClickListener(new View.OnClickListener(){
            @Override
            public void onClick(View v) {
                Intent intent = new Intent();
                intent.setType("image/*");
                intent.setAction(Intent.ACTION_GET_CONTENT);
                intent.addCategory(Intent.CATEGORY_OPENABLE);
                startActivityForResult(intent, RQS_LOADIMAGE);
            }
        });

        btnDetFace.setOnClickListener(new View.OnClickListener(){
            @Override
            public void onClick(View v) {
                if(myBitmap == null){
                    Toast.makeText(MainActivity.this,
                            "myBitmap == null",
                            Toast.LENGTH_LONG).show();
                }else{
                    detectFace();
                    Toast.makeText(MainActivity.this,
                            "Done",
                            Toast.LENGTH_LONG).show();
                }
            }
        });
    }

    @Override
    protected void onActivityResult(int requestCode, int resultCode, Intent data) {
        if (requestCode == RQS_LOADIMAGE
                && resultCode == RESULT_OK){

            if(myBitmap != null){
                myBitmap.recycle();
            }

            try {
                InputStream inputStream =
                        getContentResolver().openInputStream(data.getData());
                myBitmap = BitmapFactory.decodeStream(inputStream);
                inputStream.close();
                imgView.setImageBitmap(myBitmap);

            } catch (FileNotFoundException e) {
                e.printStackTrace();
            } catch (IOException e) {
                e.printStackTrace();
            }

        }
        super.onActivityResult(requestCode, resultCode, data);
    }

    /*
    reference:
    https://search-codelabs.appspot.com/codelabs/face-detection
     */
    private void detectFace(){

        //Create a Paint object for drawing with
        Paint myRectPaint = new Paint();
        myRectPaint.setStrokeWidth(5);
        myRectPaint.setColor(Color.GREEN);
        myRectPaint.setStyle(Paint.Style.STROKE);

        Paint landmarksPaint = new Paint();
        landmarksPaint.setStrokeWidth(10);
        landmarksPaint.setColor(Color.RED);
        landmarksPaint.setStyle(Paint.Style.STROKE);

        //Create a Canvas object for drawing on
        Bitmap tempBitmap = Bitmap.createBitmap(myBitmap.getWidth(), myBitmap.getHeight(), Bitmap.Config.RGB_565);
        Canvas tempCanvas = new Canvas(tempBitmap);
        tempCanvas.drawBitmap(myBitmap, 0, 0, null);

        //Detect the Faces


        //!!!
        //Cannot resolve method setTrackingEnabled(boolean)
        //FaceDetector faceDetector = new FaceDetector.Builder(getApplicationContext()).build();
        //faceDetector.setTrackingEnabled(false);

        FaceDetector faceDetector =
                new FaceDetector.Builder(getApplicationContext())
                .setTrackingEnabled(false)
                        .setLandmarkType(FaceDetector.ALL_LANDMARKS)
                        .build();

        Frame frame = new Frame.Builder().setBitmap(myBitmap).build();
        SparseArray<Face> faces = faceDetector.detect(frame);

        //Draw Rectangles on the Faces
        for(int i=0; i<faces.size(); i++) {
            Face thisFace = faces.valueAt(i);
            float x1 = thisFace.getPosition().x;
            float y1 = thisFace.getPosition().y;
            float x2 = x1 + thisFace.getWidth();
            float y2 = y1 + thisFace.getHeight();
            tempCanvas.drawRoundRect(new RectF(x1, y1, x2, y2), 2, 2, myRectPaint);

            //get Landmarks for the first face
            List<Landmark> landmarks = thisFace.getLandmarks();
            for(int l=0; l<landmarks.size(); l++){
                PointF pos = landmarks.get(l).getPosition();
                tempCanvas.drawPoint(pos.x, pos.y, landmarksPaint);
            }
        }

        imgView.setImageDrawable(new BitmapDrawable(getResources(),tempBitmap));

    }
}


Other files, AndroidManifest.xml and layout/activity_main.xml, refer to the post "Face Detection with Google Play services, Mobile Vision API".

Next:
Google Play services Face Detection, detect Smiling

1 comment:

Unknown said...

hey ERIC, suppose if we try to make it detect the pic we have just taken, and say how many face are in the pic, i wrote a code, but it has a flaw, that it detects the previous pic we took, not the current one.

could you show me what part of the code should be modified?

package com.echessa.facedetectiondemo;

import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.support.v7.app.AppCompatActivity;
import android.util.SparseArray;

import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;

import com.google.android.gms.vision.Frame;
import com.google.android.gms.vision.face.Face;
import com.google.android.gms.vision.face.FaceDetector;

import java.io.File;
import java.io.IOException;


public class MainActivity extends AppCompatActivity {

TextView textView;
Button button;
ImageView imageView;
static final int CAM_REQUEST = 1;


@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);

button = (Button) findViewById(R.id.button);

imageView = (ImageView) findViewById(R.id.imageView);
textView = (TextView) findViewById(R.id.textView);

button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {

Intent camera_intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE_SECURE);
File file = getFile();
camera_intent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(file));
startActivityForResult(camera_intent, CAM_REQUEST);

doDetection();
}
});


}

private File getFile()
{
File folder = new File("sdcard/camera_app");

if(!folder.exists())
{
folder.mkdir();
}

File image_file = new File(folder,"cam_image.jpg");
return image_file;
}


@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
String path = "sdcard/camera_app/cam_image.jpg";
imageView .setImageDrawable(Drawable.createFromPath(path));
}

public void doDetection(){

final String imageInSD = "sdcard/camera_app/cam_image.jpg";
Bitmap bitmap = BitmapFactory.decodeFile(imageInSD);

FaceDetector detector = new FaceDetector.Builder(getApplicationContext())
.setTrackingEnabled(false)
.build();

// Create a frame from the bitmap and run face detection on the frame.
Frame frame = new Frame.Builder().setBitmap(bitmap).build();
SparseArray faces = detector.detect(frame);

TextView textView = (TextView) findViewById(R.id.textView);
textView.setText(faces.size() + "faces detected");

detector.release();
}
}