Face Detection - Google ML - Android Studio - Compose
Detect Face using Google/Firebase ML Kit
In this tutorial, we will detect the face(s) from an image. We will use to get the Bitmap from an image in the drawable folder, but we will also show how you may also get the Bitmap from Uri, ImageView, etc. Using the ML Kit Face Detection API, you can easily identify the key facial features & get the contours of detected faces. Note that the API only detects the faces; it doesn’t recognize the people.
With the ML Kit Face Detection API, you can easily get the information you need to perform tasks like embellishing selfies & portraits or generating avatar(s) from the user's photo. Since the ML Kit Face Detection API can perform Face Detection in real-time, you can use it in applications like video chat or games that respond to the player's expressions.
We will use the Android Studio IDE and the Kotlin language with the Jetpack Compose.
>> Check For Java
>> Check For Kotlin
>> Check For Compose
Code:
build.gradle
dependencies { //---- Other libraries implementation("com.google.mlkit:face-detection:16.1.7") //--- Other libraries }
MainActivity.kt
package com.technifysoft.myapplication import android.graphics.Bitmap import android.graphics.BitmapFactory import android.os.Bundle import android.util.Log import android.widget.Toast import androidx.activity.ComponentActivity import androidx.activity.compose.setContent import androidx.compose.foundation.Image import androidx.compose.foundation.layout.* import androidx.compose.foundation.rememberScrollState import androidx.compose.foundation.verticalScroll import androidx.compose.material3.Button import androidx.compose.material3.Text import androidx.compose.runtime.* import androidx.compose.ui.Alignment import androidx.compose.ui.Modifier import androidx.compose.ui.graphics.asImageBitmap import androidx.compose.ui.platform.LocalContext import androidx.compose.ui.tooling.preview.Preview import androidx.compose.ui.unit.dp import androidx.core.graphics.scale import com.google.mlkit.vision.common.InputImage import com.google.mlkit.vision.face.Face import com.google.mlkit.vision.face.FaceDetection import com.google.mlkit.vision.face.FaceDetector import com.google.mlkit.vision.face.FaceDetectorOptions import com.technifysoft.myapplication.ui.theme.MyApplicationTheme class MainActivityCompose : ComponentActivity() { private lateinit var detector: FaceDetector override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) // Configure MLKit FaceDetector val realTimeFdo = FaceDetectorOptions.Builder() .setContourMode(FaceDetectorOptions.CONTOUR_MODE_ALL) .setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_ALL) .build() detector = FaceDetection.getClient(realTimeFdo) val bitmap = BitmapFactory.decodeResource(resources, R.drawable.atifpervaiz) setContent { FaceDetectorScreen(bitmap, detector) } } } @Composable fun FaceDetectorScreen( originalBitmap: Bitmap, detector: FaceDetector ) { val context = LocalContext.current var croppedBitmap by remember { mutableStateOf<Bitmap?>(null) } var isProcessing by remember { mutableStateOf(false) } val scrollState = rememberScrollState() Column( modifier = Modifier .fillMaxSize() .verticalScroll(scrollState) .padding(16.dp), horizontalAlignment = Alignment.CenterHorizontally ) { // Original Image Image( bitmap = originalBitmap.asImageBitmap(), contentDescription = "Original Image", modifier = Modifier .fillMaxWidth() .padding(8.dp) ) // Detect Face Button Button( onClick = { if (!isProcessing) { isProcessing = true analyzePhoto( bitmap = originalBitmap, detector = detector, onSuccess = { faces -> Log.d("FACE_DETECT_TAG", "Detected ${faces.size} face(s)") if (faces.isNotEmpty()) { val cropped = cropDetectedFace(originalBitmap, faces) croppedBitmap = cropped Toast.makeText(context, "Face Detected!", Toast.LENGTH_SHORT).show() } else { Toast.makeText(context, "No face detected", Toast.LENGTH_SHORT).show() } isProcessing = false }, onFailure = { e -> Toast.makeText(context, "Error: ${e.message}", Toast.LENGTH_SHORT).show() isProcessing = false } ) } }, modifier = Modifier .fillMaxWidth() .padding(vertical = 16.dp) ) { Text(if (isProcessing) "Detecting..." else "Detect Face") } // Cropped Face Image croppedBitmap?.let { bitmap -> Image( bitmap = bitmap.asImageBitmap(), contentDescription = "Cropped Face", modifier = Modifier .fillMaxWidth() .padding(8.dp) ) } } } private const val SCALING_FACTOR = 10 // same constant used earlier private fun analyzePhoto( bitmap: Bitmap, detector: FaceDetector, onSuccess: (List<Face>) -> Unit, onFailure: (Exception) -> Unit ) { val smallerBitmap = bitmap.scale(bitmap.width / SCALING_FACTOR, bitmap.height / SCALING_FACTOR, false) val inputImage = InputImage.fromBitmap(smallerBitmap, 0) detector.process(inputImage) .addOnSuccessListener { faces -> // Rescale bounding boxes back to original size before cropping val scaledFaces = faces.map { face -> val rect = face.boundingBox rect.set( rect.left * SCALING_FACTOR, rect.top * SCALING_FACTOR, rect.right * SCALING_FACTOR, rect.bottom * SCALING_FACTOR ) face } onSuccess(scaledFaces) } .addOnFailureListener { e -> Log.e("FACE_DETECT_TAG", "Error detecting face", e) onFailure(e) } } private fun cropDetectedFace(bitmap: Bitmap, faces: List<Face>): Bitmap? { if (faces.isEmpty()) return null //Face was detected, get cropped image as bitmap val rect = faces[0].boundingBox //there might be multiple images, if you want to get all use loop, im only managing 1 val x = rect.left.coerceAtLeast(0) val y = rect.top.coerceAtLeast(0) val width = rect.width() val height = rect.height() //cropped bitmap val croppedBitmap = Bitmap.createBitmap( bitmap, x, y, if (x + width > bitmap.width) bitmap.width - x else width, if (y + height > bitmap.height) bitmap.height - y else height ) return croppedBitmap } /** * GreetingPreview is a composable function for previewing the MainUI in Android Studio. * It is annotated with @Preview to enable live preview. * */ @Preview(showBackground = true) @Composable fun GreetingPreview() { MyApplicationTheme { //FaceDetectorScreen() } }

Comments
Post a Comment