Skip to content
Snippets Groups Projects
Commit b0015371 authored by “kamon's avatar “kamon
Browse files

amelioration de l'affichage des cadres et noms des especes avec un leger bug...

amelioration de l'affichage des cadres et noms des especes avec un leger bug dans le cas d'especes non reconnues
parent 803fcc57
No related branches found
No related tags found
1 merge request!12Amelioration de l'affichage des cadres et noms des especes + ajouts de commentaires pour la javadoc
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompilerConfiguration">
<bytecodeTargetLevel target="21" />
<bytecodeTargetLevel target="17" />
</component>
</project>
\ No newline at end of file
......@@ -4,10 +4,10 @@
<selectionStates>
<SelectionState runConfigName="app">
<option name="selectionMode" value="DROPDOWN" />
<DropdownSelection timestamp="2025-01-04T02:38:58.283128700Z">
<DropdownSelection timestamp="2025-01-07T00:07:51.508741600Z">
<Target type="DEFAULT_BOOT">
<handle>
<DeviceId pluginId="PhysicalDevice" identifier="serial=R39M30F5B2T" />
<DeviceId pluginId="LocalEmulator" identifier="path=C:\Users\Prosp\.android\avd\Pixel_8a_API_35.avd" />
</handle>
</Target>
</DropdownSelection>
......
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ExternalStorageConfigurationManager" enabled="true" />
<component name="ProjectRootManager" version="2" languageLevel="JDK_21" default="true" project-jdk-name="jbr-21" project-jdk-type="JavaSDK">
<component name="ProjectRootManager" version="2" languageLevel="JDK_17" default="true" project-jdk-name="jbr-17" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/build/classes" />
</component>
<component name="ProjectType">
......
......@@ -30,7 +30,6 @@ public class MainActivity extends AppCompatActivity {
setContentView(R.layout.activity_main);
int delayMillis = 3000;
new Handler().postDelayed(new Runnable() {
......@@ -46,44 +45,6 @@ public class MainActivity extends AppCompatActivity {
}
public void testPredict(View v){
TextView textView = findViewById(R.id.testInference);
TestModeleTflite testModeleTflite = null;
try {
testModeleTflite = new TestModeleTflite(this);
String currentDir = System.getProperty("user.dir");
String imagePath = "mouette_rieuse.jpg";
Bitmap inputImage = TestModeleTflite.loadImage(imagePath, this);
if (inputImage == null) {
System.out.println("Failed to load the image. Check the file path.");
return;
}
// Perform prediction
TensorBuffer outputBuffer = testModeleTflite.predict(inputImage);
// Process the output to get bounding boxes
List<BoundingBox> boxes = testModeleTflite.processOutput(outputBuffer);
if (boxes == null || boxes.isEmpty()) {
textView.setText("No objects detected in the image.");
} else {
// Output results to the console
for (BoundingBox box : boxes) {
System.out.println("Class: " + box.clsName);
System.out.println("Confidence: " + box.cnf);
System.out.println("----------------------------");
textView.setText("Class: " + box.clsName + " Confidence : " + box.cnf);
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
//textView.setText("bonjour");
}
}
......@@ -4,7 +4,6 @@ import static android.graphics.Paint.*;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
......@@ -20,8 +19,6 @@ import org.tensorflow.lite.support.image.TensorImage;
import org.tensorflow.lite.support.tensorbuffer.TensorBuffer;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
......@@ -31,6 +28,10 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A class for performing species recognition using a TensorFlow Lite model.
* Handles model loading, inference, output processing, and annotation of input images.
*/
public class TestModeleTflite {
private static final String MODEL_PATH = "model_float32.tflite";
private static final String LABEL_PATH = "labels.txt";
......@@ -50,16 +51,22 @@ public class TestModeleTflite {
private List<String> labels = new ArrayList<>();
private ImageProcessor imageProcessor;
/**
* Initializes the TensorFlow Lite model interpreter and loads associated resources.
*
* @param context The application context for accessing assets.
* @throws IOException If the model or label files cannot be loaded.
*/
public TestModeleTflite(Context context) throws IOException {
// Load the model
// Load the TensorFlow Lite model
MappedByteBuffer model = FileUtil.loadMappedFile(context, MODEL_PATH);
// Set interpreter options
// Configure the interpreter with multithreading options
Interpreter.Options options = new Interpreter.Options();
options.setNumThreads(4);
interpreter = new Interpreter(model, options);
// Get tensor shapes
// Extract tensor shapes from the model
int[] inputShape = interpreter.getInputTensor(0).shape();
int[] outputShape = interpreter.getOutputTensor(0).shape();
......@@ -68,7 +75,7 @@ public class TestModeleTflite {
numChannel = outputShape[1];
numElements = outputShape[2];
// Load labels
// Load the label list
try (InputStream inputStream = context.getAssets().open(LABEL_PATH);
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream))) {
String line;
......@@ -77,54 +84,95 @@ public class TestModeleTflite {
}
}
// Initialize image processor
// Configure preprocessing steps for the input image
imageProcessor = new ImageProcessor.Builder()
.add(new NormalizeOp(INPUT_MEAN, INPUT_STANDARD_DEVIATION))
.add(new CastOp(INPUT_IMAGE_TYPE))
.build();
}
/**
* Prepares an input image and performs inference using the TensorFlow Lite model.
*
* @param bitmap The input image to analyze.
* @return A TensorBuffer containing the model's raw output.
*/
public TensorBuffer predict(Bitmap bitmap) {
// Resize bitmap to model's input size
Bitmap resizedBitmap = Bitmap.createScaledBitmap(bitmap, tensorWidth, tensorHeight, false);
// Preprocess input image
TensorImage tensorImage = new TensorImage(DataType.FLOAT32);
tensorImage.load(resizedBitmap);
TensorImage processedImage = imageProcessor.process(tensorImage);
// Prepare output buffer
TensorBuffer outputBuffer = TensorBuffer.createFixedSize(
new int[]{1, numChannel, numElements},
OUTPUT_IMAGE_TYPE
);
// Run inference
interpreter.run(processedImage.getBuffer(), outputBuffer.getBuffer());
return outputBuffer;
}
public List<String> getLabels() {
return labels;
}
public void close() {
if (interpreter != null) {
interpreter.close();
interpreter = null;
}
}
/**
* Processes the raw model output to generate bounding boxes for detected objects.
*
* @param outputBuffer The TensorBuffer containing raw model output.
* @return A list of bounding boxes for detected objects.
*/
public List<BoundingBox> processOutput(TensorBuffer outputBuffer) {
float[] outputArray = outputBuffer.getFloatArray();
return bestBox(outputArray);
}
/**
* Annotates an input image with bounding boxes for detected objects.
*
* @param inputImage The original image to annotate.
* @param boxes The bounding boxes of detected objects.
* @return A new image with bounding boxes drawn.
*/
public Bitmap annotateImage(Bitmap inputImage, List<BoundingBox> boxes) {
return drawBoundingBoxes(inputImage, boxes);
}
/**
* Groups detections by class and constructs a structured output image and metadata.
*
* @param inputImage The input image for recognition.
* @return An OutputPredictedImage containing the annotated image and prediction results.
* @throws NoSpeciesRecognizedException If no objects are detected.
*/
public OutputPredictedImage recognizeSpeciesClass(Bitmap inputImage) throws NoSpeciesRecognizedException {
Map<String, RecognizeSpecie> predictionResults = new HashMap<>();
TensorBuffer outputBuffer = predict(inputImage);
List<BoundingBox> boxes = processOutput(outputBuffer);
if (boxes != null) {
System.out.println("not empty");
for (BoundingBox box : boxes) {
predictionResults.computeIfAbsent(box.clsName, k -> new RecognizeSpecie(k, new ArrayList<>()))
.getSpecieBoxes().add(box);
}
Bitmap annotatedImage = annotateImage(inputImage, boxes);
return new OutputPredictedImage(annotatedImage, predictionResults);
} else {
throw new NoSpeciesRecognizedException();
}
}
/**
* Releases the resources held by the TensorFlow Lite interpreter.
*/
public void close() {
if (interpreter != null) {
interpreter.close();
interpreter = null;
}
}
/**
* Filters bounding boxes based on confidence and non-maximum suppression (NMS).
*/
private List<BoundingBox> bestBox(float[] array) {
List<BoundingBox> boundingBoxes = new ArrayList<>();
......@@ -161,26 +209,29 @@ public class TestModeleTflite {
}
}
if (boundingBoxes.isEmpty()) return null;
return applyNMS(boundingBoxes);
}
/**
* Applies non-maximum suppression (NMS) to eliminate overlapping boxes.
*/
private List<BoundingBox> applyNMS(List<BoundingBox> boxes) {
List<BoundingBox> sortedBoxes = new ArrayList<>(boxes);
sortedBoxes.sort((b1, b2) -> Float.compare(b2.cnf, b1.cnf)); // Sort by confidence descending
sortedBoxes.sort((b1, b2) -> Float.compare(b2.cnf, b1.cnf));
List<BoundingBox> selectedBoxes = new ArrayList<>();
while (!sortedBoxes.isEmpty()) {
BoundingBox first = sortedBoxes.remove(0);
selectedBoxes.add(first);
sortedBoxes.removeIf(nextBox -> calculateIoU(first, nextBox) >= IOU_THRESHOLD);
}
return selectedBoxes;
}
/**
* Calculates the Intersection over Union (IoU) between two bounding boxes.
*/
private float calculateIoU(BoundingBox box1, BoundingBox box2) {
float x1 = Math.max(box1.x1, box2.x1);
float y1 = Math.max(box1.y1, box2.y1);
......@@ -194,8 +245,10 @@ public class TestModeleTflite {
return intersectionArea / (box1Area + box2Area - intersectionArea);
}
/**
* Draws bounding boxes on an image.
*/
/*
private Bitmap drawBoundingBoxes(Bitmap bitmap, List<BoundingBox> boxes) {
Bitmap mutableBitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
Canvas canvas = new Canvas(mutableBitmap);
......@@ -204,7 +257,7 @@ public class TestModeleTflite {
paint.setColor(Color.RED);
paint.setStyle(Style.STROKE);
paint.setStrokeWidth(8f);
/*
Paint textPaint = new Paint();
textPaint.setColor(Color.WHITE);
textPaint.setTextSize(40f);
......@@ -218,89 +271,65 @@ public class TestModeleTflite {
box.y2 * mutableBitmap.getHeight()
);
canvas.drawRect(rect, paint);
canvas.drawText(box.clsName, rect.left, rect.bottom, textPaint);
//canvas.drawText(new StringBuilder().append("prob : ").append(String.format("%.2f", box.cnf)).toString(), rect.left, rect.top, textPaint);
//canvas.drawText(box.clsName, rect.left, rect.bottom, textPaint);
}
return mutableBitmap;
}
}*/
/**
* Loads an image from the specified file path and converts it to a Bitmap.
*
* @param fileName Path to the image file.
* @return Bitmap representation of the image or null if an error occurs.
*/
public static Bitmap loadImage(String fileName, Context context) {
try (InputStream inputStream = context.getAssets().open(fileName)) {
return BitmapFactory.decodeStream(inputStream);
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
private Bitmap drawBoundingBoxes(Bitmap bitmap, List<BoundingBox> boxes) {
Bitmap mutableBitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
Canvas canvas = new Canvas(mutableBitmap);
public Bitmap recognizeSpecies(Bitmap inputImage){
TensorBuffer outputBuffer = predict(inputImage);
List<BoundingBox> boxes = processOutput(outputBuffer);
return annotateImage(inputImage, boxes);
}
// Scale stroke width dynamically based on image resolution
float strokeWidth = Math.max(mutableBitmap.getWidth(), mutableBitmap.getHeight()) / 470.0f;
public OutputPredictedImage recognizeSpeciesClass(Bitmap inputImage) throws NoSpeciesRecognizedException {
//ArrayList<String> speciesNames = new ArrayList<>();
List<RecognizeSpecie> specieBoxes= new ArrayList<RecognizeSpecie>();
Map<String, RecognizeSpecie> predictionResults = new HashMap<>();
Paint paint = new Paint();
paint.setColor(Color.RED);
paint.setStyle(Style.STROKE);
paint.setStrokeWidth(strokeWidth); // Dynamically set stroke width
TensorBuffer outputBuffer = predict(inputImage);
List<BoundingBox> boxes = processOutput(outputBuffer);
if(boxes != null){
for(BoundingBox box:boxes){
if(predictionResults.containsKey(box.clsName)){
predictionResults.get(box.clsName).getSpecieBoxes().add(box);
}else{
predictionResults.put(box.clsName, new RecognizeSpecie(box.clsName, new ArrayList<BoundingBox>()));
predictionResults.get(box.clsName).getSpecieBoxes().add(box);
}
Paint textPaint = new Paint();
textPaint.setColor(Color.WHITE);
textPaint.setStyle(Style.FILL);
}
Bitmap annotatedImage = annotateImage(inputImage, boxes);
return new OutputPredictedImage(annotatedImage, predictionResults);
}else throw new NoSpeciesRecognizedException();
}
for (BoundingBox box : boxes) {
RectF rect = new RectF(
box.x1 * mutableBitmap.getWidth(),
box.y1 * mutableBitmap.getHeight(),
box.x2 * mutableBitmap.getWidth(),
box.y2 * mutableBitmap.getHeight()
);
// Draw the bounding box
canvas.drawRect(rect, paint);
public static void main(String [] args){
/*
TestModeleTflite testModeleTflite = new TestModeleTflite(this);
String imagePath = "../../res/drawable/bernache_cravant.jpg";
Bitmap inputImage = TestModeleTflite.loadImage(imagePath);
// Dynamically adjust text size to fit within the box
float boxWidth = rect.width();
float boxHeight = rect.height();
float maxTextSize = boxHeight * 0.2f; // Limit text size to a fraction of the box height
textPaint.setTextSize(maxTextSize);
if (inputImage == null) {
System.out.println("Failed to load the image. Check the file path.");
return;
// Measure text width and adjust if necessary
float textWidth = textPaint.measureText(box.clsName);
if (textWidth > boxWidth) {
textPaint.setTextSize(maxTextSize * (boxWidth / textWidth));
}
// Draw the text at the bottom of the box
canvas.drawText(box.clsName, rect.left, rect.bottom - 5, textPaint); // Adjust -5 for padding
}
// Perform prediction
TensorBuffer outputBuffer = testModeleTflite.predict(inputImage);
return mutableBitmap;
}
// Process the output to get bounding boxes
List<BoundingBox> boxes = testModeleTflite.processOutput(outputBuffer);
if (boxes == null || boxes.isEmpty()) {
System.out.println("No objects detected in the image.");
} else {
// Output results to the console
for (BoundingBox box : boxes) {
System.out.println("Class: " + box.clsName);
System.out.println("Confidence: " + box.cnf);
System.out.println("----------------------------");
}
}*/
/**
* Main method for debugging or testing purposes.
*/
public static void main(String[] args) {
String currentDir = System.getProperty("user.dir");
System.out.println("Current working directory: " + currentDir);
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment