Skip to content

Commit bf4b76c

Browse files
DPigeonDPigeon
authored andcommitted
#10: Use the app offline
1 parent fd9e3ba commit bf4b76c

2 files changed

Lines changed: 28 additions & 24 deletions

File tree

app/src/main/java/com/ctext/MainActivity.java

Lines changed: 27 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -376,16 +376,19 @@ else if (action == MotionEvent.ACTION_UP || action == MotionEvent.ACTION_CANCEL)
376376
view.getContext().getDrawable(R.drawable.objects_detection).clearColorFilter();
377377
view.invalidate();
378378
if (currentMode != Mode.ObjectDetection) {
379-
currentMode = Mode.ObjectDetection;
380-
speechDetectionImageView.setImageResource(R.drawable.speech_detection);
381-
objectDetectionImageView.setImageResource(R.drawable.objects_detection_enabled);
382-
previewImageView.setImageDrawable(null);
383-
rebindPreview();
384-
previewImageView.setVisibility(View.VISIBLE);
385-
speechTextView.setVisibility(View.INVISIBLE);
386-
//audioImageView.setVisibility(View.INVISIBLE);
387-
sharedPreferenceHelper.saveProfile(new Profile(getInputLanguage(), getOutputLanguage(), lensFacing, currentMode.ordinal()));
388-
Toast.makeText(this, "Switched to Object Detector Mode!", Toast.LENGTH_LONG).show();
379+
currentMode = Mode.ObjectDetection;
380+
speechDetectionImageView.setImageResource(R.drawable.speech_detection);
381+
objectDetectionImageView.setImageResource(R.drawable.objects_detection_enabled);
382+
previewImageView.setImageDrawable(null);
383+
rebindPreview();
384+
previewImageView.setVisibility(View.VISIBLE);
385+
speechTextView.setVisibility(View.INVISIBLE);
386+
//audioImageView.setVisibility(View.INVISIBLE);
387+
sharedPreferenceHelper.saveProfile(new Profile(getInputLanguage(), getOutputLanguage(), lensFacing, currentMode.ordinal()));
388+
if (connectedToInternet())
389+
Toast.makeText(this, "Switched to Object Detector Mode!", Toast.LENGTH_LONG).show();
390+
else
391+
Toast.makeText(this, "You must be connected to internet to use the Object Detector Mode!", Toast.LENGTH_LONG).show();
389392
} else
390393
Toast.makeText(this, "You are already in this mode!", Toast.LENGTH_LONG).show();
391394
}
@@ -404,15 +407,18 @@ else if (action == MotionEvent.ACTION_UP || action == MotionEvent.ACTION_CANCEL)
404407
view.getContext().getDrawable(R.drawable.speech_detection).clearColorFilter();
405408
view.invalidate();
406409
if (currentMode != Mode.SpeechRecognition) {
407-
currentMode = Mode.SpeechRecognition;
408-
faceDetected = false; // Reseted and ready to fire the face check anim
409-
speechDetectionImageView.setImageResource(R.drawable.speech_detection_enabled);
410-
objectDetectionImageView.setImageResource(R.drawable.objects_detection);
411-
previewImageView.setVisibility(View.INVISIBLE);
412-
rebindPreview();
413-
//speechTextView.setVisibility(View.VISIBLE);
414-
sharedPreferenceHelper.saveProfile(new Profile(getInputLanguage(), getOutputLanguage(), lensFacing, currentMode.ordinal()));
415-
Toast.makeText(this, "Switched to Speech Translator Mode!", Toast.LENGTH_LONG).show();
410+
currentMode = Mode.SpeechRecognition;
411+
faceDetected = false; // Reseted and ready to fire the face check anim
412+
speechDetectionImageView.setImageResource(R.drawable.speech_detection_enabled);
413+
objectDetectionImageView.setImageResource(R.drawable.objects_detection);
414+
previewImageView.setVisibility(View.INVISIBLE);
415+
rebindPreview();
416+
//speechTextView.setVisibility(View.VISIBLE);
417+
sharedPreferenceHelper.saveProfile(new Profile(getInputLanguage(), getOutputLanguage(), lensFacing, currentMode.ordinal()));
418+
if (connectedToInternet())
419+
Toast.makeText(this, "Switched to Speech Translator Mode!", Toast.LENGTH_LONG).show();
420+
else
421+
Toast.makeText(this, "You must be connected to internet to use the Speech Detection Mode!", Toast.LENGTH_LONG).show();
416422
} else
417423
Toast.makeText(this, "You are already in this mode!", Toast.LENGTH_LONG).show();
418424
}
@@ -508,15 +514,14 @@ protected void bindPreview(@NonNull ProcessCameraProvider cameraProvider, int le
508514
if (image.getImage() == null)
509515
return;
510516

511-
if (currentMode == Mode.SpeechRecognition) {
512-
// Currently only looks at the first image
517+
if (currentMode == Mode.SpeechRecognition && connectedToInternet()) {
513518
graphicOverlay.clear(); // Always destroy the object graphic overlays
514519
FaceDetection faceDetection = new FaceDetection(graphicOverlay,this);
515520
if (!faceProcessing) { // Throttle the calls
516521
faceProcessing = true;
517522
faceDetection.analyzeImage(image);
518523
}
519-
} else if (currentMode == Mode.ObjectDetection) {
524+
} else if (currentMode == Mode.ObjectDetection && connectedToInternet()) {
520525
ObjectDetection objectDetection = new ObjectDetection(graphicOverlay, this);
521526
// We get the textureView to get the bitmap image every time for better orientation
522527
View surfaceOrTexture = previewView.getChildAt(0);

app/src/main/java/com/ctext/facedetection/FaceDetection.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
import android.media.Image;
55
import android.util.Log;
66

7-
import com.ctext.facedetection.FaceOverlay;
87
import com.ctext.utils.GraphicOverlay;
98
import com.ctext.utils.Utils;
109
import com.google.android.gms.tasks.Task;
@@ -56,10 +55,10 @@ public interface Callback {
5655
@SuppressLint("UnsafeExperimentalUsageError")
5756
public void analyzeImage(ImageProxy image) {
5857
Image mediaImage = image.getImage();
58+
assert mediaImage != null;
5959
int width = mediaImage.getWidth();
6060
int height = mediaImage.getHeight();
6161
int rotation = Utils.degreesToFirebaseRotation(image.getImageInfo().getRotationDegrees());
62-
assert mediaImage != null;
6362
FirebaseVisionImage imageVision = FirebaseVisionImage.fromMediaImage(mediaImage, rotation);
6463

6564
Task<List<FirebaseVisionFace>> result = detector.detectInImage(imageVision).addOnSuccessListener(faces -> {

0 commit comments

Comments
 (0)